[Files Refactor] Performance tuning (#2865)

* Don't load image files by default
* Don't load gallery files by default
* Don't load scene files by default
* Retry locked transactions forever
* Don't show release notes if config not loaded
* Don't translate path slashes in export
This commit is contained in:
WithoutPants
2022-09-01 17:54:34 +10:00
parent 0b534d89c6
commit 273cf0383d
94 changed files with 2611 additions and 981 deletions

View File

@@ -5,6 +5,10 @@
//go:generate go run -mod=vendor github.com/vektah/dataloaden StudioLoader int *github.com/stashapp/stash/pkg/models.Studio //go:generate go run -mod=vendor github.com/vektah/dataloaden StudioLoader int *github.com/stashapp/stash/pkg/models.Studio
//go:generate go run -mod=vendor github.com/vektah/dataloaden TagLoader int *github.com/stashapp/stash/pkg/models.Tag //go:generate go run -mod=vendor github.com/vektah/dataloaden TagLoader int *github.com/stashapp/stash/pkg/models.Tag
//go:generate go run -mod=vendor github.com/vektah/dataloaden MovieLoader int *github.com/stashapp/stash/pkg/models.Movie //go:generate go run -mod=vendor github.com/vektah/dataloaden MovieLoader int *github.com/stashapp/stash/pkg/models.Movie
//go:generate go run -mod=vendor github.com/vektah/dataloaden FileLoader github.com/stashapp/stash/pkg/file.ID github.com/stashapp/stash/pkg/file.File
//go:generate go run -mod=vendor github.com/vektah/dataloaden SceneFileIDsLoader int []github.com/stashapp/stash/pkg/file.ID
//go:generate go run -mod=vendor github.com/vektah/dataloaden ImageFileIDsLoader int []github.com/stashapp/stash/pkg/file.ID
//go:generate go run -mod=vendor github.com/vektah/dataloaden GalleryFileIDsLoader int []github.com/stashapp/stash/pkg/file.ID
package loaders package loaders
@@ -14,6 +18,7 @@ import (
"time" "time"
"github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/txn" "github.com/stashapp/stash/pkg/txn"
) )
@@ -30,13 +35,18 @@ const (
) )
type Loaders struct { type Loaders struct {
SceneByID *SceneLoader SceneByID *SceneLoader
SceneFiles *SceneFileIDsLoader
ImageFiles *ImageFileIDsLoader
GalleryFiles *GalleryFileIDsLoader
GalleryByID *GalleryLoader GalleryByID *GalleryLoader
ImageByID *ImageLoader ImageByID *ImageLoader
PerformerByID *PerformerLoader PerformerByID *PerformerLoader
StudioByID *StudioLoader StudioByID *StudioLoader
TagByID *TagLoader TagByID *TagLoader
MovieByID *MovieLoader MovieByID *MovieLoader
FileByID *FileLoader
} }
type Middleware struct { type Middleware struct {
@@ -83,6 +93,26 @@ func (m Middleware) Middleware(next http.Handler) http.Handler {
maxBatch: maxBatch, maxBatch: maxBatch,
fetch: m.fetchMovies(ctx), fetch: m.fetchMovies(ctx),
}, },
FileByID: &FileLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchFiles(ctx),
},
SceneFiles: &SceneFileIDsLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchScenesFileIDs(ctx),
},
ImageFiles: &ImageFileIDsLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchImagesFileIDs(ctx),
},
GalleryFiles: &GalleryFileIDsLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchGalleriesFileIDs(ctx),
},
} }
newCtx := context.WithValue(r.Context(), loadersCtxKey, ldrs) newCtx := context.WithValue(r.Context(), loadersCtxKey, ldrs)
@@ -185,3 +215,47 @@ func (m Middleware) fetchMovies(ctx context.Context) func(keys []int) ([]*models
return ret, toErrorSlice(err) return ret, toErrorSlice(err)
} }
} }
func (m Middleware) fetchFiles(ctx context.Context) func(keys []file.ID) ([]file.File, []error) {
return func(keys []file.ID) (ret []file.File, errs []error) {
err := m.withTxn(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.File.Find(ctx, keys...)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchScenesFileIDs(ctx context.Context) func(keys []int) ([][]file.ID, []error) {
return func(keys []int) (ret [][]file.ID, errs []error) {
err := m.withTxn(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Scene.GetManyFileIDs(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchImagesFileIDs(ctx context.Context) func(keys []int) ([][]file.ID, []error) {
return func(keys []int) (ret [][]file.ID, errs []error) {
err := m.withTxn(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Image.GetManyFileIDs(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchGalleriesFileIDs(ctx context.Context) func(keys []int) ([][]file.ID, []error) {
return func(keys []int) (ret [][]file.ID, errs []error) {
err := m.withTxn(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Gallery.GetManyFileIDs(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}

View File

@@ -0,0 +1,221 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
"github.com/stashapp/stash/pkg/file"
)
// FileLoaderConfig captures the config to create a new FileLoader
type FileLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []file.ID) ([]file.File, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewFileLoader creates a new FileLoader given a fetch, wait, and maxBatch
func NewFileLoader(config FileLoaderConfig) *FileLoader {
return &FileLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// FileLoader batches and caches requests
type FileLoader struct {
// this method provides the data for the loader
fetch func(keys []file.ID) ([]file.File, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[file.ID]file.File
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *fileLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type fileLoaderBatch struct {
keys []file.ID
data []file.File
error []error
closing bool
done chan struct{}
}
// Load a File by key, batching and caching will be applied automatically
func (l *FileLoader) Load(key file.ID) (file.File, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a File.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *FileLoader) LoadThunk(key file.ID) func() (file.File, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() (file.File, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &fileLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() (file.File, error) {
<-batch.done
var data file.File
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *FileLoader) LoadAll(keys []file.ID) ([]file.File, []error) {
results := make([]func() (file.File, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
files := make([]file.File, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
files[i], errors[i] = thunk()
}
return files, errors
}
// LoadAllThunk returns a function that when called will block waiting for a Files.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *FileLoader) LoadAllThunk(keys []file.ID) func() ([]file.File, []error) {
results := make([]func() (file.File, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([]file.File, []error) {
files := make([]file.File, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
files[i], errors[i] = thunk()
}
return files, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *FileLoader) Prime(key file.ID, value file.File) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
l.unsafeSet(key, value)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *FileLoader) Clear(key file.ID) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *FileLoader) unsafeSet(key file.ID, value file.File) {
if l.cache == nil {
l.cache = map[file.ID]file.File{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *fileLoaderBatch) keyIndex(l *FileLoader, key file.ID) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *fileLoaderBatch) startTimer(l *FileLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *fileLoaderBatch) end(l *FileLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View File

@@ -0,0 +1,225 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
"github.com/stashapp/stash/pkg/file"
)
// GalleryFileIDsLoaderConfig captures the config to create a new GalleryFileIDsLoader
type GalleryFileIDsLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []int) ([][]file.ID, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewGalleryFileIDsLoader creates a new GalleryFileIDsLoader given a fetch, wait, and maxBatch
func NewGalleryFileIDsLoader(config GalleryFileIDsLoaderConfig) *GalleryFileIDsLoader {
return &GalleryFileIDsLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// GalleryFileIDsLoader batches and caches requests
type GalleryFileIDsLoader struct {
// this method provides the data for the loader
fetch func(keys []int) ([][]file.ID, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[int][]file.ID
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *galleryFileIDsLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type galleryFileIDsLoaderBatch struct {
keys []int
data [][]file.ID
error []error
closing bool
done chan struct{}
}
// Load a ID by key, batching and caching will be applied automatically
func (l *GalleryFileIDsLoader) Load(key int) ([]file.ID, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a ID.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *GalleryFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() ([]file.ID, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &galleryFileIDsLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() ([]file.ID, error) {
<-batch.done
var data []file.ID
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *GalleryFileIDsLoader) LoadAll(keys []int) ([][]file.ID, []error) {
results := make([]func() ([]file.ID, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
iDs := make([][]file.ID, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
iDs[i], errors[i] = thunk()
}
return iDs, errors
}
// LoadAllThunk returns a function that when called will block waiting for a IDs.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *GalleryFileIDsLoader) LoadAllThunk(keys []int) func() ([][]file.ID, []error) {
results := make([]func() ([]file.ID, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([][]file.ID, []error) {
iDs := make([][]file.ID, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
iDs[i], errors[i] = thunk()
}
return iDs, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *GalleryFileIDsLoader) Prime(key int, value []file.ID) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
// and end up with the whole cache pointing to the same value.
cpy := make([]file.ID, len(value))
copy(cpy, value)
l.unsafeSet(key, cpy)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *GalleryFileIDsLoader) Clear(key int) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *GalleryFileIDsLoader) unsafeSet(key int, value []file.ID) {
if l.cache == nil {
l.cache = map[int][]file.ID{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *galleryFileIDsLoaderBatch) keyIndex(l *GalleryFileIDsLoader, key int) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *galleryFileIDsLoaderBatch) startTimer(l *GalleryFileIDsLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *galleryFileIDsLoaderBatch) end(l *GalleryFileIDsLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View File

@@ -0,0 +1,225 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
"github.com/stashapp/stash/pkg/file"
)
// ImageFileIDsLoaderConfig captures the config to create a new ImageFileIDsLoader
type ImageFileIDsLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []int) ([][]file.ID, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewImageFileIDsLoader creates a new ImageFileIDsLoader given a fetch, wait, and maxBatch
func NewImageFileIDsLoader(config ImageFileIDsLoaderConfig) *ImageFileIDsLoader {
return &ImageFileIDsLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// ImageFileIDsLoader batches and caches requests
type ImageFileIDsLoader struct {
// this method provides the data for the loader
fetch func(keys []int) ([][]file.ID, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[int][]file.ID
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *imageFileIDsLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type imageFileIDsLoaderBatch struct {
keys []int
data [][]file.ID
error []error
closing bool
done chan struct{}
}
// Load a ID by key, batching and caching will be applied automatically
func (l *ImageFileIDsLoader) Load(key int) ([]file.ID, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a ID.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *ImageFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() ([]file.ID, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &imageFileIDsLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() ([]file.ID, error) {
<-batch.done
var data []file.ID
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *ImageFileIDsLoader) LoadAll(keys []int) ([][]file.ID, []error) {
results := make([]func() ([]file.ID, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
iDs := make([][]file.ID, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
iDs[i], errors[i] = thunk()
}
return iDs, errors
}
// LoadAllThunk returns a function that when called will block waiting for a IDs.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *ImageFileIDsLoader) LoadAllThunk(keys []int) func() ([][]file.ID, []error) {
results := make([]func() ([]file.ID, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([][]file.ID, []error) {
iDs := make([][]file.ID, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
iDs[i], errors[i] = thunk()
}
return iDs, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *ImageFileIDsLoader) Prime(key int, value []file.ID) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
// and end up with the whole cache pointing to the same value.
cpy := make([]file.ID, len(value))
copy(cpy, value)
l.unsafeSet(key, cpy)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *ImageFileIDsLoader) Clear(key int) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *ImageFileIDsLoader) unsafeSet(key int, value []file.ID) {
if l.cache == nil {
l.cache = map[int][]file.ID{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *imageFileIDsLoaderBatch) keyIndex(l *ImageFileIDsLoader, key int) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *imageFileIDsLoaderBatch) startTimer(l *ImageFileIDsLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *imageFileIDsLoaderBatch) end(l *ImageFileIDsLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View File

@@ -0,0 +1,225 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
"github.com/stashapp/stash/pkg/file"
)
// SceneFileIDsLoaderConfig captures the config to create a new SceneFileIDsLoader
type SceneFileIDsLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []int) ([][]file.ID, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewSceneFileIDsLoader creates a new SceneFileIDsLoader given a fetch, wait, and maxBatch
func NewSceneFileIDsLoader(config SceneFileIDsLoaderConfig) *SceneFileIDsLoader {
return &SceneFileIDsLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// SceneFileIDsLoader batches and caches requests
type SceneFileIDsLoader struct {
// this method provides the data for the loader
fetch func(keys []int) ([][]file.ID, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[int][]file.ID
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *sceneFileIDsLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type sceneFileIDsLoaderBatch struct {
keys []int
data [][]file.ID
error []error
closing bool
done chan struct{}
}
// Load a ID by key, batching and caching will be applied automatically
func (l *SceneFileIDsLoader) Load(key int) ([]file.ID, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a ID.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *SceneFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() ([]file.ID, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &sceneFileIDsLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() ([]file.ID, error) {
<-batch.done
var data []file.ID
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *SceneFileIDsLoader) LoadAll(keys []int) ([][]file.ID, []error) {
results := make([]func() ([]file.ID, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
iDs := make([][]file.ID, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
iDs[i], errors[i] = thunk()
}
return iDs, errors
}
// LoadAllThunk returns a function that when called will block waiting for a IDs.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *SceneFileIDsLoader) LoadAllThunk(keys []int) func() ([][]file.ID, []error) {
results := make([]func() ([]file.ID, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([][]file.ID, []error) {
iDs := make([][]file.ID, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
iDs[i], errors[i] = thunk()
}
return iDs, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *SceneFileIDsLoader) Prime(key int, value []file.ID) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
// and end up with the whole cache pointing to the same value.
cpy := make([]file.ID, len(value))
copy(cpy, value)
l.unsafeSet(key, cpy)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *SceneFileIDsLoader) Clear(key int) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *SceneFileIDsLoader) unsafeSet(key int, value []file.ID) {
if l.cache == nil {
l.cache = map[int][]file.ID{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *sceneFileIDsLoaderBatch) keyIndex(l *SceneFileIDsLoader, key int) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *sceneFileIDsLoaderBatch) startTimer(l *SceneFileIDsLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *sceneFileIDsLoaderBatch) end(l *SceneFileIDsLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View File

@@ -12,10 +12,38 @@ import (
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
) )
func (r *galleryResolver) Files(ctx context.Context, obj *models.Gallery) ([]*GalleryFile, error) { func (r *galleryResolver) getPrimaryFile(ctx context.Context, obj *models.Gallery) (file.File, error) {
ret := make([]*GalleryFile, len(obj.Files)) if obj.PrimaryFileID != nil {
f, err := loaders.From(ctx).FileByID.Load(*obj.PrimaryFileID)
if err != nil {
return nil, err
}
for i, f := range obj.Files { return f, nil
}
return nil, nil
}
func (r *galleryResolver) getFiles(ctx context.Context, obj *models.Gallery) ([]file.File, error) {
fileIDs, err := loaders.From(ctx).GalleryFiles.Load(obj.ID)
if err != nil {
return nil, err
}
files, errs := loaders.From(ctx).FileByID.LoadAll(fileIDs)
return files, firstError(errs)
}
func (r *galleryResolver) Files(ctx context.Context, obj *models.Gallery) ([]*GalleryFile, error) {
files, err := r.getFiles(ctx, obj)
if err != nil {
return nil, err
}
ret := make([]*GalleryFile, len(files))
for i, f := range files {
base := f.Base() base := f.Base()
ret[i] = &GalleryFile{ ret[i] = &GalleryFile{
ID: strconv.Itoa(int(base.ID)), ID: strconv.Itoa(int(base.ID)),
@@ -84,7 +112,10 @@ func (r *galleryResolver) Folder(ctx context.Context, obj *models.Gallery) (*Fol
} }
func (r *galleryResolver) FileModTime(ctx context.Context, obj *models.Gallery) (*time.Time, error) { func (r *galleryResolver) FileModTime(ctx context.Context, obj *models.Gallery) (*time.Time, error) {
f := obj.PrimaryFile() f, err := r.getPrimaryFile(ctx, obj)
if err != nil {
return nil, err
}
if f != nil { if f != nil {
return &f.Base().ModTime, nil return &f.Base().ModTime, nil
} }

View File

@@ -2,21 +2,69 @@ package api
import ( import (
"context" "context"
"fmt"
"strconv" "strconv"
"time" "time"
"github.com/stashapp/stash/internal/api/loaders" "github.com/stashapp/stash/internal/api/loaders"
"github.com/stashapp/stash/internal/api/urlbuilders" "github.com/stashapp/stash/internal/api/urlbuilders"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
) )
func (r *imageResolver) getPrimaryFile(ctx context.Context, obj *models.Image) (*file.ImageFile, error) {
if obj.PrimaryFileID != nil {
f, err := loaders.From(ctx).FileByID.Load(*obj.PrimaryFileID)
if err != nil {
return nil, err
}
ret, ok := f.(*file.ImageFile)
if !ok {
return nil, fmt.Errorf("file %T is not an image file", f)
}
return ret, nil
}
return nil, nil
}
func (r *imageResolver) getFiles(ctx context.Context, obj *models.Image) ([]*file.ImageFile, error) {
fileIDs, err := loaders.From(ctx).ImageFiles.Load(obj.ID)
if err != nil {
return nil, err
}
files, errs := loaders.From(ctx).FileByID.LoadAll(fileIDs)
ret := make([]*file.ImageFile, len(files))
for i, bf := range files {
f, ok := bf.(*file.ImageFile)
if !ok {
return nil, fmt.Errorf("file %T is not an image file", f)
}
ret[i] = f
}
return ret, firstError(errs)
}
func (r *imageResolver) Title(ctx context.Context, obj *models.Image) (*string, error) { func (r *imageResolver) Title(ctx context.Context, obj *models.Image) (*string, error) {
ret := obj.GetTitle() ret := obj.GetTitle()
return &ret, nil return &ret, nil
} }
func (r *imageResolver) File(ctx context.Context, obj *models.Image) (*ImageFileType, error) { func (r *imageResolver) File(ctx context.Context, obj *models.Image) (*ImageFileType, error) {
f := obj.PrimaryFile() f, err := r.getPrimaryFile(ctx, obj)
if err != nil {
return nil, err
}
if f == nil {
return nil, nil
}
width := f.Width width := f.Width
height := f.Height height := f.Height
size := f.Size size := f.Size
@@ -28,9 +76,14 @@ func (r *imageResolver) File(ctx context.Context, obj *models.Image) (*ImageFile
} }
func (r *imageResolver) Files(ctx context.Context, obj *models.Image) ([]*ImageFile, error) { func (r *imageResolver) Files(ctx context.Context, obj *models.Image) ([]*ImageFile, error) {
ret := make([]*ImageFile, len(obj.Files)) files, err := r.getFiles(ctx, obj)
if err != nil {
return nil, err
}
for i, f := range obj.Files { ret := make([]*ImageFile, len(files))
for i, f := range files {
ret[i] = &ImageFile{ ret[i] = &ImageFile{
ID: strconv.Itoa(int(f.ID)), ID: strconv.Itoa(int(f.ID)),
Path: f.Path, Path: f.Path,
@@ -55,7 +108,10 @@ func (r *imageResolver) Files(ctx context.Context, obj *models.Image) ([]*ImageF
} }
func (r *imageResolver) FileModTime(ctx context.Context, obj *models.Image) (*time.Time, error) { func (r *imageResolver) FileModTime(ctx context.Context, obj *models.Image) (*time.Time, error) {
f := obj.PrimaryFile() f, err := r.getPrimaryFile(ctx, obj)
if err != nil {
return nil, err
}
if f != nil { if f != nil {
return &f.ModTime, nil return &f.ModTime, nil
} }

View File

@@ -14,9 +14,56 @@ import (
"github.com/stashapp/stash/pkg/utils" "github.com/stashapp/stash/pkg/utils"
) )
func (r *sceneResolver) getPrimaryFile(ctx context.Context, obj *models.Scene) (*file.VideoFile, error) {
if obj.PrimaryFileID != nil {
f, err := loaders.From(ctx).FileByID.Load(*obj.PrimaryFileID)
if err != nil {
return nil, err
}
ret, ok := f.(*file.VideoFile)
if !ok {
return nil, fmt.Errorf("file %T is not an image file", f)
}
obj.Files.SetPrimary(ret)
return ret, nil
}
return nil, nil
}
func (r *sceneResolver) getFiles(ctx context.Context, obj *models.Scene) ([]*file.VideoFile, error) {
fileIDs, err := loaders.From(ctx).SceneFiles.Load(obj.ID)
if err != nil {
return nil, err
}
files, errs := loaders.From(ctx).FileByID.LoadAll(fileIDs)
ret := make([]*file.VideoFile, len(files))
for i, bf := range files {
f, ok := bf.(*file.VideoFile)
if !ok {
return nil, fmt.Errorf("file %T is not a video file", f)
}
ret[i] = f
}
obj.Files.Set(ret)
return ret, firstError(errs)
}
func (r *sceneResolver) FileModTime(ctx context.Context, obj *models.Scene) (*time.Time, error) { func (r *sceneResolver) FileModTime(ctx context.Context, obj *models.Scene) (*time.Time, error) {
if obj.PrimaryFile() != nil { f, err := r.getPrimaryFile(ctx, obj)
return &obj.PrimaryFile().ModTime, nil if err != nil {
return nil, err
}
if f != nil {
return &f.ModTime, nil
} }
return nil, nil return nil, nil
} }
@@ -31,7 +78,10 @@ func (r *sceneResolver) Date(ctx context.Context, obj *models.Scene) (*string, e
// File is deprecated // File is deprecated
func (r *sceneResolver) File(ctx context.Context, obj *models.Scene) (*models.SceneFileType, error) { func (r *sceneResolver) File(ctx context.Context, obj *models.Scene) (*models.SceneFileType, error) {
f := obj.PrimaryFile() f, err := r.getPrimaryFile(ctx, obj)
if err != nil {
return nil, err
}
if f == nil { if f == nil {
return nil, nil return nil, nil
} }
@@ -52,9 +102,14 @@ func (r *sceneResolver) File(ctx context.Context, obj *models.Scene) (*models.Sc
} }
func (r *sceneResolver) Files(ctx context.Context, obj *models.Scene) ([]*VideoFile, error) { func (r *sceneResolver) Files(ctx context.Context, obj *models.Scene) ([]*VideoFile, error) {
ret := make([]*VideoFile, len(obj.Files)) files, err := r.getFiles(ctx, obj)
if err != nil {
return nil, err
}
for i, f := range obj.Files { ret := make([]*VideoFile, len(files))
for i, f := range files {
ret[i] = &VideoFile{ ret[i] = &VideoFile{
ID: strconv.Itoa(int(f.ID)), ID: strconv.Itoa(int(f.ID)),
Path: f.Path, Path: f.Path,
@@ -148,7 +203,10 @@ func (r *sceneResolver) SceneMarkers(ctx context.Context, obj *models.Scene) (re
} }
func (r *sceneResolver) Captions(ctx context.Context, obj *models.Scene) (ret []*models.VideoCaption, err error) { func (r *sceneResolver) Captions(ctx context.Context, obj *models.Scene) (ret []*models.VideoCaption, err error) {
primaryFile := obj.PrimaryFile() primaryFile, err := r.getPrimaryFile(ctx, obj)
if err != nil {
return nil, err
}
if primaryFile == nil { if primaryFile == nil {
return nil, nil return nil, nil
} }
@@ -265,7 +323,22 @@ func (r *sceneResolver) StashIds(ctx context.Context, obj *models.Scene) (ret []
} }
func (r *sceneResolver) Phash(ctx context.Context, obj *models.Scene) (*string, error) { func (r *sceneResolver) Phash(ctx context.Context, obj *models.Scene) (*string, error) {
phash := obj.Phash() f, err := r.getPrimaryFile(ctx, obj)
if err != nil {
return nil, err
}
if f == nil {
return nil, nil
}
val := f.Fingerprints.Get(file.FingerprintTypePhash)
if val == nil {
return nil, nil
}
phash, _ := val.(int64)
if phash != 0 { if phash != 0 {
hexval := utils.PhashToString(phash) hexval := utils.PhashToString(phash)
return &hexval, nil return &hexval, nil
@@ -274,6 +347,12 @@ func (r *sceneResolver) Phash(ctx context.Context, obj *models.Scene) (*string,
} }
func (r *sceneResolver) SceneStreams(ctx context.Context, obj *models.Scene) ([]*manager.SceneStreamEndpoint, error) { func (r *sceneResolver) SceneStreams(ctx context.Context, obj *models.Scene) ([]*manager.SceneStreamEndpoint, error) {
// load the primary file into the scene
_, err := r.getPrimaryFile(ctx, obj)
if err != nil {
return nil, err
}
config := manager.GetInstance().Config config := manager.GetInstance().Config
baseURL, _ := ctx.Value(BaseURLCtxKey).(string) baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
@@ -283,7 +362,10 @@ func (r *sceneResolver) SceneStreams(ctx context.Context, obj *models.Scene) ([]
} }
func (r *sceneResolver) Interactive(ctx context.Context, obj *models.Scene) (bool, error) { func (r *sceneResolver) Interactive(ctx context.Context, obj *models.Scene) (bool, error) {
primaryFile := obj.PrimaryFile() primaryFile, err := r.getPrimaryFile(ctx, obj)
if err != nil {
return false, err
}
if primaryFile == nil { if primaryFile == nil {
return false, nil return false, nil
} }
@@ -292,7 +374,10 @@ func (r *sceneResolver) Interactive(ctx context.Context, obj *models.Scene) (boo
} }
func (r *sceneResolver) InteractiveSpeed(ctx context.Context, obj *models.Scene) (*int, error) { func (r *sceneResolver) InteractiveSpeed(ctx context.Context, obj *models.Scene) (*int, error) {
primaryFile := obj.PrimaryFile() primaryFile, err := r.getPrimaryFile(ctx, obj)
if err != nil {
return nil, err
}
if primaryFile == nil { if primaryFile == nil {
return nil, nil return nil, nil
} }

View File

@@ -29,7 +29,7 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen
c := config.GetInstance() c := config.GetInstance()
existingPaths := c.GetStashPaths() existingPaths := c.GetStashPaths()
if len(input.Stashes) > 0 { if input.Stashes != nil {
for _, s := range input.Stashes { for _, s := range input.Stashes {
// Only validate existence of new paths // Only validate existence of new paths
isNew := true isNew := true

View File

@@ -338,6 +338,10 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
return fmt.Errorf("gallery with id %d not found", id) return fmt.Errorf("gallery with id %d not found", id)
} }
if err := gallery.LoadFiles(ctx, qb); err != nil {
return err
}
galleries = append(galleries, gallery) galleries = append(galleries, gallery)
imgsDestroyed, err = r.galleryService.Destroy(ctx, gallery, fileDeleter, deleteGenerated, deleteFile) imgsDestroyed, err = r.galleryService.Destroy(ctx, gallery, fileDeleter, deleteGenerated, deleteFile)
@@ -357,7 +361,7 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
for _, gallery := range galleries { for _, gallery := range galleries {
// don't delete stash library paths // don't delete stash library paths
path := gallery.Path() path := gallery.Path
if deleteFile && path != "" && !isStashPath(path) { if deleteFile && path != "" && !isStashPath(path) {
// try to remove the folder - it is possible that it is not empty // try to remove the folder - it is possible that it is not empty
// so swallow the error if present // so swallow the error if present
@@ -370,15 +374,15 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
r.hookExecutor.ExecutePostHooks(ctx, gallery.ID, plugin.GalleryDestroyPost, plugin.GalleryDestroyInput{ r.hookExecutor.ExecutePostHooks(ctx, gallery.ID, plugin.GalleryDestroyPost, plugin.GalleryDestroyInput{
GalleryDestroyInput: input, GalleryDestroyInput: input,
Checksum: gallery.Checksum(), Checksum: gallery.Checksum(),
Path: gallery.Path(), Path: gallery.Path,
}, nil) }, nil)
} }
// call image destroy post hook as well // call image destroy post hook as well
for _, img := range imgsDestroyed { for _, img := range imgsDestroyed {
r.hookExecutor.ExecutePostHooks(ctx, img.ID, plugin.ImageDestroyPost, plugin.ImageDestroyInput{ r.hookExecutor.ExecutePostHooks(ctx, img.ID, plugin.ImageDestroyPost, plugin.ImageDestroyInput{
Checksum: img.Checksum(), Checksum: img.Checksum,
Path: img.Path(), Path: img.Path,
}, nil) }, nil)
} }

View File

@@ -240,8 +240,8 @@ func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageD
// call post hook after performing the other actions // call post hook after performing the other actions
r.hookExecutor.ExecutePostHooks(ctx, i.ID, plugin.ImageDestroyPost, plugin.ImageDestroyInput{ r.hookExecutor.ExecutePostHooks(ctx, i.ID, plugin.ImageDestroyPost, plugin.ImageDestroyInput{
ImageDestroyInput: input, ImageDestroyInput: input,
Checksum: i.Checksum(), Checksum: i.Checksum,
Path: i.Path(), Path: i.Path,
}, nil) }, nil)
return true, nil return true, nil
@@ -291,8 +291,8 @@ func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.Image
// call post hook after performing the other actions // call post hook after performing the other actions
r.hookExecutor.ExecutePostHooks(ctx, image.ID, plugin.ImageDestroyPost, plugin.ImagesDestroyInput{ r.hookExecutor.ExecutePostHooks(ctx, image.ID, plugin.ImageDestroyPost, plugin.ImagesDestroyInput{
ImagesDestroyInput: input, ImagesDestroyInput: input,
Checksum: image.Checksum(), Checksum: image.Checksum,
Path: image.Path(), Path: image.Path,
}, nil) }, nil)
} }

View File

@@ -366,9 +366,9 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
// call post hook after performing the other actions // call post hook after performing the other actions
r.hookExecutor.ExecutePostHooks(ctx, s.ID, plugin.SceneDestroyPost, plugin.SceneDestroyInput{ r.hookExecutor.ExecutePostHooks(ctx, s.ID, plugin.SceneDestroyPost, plugin.SceneDestroyInput{
SceneDestroyInput: input, SceneDestroyInput: input,
Checksum: s.Checksum(), Checksum: s.Checksum,
OSHash: s.OSHash(), OSHash: s.OSHash,
Path: s.Path(), Path: s.Path,
}, nil) }, nil)
return true, nil return true, nil
@@ -422,9 +422,9 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
// call post hook after performing the other actions // call post hook after performing the other actions
r.hookExecutor.ExecutePostHooks(ctx, scene.ID, plugin.SceneDestroyPost, plugin.ScenesDestroyInput{ r.hookExecutor.ExecutePostHooks(ctx, scene.ID, plugin.SceneDestroyPost, plugin.ScenesDestroyInput{
ScenesDestroyInput: input, ScenesDestroyInput: input,
Checksum: scene.Checksum(), Checksum: scene.Checksum,
OSHash: scene.OSHash(), OSHash: scene.OSHash,
Path: scene.Path(), Path: scene.Path,
}, nil) }, nil)
} }

View File

@@ -86,7 +86,11 @@ func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.Scen
if err == nil { if err == nil {
result.Count = len(scenes) result.Count = len(scenes)
for _, s := range scenes { for _, s := range scenes {
f := s.PrimaryFile() if err = s.LoadPrimaryFile(ctx, r.repository.File); err != nil {
break
}
f := s.Files.Primary()
if f == nil { if f == nil {
continue continue
} }

View File

@@ -18,6 +18,11 @@ func (r *queryResolver) SceneStreams(ctx context.Context, id *string) ([]*manage
idInt, _ := strconv.Atoi(*id) idInt, _ := strconv.Atoi(*id)
var err error var err error
scene, err = r.repository.Scene.Find(ctx, idInt) scene, err = r.repository.Scene.Find(ctx, idInt)
if scene != nil {
err = scene.LoadPrimaryFile(ctx, r.repository.File)
}
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err

View File

@@ -25,6 +25,7 @@ type ImageFinder interface {
type imageRoutes struct { type imageRoutes struct {
txnManager txn.Manager txnManager txn.Manager
imageFinder ImageFinder imageFinder ImageFinder
fileFinder file.Finder
} }
func (rs imageRoutes) Routes() chi.Router { func (rs imageRoutes) Routes() chi.Router {
@@ -44,7 +45,7 @@ func (rs imageRoutes) Routes() chi.Router {
func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) { func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) {
img := r.Context().Value(imageKey).(*models.Image) img := r.Context().Value(imageKey).(*models.Image)
filepath := manager.GetInstance().Paths.Generated.GetThumbnailPath(img.Checksum(), models.DefaultGthumbWidth) filepath := manager.GetInstance().Paths.Generated.GetThumbnailPath(img.Checksum, models.DefaultGthumbWidth)
w.Header().Add("Cache-Control", "max-age=604800000") w.Header().Add("Cache-Control", "max-age=604800000")
@@ -54,7 +55,7 @@ func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) {
http.ServeFile(w, r, filepath) http.ServeFile(w, r, filepath)
} else { } else {
// don't return anything if there is no file // don't return anything if there is no file
f := img.PrimaryFile() f := img.Files.Primary()
if f == nil { if f == nil {
// TODO - probably want to return a placeholder // TODO - probably want to return a placeholder
http.Error(w, http.StatusText(404), 404) http.Error(w, http.StatusText(404), 404)
@@ -81,7 +82,7 @@ func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) {
// write the generated thumbnail to disk if enabled // write the generated thumbnail to disk if enabled
if manager.GetInstance().Config.IsWriteImageThumbnails() { if manager.GetInstance().Config.IsWriteImageThumbnails() {
logger.Debugf("writing thumbnail to disk: %s", img.Path()) logger.Debugf("writing thumbnail to disk: %s", img.Path)
if err := fsutil.WriteFile(filepath, data); err != nil { if err := fsutil.WriteFile(filepath, data); err != nil {
logger.Errorf("error writing thumbnail for image %s: %s", img.Path, err) logger.Errorf("error writing thumbnail for image %s: %s", img.Path, err)
} }
@@ -97,12 +98,12 @@ func (rs imageRoutes) Image(w http.ResponseWriter, r *http.Request) {
// if image is in a zip file, we need to serve it specifically // if image is in a zip file, we need to serve it specifically
if len(i.Files) == 0 { if i.Files.Primary() == nil {
http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound) http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
return return
} }
i.Files[0].Serve(&file.OsFS{}, w, r) i.Files.Primary().Serve(&file.OsFS{}, w, r)
} }
// endregion // endregion
@@ -124,6 +125,10 @@ func (rs imageRoutes) ImageCtx(next http.Handler) http.Handler {
image, _ = qb.Find(ctx, imageID) image, _ = qb.Find(ctx, imageID)
} }
if image != nil {
_ = image.LoadPrimaryFile(ctx, rs.fileFinder)
}
return nil return nil
}) })
if readTxnErr != nil { if readTxnErr != nil {

View File

@@ -41,6 +41,7 @@ type CaptionFinder interface {
type sceneRoutes struct { type sceneRoutes struct {
txnManager txn.Manager txnManager txn.Manager
sceneFinder SceneFinder sceneFinder SceneFinder
fileFinder file.Finder
captionFinder CaptionFinder captionFinder CaptionFinder
sceneMarkerFinder SceneMarkerFinder sceneMarkerFinder SceneMarkerFinder
tagFinder scene.MarkerTagFinder tagFinder scene.MarkerTagFinder
@@ -94,7 +95,12 @@ func (rs sceneRoutes) StreamMKV(w http.ResponseWriter, r *http.Request) {
// only allow mkv streaming if the scene container is an mkv already // only allow mkv streaming if the scene container is an mkv already
scene := r.Context().Value(sceneKey).(*models.Scene) scene := r.Context().Value(sceneKey).(*models.Scene)
container, err := manager.GetSceneFileContainer(scene) pf := scene.Files.Primary()
if pf == nil {
return
}
container, err := manager.GetVideoFileContainer(pf)
if err != nil { if err != nil {
logger.Errorf("[transcode] error getting container: %v", err) logger.Errorf("[transcode] error getting container: %v", err)
} }
@@ -121,10 +127,8 @@ func (rs sceneRoutes) StreamMp4(w http.ResponseWriter, r *http.Request) {
func (rs sceneRoutes) StreamHLS(w http.ResponseWriter, r *http.Request) { func (rs sceneRoutes) StreamHLS(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene) scene := r.Context().Value(sceneKey).(*models.Scene)
ffprobe := manager.GetInstance().FFProbe pf := scene.Files.Primary()
videoFile, err := ffprobe.NewVideoFile(scene.Path()) if pf == nil {
if err != nil {
logger.Errorf("[stream] error reading video file: %v", err)
return return
} }
@@ -134,7 +138,7 @@ func (rs sceneRoutes) StreamHLS(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", ffmpeg.MimeHLS) w.Header().Set("Content-Type", ffmpeg.MimeHLS)
var str strings.Builder var str strings.Builder
ffmpeg.WriteHLSPlaylist(videoFile.Duration, r.URL.String(), &str) ffmpeg.WriteHLSPlaylist(pf.Duration, r.URL.String(), &str)
requestByteRange := createByteRange(r.Header.Get("Range")) requestByteRange := createByteRange(r.Header.Get("Range"))
if requestByteRange.RawString != "" { if requestByteRange.RawString != "" {
@@ -157,7 +161,10 @@ func (rs sceneRoutes) StreamTS(w http.ResponseWriter, r *http.Request) {
func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, streamFormat ffmpeg.StreamFormat) { func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, streamFormat ffmpeg.StreamFormat) {
scene := r.Context().Value(sceneKey).(*models.Scene) scene := r.Context().Value(sceneKey).(*models.Scene)
f := scene.PrimaryFile() f := scene.Files.Primary()
if f == nil {
return
}
logger.Debugf("Streaming as %s", streamFormat.MimeType) logger.Debugf("Streaming as %s", streamFormat.MimeType)
// start stream based on query param, if provided // start stream based on query param, if provided
@@ -306,7 +313,7 @@ func (rs sceneRoutes) ChapterVtt(w http.ResponseWriter, r *http.Request) {
func (rs sceneRoutes) Funscript(w http.ResponseWriter, r *http.Request) { func (rs sceneRoutes) Funscript(w http.ResponseWriter, r *http.Request) {
s := r.Context().Value(sceneKey).(*models.Scene) s := r.Context().Value(sceneKey).(*models.Scene)
funscript := video.GetFunscriptPath(s.Path()) funscript := video.GetFunscriptPath(s.Path)
serveFileNoCache(w, r, funscript) serveFileNoCache(w, r, funscript)
} }
@@ -322,7 +329,7 @@ func (rs sceneRoutes) Caption(w http.ResponseWriter, r *http.Request, lang strin
if err := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error { if err := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error {
var err error var err error
primaryFile := s.PrimaryFile() primaryFile := s.Files.Primary()
if primaryFile == nil { if primaryFile == nil {
return nil return nil
} }
@@ -330,7 +337,7 @@ func (rs sceneRoutes) Caption(w http.ResponseWriter, r *http.Request, lang strin
captions, err := rs.captionFinder.GetCaptions(ctx, primaryFile.Base().ID) captions, err := rs.captionFinder.GetCaptions(ctx, primaryFile.Base().ID)
for _, caption := range captions { for _, caption := range captions {
if lang == caption.LanguageCode && ext == caption.CaptionType { if lang == caption.LanguageCode && ext == caption.CaptionType {
sub, err := video.ReadSubs(caption.Path(s.Path())) sub, err := video.ReadSubs(caption.Path(s.Path))
if err == nil { if err == nil {
var b bytes.Buffer var b bytes.Buffer
err = sub.WriteToWebVTT(&b) err = sub.WriteToWebVTT(&b)
@@ -492,6 +499,10 @@ func (rs sceneRoutes) SceneCtx(next http.Handler) http.Handler {
scene, _ = qb.Find(ctx, sceneID) scene, _ = qb.Find(ctx, sceneID)
} }
if scene != nil {
_ = scene.LoadPrimaryFile(ctx, rs.fileFinder)
}
return nil return nil
}) })
if readTxnErr != nil { if readTxnErr != nil {

View File

@@ -140,6 +140,7 @@ func Start() error {
r.Mount("/scene", sceneRoutes{ r.Mount("/scene", sceneRoutes{
txnManager: txnManager, txnManager: txnManager,
sceneFinder: txnManager.Scene, sceneFinder: txnManager.Scene,
fileFinder: txnManager.File,
captionFinder: txnManager.File, captionFinder: txnManager.File,
sceneMarkerFinder: txnManager.SceneMarker, sceneMarkerFinder: txnManager.SceneMarker,
tagFinder: txnManager.Tag, tagFinder: txnManager.Tag,
@@ -147,6 +148,7 @@ func Start() error {
r.Mount("/image", imageRoutes{ r.Mount("/image", imageRoutes{
txnManager: txnManager, txnManager: txnManager,
imageFinder: txnManager.Image, imageFinder: txnManager.Image,
fileFinder: txnManager.File,
}.Routes()) }.Routes())
r.Mount("/studio", studioRoutes{ r.Mount("/studio", studioRoutes{
txnManager: txnManager, txnManager: txnManager,

View File

@@ -21,12 +21,12 @@ type GalleryTagUpdater interface {
func getGalleryFileTagger(s *models.Gallery, cache *match.Cache) tagger { func getGalleryFileTagger(s *models.Gallery, cache *match.Cache) tagger {
var path string var path string
if s.Path() != "" { if s.Path != "" {
path = s.Path() path = s.Path
} }
// only trim the extension if gallery is file-based // only trim the extension if gallery is file-based
trimExt := s.PrimaryFile() != nil trimExt := s.PrimaryFileID != nil
return tagger{ return tagger{
ID: s.ID, ID: s.ID,

View File

@@ -4,7 +4,6 @@ import (
"context" "context"
"testing" "testing"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks" "github.com/stashapp/stash/pkg/models/mocks"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@@ -54,12 +53,8 @@ func TestGalleryPerformers(t *testing.T) {
} }
gallery := models.Gallery{ gallery := models.Gallery{
ID: galleryID, ID: galleryID,
Files: []file.File{ Path: test.Path,
&file.BaseFile{
Path: test.Path,
},
},
PerformerIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}),
} }
err := GalleryPerformers(testCtx, &gallery, mockGalleryReader, mockPerformerReader, nil) err := GalleryPerformers(testCtx, &gallery, mockGalleryReader, mockPerformerReader, nil)
@@ -101,12 +96,8 @@ func TestGalleryStudios(t *testing.T) {
} }
gallery := models.Gallery{ gallery := models.Gallery{
ID: galleryID, ID: galleryID,
Files: []file.File{ Path: test.Path,
&file.BaseFile{
Path: test.Path,
},
},
} }
err := GalleryStudios(testCtx, &gallery, mockGalleryReader, mockStudioReader, nil) err := GalleryStudios(testCtx, &gallery, mockGalleryReader, mockStudioReader, nil)
@@ -178,12 +169,8 @@ func TestGalleryTags(t *testing.T) {
} }
gallery := models.Gallery{ gallery := models.Gallery{
ID: galleryID, ID: galleryID,
Files: []file.File{ Path: test.Path,
&file.BaseFile{
Path: test.Path,
},
},
TagIDs: models.NewRelatedIDs([]int{}), TagIDs: models.NewRelatedIDs([]int{}),
} }
err := GalleryTags(testCtx, &gallery, mockGalleryReader, mockTagReader, nil) err := GalleryTags(testCtx, &gallery, mockGalleryReader, mockTagReader, nil)

View File

@@ -24,7 +24,7 @@ func getImageFileTagger(s *models.Image, cache *match.Cache) tagger {
ID: s.ID, ID: s.ID,
Type: "image", Type: "image",
Name: s.GetTitle(), Name: s.GetTitle(),
Path: s.Path(), Path: s.Path,
cache: cache, cache: cache,
} }
} }

View File

@@ -3,7 +3,6 @@ package autotag
import ( import (
"testing" "testing"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks" "github.com/stashapp/stash/pkg/models/mocks"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@@ -12,14 +11,6 @@ import (
const imageExt = "jpg" const imageExt = "jpg"
func makeImageFile(p string) *file.ImageFile {
return &file.ImageFile{
BaseFile: &file.BaseFile{
Path: p,
},
}
}
func TestImagePerformers(t *testing.T) { func TestImagePerformers(t *testing.T) {
t.Parallel() t.Parallel()
@@ -60,7 +51,7 @@ func TestImagePerformers(t *testing.T) {
image := models.Image{ image := models.Image{
ID: imageID, ID: imageID,
Files: []*file.ImageFile{makeImageFile(test.Path)}, Path: test.Path,
PerformerIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}),
} }
err := ImagePerformers(testCtx, &image, mockImageReader, mockPerformerReader, nil) err := ImagePerformers(testCtx, &image, mockImageReader, mockPerformerReader, nil)
@@ -102,8 +93,8 @@ func TestImageStudios(t *testing.T) {
} }
image := models.Image{ image := models.Image{
ID: imageID, ID: imageID,
Files: []*file.ImageFile{makeImageFile(test.Path)}, Path: test.Path,
} }
err := ImageStudios(testCtx, &image, mockImageReader, mockStudioReader, nil) err := ImageStudios(testCtx, &image, mockImageReader, mockStudioReader, nil)
@@ -176,7 +167,7 @@ func TestImageTags(t *testing.T) {
image := models.Image{ image := models.Image{
ID: imageID, ID: imageID,
Files: []*file.ImageFile{makeImageFile(test.Path)}, Path: test.Path,
TagIDs: models.NewRelatedIDs([]int{}), TagIDs: models.NewRelatedIDs([]int{}),
} }
err := ImageTags(testCtx, &image, mockImageReader, mockTagReader, nil) err := ImageTags(testCtx, &image, mockImageReader, mockTagReader, nil)

View File

@@ -565,9 +565,9 @@ func TestParsePerformerScenes(t *testing.T) {
// title is only set on scenes where we expect performer to be set // title is only set on scenes where we expect performer to be set
if scene.Title == expectedMatchTitle && len(performers) == 0 { if scene.Title == expectedMatchTitle && len(performers) == 0 {
t.Errorf("Did not set performer '%s' for path '%s'", testName, scene.Path()) t.Errorf("Did not set performer '%s' for path '%s'", testName, scene.Path)
} else if scene.Title != expectedMatchTitle && len(performers) > 0 { } else if scene.Title != expectedMatchTitle && len(performers) > 0 {
t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, scene.Path()) t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, scene.Path)
} }
} }
@@ -616,13 +616,13 @@ func TestParseStudioScenes(t *testing.T) {
// title is only set on scenes where we expect studio to be set // title is only set on scenes where we expect studio to be set
if scene.Title == expectedMatchTitle { if scene.Title == expectedMatchTitle {
if scene.StudioID == nil { if scene.StudioID == nil {
t.Errorf("Did not set studio '%s' for path '%s'", testName, scene.Path()) t.Errorf("Did not set studio '%s' for path '%s'", testName, scene.Path)
} else if scene.StudioID != nil && *scene.StudioID != studios[1].ID { } else if scene.StudioID != nil && *scene.StudioID != studios[1].ID {
t.Errorf("Incorrect studio id %d set for path '%s'", scene.StudioID, scene.Path()) t.Errorf("Incorrect studio id %d set for path '%s'", scene.StudioID, scene.Path)
} }
} else if scene.Title != expectedMatchTitle && scene.StudioID != nil && *scene.StudioID == studios[1].ID { } else if scene.Title != expectedMatchTitle && scene.StudioID != nil && *scene.StudioID == studios[1].ID {
t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, scene.Path()) t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, scene.Path)
} }
} }
} }
@@ -673,9 +673,9 @@ func TestParseTagScenes(t *testing.T) {
// title is only set on scenes where we expect tag to be set // title is only set on scenes where we expect tag to be set
if scene.Title == expectedMatchTitle && len(tags) == 0 { if scene.Title == expectedMatchTitle && len(tags) == 0 {
t.Errorf("Did not set tag '%s' for path '%s'", testName, scene.Path()) t.Errorf("Did not set tag '%s' for path '%s'", testName, scene.Path)
} else if (scene.Title != expectedMatchTitle) && len(tags) > 0 { } else if (scene.Title != expectedMatchTitle) && len(tags) > 0 {
t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, scene.Path()) t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, scene.Path)
} }
} }
@@ -721,9 +721,9 @@ func TestParsePerformerImages(t *testing.T) {
// title is only set on images where we expect performer to be set // title is only set on images where we expect performer to be set
expectedMatch := image.Title == expectedMatchTitle || image.Title == existingStudioImageName expectedMatch := image.Title == expectedMatchTitle || image.Title == existingStudioImageName
if expectedMatch && len(performers) == 0 { if expectedMatch && len(performers) == 0 {
t.Errorf("Did not set performer '%s' for path '%s'", testName, image.Path()) t.Errorf("Did not set performer '%s' for path '%s'", testName, image.Path)
} else if !expectedMatch && len(performers) > 0 { } else if !expectedMatch && len(performers) > 0 {
t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, image.Path()) t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, image.Path)
} }
} }
@@ -772,13 +772,13 @@ func TestParseStudioImages(t *testing.T) {
// title is only set on images where we expect studio to be set // title is only set on images where we expect studio to be set
if image.Title == expectedMatchTitle { if image.Title == expectedMatchTitle {
if image.StudioID == nil { if image.StudioID == nil {
t.Errorf("Did not set studio '%s' for path '%s'", testName, image.Path()) t.Errorf("Did not set studio '%s' for path '%s'", testName, image.Path)
} else if *image.StudioID != studios[1].ID { } else if *image.StudioID != studios[1].ID {
t.Errorf("Incorrect studio id %d set for path '%s'", *image.StudioID, image.Path()) t.Errorf("Incorrect studio id %d set for path '%s'", *image.StudioID, image.Path)
} }
} else if image.Title != expectedMatchTitle && image.StudioID != nil && *image.StudioID == studios[1].ID { } else if image.Title != expectedMatchTitle && image.StudioID != nil && *image.StudioID == studios[1].ID {
t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, image.Path()) t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, image.Path)
} }
} }
} }
@@ -830,9 +830,9 @@ func TestParseTagImages(t *testing.T) {
// title is only set on images where we expect performer to be set // title is only set on images where we expect performer to be set
expectedMatch := image.Title == expectedMatchTitle || image.Title == existingStudioImageName expectedMatch := image.Title == expectedMatchTitle || image.Title == existingStudioImageName
if expectedMatch && len(tags) == 0 { if expectedMatch && len(tags) == 0 {
t.Errorf("Did not set tag '%s' for path '%s'", testName, image.Path()) t.Errorf("Did not set tag '%s' for path '%s'", testName, image.Path)
} else if !expectedMatch && len(tags) > 0 { } else if !expectedMatch && len(tags) > 0 {
t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, image.Path()) t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, image.Path)
} }
} }
@@ -878,9 +878,9 @@ func TestParsePerformerGalleries(t *testing.T) {
// title is only set on galleries where we expect performer to be set // title is only set on galleries where we expect performer to be set
expectedMatch := gallery.Title == expectedMatchTitle || gallery.Title == existingStudioGalleryName expectedMatch := gallery.Title == expectedMatchTitle || gallery.Title == existingStudioGalleryName
if expectedMatch && len(performers) == 0 { if expectedMatch && len(performers) == 0 {
t.Errorf("Did not set performer '%s' for path '%s'", testName, gallery.Path()) t.Errorf("Did not set performer '%s' for path '%s'", testName, gallery.Path)
} else if !expectedMatch && len(performers) > 0 { } else if !expectedMatch && len(performers) > 0 {
t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, gallery.Path()) t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, gallery.Path)
} }
} }
@@ -929,13 +929,13 @@ func TestParseStudioGalleries(t *testing.T) {
// title is only set on galleries where we expect studio to be set // title is only set on galleries where we expect studio to be set
if gallery.Title == expectedMatchTitle { if gallery.Title == expectedMatchTitle {
if gallery.StudioID == nil { if gallery.StudioID == nil {
t.Errorf("Did not set studio '%s' for path '%s'", testName, gallery.Path()) t.Errorf("Did not set studio '%s' for path '%s'", testName, gallery.Path)
} else if *gallery.StudioID != studios[1].ID { } else if *gallery.StudioID != studios[1].ID {
t.Errorf("Incorrect studio id %d set for path '%s'", *gallery.StudioID, gallery.Path()) t.Errorf("Incorrect studio id %d set for path '%s'", *gallery.StudioID, gallery.Path)
} }
} else if gallery.Title != expectedMatchTitle && (gallery.StudioID != nil && *gallery.StudioID == studios[1].ID) { } else if gallery.Title != expectedMatchTitle && (gallery.StudioID != nil && *gallery.StudioID == studios[1].ID) {
t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, gallery.Path()) t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, gallery.Path)
} }
} }
} }
@@ -987,9 +987,9 @@ func TestParseTagGalleries(t *testing.T) {
// title is only set on galleries where we expect performer to be set // title is only set on galleries where we expect performer to be set
expectedMatch := gallery.Title == expectedMatchTitle || gallery.Title == existingStudioGalleryName expectedMatch := gallery.Title == expectedMatchTitle || gallery.Title == existingStudioGalleryName
if expectedMatch && len(tags) == 0 { if expectedMatch && len(tags) == 0 {
t.Errorf("Did not set tag '%s' for path '%s'", testName, gallery.Path()) t.Errorf("Did not set tag '%s' for path '%s'", testName, gallery.Path)
} else if !expectedMatch && len(tags) > 0 { } else if !expectedMatch && len(tags) > 0 {
t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, gallery.Path()) t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, gallery.Path)
} }
} }

View File

@@ -4,7 +4,6 @@ import (
"path/filepath" "path/filepath"
"testing" "testing"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks" "github.com/stashapp/stash/pkg/models/mocks"
@@ -53,14 +52,8 @@ func testPerformerScenes(t *testing.T, performerName, expectedRegex string) {
matchingPaths, falsePaths := generateTestPaths(performerName, "mp4") matchingPaths, falsePaths := generateTestPaths(performerName, "mp4")
for i, p := range append(matchingPaths, falsePaths...) { for i, p := range append(matchingPaths, falsePaths...) {
scenes = append(scenes, &models.Scene{ scenes = append(scenes, &models.Scene{
ID: i + 1, ID: i + 1,
Files: []*file.VideoFile{ Path: p,
{
BaseFile: &file.BaseFile{
Path: p,
},
},
},
PerformerIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}),
}) })
} }
@@ -140,7 +133,7 @@ func testPerformerImages(t *testing.T, performerName, expectedRegex string) {
for i, p := range append(matchingPaths, falsePaths...) { for i, p := range append(matchingPaths, falsePaths...) {
images = append(images, &models.Image{ images = append(images, &models.Image{
ID: i + 1, ID: i + 1,
Files: []*file.ImageFile{makeImageFile(p)}, Path: p,
PerformerIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}),
}) })
} }
@@ -220,12 +213,8 @@ func testPerformerGalleries(t *testing.T, performerName, expectedRegex string) {
for i, p := range append(matchingPaths, falsePaths...) { for i, p := range append(matchingPaths, falsePaths...) {
v := p v := p
galleries = append(galleries, &models.Gallery{ galleries = append(galleries, &models.Gallery{
ID: i + 1, ID: i + 1,
Files: []file.File{ Path: v,
&file.BaseFile{
Path: v,
},
},
PerformerIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}),
}) })
} }

View File

@@ -24,7 +24,7 @@ func getSceneFileTagger(s *models.Scene, cache *match.Cache) tagger {
ID: s.ID, ID: s.ID,
Type: "scene", Type: "scene",
Name: s.GetTitle(), Name: s.GetTitle(),
Path: s.Path(), Path: s.Path,
cache: cache, cache: cache,
} }
} }

View File

@@ -6,7 +6,6 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks" "github.com/stashapp/stash/pkg/models/mocks"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@@ -175,14 +174,8 @@ func TestScenePerformers(t *testing.T) {
mockPerformerReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Performer{&performer, &reversedPerformer}, nil).Once() mockPerformerReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Performer{&performer, &reversedPerformer}, nil).Once()
scene := models.Scene{ scene := models.Scene{
ID: sceneID, ID: sceneID,
Files: []*file.VideoFile{ Path: test.Path,
{
BaseFile: &file.BaseFile{
Path: test.Path,
},
},
},
PerformerIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}),
} }
@@ -236,14 +229,8 @@ func TestSceneStudios(t *testing.T) {
} }
scene := models.Scene{ scene := models.Scene{
ID: sceneID, ID: sceneID,
Files: []*file.VideoFile{ Path: test.Path,
{
BaseFile: &file.BaseFile{
Path: test.Path,
},
},
},
} }
err := SceneStudios(testCtx, &scene, mockSceneReader, mockStudioReader, nil) err := SceneStudios(testCtx, &scene, mockSceneReader, mockStudioReader, nil)
@@ -315,14 +302,8 @@ func TestSceneTags(t *testing.T) {
} }
scene := models.Scene{ scene := models.Scene{
ID: sceneID, ID: sceneID,
Files: []*file.VideoFile{ Path: test.Path,
{
BaseFile: &file.BaseFile{
Path: test.Path,
},
},
},
TagIDs: models.NewRelatedIDs([]int{}), TagIDs: models.NewRelatedIDs([]int{}),
} }
err := SceneTags(testCtx, &scene, mockSceneReader, mockTagReader, nil) err := SceneTags(testCtx, &scene, mockSceneReader, mockTagReader, nil)

View File

@@ -4,7 +4,6 @@ import (
"path/filepath" "path/filepath"
"testing" "testing"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks" "github.com/stashapp/stash/pkg/models/mocks"
@@ -100,14 +99,8 @@ func testStudioScenes(t *testing.T, tc testStudioCase) {
var scenes []*models.Scene var scenes []*models.Scene
for i, p := range append(matchingPaths, falsePaths...) { for i, p := range append(matchingPaths, falsePaths...) {
scenes = append(scenes, &models.Scene{ scenes = append(scenes, &models.Scene{
ID: i + 1, ID: i + 1,
Files: []*file.VideoFile{ Path: p,
{
BaseFile: &file.BaseFile{
Path: p,
},
},
},
}) })
} }
@@ -197,8 +190,8 @@ func testStudioImages(t *testing.T, tc testStudioCase) {
matchingPaths, falsePaths := generateTestPaths(testPathName, imageExt) matchingPaths, falsePaths := generateTestPaths(testPathName, imageExt)
for i, p := range append(matchingPaths, falsePaths...) { for i, p := range append(matchingPaths, falsePaths...) {
images = append(images, &models.Image{ images = append(images, &models.Image{
ID: i + 1, ID: i + 1,
Files: []*file.ImageFile{makeImageFile(p)}, Path: p,
}) })
} }
@@ -287,12 +280,8 @@ func testStudioGalleries(t *testing.T, tc testStudioCase) {
for i, p := range append(matchingPaths, falsePaths...) { for i, p := range append(matchingPaths, falsePaths...) {
v := p v := p
galleries = append(galleries, &models.Gallery{ galleries = append(galleries, &models.Gallery{
ID: i + 1, ID: i + 1,
Files: []file.File{ Path: v,
&file.BaseFile{
Path: v,
},
},
}) })
} }

View File

@@ -4,7 +4,6 @@ import (
"path/filepath" "path/filepath"
"testing" "testing"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks" "github.com/stashapp/stash/pkg/models/mocks"
@@ -100,14 +99,8 @@ func testTagScenes(t *testing.T, tc testTagCase) {
var scenes []*models.Scene var scenes []*models.Scene
for i, p := range append(matchingPaths, falsePaths...) { for i, p := range append(matchingPaths, falsePaths...) {
scenes = append(scenes, &models.Scene{ scenes = append(scenes, &models.Scene{
ID: i + 1, ID: i + 1,
Files: []*file.VideoFile{ Path: p,
{
BaseFile: &file.BaseFile{
Path: p,
},
},
},
TagIDs: models.NewRelatedIDs([]int{}), TagIDs: models.NewRelatedIDs([]int{}),
}) })
} }
@@ -200,7 +193,7 @@ func testTagImages(t *testing.T, tc testTagCase) {
for i, p := range append(matchingPaths, falsePaths...) { for i, p := range append(matchingPaths, falsePaths...) {
images = append(images, &models.Image{ images = append(images, &models.Image{
ID: i + 1, ID: i + 1,
Files: []*file.ImageFile{makeImageFile(p)}, Path: p,
TagIDs: models.NewRelatedIDs([]int{}), TagIDs: models.NewRelatedIDs([]int{}),
}) })
} }
@@ -294,12 +287,8 @@ func testTagGalleries(t *testing.T, tc testTagCase) {
for i, p := range append(matchingPaths, falsePaths...) { for i, p := range append(matchingPaths, falsePaths...) {
v := p v := p
galleries = append(galleries, &models.Gallery{ galleries = append(galleries, &models.Gallery{
ID: i + 1, ID: i + 1,
Files: []file.File{ Path: v,
&file.BaseFile{
Path: v,
},
},
TagIDs: models.NewRelatedIDs([]int{}), TagIDs: models.NewRelatedIDs([]int{}),
}) })
} }

View File

@@ -114,7 +114,7 @@ func sceneToContainer(scene *models.Scene, parent string, host string) interface
duration int64 duration int64
) )
f := scene.PrimaryFile() f := scene.Files.Primary()
if f != nil { if f != nil {
size = int(f.Size) size = int(f.Size)
bitrate = uint(f.BitRate) bitrate = uint(f.BitRate)
@@ -362,6 +362,10 @@ func (me *contentDirectoryService) handleBrowseMetadata(obj object, host string)
if err := txn.WithTxn(context.TODO(), me.txnManager, func(ctx context.Context) error { if err := txn.WithTxn(context.TODO(), me.txnManager, func(ctx context.Context) error {
scene, err = me.repository.SceneFinder.Find(ctx, sceneID) scene, err = me.repository.SceneFinder.Find(ctx, sceneID)
if scene != nil {
err = scene.LoadPrimaryFile(ctx, me.repository.FileFinder)
}
if err != nil { if err != nil {
return err return err
} }

View File

@@ -8,6 +8,7 @@ import (
"sync" "sync"
"time" "time"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/txn" "github.com/stashapp/stash/pkg/txn"
@@ -15,6 +16,7 @@ import (
type Repository struct { type Repository struct {
SceneFinder SceneFinder SceneFinder SceneFinder
FileFinder file.Finder
StudioFinder StudioFinder StudioFinder StudioFinder
TagFinder TagFinder TagFinder TagFinder
PerformerFinder PerformerFinder PerformerFinder PerformerFinder

View File

@@ -212,7 +212,7 @@ func (t *SceneIdentifier) modifyScene(ctx context.Context, txnManager txn.Manage
// don't update anything if nothing was set // don't update anything if nothing was set
if updater.IsEmpty() { if updater.IsEmpty() {
logger.Debugf("Nothing to set for %s", s.Path()) logger.Debugf("Nothing to set for %s", s.Path)
return nil return nil
} }
@@ -225,7 +225,7 @@ func (t *SceneIdentifier) modifyScene(ctx context.Context, txnManager txn.Manage
if title.Ptr() != nil { if title.Ptr() != nil {
as = fmt.Sprintf(" as %s", title.Value) as = fmt.Sprintf(" as %s", title.Value)
} }
logger.Infof("Successfully identified %s%s using %s", s.Path(), as, result.source.Name) logger.Infof("Successfully identified %s%s using %s", s.Path, as, result.source.Name)
return nil return nil
}); err != nil { }); err != nil {

View File

@@ -385,9 +385,9 @@ func (m parseMapper) parse(scene *models.Scene) *sceneHolder {
// scene path in the match. Otherwise, use the default behaviour of just // scene path in the match. Otherwise, use the default behaviour of just
// the file's basename // the file's basename
// must be double \ because of the regex escaping // must be double \ because of the regex escaping
filename := filepath.Base(scene.Path()) filename := filepath.Base(scene.Path)
if strings.Contains(m.regexString, `\\`) || strings.Contains(m.regexString, "/") { if strings.Contains(m.regexString, `\\`) || strings.Contains(m.regexString, "/") {
filename = scene.Path() filename = scene.Path
} }
result := m.regex.FindStringSubmatch(filename) result := m.regex.FindStringSubmatch(filename)

View File

@@ -1,18 +0,0 @@
package manager
import (
"os"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
)
func DeleteGalleryFile(gallery *models.Gallery) {
path := gallery.Path()
if path != "" {
err := os.Remove(path)
if err != nil {
logger.Warnf("Could not delete file %s: %s", path, err.Error())
}
}
}

View File

@@ -38,13 +38,11 @@ func (i *fileFolderImporter) PreImport(ctx context.Context) error {
} }
func (i *fileFolderImporter) folderJSONToFolder(ctx context.Context, baseJSON *jsonschema.BaseDirEntry) (*file.Folder, error) { func (i *fileFolderImporter) folderJSONToFolder(ctx context.Context, baseJSON *jsonschema.BaseDirEntry) (*file.Folder, error) {
path := filepath.FromSlash(baseJSON.Path)
ret := file.Folder{ ret := file.Folder{
DirEntry: file.DirEntry{ DirEntry: file.DirEntry{
ModTime: baseJSON.ModTime.GetTime(), ModTime: baseJSON.ModTime.GetTime(),
}, },
Path: path, Path: baseJSON.Path,
CreatedAt: baseJSON.CreatedAt.GetTime(), CreatedAt: baseJSON.CreatedAt.GetTime(),
UpdatedAt: baseJSON.CreatedAt.GetTime(), UpdatedAt: baseJSON.CreatedAt.GetTime(),
} }
@@ -97,13 +95,11 @@ func (i *fileFolderImporter) fileJSONToFile(ctx context.Context, fileJSON jsonsc
} }
func (i *fileFolderImporter) baseFileJSONToBaseFile(ctx context.Context, baseJSON *jsonschema.BaseFile) (*file.BaseFile, error) { func (i *fileFolderImporter) baseFileJSONToBaseFile(ctx context.Context, baseJSON *jsonschema.BaseFile) (*file.BaseFile, error) {
path := filepath.FromSlash(baseJSON.Path)
baseFile := file.BaseFile{ baseFile := file.BaseFile{
DirEntry: file.DirEntry{ DirEntry: file.DirEntry{
ModTime: baseJSON.ModTime.GetTime(), ModTime: baseJSON.ModTime.GetTime(),
}, },
Basename: filepath.Base(path), Basename: filepath.Base(baseJSON.Path),
Size: baseJSON.Size, Size: baseJSON.Size,
CreatedAt: baseJSON.CreatedAt.GetTime(), CreatedAt: baseJSON.CreatedAt.GetTime(),
UpdatedAt: baseJSON.CreatedAt.GetTime(), UpdatedAt: baseJSON.CreatedAt.GetTime(),
@@ -124,7 +120,7 @@ func (i *fileFolderImporter) baseFileJSONToBaseFile(ctx context.Context, baseJSO
} }
func (i *fileFolderImporter) populateZipFileID(ctx context.Context, f *file.DirEntry) error { func (i *fileFolderImporter) populateZipFileID(ctx context.Context, f *file.DirEntry) error {
zipFilePath := filepath.FromSlash(i.Input.DirEntry().ZipFile) zipFilePath := i.Input.DirEntry().ZipFile
if zipFilePath != "" { if zipFilePath != "" {
zf, err := i.ReaderWriter.FindByPath(ctx, zipFilePath) zf, err := i.ReaderWriter.FindByPath(ctx, zipFilePath)
if err != nil { if err != nil {
@@ -147,11 +143,11 @@ func (i *fileFolderImporter) PostImport(ctx context.Context, id int) error {
} }
func (i *fileFolderImporter) Name() string { func (i *fileFolderImporter) Name() string {
return filepath.FromSlash(i.Input.DirEntry().Path) return i.Input.DirEntry().Path
} }
func (i *fileFolderImporter) FindExistingID(ctx context.Context) (*int, error) { func (i *fileFolderImporter) FindExistingID(ctx context.Context) (*int, error) {
path := filepath.FromSlash(i.Input.DirEntry().Path) path := i.Input.DirEntry().Path
existing, err := i.ReaderWriter.FindByPath(ctx, path) existing, err := i.ReaderWriter.FindByPath(ctx, path)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -213,7 +209,7 @@ func (i *fileFolderImporter) getOrCreateFolder(ctx context.Context, path string,
func (i *fileFolderImporter) Create(ctx context.Context) (*int, error) { func (i *fileFolderImporter) Create(ctx context.Context) (*int, error) {
// create folder hierarchy and set parent folder id // create folder hierarchy and set parent folder id
path := filepath.FromSlash(i.Input.DirEntry().Path) path := i.Input.DirEntry().Path
path = filepath.Dir(path) path = filepath.Dir(path)
folder, err := i.createFolderHierarchy(ctx, path) folder, err := i.createFolderHierarchy(ctx, path)
if err != nil { if err != nil {

View File

@@ -210,6 +210,7 @@ func initialize() error {
instance.DLNAService = dlna.NewService(instance.Repository, dlna.Repository{ instance.DLNAService = dlna.NewService(instance.Repository, dlna.Repository{
SceneFinder: instance.Repository.Scene, SceneFinder: instance.Repository.Scene,
FileFinder: instance.Repository.File,
StudioFinder: instance.Repository.Studio, StudioFinder: instance.Repository.Studio,
TagFinder: instance.Repository.Tag, TagFinder: instance.Repository.Tag,
PerformerFinder: instance.Repository.Performer, PerformerFinder: instance.Repository.Performer,

View File

@@ -185,6 +185,9 @@ func (s *Manager) generateScreenshot(ctx context.Context, sceneId string, at *fl
if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error {
var err error var err error
scene, err = s.Repository.Scene.Find(ctx, sceneIdInt) scene, err = s.Repository.Scene.Find(ctx, sceneIdInt)
if scene != nil {
err = scene.LoadPrimaryFile(ctx, s.Repository.File)
}
return err return err
}); err != nil || scene == nil { }); err != nil || scene == nil {
logger.Errorf("failed to get scene for generate: %s", err.Error()) logger.Errorf("failed to get scene for generate: %s", err.Error())

View File

@@ -15,17 +15,22 @@ import (
type ImageReaderWriter interface { type ImageReaderWriter interface {
models.ImageReaderWriter models.ImageReaderWriter
image.FinderCreatorUpdater image.FinderCreatorUpdater
models.ImageFileLoader
GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error)
} }
type GalleryReaderWriter interface { type GalleryReaderWriter interface {
models.GalleryReaderWriter models.GalleryReaderWriter
gallery.FinderCreatorUpdater gallery.FinderCreatorUpdater
gallery.Finder gallery.Finder
models.FileLoader
GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error)
} }
type SceneReaderWriter interface { type SceneReaderWriter interface {
models.SceneReaderWriter models.SceneReaderWriter
scene.CreatorUpdater scene.CreatorUpdater
GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error)
} }
type FileReaderWriter interface { type FileReaderWriter interface {

View File

@@ -38,7 +38,7 @@ func (c *StreamRequestContext) Cancel() {
} }
func KillRunningStreams(scene *models.Scene, fileNamingAlgo models.HashAlgorithm) { func KillRunningStreams(scene *models.Scene, fileNamingAlgo models.HashAlgorithm) {
instance.ReadLockManager.Cancel(scene.Path()) instance.ReadLockManager.Cancel(scene.Path)
sceneHash := scene.GetHash(fileNamingAlgo) sceneHash := scene.GetHash(fileNamingAlgo)
@@ -62,7 +62,7 @@ type SceneServer struct {
func (s *SceneServer) StreamSceneDirect(scene *models.Scene, w http.ResponseWriter, r *http.Request) { func (s *SceneServer) StreamSceneDirect(scene *models.Scene, w http.ResponseWriter, r *http.Request) {
fileNamingAlgo := config.GetInstance().GetVideoFileNamingAlgorithm() fileNamingAlgo := config.GetInstance().GetVideoFileNamingAlgorithm()
filepath := GetInstance().Paths.Scene.GetStreamPath(scene.Path(), scene.GetHash(fileNamingAlgo)) filepath := GetInstance().Paths.Scene.GetStreamPath(scene.Path, scene.GetHash(fileNamingAlgo))
streamRequestCtx := NewStreamRequestContext(w, r) streamRequestCtx := NewStreamRequestContext(w, r)
// #2579 - hijacking and closing the connection here causes video playback to fail in Safari // #2579 - hijacking and closing the connection here causes video playback to fail in Safari

View File

@@ -5,36 +5,37 @@ import (
"github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/ffmpeg" "github.com/stashapp/stash/pkg/ffmpeg"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
) )
func GetSceneFileContainer(scene *models.Scene) (ffmpeg.Container, error) { func GetVideoFileContainer(file *file.VideoFile) (ffmpeg.Container, error) {
var container ffmpeg.Container var container ffmpeg.Container
format := scene.Format() format := file.Format
if format != "" { if format != "" {
container = ffmpeg.Container(format) container = ffmpeg.Container(format)
} else { // container isn't in the DB } else { // container isn't in the DB
// shouldn't happen, fallback to ffprobe // shouldn't happen, fallback to ffprobe
ffprobe := GetInstance().FFProbe ffprobe := GetInstance().FFProbe
tmpVideoFile, err := ffprobe.NewVideoFile(scene.Path()) tmpVideoFile, err := ffprobe.NewVideoFile(file.Path)
if err != nil { if err != nil {
return ffmpeg.Container(""), fmt.Errorf("error reading video file: %v", err) return ffmpeg.Container(""), fmt.Errorf("error reading video file: %v", err)
} }
return ffmpeg.MatchContainer(tmpVideoFile.Container, scene.Path()) return ffmpeg.MatchContainer(tmpVideoFile.Container, file.Path)
} }
return container, nil return container, nil
} }
func includeSceneStreamPath(scene *models.Scene, streamingResolution models.StreamingResolutionEnum, maxStreamingTranscodeSize models.StreamingResolutionEnum) bool { func includeSceneStreamPath(f *file.VideoFile, streamingResolution models.StreamingResolutionEnum, maxStreamingTranscodeSize models.StreamingResolutionEnum) bool {
// convert StreamingResolutionEnum to ResolutionEnum so we can get the min // convert StreamingResolutionEnum to ResolutionEnum so we can get the min
// resolution // resolution
convertedRes := models.ResolutionEnum(streamingResolution) convertedRes := models.ResolutionEnum(streamingResolution)
minResolution := convertedRes.GetMinResolution() minResolution := convertedRes.GetMinResolution()
sceneResolution := scene.GetMinResolution() sceneResolution := f.GetMinResolution()
// don't include if scene resolution is smaller than the streamingResolution // don't include if scene resolution is smaller than the streamingResolution
if sceneResolution != 0 && sceneResolution < minResolution { if sceneResolution != 0 && sceneResolution < minResolution {
@@ -70,6 +71,11 @@ func GetSceneStreamPaths(scene *models.Scene, directStreamURL string, maxStreami
return nil, fmt.Errorf("nil scene") return nil, fmt.Errorf("nil scene")
} }
pf := scene.Files.Primary()
if pf == nil {
return nil, fmt.Errorf("nil file")
}
var ret []*SceneStreamEndpoint var ret []*SceneStreamEndpoint
mimeWebm := ffmpeg.MimeWebm mimeWebm := ffmpeg.MimeWebm
mimeHLS := ffmpeg.MimeHLS mimeHLS := ffmpeg.MimeHLS
@@ -80,12 +86,12 @@ func GetSceneStreamPaths(scene *models.Scene, directStreamURL string, maxStreami
// direct stream should only apply when the audio codec is supported // direct stream should only apply when the audio codec is supported
audioCodec := ffmpeg.MissingUnsupported audioCodec := ffmpeg.MissingUnsupported
if scene.AudioCodec() != "" { if pf.AudioCodec != "" {
audioCodec = ffmpeg.ProbeAudioCodec(scene.AudioCodec()) audioCodec = ffmpeg.ProbeAudioCodec(pf.AudioCodec)
} }
// don't care if we can't get the container // don't care if we can't get the container
container, _ := GetSceneFileContainer(scene) container, _ := GetVideoFileContainer(pf)
if HasTranscode(scene, config.GetInstance().GetVideoFileNamingAlgorithm()) || ffmpeg.IsValidAudioForContainer(audioCodec, container) { if HasTranscode(scene, config.GetInstance().GetVideoFileNamingAlgorithm()) || ffmpeg.IsValidAudioForContainer(audioCodec, container) {
label := "Direct stream" label := "Direct stream"
@@ -128,27 +134,27 @@ func GetSceneStreamPaths(scene *models.Scene, directStreamURL string, maxStreami
webmURL := directStreamURL + ".webm" webmURL := directStreamURL + ".webm"
mp4URL := directStreamURL + ".mp4" mp4URL := directStreamURL + ".mp4"
if includeSceneStreamPath(scene, models.StreamingResolutionEnumFourK, maxStreamingTranscodeSize) { if includeSceneStreamPath(pf, models.StreamingResolutionEnumFourK, maxStreamingTranscodeSize) {
webmStreams = append(webmStreams, makeStreamEndpoint(webmURL, models.StreamingResolutionEnumFourK, mimeMp4, webmLabelFourK)) webmStreams = append(webmStreams, makeStreamEndpoint(webmURL, models.StreamingResolutionEnumFourK, mimeMp4, webmLabelFourK))
mp4Streams = append(mp4Streams, makeStreamEndpoint(mp4URL, models.StreamingResolutionEnumFourK, mimeMp4, mp4LabelFourK)) mp4Streams = append(mp4Streams, makeStreamEndpoint(mp4URL, models.StreamingResolutionEnumFourK, mimeMp4, mp4LabelFourK))
} }
if includeSceneStreamPath(scene, models.StreamingResolutionEnumFullHd, maxStreamingTranscodeSize) { if includeSceneStreamPath(pf, models.StreamingResolutionEnumFullHd, maxStreamingTranscodeSize) {
webmStreams = append(webmStreams, makeStreamEndpoint(webmURL, models.StreamingResolutionEnumFullHd, mimeMp4, webmLabelFullHD)) webmStreams = append(webmStreams, makeStreamEndpoint(webmURL, models.StreamingResolutionEnumFullHd, mimeMp4, webmLabelFullHD))
mp4Streams = append(mp4Streams, makeStreamEndpoint(mp4URL, models.StreamingResolutionEnumFullHd, mimeMp4, mp4LabelFullHD)) mp4Streams = append(mp4Streams, makeStreamEndpoint(mp4URL, models.StreamingResolutionEnumFullHd, mimeMp4, mp4LabelFullHD))
} }
if includeSceneStreamPath(scene, models.StreamingResolutionEnumStandardHd, maxStreamingTranscodeSize) { if includeSceneStreamPath(pf, models.StreamingResolutionEnumStandardHd, maxStreamingTranscodeSize) {
webmStreams = append(webmStreams, makeStreamEndpoint(webmURL, models.StreamingResolutionEnumStandardHd, mimeMp4, webmLabelStandardHD)) webmStreams = append(webmStreams, makeStreamEndpoint(webmURL, models.StreamingResolutionEnumStandardHd, mimeMp4, webmLabelStandardHD))
mp4Streams = append(mp4Streams, makeStreamEndpoint(mp4URL, models.StreamingResolutionEnumStandardHd, mimeMp4, mp4LabelStandardHD)) mp4Streams = append(mp4Streams, makeStreamEndpoint(mp4URL, models.StreamingResolutionEnumStandardHd, mimeMp4, mp4LabelStandardHD))
} }
if includeSceneStreamPath(scene, models.StreamingResolutionEnumStandard, maxStreamingTranscodeSize) { if includeSceneStreamPath(pf, models.StreamingResolutionEnumStandard, maxStreamingTranscodeSize) {
webmStreams = append(webmStreams, makeStreamEndpoint(webmURL, models.StreamingResolutionEnumStandard, mimeMp4, webmLabelStandard)) webmStreams = append(webmStreams, makeStreamEndpoint(webmURL, models.StreamingResolutionEnumStandard, mimeMp4, webmLabelStandard))
mp4Streams = append(mp4Streams, makeStreamEndpoint(mp4URL, models.StreamingResolutionEnumStandard, mimeMp4, mp4LabelStandard)) mp4Streams = append(mp4Streams, makeStreamEndpoint(mp4URL, models.StreamingResolutionEnumStandard, mimeMp4, mp4LabelStandard))
} }
if includeSceneStreamPath(scene, models.StreamingResolutionEnumLow, maxStreamingTranscodeSize) { if includeSceneStreamPath(pf, models.StreamingResolutionEnumLow, maxStreamingTranscodeSize) {
webmStreams = append(webmStreams, makeStreamEndpoint(webmURL, models.StreamingResolutionEnumLow, mimeMp4, webmLabelLow)) webmStreams = append(webmStreams, makeStreamEndpoint(webmURL, models.StreamingResolutionEnumLow, mimeMp4, webmLabelLow))
mp4Streams = append(mp4Streams, makeStreamEndpoint(mp4URL, models.StreamingResolutionEnumLow, mimeMp4, mp4LabelLow)) mp4Streams = append(mp4Streams, makeStreamEndpoint(mp4URL, models.StreamingResolutionEnumLow, mimeMp4, mp4LabelLow))
} }

View File

@@ -699,17 +699,17 @@ func (t *autoTagSceneTask) Start(ctx context.Context, wg *sync.WaitGroup) {
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error { if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
if t.performers { if t.performers {
if err := autotag.ScenePerformers(ctx, t.scene, r.Scene, r.Performer, t.cache); err != nil { if err := autotag.ScenePerformers(ctx, t.scene, r.Scene, r.Performer, t.cache); err != nil {
return fmt.Errorf("error tagging scene performers for %s: %v", t.scene.Path(), err) return fmt.Errorf("error tagging scene performers for %s: %v", t.scene.Path, err)
} }
} }
if t.studios { if t.studios {
if err := autotag.SceneStudios(ctx, t.scene, r.Scene, r.Studio, t.cache); err != nil { if err := autotag.SceneStudios(ctx, t.scene, r.Scene, r.Studio, t.cache); err != nil {
return fmt.Errorf("error tagging scene studio for %s: %v", t.scene.Path(), err) return fmt.Errorf("error tagging scene studio for %s: %v", t.scene.Path, err)
} }
} }
if t.tags { if t.tags {
if err := autotag.SceneTags(ctx, t.scene, r.Scene, r.Tag, t.cache); err != nil { if err := autotag.SceneTags(ctx, t.scene, r.Scene, r.Tag, t.cache); err != nil {
return fmt.Errorf("error tagging scene tags for %s: %v", t.scene.Path(), err) return fmt.Errorf("error tagging scene tags for %s: %v", t.scene.Path, err)
} }
} }
@@ -736,17 +736,17 @@ func (t *autoTagImageTask) Start(ctx context.Context, wg *sync.WaitGroup) {
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error { if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
if t.performers { if t.performers {
if err := autotag.ImagePerformers(ctx, t.image, r.Image, r.Performer, t.cache); err != nil { if err := autotag.ImagePerformers(ctx, t.image, r.Image, r.Performer, t.cache); err != nil {
return fmt.Errorf("error tagging image performers for %s: %v", t.image.Path(), err) return fmt.Errorf("error tagging image performers for %s: %v", t.image.Path, err)
} }
} }
if t.studios { if t.studios {
if err := autotag.ImageStudios(ctx, t.image, r.Image, r.Studio, t.cache); err != nil { if err := autotag.ImageStudios(ctx, t.image, r.Image, r.Studio, t.cache); err != nil {
return fmt.Errorf("error tagging image studio for %s: %v", t.image.Path(), err) return fmt.Errorf("error tagging image studio for %s: %v", t.image.Path, err)
} }
} }
if t.tags { if t.tags {
if err := autotag.ImageTags(ctx, t.image, r.Image, r.Tag, t.cache); err != nil { if err := autotag.ImageTags(ctx, t.image, r.Image, r.Tag, t.cache); err != nil {
return fmt.Errorf("error tagging image tags for %s: %v", t.image.Path(), err) return fmt.Errorf("error tagging image tags for %s: %v", t.image.Path, err)
} }
} }
@@ -773,17 +773,17 @@ func (t *autoTagGalleryTask) Start(ctx context.Context, wg *sync.WaitGroup) {
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error { if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
if t.performers { if t.performers {
if err := autotag.GalleryPerformers(ctx, t.gallery, r.Gallery, r.Performer, t.cache); err != nil { if err := autotag.GalleryPerformers(ctx, t.gallery, r.Gallery, r.Performer, t.cache); err != nil {
return fmt.Errorf("error tagging gallery performers for %s: %v", t.gallery.Path(), err) return fmt.Errorf("error tagging gallery performers for %s: %v", t.gallery.Path, err)
} }
} }
if t.studios { if t.studios {
if err := autotag.GalleryStudios(ctx, t.gallery, r.Gallery, r.Studio, t.cache); err != nil { if err := autotag.GalleryStudios(ctx, t.gallery, r.Gallery, r.Studio, t.cache); err != nil {
return fmt.Errorf("error tagging gallery studio for %s: %v", t.gallery.Path(), err) return fmt.Errorf("error tagging gallery studio for %s: %v", t.gallery.Path, err)
} }
} }
if t.tags { if t.tags {
if err := autotag.GalleryTags(ctx, t.gallery, r.Gallery, r.Tag, t.cache); err != nil { if err := autotag.GalleryTags(ctx, t.gallery, r.Gallery, r.Tag, t.cache); err != nil {
return fmt.Errorf("error tagging gallery tags for %s: %v", t.gallery.Path(), err) return fmt.Errorf("error tagging gallery tags for %s: %v", t.gallery.Path, err)
} }
} }

View File

@@ -206,20 +206,24 @@ func (h *cleanHandler) deleteRelatedScenes(ctx context.Context, fileDeleter *fil
} }
for _, scene := range scenes { for _, scene := range scenes {
if err := scene.LoadFiles(ctx, sceneQB); err != nil {
return err
}
// only delete if the scene has no other files // only delete if the scene has no other files
if len(scene.Files) <= 1 { if len(scene.Files.List()) <= 1 {
logger.Infof("Deleting scene %q since it has no other related files", scene.GetTitle()) logger.Infof("Deleting scene %q since it has no other related files", scene.GetTitle())
if err := mgr.SceneService.Destroy(ctx, scene, sceneFileDeleter, true, false); err != nil { if err := mgr.SceneService.Destroy(ctx, scene, sceneFileDeleter, true, false); err != nil {
return err return err
} }
checksum := scene.Checksum() checksum := scene.Checksum
oshash := scene.OSHash() oshash := scene.OSHash
mgr.PluginCache.RegisterPostHooks(ctx, mgr.Database, scene.ID, plugin.SceneDestroyPost, plugin.SceneDestroyInput{ mgr.PluginCache.RegisterPostHooks(ctx, mgr.Database, scene.ID, plugin.SceneDestroyPost, plugin.SceneDestroyInput{
Checksum: checksum, Checksum: checksum,
OSHash: oshash, OSHash: oshash,
Path: scene.Path(), Path: scene.Path,
}, nil) }, nil)
} }
} }
@@ -236,8 +240,12 @@ func (h *cleanHandler) deleteRelatedGalleries(ctx context.Context, fileID file.I
} }
for _, g := range galleries { for _, g := range galleries {
if err := g.LoadFiles(ctx, qb); err != nil {
return err
}
// only delete if the gallery has no other files // only delete if the gallery has no other files
if len(g.Files) <= 1 { if len(g.Files.List()) <= 1 {
logger.Infof("Deleting gallery %q since it has no other related files", g.GetTitle()) logger.Infof("Deleting gallery %q since it has no other related files", g.GetTitle())
if err := qb.Destroy(ctx, g.ID); err != nil { if err := qb.Destroy(ctx, g.ID); err != nil {
return err return err
@@ -245,7 +253,7 @@ func (h *cleanHandler) deleteRelatedGalleries(ctx context.Context, fileID file.I
mgr.PluginCache.RegisterPostHooks(ctx, mgr.Database, g.ID, plugin.GalleryDestroyPost, plugin.GalleryDestroyInput{ mgr.PluginCache.RegisterPostHooks(ctx, mgr.Database, g.ID, plugin.GalleryDestroyPost, plugin.GalleryDestroyInput{
Checksum: g.Checksum(), Checksum: g.Checksum(),
Path: g.Path(), Path: g.Path,
}, nil) }, nil)
} }
} }
@@ -269,7 +277,7 @@ func (h *cleanHandler) deleteRelatedFolderGalleries(ctx context.Context, folderI
mgr.PluginCache.RegisterPostHooks(ctx, mgr.Database, g.ID, plugin.GalleryDestroyPost, plugin.GalleryDestroyInput{ mgr.PluginCache.RegisterPostHooks(ctx, mgr.Database, g.ID, plugin.GalleryDestroyPost, plugin.GalleryDestroyInput{
Checksum: g.Checksum(), Checksum: g.Checksum(),
Path: g.Path(), Path: g.Path,
}, nil) }, nil)
} }
@@ -290,15 +298,19 @@ func (h *cleanHandler) deleteRelatedImages(ctx context.Context, fileDeleter *fil
} }
for _, i := range images { for _, i := range images {
if len(i.Files) <= 1 { if err := i.LoadFiles(ctx, imageQB); err != nil {
return err
}
if len(i.Files.List()) <= 1 {
logger.Infof("Deleting image %q since it has no other related files", i.GetTitle()) logger.Infof("Deleting image %q since it has no other related files", i.GetTitle())
if err := mgr.ImageService.Destroy(ctx, i, imageFileDeleter, true, false); err != nil { if err := mgr.ImageService.Destroy(ctx, i, imageFileDeleter, true, false); err != nil {
return err return err
} }
mgr.PluginCache.RegisterPostHooks(ctx, mgr.Database, i.ID, plugin.ImageDestroyPost, plugin.ImageDestroyInput{ mgr.PluginCache.RegisterPostHooks(ctx, mgr.Database, i.ID, plugin.ImageDestroyPost, plugin.ImageDestroyInput{
Checksum: i.Checksum(), Checksum: i.Checksum,
Path: i.Path(), Path: i.Path,
}, nil) }, nil)
} }
} }

View File

@@ -328,6 +328,11 @@ func (t *ExportTask) populateGalleryImages(ctx context.Context, repo Repository)
} }
for _, g := range galleries { for _, g := range galleries {
if err := g.LoadFiles(ctx, reader); err != nil {
logger.Errorf("[galleries] <%s> failed to fetch files for gallery: %s", g.GetTitle(), err.Error())
continue
}
images, err := imageReader.FindByGalleryID(ctx, g.ID) images, err := imageReader.FindByGalleryID(ctx, g.ID)
if err != nil { if err != nil {
logger.Errorf("[galleries] <%s> failed to fetch images for gallery: %s", g.Checksum, err.Error()) logger.Errorf("[galleries] <%s> failed to fetch images for gallery: %s", g.Checksum, err.Error())
@@ -400,7 +405,7 @@ func fileToJSON(f file.File) jsonschema.DirEntry {
BaseDirEntry: jsonschema.BaseDirEntry{ BaseDirEntry: jsonschema.BaseDirEntry{
Type: jsonschema.DirEntryTypeFile, Type: jsonschema.DirEntryTypeFile,
ModTime: json.JSONTime{Time: bf.ModTime}, ModTime: json.JSONTime{Time: bf.ModTime},
Path: filepath.ToSlash(bf.Path), Path: bf.Path,
CreatedAt: json.JSONTime{Time: bf.CreatedAt}, CreatedAt: json.JSONTime{Time: bf.CreatedAt},
UpdatedAt: json.JSONTime{Time: bf.UpdatedAt}, UpdatedAt: json.JSONTime{Time: bf.UpdatedAt},
}, },
@@ -461,7 +466,7 @@ func folderToJSON(f file.Folder) jsonschema.DirEntry {
base := jsonschema.BaseDirEntry{ base := jsonschema.BaseDirEntry{
Type: jsonschema.DirEntryTypeFolder, Type: jsonschema.DirEntryTypeFolder,
ModTime: json.JSONTime{Time: f.ModTime}, ModTime: json.JSONTime{Time: f.ModTime},
Path: filepath.ToSlash(f.Path), Path: f.Path,
CreatedAt: json.JSONTime{Time: f.CreatedAt}, CreatedAt: json.JSONTime{Time: f.CreatedAt},
UpdatedAt: json.JSONTime{Time: f.UpdatedAt}, UpdatedAt: json.JSONTime{Time: f.UpdatedAt},
} }
@@ -497,7 +502,7 @@ func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
} }
// export files // export files
for _, f := range s.Files { for _, f := range s.Files.List() {
exportFile(f, t) exportFile(f, t)
} }
@@ -513,6 +518,13 @@ func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
continue continue
} }
for _, g := range galleries {
if err := g.LoadFiles(ctx, galleryReader); err != nil {
logger.Errorf("[scenes] <%s> error getting scene gallery files: %s", sceneHash, err.Error())
continue
}
}
newSceneJSON.Galleries = gallery.GetRefs(galleries) newSceneJSON.Galleries = gallery.GetRefs(galleries)
performers, err := performerReader.FindBySceneID(ctx, s.ID) performers, err := performerReader.FindBySceneID(ctx, s.ID)
@@ -565,13 +577,8 @@ func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
t.performers.IDs = intslice.IntAppendUniques(t.performers.IDs, performer.GetIDs(performers)) t.performers.IDs = intslice.IntAppendUniques(t.performers.IDs, performer.GetIDs(performers))
} }
pf := s.PrimaryFile() basename := filepath.Base(s.Path)
basename := "" hash := s.OSHash
hash := ""
if pf != nil {
basename = pf.Basename
hash = s.OSHash()
}
fn := newSceneJSON.Filename(basename, hash) fn := newSceneJSON.Filename(basename, hash)
@@ -632,12 +639,17 @@ func exportImage(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
tagReader := repo.Tag tagReader := repo.Tag
for s := range jobChan { for s := range jobChan {
imageHash := s.Checksum() imageHash := s.Checksum
if err := s.LoadFiles(ctx, repo.Image); err != nil {
logger.Errorf("[images] <%s> error getting image files: %s", imageHash, err.Error())
continue
}
newImageJSON := image.ToBasicJSON(s) newImageJSON := image.ToBasicJSON(s)
// export files // export files
for _, f := range s.Files { for _, f := range s.Files.List() {
exportFile(f, t) exportFile(f, t)
} }
@@ -654,6 +666,13 @@ func exportImage(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
continue continue
} }
for _, g := range imageGalleries {
if err := g.LoadFiles(ctx, galleryReader); err != nil {
logger.Errorf("[images] <%s> error getting image gallery files: %s", imageHash, err.Error())
continue
}
}
newImageJSON.Galleries = gallery.GetRefs(imageGalleries) newImageJSON.Galleries = gallery.GetRefs(imageGalleries)
performers, err := performerReader.FindByImageID(ctx, s.ID) performers, err := performerReader.FindByImageID(ctx, s.ID)
@@ -682,15 +701,7 @@ func exportImage(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
t.performers.IDs = intslice.IntAppendUniques(t.performers.IDs, performer.GetIDs(performers)) t.performers.IDs = intslice.IntAppendUniques(t.performers.IDs, performer.GetIDs(performers))
} }
pf := s.PrimaryFile() fn := newImageJSON.Filename(filepath.Base(s.Path), s.Checksum)
basename := ""
hash := ""
if pf != nil {
basename = pf.Basename
hash = s.Checksum()
}
fn := newImageJSON.Filename(basename, hash)
if err := t.json.saveImage(fn, newImageJSON); err != nil { if err := t.json.saveImage(fn, newImageJSON); err != nil {
logger.Errorf("[images] <%s> failed to save json: %s", imageHash, err.Error()) logger.Errorf("[images] <%s> failed to save json: %s", imageHash, err.Error())
@@ -749,6 +760,11 @@ func exportGallery(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *mode
tagReader := repo.Tag tagReader := repo.Tag
for g := range jobChan { for g := range jobChan {
if err := g.LoadFiles(ctx, repo.Gallery); err != nil {
logger.Errorf("[galleries] <%s> failed to fetch files for gallery: %s", g.GetTitle(), err.Error())
continue
}
galleryHash := g.Checksum() galleryHash := g.Checksum()
newGalleryJSON, err := gallery.ToBasicJSON(g) newGalleryJSON, err := gallery.ToBasicJSON(g)
@@ -758,7 +774,7 @@ func exportGallery(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *mode
} }
// export files // export files
for _, f := range g.Files { for _, f := range g.Files.List() {
exportFile(f, t) exportFile(f, t)
} }
@@ -809,16 +825,13 @@ func exportGallery(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *mode
t.performers.IDs = intslice.IntAppendUniques(t.performers.IDs, performer.GetIDs(performers)) t.performers.IDs = intslice.IntAppendUniques(t.performers.IDs, performer.GetIDs(performers))
} }
pf := g.PrimaryFile()
basename := "" basename := ""
// use id in case multiple galleries with the same basename // use id in case multiple galleries with the same basename
hash := strconv.Itoa(g.ID) hash := strconv.Itoa(g.ID)
switch { switch {
case pf != nil: case g.Path != "":
basename = pf.Base().Basename basename = filepath.Base(g.Path)
case g.FolderPath != "":
basename = filepath.Base(g.FolderPath)
default: default:
basename = g.Title basename = g.Title
} }

View File

@@ -117,6 +117,10 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) {
if len(j.input.SceneIDs) > 0 { if len(j.input.SceneIDs) > 0 {
scenes, err = qb.FindMany(ctx, sceneIDs) scenes, err = qb.FindMany(ctx, sceneIDs)
for _, s := range scenes { for _, s := range scenes {
if err := s.LoadFiles(ctx, qb); err != nil {
return err
}
j.queueSceneJobs(ctx, g, s, queue, &totals) j.queueSceneJobs(ctx, g, s, queue, &totals)
} }
} }
@@ -207,6 +211,11 @@ func (j *GenerateJob) queueTasks(ctx context.Context, g *generate.Generator, que
return totals return totals
} }
if err := ss.LoadFiles(ctx, j.txnManager.Scene); err != nil {
logger.Errorf("Error encountered queuing files to scan: %s", err.Error())
return totals
}
j.queueSceneJobs(ctx, g, ss, queue, &totals) j.queueSceneJobs(ctx, g, ss, queue, &totals)
} }
@@ -277,7 +286,6 @@ func (j *GenerateJob) queueSceneJobs(ctx context.Context, g *generate.Generator,
options := getGeneratePreviewOptions(*generatePreviewOptions) options := getGeneratePreviewOptions(*generatePreviewOptions)
if utils.IsTrue(j.input.Previews) { if utils.IsTrue(j.input.Previews) {
task := &GeneratePreviewTask{ task := &GeneratePreviewTask{
Scene: *scene, Scene: *scene,
ImagePreview: utils.IsTrue(j.input.ImagePreviews), ImagePreview: utils.IsTrue(j.input.ImagePreviews),
@@ -344,7 +352,7 @@ func (j *GenerateJob) queueSceneJobs(ctx context.Context, g *generate.Generator,
if utils.IsTrue(j.input.Phashes) { if utils.IsTrue(j.input.Phashes) {
// generate for all files in scene // generate for all files in scene
for _, f := range scene.Files { for _, f := range scene.Files.List() {
task := &GeneratePhashTask{ task := &GeneratePhashTask{
File: f, File: f,
fileNamingAlgorithm: j.fileNamingAlgo, fileNamingAlgorithm: j.fileNamingAlgo,

View File

@@ -18,7 +18,7 @@ type GenerateInteractiveHeatmapSpeedTask struct {
} }
func (t *GenerateInteractiveHeatmapSpeedTask) GetDescription() string { func (t *GenerateInteractiveHeatmapSpeedTask) GetDescription() string {
return fmt.Sprintf("Generating heatmap and speed for %s", t.Scene.Path()) return fmt.Sprintf("Generating heatmap and speed for %s", t.Scene.Path)
} }
func (t *GenerateInteractiveHeatmapSpeedTask) Start(ctx context.Context) { func (t *GenerateInteractiveHeatmapSpeedTask) Start(ctx context.Context) {
@@ -27,7 +27,7 @@ func (t *GenerateInteractiveHeatmapSpeedTask) Start(ctx context.Context) {
} }
videoChecksum := t.Scene.GetHash(t.fileNamingAlgorithm) videoChecksum := t.Scene.GetHash(t.fileNamingAlgorithm)
funscriptPath := video.GetFunscriptPath(t.Scene.Path()) funscriptPath := video.GetFunscriptPath(t.Scene.Path)
heatmapPath := instance.Paths.Scene.GetInteractiveHeatmapPath(videoChecksum) heatmapPath := instance.Paths.Scene.GetInteractiveHeatmapPath(videoChecksum)
generator := NewInteractiveHeatmapSpeedGenerator(funscriptPath, heatmapPath) generator := NewInteractiveHeatmapSpeedGenerator(funscriptPath, heatmapPath)
@@ -42,7 +42,7 @@ func (t *GenerateInteractiveHeatmapSpeedTask) Start(ctx context.Context) {
median := generator.InteractiveSpeed median := generator.InteractiveSpeed
if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error { if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
primaryFile := t.Scene.PrimaryFile() primaryFile := t.Scene.Files.Primary()
primaryFile.InteractiveSpeed = &median primaryFile.InteractiveSpeed = &median
qb := t.TxnManager.File qb := t.TxnManager.File
return qb.Update(ctx, primaryFile) return qb.Update(ctx, primaryFile)
@@ -53,7 +53,7 @@ func (t *GenerateInteractiveHeatmapSpeedTask) Start(ctx context.Context) {
} }
func (t *GenerateInteractiveHeatmapSpeedTask) shouldGenerate() bool { func (t *GenerateInteractiveHeatmapSpeedTask) shouldGenerate() bool {
primaryFile := t.Scene.PrimaryFile() primaryFile := t.Scene.Files.Primary()
if primaryFile == nil || !primaryFile.Interactive { if primaryFile == nil || !primaryFile.Interactive {
return false return false
} }

View File

@@ -27,7 +27,7 @@ type GenerateMarkersTask struct {
func (t *GenerateMarkersTask) GetDescription() string { func (t *GenerateMarkersTask) GetDescription() string {
if t.Scene != nil { if t.Scene != nil {
return fmt.Sprintf("Generating markers for %s", t.Scene.Path()) return fmt.Sprintf("Generating markers for %s", t.Scene.Path)
} else if t.Marker != nil { } else if t.Marker != nil {
return fmt.Sprintf("Generating marker preview for marker ID %d", t.Marker.ID) return fmt.Sprintf("Generating marker preview for marker ID %d", t.Marker.ID)
} }
@@ -57,7 +57,7 @@ func (t *GenerateMarkersTask) Start(ctx context.Context) {
} }
ffprobe := instance.FFProbe ffprobe := instance.FFProbe
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path()) videoFile, err := ffprobe.NewVideoFile(t.Scene.Path)
if err != nil { if err != nil {
logger.Errorf("error reading video file: %s", err.Error()) logger.Errorf("error reading video file: %s", err.Error())
return return
@@ -83,7 +83,7 @@ func (t *GenerateMarkersTask) generateSceneMarkers(ctx context.Context) {
} }
ffprobe := instance.FFProbe ffprobe := instance.FFProbe
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path()) videoFile, err := ffprobe.NewVideoFile(t.Scene.Path)
if err != nil { if err != nil {
logger.Errorf("error reading video file: %s", err.Error()) logger.Errorf("error reading video file: %s", err.Error())
return return

View File

@@ -23,7 +23,7 @@ type GeneratePreviewTask struct {
} }
func (t *GeneratePreviewTask) GetDescription() string { func (t *GeneratePreviewTask) GetDescription() string {
return fmt.Sprintf("Generating preview for %s", t.Scene.Path()) return fmt.Sprintf("Generating preview for %s", t.Scene.Path)
} }
func (t *GeneratePreviewTask) Start(ctx context.Context) { func (t *GeneratePreviewTask) Start(ctx context.Context) {
@@ -32,7 +32,7 @@ func (t *GeneratePreviewTask) Start(ctx context.Context) {
} }
ffprobe := instance.FFProbe ffprobe := instance.FFProbe
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path()) videoFile, err := ffprobe.NewVideoFile(t.Scene.Path)
if err != nil { if err != nil {
logger.Errorf("error reading video file: %v", err) logger.Errorf("error reading video file: %v", err)
return return
@@ -55,7 +55,7 @@ func (t *GeneratePreviewTask) Start(ctx context.Context) {
} }
func (t GeneratePreviewTask) generateVideo(videoChecksum string, videoDuration float64) error { func (t GeneratePreviewTask) generateVideo(videoChecksum string, videoDuration float64) error {
videoFilename := t.Scene.Path() videoFilename := t.Scene.Path
if err := t.generator.PreviewVideo(context.TODO(), videoFilename, videoDuration, videoChecksum, t.Options, true); err != nil { if err := t.generator.PreviewVideo(context.TODO(), videoFilename, videoDuration, videoChecksum, t.Options, true); err != nil {
logger.Warnf("[generator] failed generating scene preview, trying fallback") logger.Warnf("[generator] failed generating scene preview, trying fallback")
@@ -68,7 +68,7 @@ func (t GeneratePreviewTask) generateVideo(videoChecksum string, videoDuration f
} }
func (t GeneratePreviewTask) generateWebp(videoChecksum string) error { func (t GeneratePreviewTask) generateWebp(videoChecksum string) error {
videoFilename := t.Scene.Path() videoFilename := t.Scene.Path
return t.generator.PreviewWebp(context.TODO(), videoFilename, videoChecksum) return t.generator.PreviewWebp(context.TODO(), videoFilename, videoChecksum)
} }

View File

@@ -20,18 +20,13 @@ type GenerateScreenshotTask struct {
} }
func (t *GenerateScreenshotTask) Start(ctx context.Context) { func (t *GenerateScreenshotTask) Start(ctx context.Context) {
scenePath := t.Scene.Path() scenePath := t.Scene.Path
ffprobe := instance.FFProbe
probeResult, err := ffprobe.NewVideoFile(scenePath)
if err != nil { videoFile := t.Scene.Files.Primary()
logger.Error(err.Error())
return
}
var at float64 var at float64
if t.ScreenshotAt == nil { if t.ScreenshotAt == nil {
at = float64(probeResult.Duration) * 0.2 at = float64(videoFile.Duration) * 0.2
} else { } else {
at = *t.ScreenshotAt at = *t.ScreenshotAt
} }
@@ -52,7 +47,7 @@ func (t *GenerateScreenshotTask) Start(ctx context.Context) {
Overwrite: true, Overwrite: true,
} }
if err := g.Screenshot(context.TODO(), probeResult.Path, checksum, probeResult.Width, probeResult.Duration, generate.ScreenshotOptions{ if err := g.Screenshot(context.TODO(), videoFile.Path, checksum, videoFile.Width, videoFile.Duration, generate.ScreenshotOptions{
At: &at, At: &at,
}); err != nil { }); err != nil {
logger.Errorf("Error generating screenshot: %v", err) logger.Errorf("Error generating screenshot: %v", err)

View File

@@ -16,7 +16,7 @@ type GenerateSpriteTask struct {
} }
func (t *GenerateSpriteTask) GetDescription() string { func (t *GenerateSpriteTask) GetDescription() string {
return fmt.Sprintf("Generating sprites for %s", t.Scene.Path()) return fmt.Sprintf("Generating sprites for %s", t.Scene.Path)
} }
func (t *GenerateSpriteTask) Start(ctx context.Context) { func (t *GenerateSpriteTask) Start(ctx context.Context) {
@@ -25,7 +25,7 @@ func (t *GenerateSpriteTask) Start(ctx context.Context) {
} }
ffprobe := instance.FFProbe ffprobe := instance.FFProbe
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path()) videoFile, err := ffprobe.NewVideoFile(t.Scene.Path)
if err != nil { if err != nil {
logger.Errorf("error reading video file: %s", err.Error()) logger.Errorf("error reading video file: %s", err.Error())
return return
@@ -51,6 +51,9 @@ func (t *GenerateSpriteTask) Start(ctx context.Context) {
// required returns true if the sprite needs to be generated // required returns true if the sprite needs to be generated
func (t GenerateSpriteTask) required() bool { func (t GenerateSpriteTask) required() bool {
if t.Scene.Path == "" {
return false
}
sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm) sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm)
return !t.doesSpriteExist(sceneHash) return !t.doesSpriteExist(sceneHash)
} }

View File

@@ -131,7 +131,7 @@ func (j *IdentifyJob) identifyScene(ctx context.Context, s *models.Scene, source
} }
var taskError error var taskError error
j.progress.ExecuteTask("Identifying "+s.Path(), func() { j.progress.ExecuteTask("Identifying "+s.Path, func() {
task := identify.SceneIdentifier{ task := identify.SceneIdentifier{
SceneReaderUpdater: instance.Repository.Scene, SceneReaderUpdater: instance.Repository.Scene,
StudioCreator: instance.Repository.Studio, StudioCreator: instance.Repository.Studio,
@@ -151,7 +151,7 @@ func (j *IdentifyJob) identifyScene(ctx context.Context, s *models.Scene, source
}) })
if taskError != nil { if taskError != nil {
logger.Errorf("Error encountered identifying %s: %v", s.Path(), taskError) logger.Errorf("Error encountered identifying %s: %v", s.Path, taskError)
} }
j.progress.Increment() j.progress.Increment()

View File

@@ -14,13 +14,13 @@ type MigrateHashTask struct {
// Start starts the task. // Start starts the task.
func (t *MigrateHashTask) Start() { func (t *MigrateHashTask) Start() {
if t.Scene.OSHash() == "" || t.Scene.Checksum() == "" { if t.Scene.OSHash == "" || t.Scene.Checksum == "" {
// nothing to do // nothing to do
return return
} }
oshash := t.Scene.OSHash() oshash := t.Scene.OSHash
checksum := t.Scene.Checksum() checksum := t.Scene.Checksum
oldHash := oshash oldHash := oshash
newHash := checksum newHash := checksum

View File

@@ -277,7 +277,7 @@ func getScanHandlers(options ScanMetadataInput, taskQueue *job.TaskQueue, progre
type imageThumbnailGenerator struct{} type imageThumbnailGenerator struct{}
func (g *imageThumbnailGenerator) GenerateThumbnail(ctx context.Context, i *models.Image, f *file.ImageFile) error { func (g *imageThumbnailGenerator) GenerateThumbnail(ctx context.Context, i *models.Image, f *file.ImageFile) error {
thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(i.Checksum(), models.DefaultGthumbWidth) thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(i.Checksum, models.DefaultGthumbWidth)
exists, _ := fsutil.FileExists(thumbPath) exists, _ := fsutil.FileExists(thumbPath)
if exists { if exists {
return nil return nil

View File

@@ -23,7 +23,7 @@ type GenerateTranscodeTask struct {
} }
func (t *GenerateTranscodeTask) GetDescription() string { func (t *GenerateTranscodeTask) GetDescription() string {
return fmt.Sprintf("Generating transcode for %s", t.Scene.Path()) return fmt.Sprintf("Generating transcode for %s", t.Scene.Path)
} }
func (t *GenerateTranscodeTask) Start(ctc context.Context) { func (t *GenerateTranscodeTask) Start(ctc context.Context) {
@@ -32,11 +32,13 @@ func (t *GenerateTranscodeTask) Start(ctc context.Context) {
return return
} }
f := t.Scene.Files.Primary()
ffprobe := instance.FFProbe ffprobe := instance.FFProbe
var container ffmpeg.Container var container ffmpeg.Container
var err error var err error
container, err = GetSceneFileContainer(&t.Scene) container, err = GetVideoFileContainer(f)
if err != nil { if err != nil {
logger.Errorf("[transcode] error getting scene container: %s", err.Error()) logger.Errorf("[transcode] error getting scene container: %s", err.Error())
return return
@@ -44,13 +46,13 @@ func (t *GenerateTranscodeTask) Start(ctc context.Context) {
var videoCodec string var videoCodec string
if t.Scene.VideoCodec() != "" { if f.VideoCodec != "" {
videoCodec = t.Scene.VideoCodec() videoCodec = f.VideoCodec
} }
audioCodec := ffmpeg.MissingUnsupported audioCodec := ffmpeg.MissingUnsupported
if t.Scene.AudioCodec() != "" { if f.AudioCodec != "" {
audioCodec = ffmpeg.ProbeAudioCodec(t.Scene.AudioCodec()) audioCodec = ffmpeg.ProbeAudioCodec(f.AudioCodec)
} }
if !t.Force && ffmpeg.IsStreamable(videoCodec, audioCodec, container) == nil { if !t.Force && ffmpeg.IsStreamable(videoCodec, audioCodec, container) == nil {
@@ -59,7 +61,7 @@ func (t *GenerateTranscodeTask) Start(ctc context.Context) {
// TODO - move transcode generation logic elsewhere // TODO - move transcode generation logic elsewhere
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path()) videoFile, err := ffprobe.NewVideoFile(f.Path)
if err != nil { if err != nil {
logger.Errorf("[transcode] error reading video file: %s", err.Error()) logger.Errorf("[transcode] error reading video file: %s", err.Error())
return return
@@ -100,6 +102,11 @@ func (t *GenerateTranscodeTask) Start(ctc context.Context) {
// used only when counting files to generate, doesn't affect the actual transcode generation // used only when counting files to generate, doesn't affect the actual transcode generation
// if container is missing from DB it is treated as non supported in order not to delay the user // if container is missing from DB it is treated as non supported in order not to delay the user
func (t *GenerateTranscodeTask) isTranscodeNeeded() bool { func (t *GenerateTranscodeTask) isTranscodeNeeded() bool {
f := t.Scene.Files.Primary()
if f == nil {
return false
}
hasTranscode := HasTranscode(&t.Scene, t.fileNamingAlgorithm) hasTranscode := HasTranscode(&t.Scene, t.fileNamingAlgorithm)
if !t.Overwrite && hasTranscode { if !t.Overwrite && hasTranscode {
return false return false
@@ -110,17 +117,17 @@ func (t *GenerateTranscodeTask) isTranscodeNeeded() bool {
} }
var videoCodec string var videoCodec string
if t.Scene.VideoCodec() != "" { if f.VideoCodec != "" {
videoCodec = t.Scene.VideoCodec() videoCodec = f.VideoCodec
} }
container := "" container := ""
audioCodec := ffmpeg.MissingUnsupported audioCodec := ffmpeg.MissingUnsupported
if t.Scene.AudioCodec() != "" { if f.AudioCodec != "" {
audioCodec = ffmpeg.ProbeAudioCodec(t.Scene.AudioCodec()) audioCodec = ffmpeg.ProbeAudioCodec(f.AudioCodec)
} }
if t.Scene.Format() != "" { if f.Format != "" {
container = t.Scene.Format() container = f.Format
} }
if ffmpeg.IsStreamable(videoCodec, audioCodec, ffmpeg.Container(container)) == nil { if ffmpeg.IsStreamable(videoCodec, audioCodec, ffmpeg.Container(container)) == nil {

View File

@@ -24,6 +24,26 @@ func (f Fingerprints) Get(type_ string) interface{} {
return nil return nil
} }
func (f Fingerprints) GetString(type_ string) string {
fp := f.Get(type_)
if fp != nil {
s, _ := fp.(string)
return s
}
return ""
}
func (f Fingerprints) GetInt64(type_ string) int64 {
fp := f.Get(type_)
if fp != nil {
v, _ := fp.(int64)
return v
}
return 0
}
// AppendUnique appends a fingerprint to the list if a Fingerprint of the same type does not already exist in the list. If one does, then it is updated with o's Fingerprint value. // AppendUnique appends a fingerprint to the list if a Fingerprint of the same type does not already exist in the list. If one does, then it is updated with o's Fingerprint value.
func (f Fingerprints) AppendUnique(o Fingerprint) Fingerprints { func (f Fingerprints) AppendUnique(o Fingerprint) Fingerprints {
ret := f ret := f

View File

@@ -18,7 +18,8 @@ import (
const ( const (
scanQueueSize = 200000 scanQueueSize = 200000
// maximum number of times to retry in the event of a locked database // maximum number of times to retry in the event of a locked database
maxRetries = 1000 // use -1 to retry forever
maxRetries = -1
) )
// Repository provides access to storage methods for files and folders. // Repository provides access to storage methods for files and folders.

View File

@@ -15,3 +15,14 @@ type VideoFile struct {
Interactive bool `json:"interactive"` Interactive bool `json:"interactive"`
InteractiveSpeed *int `json:"interactive_speed"` InteractiveSpeed *int `json:"interactive_speed"`
} }
func (f VideoFile) GetMinResolution() int {
w := f.Width
h := f.Height
if w < h {
return w
}
return h
}

View File

@@ -48,7 +48,7 @@ func (s *Service) destroyZipFileImages(ctx context.Context, i *models.Gallery, f
} }
// for zip-based galleries, delete the images as well first // for zip-based galleries, delete the images as well first
for _, f := range i.Files { for _, f := range i.Files.List() {
// only do this where there are no other galleries related to the file // only do this where there are no other galleries related to the file
otherGalleries, err := s.Repository.FindByFileID(ctx, f.Base().ID) otherGalleries, err := s.Repository.FindByFileID(ctx, f.Base().ID)
if err != nil { if err != nil {

View File

@@ -13,15 +13,18 @@ import (
// does not convert the relationships to other objects. // does not convert the relationships to other objects.
func ToBasicJSON(gallery *models.Gallery) (*jsonschema.Gallery, error) { func ToBasicJSON(gallery *models.Gallery) (*jsonschema.Gallery, error) {
newGalleryJSON := jsonschema.Gallery{ newGalleryJSON := jsonschema.Gallery{
FolderPath: gallery.FolderPath, Title: gallery.Title,
Title: gallery.Title, URL: gallery.URL,
URL: gallery.URL, Details: gallery.Details,
Details: gallery.Details, CreatedAt: json.JSONTime{Time: gallery.CreatedAt},
CreatedAt: json.JSONTime{Time: gallery.CreatedAt}, UpdatedAt: json.JSONTime{Time: gallery.UpdatedAt},
UpdatedAt: json.JSONTime{Time: gallery.UpdatedAt},
} }
for _, f := range gallery.Files { if gallery.FolderID != nil {
newGalleryJSON.FolderPath = gallery.Path
}
for _, f := range gallery.Files.List() {
newGalleryJSON.ZipFiles = append(newGalleryJSON.ZipFiles, f.Base().Path) newGalleryJSON.ZipFiles = append(newGalleryJSON.ZipFiles, f.Base().Path)
} }
@@ -69,10 +72,10 @@ func GetRefs(galleries []*models.Gallery) []jsonschema.GalleryRef {
for _, gallery := range galleries { for _, gallery := range galleries {
toAdd := jsonschema.GalleryRef{} toAdd := jsonschema.GalleryRef{}
switch { switch {
case gallery.FolderPath != "": case gallery.FolderID != nil:
toAdd.FolderPath = gallery.FolderPath toAdd.FolderPath = gallery.Path
case len(gallery.Files) > 0: case len(gallery.Files.List()) > 0:
for _, f := range gallery.Files { for _, f := range gallery.Files.List() {
toAdd.ZipFiles = append(toAdd.ZipFiles, f.Base().Path) toAdd.ZipFiles = append(toAdd.ZipFiles, f.Base().Path)
} }
default: default:

View File

@@ -3,6 +3,7 @@ package gallery
import ( import (
"errors" "errors"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/json"
"github.com/stashapp/stash/pkg/models/jsonschema" "github.com/stashapp/stash/pkg/models/jsonschema"
@@ -35,6 +36,7 @@ var (
const ( const (
studioName = "studioName" studioName = "studioName"
path = "path"
) )
var ( var (
@@ -44,7 +46,12 @@ var (
func createFullGallery(id int) models.Gallery { func createFullGallery(id int) models.Gallery {
return models.Gallery{ return models.Gallery{
ID: id, ID: id,
Files: models.NewRelatedFiles([]file.File{
&file.BaseFile{
Path: path,
},
}),
Title: title, Title: title,
Date: &dateObj, Date: &dateObj,
Details: details, Details: details,
@@ -64,6 +71,7 @@ func createFullJSONGallery() *jsonschema.Gallery {
Rating: rating, Rating: rating,
Organized: organized, Organized: organized,
URL: url, URL: url,
ZipFiles: []string{path},
CreatedAt: json.JSONTime{ CreatedAt: json.JSONTime{
Time: createTime, Time: createTime,
}, },

View File

@@ -3,7 +3,6 @@ package gallery
import ( import (
"context" "context"
"fmt" "fmt"
"path/filepath"
"strings" "strings"
"github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file"
@@ -247,8 +246,10 @@ func (i *Importer) createTags(ctx context.Context, names []string) ([]*models.Ta
} }
func (i *Importer) populateFilesFolder(ctx context.Context) error { func (i *Importer) populateFilesFolder(ctx context.Context) error {
files := make([]file.File, 0)
for _, ref := range i.Input.ZipFiles { for _, ref := range i.Input.ZipFiles {
path := filepath.FromSlash(ref) path := ref
f, err := i.FileFinder.FindByPath(ctx, path) f, err := i.FileFinder.FindByPath(ctx, path)
if err != nil { if err != nil {
return fmt.Errorf("error finding file: %w", err) return fmt.Errorf("error finding file: %w", err)
@@ -257,12 +258,14 @@ func (i *Importer) populateFilesFolder(ctx context.Context) error {
if f == nil { if f == nil {
return fmt.Errorf("gallery zip file '%s' not found", path) return fmt.Errorf("gallery zip file '%s' not found", path)
} else { } else {
i.gallery.Files = append(i.gallery.Files, f) files = append(files, f)
} }
} }
i.gallery.Files = models.NewRelatedFiles(files)
if i.Input.FolderPath != "" { if i.Input.FolderPath != "" {
path := filepath.FromSlash(i.Input.FolderPath) path := i.Input.FolderPath
f, err := i.FolderFinder.FindByPath(ctx, path) f, err := i.FolderFinder.FindByPath(ctx, path)
if err != nil { if err != nil {
return fmt.Errorf("error finding folder: %w", err) return fmt.Errorf("error finding folder: %w", err)
@@ -302,8 +305,8 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) {
var existing []*models.Gallery var existing []*models.Gallery
var err error var err error
switch { switch {
case len(i.gallery.Files) > 0: case len(i.gallery.Files.List()) > 0:
for _, f := range i.gallery.Files { for _, f := range i.gallery.Files.List() {
existing, err := i.ReaderWriter.FindByFileID(ctx, f.Base().ID) existing, err := i.ReaderWriter.FindByFileID(ctx, f.Base().ID)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -333,7 +336,7 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) {
func (i *Importer) Create(ctx context.Context) (*int, error) { func (i *Importer) Create(ctx context.Context) (*int, error) {
var fileIDs []file.ID var fileIDs []file.ID
for _, f := range i.gallery.Files { for _, f := range i.gallery.Files.List() {
fileIDs = append(fileIDs, f.Base().ID) fileIDs = append(fileIDs, f.Base().ID)
} }
err := i.ReaderWriter.Create(ctx, &i.gallery, fileIDs) err := i.ReaderWriter.Create(ctx, &i.gallery, fileIDs)

View File

@@ -6,6 +6,7 @@ import (
"testing" "testing"
"time" "time"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/json"
"github.com/stashapp/stash/pkg/models/jsonschema" "github.com/stashapp/stash/pkg/models/jsonschema"
@@ -67,6 +68,7 @@ func TestImporterPreImport(t *testing.T) {
Rating: &rating, Rating: &rating,
Organized: organized, Organized: organized,
URL: url, URL: url,
Files: models.NewRelatedFiles([]file.File{}),
TagIDs: models.NewRelatedIDs([]int{}), TagIDs: models.NewRelatedIDs([]int{}),
PerformerIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}),
CreatedAt: createdAt, CreatedAt: createdAt,

View File

@@ -19,6 +19,7 @@ type FinderCreatorUpdater interface {
Finder Finder
Create(ctx context.Context, newGallery *models.Gallery, fileIDs []file.ID) error Create(ctx context.Context, newGallery *models.Gallery, fileIDs []file.ID) error
AddFileID(ctx context.Context, id int, fileID file.ID) error AddFileID(ctx context.Context, id int, fileID file.ID) error
models.FileLoader
} }
type SceneFinderUpdater interface { type SceneFinderUpdater interface {
@@ -83,8 +84,12 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File) error {
func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Gallery, f file.File) error { func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Gallery, f file.File) error {
for _, i := range existing { for _, i := range existing {
if err := i.LoadFiles(ctx, h.CreatorUpdater); err != nil {
return err
}
found := false found := false
for _, sf := range i.Files { for _, sf := range i.Files.List() {
if sf.Base().ID == f.Base().ID { if sf.Base().ID == f.Base().ID {
found = true found = true
break break
@@ -93,12 +98,12 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.
if !found { if !found {
logger.Infof("Adding %s to gallery %s", f.Base().Path, i.GetTitle()) logger.Infof("Adding %s to gallery %s", f.Base().Path, i.GetTitle())
i.Files = append(i.Files, f)
if err := h.CreatorUpdater.AddFileID(ctx, i.ID, f.Base().ID); err != nil {
return fmt.Errorf("adding file to gallery: %w", err)
}
} }
if err := h.CreatorUpdater.AddFileID(ctx, i.ID, f.Base().ID); err != nil {
return fmt.Errorf("adding file to gallery: %w", err)
}
} }
return nil return nil

View File

@@ -22,7 +22,7 @@ type FileDeleter struct {
// MarkGeneratedFiles marks for deletion the generated files for the provided image. // MarkGeneratedFiles marks for deletion the generated files for the provided image.
func (d *FileDeleter) MarkGeneratedFiles(image *models.Image) error { func (d *FileDeleter) MarkGeneratedFiles(image *models.Image) error {
thumbPath := d.Paths.Generated.GetThumbnailPath(image.Checksum(), models.DefaultGthumbWidth) thumbPath := d.Paths.Generated.GetThumbnailPath(image.Checksum, models.DefaultGthumbWidth)
exists, _ := fsutil.FileExists(thumbPath) exists, _ := fsutil.FileExists(thumbPath)
if exists { if exists {
return d.Files([]string{thumbPath}) return d.Files([]string{thumbPath})
@@ -47,6 +47,10 @@ func (s *Service) DestroyZipImages(ctx context.Context, zipFile file.File, fileD
} }
for _, img := range imgs { for _, img := range imgs {
if err := img.LoadFiles(ctx, s.Repository); err != nil {
return nil, err
}
const deleteFileInZip = false const deleteFileInZip = false
if err := s.destroyImage(ctx, img, fileDeleter, deleteGenerated, deleteFileInZip); err != nil { if err := s.destroyImage(ctx, img, fileDeleter, deleteGenerated, deleteFileInZip); err != nil {
return nil, err return nil, err
@@ -77,7 +81,7 @@ func (s *Service) destroyImage(ctx context.Context, i *models.Image, fileDeleter
// deleteFiles deletes files for the image from the database and file system, if they are not in use by other images // deleteFiles deletes files for the image from the database and file system, if they are not in use by other images
func (s *Service) deleteFiles(ctx context.Context, i *models.Image, fileDeleter *FileDeleter) error { func (s *Service) deleteFiles(ctx context.Context, i *models.Image, fileDeleter *FileDeleter) error {
for _, f := range i.Files { for _, f := range i.Files.List() {
// only delete files where there is no other associated image // only delete files where there is no other associated image
otherImages, err := s.Repository.FindByFileID(ctx, f.ID) otherImages, err := s.Repository.FindByFileID(ctx, f.ID)
if err != nil { if err != nil {

View File

@@ -26,7 +26,7 @@ func ToBasicJSON(image *models.Image) *jsonschema.Image {
newImageJSON.Organized = image.Organized newImageJSON.Organized = image.Organized
newImageJSON.OCounter = image.OCounter newImageJSON.OCounter = image.OCounter
for _, f := range image.Files { for _, f := range image.Files.List() {
newImageJSON.Files = append(newImageJSON.Files, f.Base().Path) newImageJSON.Files = append(newImageJSON.Files, f.Base().Path)
} }

View File

@@ -3,6 +3,7 @@ package image
import ( import (
"errors" "errors"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/json"
"github.com/stashapp/stash/pkg/models/jsonschema" "github.com/stashapp/stash/pkg/models/jsonschema"
@@ -30,6 +31,7 @@ var (
const ( const (
studioName = "studioName" studioName = "studioName"
path = "path"
) )
var ( var (
@@ -39,7 +41,14 @@ var (
func createFullImage(id int) models.Image { func createFullImage(id int) models.Image {
return models.Image{ return models.Image{
ID: id, ID: id,
Files: models.NewRelatedImageFiles([]*file.ImageFile{
{
BaseFile: &file.BaseFile{
Path: path,
},
},
}),
Title: title, Title: title,
OCounter: ocounter, OCounter: ocounter,
Rating: &rating, Rating: &rating,
@@ -55,6 +64,7 @@ func createFullJSONImage() *jsonschema.Image {
OCounter: ocounter, OCounter: ocounter,
Rating: rating, Rating: rating,
Organized: organized, Organized: organized,
Files: []string{path},
CreatedAt: json.JSONTime{ CreatedAt: json.JSONTime{
Time: createTime, Time: createTime,
}, },

View File

@@ -8,5 +8,5 @@ import (
) )
func IsCover(img *models.Image) bool { func IsCover(img *models.Image) bool {
return strings.HasSuffix(img.Path(), "cover.jpg") return strings.HasSuffix(img.Path, "cover.jpg")
} }

View File

@@ -5,7 +5,6 @@ import (
"path/filepath" "path/filepath"
"testing" "testing"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
@@ -28,13 +27,7 @@ func TestIsCover(t *testing.T) {
assert := assert.New(t) assert := assert.New(t)
for _, tc := range tests { for _, tc := range tests {
img := &models.Image{ img := &models.Image{
Files: []*file.ImageFile{ Path: tc.fn,
{
BaseFile: &file.BaseFile{
Path: tc.fn,
},
},
},
} }
assert.Equal(tc.isCover, IsCover(img), "expected: %t for %s", tc.isCover, tc.fn) assert.Equal(tc.isCover, IsCover(img), "expected: %t for %s", tc.isCover, tc.fn)
} }

View File

@@ -3,7 +3,6 @@ package image
import ( import (
"context" "context"
"fmt" "fmt"
"path/filepath"
"strings" "strings"
"github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file"
@@ -91,8 +90,10 @@ func (i *Importer) imageJSONToImage(imageJSON jsonschema.Image) models.Image {
} }
func (i *Importer) populateFiles(ctx context.Context) error { func (i *Importer) populateFiles(ctx context.Context) error {
files := make([]*file.ImageFile, 0)
for _, ref := range i.Input.Files { for _, ref := range i.Input.Files {
path := filepath.FromSlash(ref) path := ref
f, err := i.FileFinder.FindByPath(ctx, path) f, err := i.FileFinder.FindByPath(ctx, path)
if err != nil { if err != nil {
return fmt.Errorf("error finding file: %w", err) return fmt.Errorf("error finding file: %w", err)
@@ -101,10 +102,12 @@ func (i *Importer) populateFiles(ctx context.Context) error {
if f == nil { if f == nil {
return fmt.Errorf("image file '%s' not found", path) return fmt.Errorf("image file '%s' not found", path)
} else { } else {
i.image.Files = append(i.image.Files, f.(*file.ImageFile)) files = append(files, f.(*file.ImageFile))
} }
} }
i.image.Files = models.NewRelatedImageFiles(files)
return nil return nil
} }
@@ -300,7 +303,7 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) {
var existing []*models.Image var existing []*models.Image
var err error var err error
for _, f := range i.image.Files { for _, f := range i.image.Files.List() {
existing, err = i.ReaderWriter.FindByFileID(ctx, f.ID) existing, err = i.ReaderWriter.FindByFileID(ctx, f.ID)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -317,7 +320,7 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) {
func (i *Importer) Create(ctx context.Context) (*int, error) { func (i *Importer) Create(ctx context.Context) (*int, error) {
var fileIDs []file.ID var fileIDs []file.ID
for _, f := range i.image.Files { for _, f := range i.image.Files.List() {
fileIDs = append(fileIDs, f.Base().ID) fileIDs = append(fileIDs, f.Base().ID)
} }

View File

@@ -26,6 +26,7 @@ type FinderCreatorUpdater interface {
Create(ctx context.Context, newImage *models.ImageCreateInput) error Create(ctx context.Context, newImage *models.ImageCreateInput) error
AddFileID(ctx context.Context, id int, fileID file.ID) error AddFileID(ctx context.Context, id int, fileID file.ID) error
models.GalleryIDLoader models.GalleryIDLoader
models.ImageFileLoader
} }
type GalleryFinderCreator interface { type GalleryFinderCreator interface {
@@ -145,8 +146,12 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File) error {
func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Image, f *file.ImageFile) error { func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Image, f *file.ImageFile) error {
for _, i := range existing { for _, i := range existing {
if err := i.LoadFiles(ctx, h.CreatorUpdater); err != nil {
return err
}
found := false found := false
for _, sf := range i.Files { for _, sf := range i.Files.List() {
if sf.ID == f.Base().ID { if sf.ID == f.Base().ID {
found = true found = true
break break
@@ -155,7 +160,6 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.
if !found { if !found {
logger.Infof("Adding %s to image %s", f.Path, i.GetTitle()) logger.Infof("Adding %s to image %s", f.Path, i.GetTitle())
i.Files = append(i.Files, f)
// associate with folder-based gallery if applicable // associate with folder-based gallery if applicable
if h.ScanConfig.GetCreateGalleriesFromFolders() { if h.ScanConfig.GetCreateGalleriesFromFolders() {
@@ -218,7 +222,7 @@ func (h *ScanHandler) associateFolderBasedGallery(ctx context.Context, newImage
if g != nil && !intslice.IntInclude(newImage.GalleryIDs.List(), g.ID) { if g != nil && !intslice.IntInclude(newImage.GalleryIDs.List(), g.ID) {
newImage.GalleryIDs.Add(g.ID) newImage.GalleryIDs.Add(g.ID)
logger.Infof("Adding %s to folder-based gallery %s", f.Base().Path, g.Path()) logger.Infof("Adding %s to folder-based gallery %s", f.Base().Path, g.Path)
} }
return nil return nil

View File

@@ -15,6 +15,7 @@ type FinderByFile interface {
type Repository interface { type Repository interface {
FinderByFile FinderByFile
Destroyer Destroyer
models.ImageFileLoader
} }
type Service struct { type Service struct {

View File

@@ -307,7 +307,7 @@ func PathToScenes(ctx context.Context, name string, paths []string, sceneReader
r := nameToRegexp(name, useUnicode) r := nameToRegexp(name, useUnicode)
for _, p := range scenes { for _, p := range scenes {
if regexpMatchesPath(r, p.Path()) != -1 { if regexpMatchesPath(r, p.Path) != -1 {
ret = append(ret, p) ret = append(ret, p)
} }
} }
@@ -344,7 +344,7 @@ func PathToImages(ctx context.Context, name string, paths []string, imageReader
r := nameToRegexp(name, useUnicode) r := nameToRegexp(name, useUnicode)
for _, p := range images { for _, p := range images {
if regexpMatchesPath(r, p.Path()) != -1 { if regexpMatchesPath(r, p.Path) != -1 {
ret = append(ret, p) ret = append(ret, p)
} }
} }
@@ -381,7 +381,7 @@ func PathToGalleries(ctx context.Context, name string, paths []string, galleryRe
r := nameToRegexp(name, useUnicode) r := nameToRegexp(name, useUnicode)
for _, p := range gallerys { for _, p := range gallerys {
path := p.Path() path := p.Path
if path != "" && regexpMatchesPath(r, path) != -1 { if path != "" && regexpMatchesPath(r, path) != -1 {
ret = append(ret, p) ret = append(ret, p)
} }

View File

@@ -500,6 +500,29 @@ func (_m *SceneReaderWriter) GetCover(ctx context.Context, sceneID int) ([]byte,
return r0, r1 return r0, r1
} }
// GetFiles provides a mock function with given fields: ctx, relatedID
func (_m *SceneReaderWriter) GetFiles(ctx context.Context, relatedID int) ([]*file.VideoFile, error) {
ret := _m.Called(ctx, relatedID)
var r0 []*file.VideoFile
if rf, ok := ret.Get(0).(func(context.Context, int) []*file.VideoFile); ok {
r0 = rf(ctx, relatedID)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]*file.VideoFile)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
r1 = rf(ctx, relatedID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetGalleryIDs provides a mock function with given fields: ctx, relatedID // GetGalleryIDs provides a mock function with given fields: ctx, relatedID
func (_m *SceneReaderWriter) GetGalleryIDs(ctx context.Context, relatedID int) ([]int, error) { func (_m *SceneReaderWriter) GetGalleryIDs(ctx context.Context, relatedID int) ([]int, error) {
ret := _m.Called(ctx, relatedID) ret := _m.Called(ctx, relatedID)

View File

@@ -2,7 +2,6 @@ package models
import ( import (
"context" "context"
"path/filepath"
"time" "time"
"github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file"
@@ -11,10 +10,6 @@ import (
type Gallery struct { type Gallery struct {
ID int `json:"id"` ID int `json:"id"`
// Path *string `json:"path"`
// Checksum string `json:"checksum"`
// Zip bool `json:"zip"`
Title string `json:"title"` Title string `json:"title"`
URL string `json:"url"` URL string `json:"url"`
Date *Date `json:"date"` Date *Date `json:"date"`
@@ -23,16 +18,15 @@ type Gallery struct {
Organized bool `json:"organized"` Organized bool `json:"organized"`
StudioID *int `json:"studio_id"` StudioID *int `json:"studio_id"`
// FileModTime *time.Time `json:"file_mod_time"`
// transient - not persisted // transient - not persisted
Files []file.File Files RelatedFiles
// transient - not persisted
PrimaryFileID *file.ID
// transient - path of primary file or folder
Path string
FolderID *file.FolderID `json:"folder_id"` FolderID *file.FolderID `json:"folder_id"`
// transient - not persisted
FolderPath string `json:"folder_path"`
CreatedAt time.Time `json:"created_at"` CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"` UpdatedAt time.Time `json:"updated_at"`
@@ -41,6 +35,30 @@ type Gallery struct {
PerformerIDs RelatedIDs `json:"performer_ids"` PerformerIDs RelatedIDs `json:"performer_ids"`
} }
func (g *Gallery) LoadFiles(ctx context.Context, l FileLoader) error {
return g.Files.load(func() ([]file.File, error) {
return l.GetFiles(ctx, g.ID)
})
}
func (g *Gallery) LoadPrimaryFile(ctx context.Context, l file.Finder) error {
return g.Files.loadPrimary(func() (file.File, error) {
if g.PrimaryFileID == nil {
return nil, nil
}
f, err := l.Find(ctx, *g.PrimaryFileID)
if err != nil {
return nil, err
}
if len(f) > 0 {
return f[0], nil
}
return nil, nil
})
}
func (g *Gallery) LoadSceneIDs(ctx context.Context, l SceneIDLoader) error { func (g *Gallery) LoadSceneIDs(ctx context.Context, l SceneIDLoader) error {
return g.SceneIDs.load(func() ([]int, error) { return g.SceneIDs.load(func() ([]int, error) {
return l.GetSceneIDs(ctx, g.ID) return l.GetSceneIDs(ctx, g.ID)
@@ -59,24 +77,8 @@ func (g *Gallery) LoadTagIDs(ctx context.Context, l TagIDLoader) error {
}) })
} }
func (g Gallery) PrimaryFile() file.File {
if len(g.Files) == 0 {
return nil
}
return g.Files[0]
}
func (g Gallery) Path() string {
if p := g.PrimaryFile(); p != nil {
return p.Base().Path
}
return g.FolderPath
}
func (g Gallery) Checksum() string { func (g Gallery) Checksum() string {
if p := g.PrimaryFile(); p != nil { if p := g.Files.Primary(); p != nil {
v := p.Base().Fingerprints.Get(file.FingerprintTypeMD5) v := p.Base().Fingerprints.Get(file.FingerprintTypeMD5)
if v == nil { if v == nil {
return "" return ""
@@ -123,15 +125,7 @@ func (g Gallery) GetTitle() string {
return g.Title return g.Title
} }
if len(g.Files) > 0 { return g.Path
return filepath.Base(g.Path())
}
if g.FolderPath != "" {
return g.FolderPath
}
return ""
} }
const DefaultGthumbWidth int = 640 const DefaultGthumbWidth int = 640

View File

@@ -2,6 +2,8 @@ package models
import ( import (
"context" "context"
"errors"
"path/filepath"
"time" "time"
"github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file"
@@ -18,7 +20,12 @@ type Image struct {
StudioID *int `json:"studio_id"` StudioID *int `json:"studio_id"`
// transient - not persisted // transient - not persisted
Files []*file.ImageFile Files RelatedImageFiles
PrimaryFileID *file.ID
// transient - path of primary file - empty if no files
Path string
// transient - checksum of primary file - empty if no files
Checksum string
CreatedAt time.Time `json:"created_at"` CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"` UpdatedAt time.Time `json:"updated_at"`
@@ -28,6 +35,35 @@ type Image struct {
PerformerIDs RelatedIDs `json:"performer_ids"` PerformerIDs RelatedIDs `json:"performer_ids"`
} }
func (i *Image) LoadFiles(ctx context.Context, l ImageFileLoader) error {
return i.Files.load(func() ([]*file.ImageFile, error) {
return l.GetFiles(ctx, i.ID)
})
}
func (i *Image) LoadPrimaryFile(ctx context.Context, l file.Finder) error {
return i.Files.loadPrimary(func() (*file.ImageFile, error) {
if i.PrimaryFileID == nil {
return nil, nil
}
f, err := l.Find(ctx, *i.PrimaryFileID)
if err != nil {
return nil, err
}
var vf *file.ImageFile
if len(f) > 0 {
var ok bool
vf, ok = f[0].(*file.ImageFile)
if !ok {
return nil, errors.New("not an image file")
}
}
return vf, nil
})
}
func (i *Image) LoadGalleryIDs(ctx context.Context, l GalleryIDLoader) error { func (i *Image) LoadGalleryIDs(ctx context.Context, l GalleryIDLoader) error {
return i.GalleryIDs.load(func() ([]int, error) { return i.GalleryIDs.load(func() ([]int, error) {
return l.GetGalleryIDs(ctx, i.ID) return l.GetGalleryIDs(ctx, i.ID)
@@ -46,34 +82,6 @@ func (i *Image) LoadTagIDs(ctx context.Context, l TagIDLoader) error {
}) })
} }
func (i Image) PrimaryFile() *file.ImageFile {
if len(i.Files) == 0 {
return nil
}
return i.Files[0]
}
func (i Image) Path() string {
if p := i.PrimaryFile(); p != nil {
return p.Path
}
return ""
}
func (i Image) Checksum() string {
if p := i.PrimaryFile(); p != nil {
v := p.Fingerprints.Get(file.FingerprintTypeMD5)
if v == nil {
return ""
}
return v.(string)
}
return ""
}
// GetTitle returns the title of the image. If the Title field is empty, // GetTitle returns the title of the image. If the Title field is empty,
// then the base filename is returned. // then the base filename is returned.
func (i Image) GetTitle() string { func (i Image) GetTitle() string {
@@ -81,8 +89,8 @@ func (i Image) GetTitle() string {
return i.Title return i.Title
} }
if p := i.PrimaryFile(); p != nil { if i.Path != "" {
return p.Basename return filepath.Base(i.Path)
} }
return "" return ""

View File

@@ -2,6 +2,7 @@ package models
import ( import (
"context" "context"
"errors"
"path/filepath" "path/filepath"
"strconv" "strconv"
"time" "time"
@@ -22,7 +23,14 @@ type Scene struct {
StudioID *int `json:"studio_id"` StudioID *int `json:"studio_id"`
// transient - not persisted // transient - not persisted
Files []*file.VideoFile Files RelatedVideoFiles
PrimaryFileID *file.ID
// transient - path of primary file - empty if no files
Path string
// transient - oshash of primary file - empty if no files
OSHash string
// transient - checksum of primary file - empty if no files
Checksum string
CreatedAt time.Time `json:"created_at"` CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"` UpdatedAt time.Time `json:"updated_at"`
@@ -34,6 +42,35 @@ type Scene struct {
StashIDs RelatedStashIDs `json:"stash_ids"` StashIDs RelatedStashIDs `json:"stash_ids"`
} }
func (s *Scene) LoadFiles(ctx context.Context, l VideoFileLoader) error {
return s.Files.load(func() ([]*file.VideoFile, error) {
return l.GetFiles(ctx, s.ID)
})
}
func (s *Scene) LoadPrimaryFile(ctx context.Context, l file.Finder) error {
return s.Files.loadPrimary(func() (*file.VideoFile, error) {
if s.PrimaryFileID == nil {
return nil, nil
}
f, err := l.Find(ctx, *s.PrimaryFileID)
if err != nil {
return nil, err
}
var vf *file.VideoFile
if len(f) > 0 {
var ok bool
vf, ok = f[0].(*file.VideoFile)
if !ok {
return nil, errors.New("not a video file")
}
}
return vf, nil
})
}
func (s *Scene) LoadGalleryIDs(ctx context.Context, l GalleryIDLoader) error { func (s *Scene) LoadGalleryIDs(ctx context.Context, l GalleryIDLoader) error {
return s.GalleryIDs.load(func() ([]int, error) { return s.GalleryIDs.load(func() ([]int, error) {
return l.GetGalleryIDs(ctx, s.ID) return l.GetGalleryIDs(ctx, s.ID)
@@ -85,89 +122,13 @@ func (s *Scene) LoadRelationships(ctx context.Context, l SceneReader) error {
return err return err
} }
if err := s.LoadFiles(ctx, l); err != nil {
return err
}
return nil return nil
} }
func (s Scene) PrimaryFile() *file.VideoFile {
if len(s.Files) == 0 {
return nil
}
return s.Files[0]
}
func (s Scene) Path() string {
if p := s.PrimaryFile(); p != nil {
return p.Base().Path
}
return ""
}
func (s Scene) getHash(type_ string) string {
if p := s.PrimaryFile(); p != nil {
v := p.Base().Fingerprints.Get(type_)
if v == nil {
return ""
}
return v.(string)
}
return ""
}
func (s Scene) Checksum() string {
return s.getHash(file.FingerprintTypeMD5)
}
func (s Scene) OSHash() string {
return s.getHash(file.FingerprintTypeOshash)
}
func (s Scene) Phash() int64 {
if p := s.PrimaryFile(); p != nil {
v := p.Base().Fingerprints.Get(file.FingerprintTypePhash)
if v == nil {
return 0
}
return v.(int64)
}
return 0
}
func (s Scene) Duration() float64 {
if p := s.PrimaryFile(); p != nil {
return p.Duration
}
return 0
}
func (s Scene) Format() string {
if p := s.PrimaryFile(); p != nil {
return p.Format
}
return ""
}
func (s Scene) VideoCodec() string {
if p := s.PrimaryFile(); p != nil {
return p.VideoCodec
}
return ""
}
func (s Scene) AudioCodec() string {
if p := s.PrimaryFile(); p != nil {
return p.AudioCodec
}
return ""
}
// ScenePartial represents part of a Scene object. It is used to update // ScenePartial represents part of a Scene object. It is used to update
// the database entry. // the database entry.
type ScenePartial struct { type ScenePartial struct {
@@ -258,43 +219,22 @@ func (s Scene) GetTitle() string {
return s.Title return s.Title
} }
return filepath.Base(s.Path()) return filepath.Base(s.Path)
} }
// GetHash returns the hash of the scene, based on the hash algorithm provided. If // GetHash returns the hash of the scene, based on the hash algorithm provided. If
// hash algorithm is MD5, then Checksum is returned. Otherwise, OSHash is returned. // hash algorithm is MD5, then Checksum is returned. Otherwise, OSHash is returned.
func (s Scene) GetHash(hashAlgorithm HashAlgorithm) string { func (s Scene) GetHash(hashAlgorithm HashAlgorithm) string {
f := s.PrimaryFile()
if f == nil {
return ""
}
switch hashAlgorithm { switch hashAlgorithm {
case HashAlgorithmMd5: case HashAlgorithmMd5:
return f.Base().Fingerprints.Get(file.FingerprintTypeMD5).(string) return s.Checksum
case HashAlgorithmOshash: case HashAlgorithmOshash:
return f.Base().Fingerprints.Get(file.FingerprintTypeOshash).(string) return s.OSHash
} }
return "" return ""
} }
func (s Scene) GetMinResolution() int {
f := s.PrimaryFile()
if f == nil {
return 0
}
w := f.Width
h := f.Height
if w < h {
return w
}
return h
}
// SceneFileType represents the file metadata for a scene. // SceneFileType represents the file metadata for a scene.
type SceneFileType struct { type SceneFileType struct {
Size *string `graphql:"size" json:"size"` Size *string `graphql:"size" json:"size"`

View File

@@ -1,6 +1,10 @@
package models package models
import "context" import (
"context"
"github.com/stashapp/stash/pkg/file"
)
type SceneIDLoader interface { type SceneIDLoader interface {
GetSceneIDs(ctx context.Context, relatedID int) ([]int, error) GetSceneIDs(ctx context.Context, relatedID int) ([]int, error)
@@ -26,6 +30,18 @@ type StashIDLoader interface {
GetStashIDs(ctx context.Context, relatedID int) ([]StashID, error) GetStashIDs(ctx context.Context, relatedID int) ([]StashID, error)
} }
type VideoFileLoader interface {
GetFiles(ctx context.Context, relatedID int) ([]*file.VideoFile, error)
}
type ImageFileLoader interface {
GetFiles(ctx context.Context, relatedID int) ([]*file.ImageFile, error)
}
type FileLoader interface {
GetFiles(ctx context.Context, relatedID int) ([]file.File, error)
}
// RelatedIDs represents a list of related IDs. // RelatedIDs represents a list of related IDs.
// TODO - this can be made generic // TODO - this can be made generic
type RelatedIDs struct { type RelatedIDs struct {
@@ -189,3 +205,266 @@ func (r *RelatedStashIDs) load(fn func() ([]StashID, error)) error {
return nil return nil
} }
type RelatedVideoFiles struct {
primaryFile *file.VideoFile
files []*file.VideoFile
primaryLoaded bool
}
func NewRelatedVideoFiles(files []*file.VideoFile) RelatedVideoFiles {
ret := RelatedVideoFiles{
files: files,
primaryLoaded: true,
}
if len(files) > 0 {
ret.primaryFile = files[0]
}
return ret
}
func (r *RelatedVideoFiles) SetPrimary(f *file.VideoFile) {
r.primaryFile = f
r.primaryLoaded = true
}
func (r *RelatedVideoFiles) Set(f []*file.VideoFile) {
r.files = f
if len(r.files) > 0 {
r.primaryFile = r.files[0]
}
r.primaryLoaded = true
}
// Loaded returns true if the relationship has been loaded.
func (r RelatedVideoFiles) Loaded() bool {
return r.files != nil
}
// Loaded returns true if the primary file relationship has been loaded.
func (r RelatedVideoFiles) PrimaryLoaded() bool {
return r.primaryLoaded
}
// List returns the related files. Panics if the relationship has not been loaded.
func (r RelatedVideoFiles) List() []*file.VideoFile {
if !r.Loaded() {
panic("relationship has not been loaded")
}
return r.files
}
// Primary returns the primary file. Panics if the relationship has not been loaded.
func (r RelatedVideoFiles) Primary() *file.VideoFile {
if !r.PrimaryLoaded() {
panic("relationship has not been loaded")
}
return r.primaryFile
}
func (r *RelatedVideoFiles) load(fn func() ([]*file.VideoFile, error)) error {
if r.Loaded() {
return nil
}
var err error
r.files, err = fn()
if err != nil {
return err
}
if len(r.files) > 0 {
r.primaryFile = r.files[0]
}
r.primaryLoaded = true
return nil
}
func (r *RelatedVideoFiles) loadPrimary(fn func() (*file.VideoFile, error)) error {
if r.PrimaryLoaded() {
return nil
}
var err error
r.primaryFile, err = fn()
if err != nil {
return err
}
r.primaryLoaded = true
return nil
}
type RelatedImageFiles struct {
primaryFile *file.ImageFile
files []*file.ImageFile
primaryLoaded bool
}
func NewRelatedImageFiles(files []*file.ImageFile) RelatedImageFiles {
ret := RelatedImageFiles{
files: files,
primaryLoaded: true,
}
if len(files) > 0 {
ret.primaryFile = files[0]
}
return ret
}
// Loaded returns true if the relationship has been loaded.
func (r RelatedImageFiles) Loaded() bool {
return r.files != nil
}
// Loaded returns true if the primary file relationship has been loaded.
func (r RelatedImageFiles) PrimaryLoaded() bool {
return r.primaryLoaded
}
// List returns the related files. Panics if the relationship has not been loaded.
func (r RelatedImageFiles) List() []*file.ImageFile {
if !r.Loaded() {
panic("relationship has not been loaded")
}
return r.files
}
// Primary returns the primary file. Panics if the relationship has not been loaded.
func (r RelatedImageFiles) Primary() *file.ImageFile {
if !r.PrimaryLoaded() {
panic("relationship has not been loaded")
}
return r.primaryFile
}
func (r *RelatedImageFiles) load(fn func() ([]*file.ImageFile, error)) error {
if r.Loaded() {
return nil
}
var err error
r.files, err = fn()
if err != nil {
return err
}
if len(r.files) > 0 {
r.primaryFile = r.files[0]
}
r.primaryLoaded = true
return nil
}
func (r *RelatedImageFiles) loadPrimary(fn func() (*file.ImageFile, error)) error {
if r.PrimaryLoaded() {
return nil
}
var err error
r.primaryFile, err = fn()
if err != nil {
return err
}
r.primaryLoaded = true
return nil
}
type RelatedFiles struct {
primaryFile file.File
files []file.File
primaryLoaded bool
}
func NewRelatedFiles(files []file.File) RelatedFiles {
ret := RelatedFiles{
files: files,
primaryLoaded: true,
}
if len(files) > 0 {
ret.primaryFile = files[0]
}
return ret
}
// Loaded returns true if the relationship has been loaded.
func (r RelatedFiles) Loaded() bool {
return r.files != nil
}
// Loaded returns true if the primary file relationship has been loaded.
func (r RelatedFiles) PrimaryLoaded() bool {
return r.primaryLoaded
}
// List returns the related files. Panics if the relationship has not been loaded.
func (r RelatedFiles) List() []file.File {
if !r.Loaded() {
panic("relationship has not been loaded")
}
return r.files
}
// Primary returns the primary file. Panics if the relationship has not been loaded.
func (r RelatedFiles) Primary() file.File {
if !r.PrimaryLoaded() {
panic("relationship has not been loaded")
}
return r.primaryFile
}
func (r *RelatedFiles) load(fn func() ([]file.File, error)) error {
if r.Loaded() {
return nil
}
var err error
r.files, err = fn()
if err != nil {
return err
}
if len(r.files) > 0 {
r.primaryFile = r.files[0]
}
r.primaryLoaded = true
return nil
}
func (r *RelatedFiles) loadPrimary(fn func() (file.File, error)) error {
if r.PrimaryLoaded() {
return nil
}
var err error
r.primaryFile, err = fn()
if err != nil {
return err
}
r.primaryLoaded = true
return nil
}

View File

@@ -139,6 +139,7 @@ type SceneReader interface {
TagIDLoader TagIDLoader
SceneMovieLoader SceneMovieLoader
StashIDLoader StashIDLoader
VideoFileLoader
CountByPerformerID(ctx context.Context, performerID int) (int, error) CountByPerformerID(ctx context.Context, performerID int) (int, error)
// FindByStudioID(studioID int) ([]*Scene, error) // FindByStudioID(studioID int) ([]*Scene, error)

View File

@@ -161,7 +161,7 @@ func (s *Service) Destroy(ctx context.Context, scene *models.Scene, fileDeleter
// deleteFiles deletes files from the database and file system // deleteFiles deletes files from the database and file system
func (s *Service) deleteFiles(ctx context.Context, scene *models.Scene, fileDeleter *FileDeleter) error { func (s *Service) deleteFiles(ctx context.Context, scene *models.Scene, fileDeleter *FileDeleter) error {
for _, f := range scene.Files { for _, f := range scene.Files.List() {
// only delete files where there is no other associated scene // only delete files where there is no other associated scene
otherScenes, err := s.Repository.FindByFileID(ctx, f.ID) otherScenes, err := s.Repository.FindByFileID(ctx, f.ID)
if err != nil { if err != nil {

View File

@@ -56,7 +56,7 @@ func ToBasicJSON(ctx context.Context, reader CoverGetter, scene *models.Scene) (
newSceneJSON.Organized = scene.Organized newSceneJSON.Organized = scene.Organized
newSceneJSON.OCounter = scene.OCounter newSceneJSON.OCounter = scene.OCounter
for _, f := range scene.Files { for _, f := range scene.Files.List() {
newSceneJSON.Files = append(newSceneJSON.Files, f.Base().Path) newSceneJSON.Files = append(newSceneJSON.Files, f.Base().Path)
} }

View File

@@ -3,6 +3,7 @@ package scene
import ( import (
"errors" "errors"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/json"
"github.com/stashapp/stash/pkg/models/jsonschema" "github.com/stashapp/stash/pkg/models/jsonschema"
@@ -72,7 +73,10 @@ var stashID = models.StashID{
Endpoint: "Endpoint", Endpoint: "Endpoint",
} }
const imageBase64 = "aW1hZ2VCeXRlcw==" const (
path = "path"
imageBase64 = "aW1hZ2VCeXRlcw=="
)
var ( var (
createTime = time.Date(2001, 01, 01, 0, 0, 0, 0, time.UTC) createTime = time.Date(2001, 01, 01, 0, 0, 0, 0, time.UTC)
@@ -89,6 +93,13 @@ func createFullScene(id int) models.Scene {
Rating: &rating, Rating: &rating,
Organized: organized, Organized: organized,
URL: url, URL: url,
Files: models.NewRelatedVideoFiles([]*file.VideoFile{
{
BaseFile: &file.BaseFile{
Path: path,
},
},
}),
StashIDs: models.NewRelatedStashIDs([]models.StashID{ StashIDs: models.NewRelatedStashIDs([]models.StashID{
stashID, stashID,
}), }),
@@ -99,7 +110,14 @@ func createFullScene(id int) models.Scene {
func createEmptyScene(id int) models.Scene { func createEmptyScene(id int) models.Scene {
return models.Scene{ return models.Scene{
ID: id, ID: id,
Files: models.NewRelatedVideoFiles([]*file.VideoFile{
{
BaseFile: &file.BaseFile{
Path: path,
},
},
}),
StashIDs: models.NewRelatedStashIDs([]models.StashID{}), StashIDs: models.NewRelatedStashIDs([]models.StashID{}),
CreatedAt: createTime, CreatedAt: createTime,
UpdatedAt: updateTime, UpdatedAt: updateTime,
@@ -109,6 +127,7 @@ func createEmptyScene(id int) models.Scene {
func createFullJSONScene(image string) *jsonschema.Scene { func createFullJSONScene(image string) *jsonschema.Scene {
return &jsonschema.Scene{ return &jsonschema.Scene{
Title: title, Title: title,
Files: []string{path},
Date: date, Date: date,
Details: details, Details: details,
OCounter: ocounter, OCounter: ocounter,
@@ -130,6 +149,7 @@ func createFullJSONScene(image string) *jsonschema.Scene {
func createEmptyJSONScene() *jsonschema.Scene { func createEmptyJSONScene() *jsonschema.Scene {
return &jsonschema.Scene{ return &jsonschema.Scene{
Files: []string{path},
CreatedAt: json.JSONTime{ CreatedAt: json.JSONTime{
Time: createTime, Time: createTime,
}, },

View File

@@ -3,7 +3,6 @@ package scene
import ( import (
"context" "context"
"fmt" "fmt"
"path/filepath"
"strings" "strings"
"github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file"
@@ -109,8 +108,10 @@ func (i *Importer) sceneJSONToScene(sceneJSON jsonschema.Scene) models.Scene {
} }
func (i *Importer) populateFiles(ctx context.Context) error { func (i *Importer) populateFiles(ctx context.Context) error {
files := make([]*file.VideoFile, 0)
for _, ref := range i.Input.Files { for _, ref := range i.Input.Files {
path := filepath.FromSlash(ref) path := ref
f, err := i.FileFinder.FindByPath(ctx, path) f, err := i.FileFinder.FindByPath(ctx, path)
if err != nil { if err != nil {
return fmt.Errorf("error finding file: %w", err) return fmt.Errorf("error finding file: %w", err)
@@ -119,10 +120,12 @@ func (i *Importer) populateFiles(ctx context.Context) error {
if f == nil { if f == nil {
return fmt.Errorf("scene file '%s' not found", path) return fmt.Errorf("scene file '%s' not found", path)
} else { } else {
i.scene.Files = append(i.scene.Files, f.(*file.VideoFile)) files = append(files, f.(*file.VideoFile))
} }
} }
i.scene.Files = models.NewRelatedVideoFiles(files)
return nil return nil
} }
@@ -374,7 +377,7 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) {
var existing []*models.Scene var existing []*models.Scene
var err error var err error
for _, f := range i.scene.Files { for _, f := range i.scene.Files.List() {
existing, err = i.ReaderWriter.FindByFileID(ctx, f.ID) existing, err = i.ReaderWriter.FindByFileID(ctx, f.ID)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -391,7 +394,7 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) {
func (i *Importer) Create(ctx context.Context) (*int, error) { func (i *Importer) Create(ctx context.Context) (*int, error) {
var fileIDs []file.ID var fileIDs []file.ID
for _, f := range i.scene.Files { for _, f := range i.scene.Files.List() {
fileIDs = append(fileIDs, f.Base().ID) fileIDs = append(fileIDs, f.Base().ID)
} }
if err := i.ReaderWriter.Create(ctx, &i.scene, fileIDs); err != nil { if err := i.ReaderWriter.Create(ctx, &i.scene, fileIDs); err != nil {

View File

@@ -24,6 +24,7 @@ type CreatorUpdater interface {
Create(ctx context.Context, newScene *models.Scene, fileIDs []file.ID) error Create(ctx context.Context, newScene *models.Scene, fileIDs []file.ID) error
UpdatePartial(ctx context.Context, id int, updatedScene models.ScenePartial) (*models.Scene, error) UpdatePartial(ctx context.Context, id int, updatedScene models.ScenePartial) (*models.Scene, error)
AddFileID(ctx context.Context, id int, fileID file.ID) error AddFileID(ctx context.Context, id int, fileID file.ID) error
models.VideoFileLoader
} }
type ScanGenerator interface { type ScanGenerator interface {
@@ -116,8 +117,12 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File) error {
func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Scene, f *file.VideoFile) error { func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Scene, f *file.VideoFile) error {
for _, s := range existing { for _, s := range existing {
if err := s.LoadFiles(ctx, h.CreatorUpdater); err != nil {
return err
}
found := false found := false
for _, sf := range s.Files { for _, sf := range s.Files.List() {
if sf.ID == f.ID { if sf.ID == f.ID {
found = true found = true
break break
@@ -126,7 +131,6 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.
if !found { if !found {
logger.Infof("Adding %s to scene %s", f.Path, s.GetTitle()) logger.Infof("Adding %s to scene %s", f.Path, s.GetTitle())
s.Files = append(s.Files, f)
if err := h.CreatorUpdater.AddFileID(ctx, s.ID, f.ID); err != nil { if err := h.CreatorUpdater.AddFileID(ctx, s.ID, f.ID); err != nil {
return fmt.Errorf("adding file to scene: %w", err) return fmt.Errorf("adding file to scene: %w", err)

View File

@@ -95,7 +95,7 @@ func (s autotagScraper) viaScene(ctx context.Context, _client *http.Client, scen
// populate performers, studio and tags based on scene path // populate performers, studio and tags based on scene path
if err := txn.WithTxn(ctx, s.txnManager, func(ctx context.Context) error { if err := txn.WithTxn(ctx, s.txnManager, func(ctx context.Context) error {
path := scene.Path() path := scene.Path
performers, err := autotagMatchPerformers(ctx, path, s.performerReader, trimExt) performers, err := autotagMatchPerformers(ctx, path, s.performerReader, trimExt)
if err != nil { if err != nil {
return fmt.Errorf("autotag scraper viaScene: %w", err) return fmt.Errorf("autotag scraper viaScene: %w", err)
@@ -127,20 +127,20 @@ func (s autotagScraper) viaScene(ctx context.Context, _client *http.Client, scen
} }
func (s autotagScraper) viaGallery(ctx context.Context, _client *http.Client, gallery *models.Gallery) (*ScrapedGallery, error) { func (s autotagScraper) viaGallery(ctx context.Context, _client *http.Client, gallery *models.Gallery) (*ScrapedGallery, error) {
path := gallery.Path() path := gallery.Path
if path == "" { if path == "" {
// not valid for non-path-based galleries // not valid for non-path-based galleries
return nil, nil return nil, nil
} }
// only trim extension if gallery is file-based // only trim extension if gallery is file-based
trimExt := gallery.PrimaryFile() != nil trimExt := gallery.PrimaryFileID != nil
var ret *ScrapedGallery var ret *ScrapedGallery
// populate performers, studio and tags based on scene path // populate performers, studio and tags based on scene path
if err := txn.WithTxn(ctx, s.txnManager, func(ctx context.Context) error { if err := txn.WithTxn(ctx, s.txnManager, func(ctx context.Context) error {
path := gallery.Path() path := gallery.Path
performers, err := autotagMatchPerformers(ctx, path, s.performerReader, trimExt) performers, err := autotagMatchPerformers(ctx, path, s.performerReader, trimExt)
if err != nil { if err != nil {
return fmt.Errorf("autotag scraper viaGallery: %w", err) return fmt.Errorf("autotag scraper viaGallery: %w", err)

View File

@@ -68,6 +68,7 @@ type TagFinder interface {
type GalleryFinder interface { type GalleryFinder interface {
Find(ctx context.Context, id int) (*models.Gallery, error) Find(ctx context.Context, id int) (*models.Gallery, error)
models.FileLoader
} }
type Repository struct { type Repository struct {
@@ -364,6 +365,11 @@ func (c Cache) getGallery(ctx context.Context, galleryID int) (*models.Gallery,
if err := txn.WithTxn(ctx, c.txnManager, func(ctx context.Context) error { if err := txn.WithTxn(ctx, c.txnManager, func(ctx context.Context) error {
var err error var err error
ret, err = c.repository.GalleryFinder.Find(ctx, galleryID) ret, err = c.repository.GalleryFinder.Find(ctx, galleryID)
if ret != nil {
err = ret.LoadFiles(ctx, c.repository.GalleryFinder)
}
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err

View File

@@ -13,9 +13,9 @@ type queryURLParameters map[string]string
func queryURLParametersFromScene(scene *models.Scene) queryURLParameters { func queryURLParametersFromScene(scene *models.Scene) queryURLParameters {
ret := make(queryURLParameters) ret := make(queryURLParameters)
ret["checksum"] = scene.Checksum() ret["checksum"] = scene.Checksum
ret["oshash"] = scene.OSHash() ret["oshash"] = scene.OSHash
ret["filename"] = filepath.Base(scene.Path()) ret["filename"] = filepath.Base(scene.Path)
if scene.Title != "" { if scene.Title != "" {
ret["title"] = scene.Title ret["title"] = scene.Title
@@ -53,8 +53,8 @@ func queryURLParametersFromGallery(gallery *models.Gallery) queryURLParameters {
ret := make(queryURLParameters) ret := make(queryURLParameters)
ret["checksum"] = gallery.Checksum() ret["checksum"] = gallery.Checksum()
if gallery.Path() != "" { if gallery.Path != "" {
ret["filename"] = filepath.Base(gallery.Path()) ret["filename"] = filepath.Base(gallery.Path)
} }
if gallery.Title != "" { if gallery.Title != "" {
ret["title"] = gallery.Title ret["title"] = gallery.Title

View File

@@ -229,8 +229,8 @@ func (s *stashScraper) scrapeSceneByScene(ctx context.Context, scene *models.Sce
Oshash *string `graphql:"oshash" json:"oshash"` Oshash *string `graphql:"oshash" json:"oshash"`
} }
checksum := scene.Checksum() checksum := scene.Checksum
oshash := scene.OSHash() oshash := scene.OSHash
input := SceneHashInput{ input := SceneHashInput{
Checksum: &checksum, Checksum: &checksum,

View File

@@ -17,6 +17,7 @@ import (
"golang.org/x/text/language" "golang.org/x/text/language"
"github.com/Yamashou/gqlgenc/graphqljson" "github.com/Yamashou/gqlgenc/graphqljson"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/match"
@@ -33,6 +34,7 @@ import (
type SceneReader interface { type SceneReader interface {
Find(ctx context.Context, id int) (*models.Scene, error) Find(ctx context.Context, id int) (*models.Scene, error)
models.StashIDLoader models.StashIDLoader
models.VideoFileLoader
} }
type PerformerReader interface { type PerformerReader interface {
@@ -140,31 +142,37 @@ func (c Client) FindStashBoxScenesByFingerprints(ctx context.Context, ids []int)
return fmt.Errorf("scene with id %d not found", sceneID) return fmt.Errorf("scene with id %d not found", sceneID)
} }
if err := scene.LoadFiles(ctx, c.repository.Scene); err != nil {
return err
}
var sceneFPs []*graphql.FingerprintQueryInput var sceneFPs []*graphql.FingerprintQueryInput
checksum := scene.Checksum() for _, f := range scene.Files.List() {
if checksum != "" { checksum := f.Fingerprints.GetString(file.FingerprintTypeMD5)
sceneFPs = append(sceneFPs, &graphql.FingerprintQueryInput{ if checksum != "" {
Hash: checksum, sceneFPs = append(sceneFPs, &graphql.FingerprintQueryInput{
Algorithm: graphql.FingerprintAlgorithmMd5, Hash: checksum,
}) Algorithm: graphql.FingerprintAlgorithmMd5,
} })
}
oshash := scene.OSHash() oshash := f.Fingerprints.GetString(file.FingerprintTypeOshash)
if oshash != "" { if oshash != "" {
sceneFPs = append(sceneFPs, &graphql.FingerprintQueryInput{ sceneFPs = append(sceneFPs, &graphql.FingerprintQueryInput{
Hash: oshash, Hash: oshash,
Algorithm: graphql.FingerprintAlgorithmOshash, Algorithm: graphql.FingerprintAlgorithmOshash,
}) })
} }
phash := scene.Phash() phash := f.Fingerprints.GetInt64(file.FingerprintTypePhash)
if phash != 0 { if phash != 0 {
phashStr := utils.PhashToString(phash) phashStr := utils.PhashToString(phash)
sceneFPs = append(sceneFPs, &graphql.FingerprintQueryInput{ sceneFPs = append(sceneFPs, &graphql.FingerprintQueryInput{
Hash: phashStr, Hash: phashStr,
Algorithm: graphql.FingerprintAlgorithmPhash, Algorithm: graphql.FingerprintAlgorithmPhash,
}) })
}
} }
fingerprints = append(fingerprints, sceneFPs) fingerprints = append(fingerprints, sceneFPs)
@@ -232,6 +240,10 @@ func (c Client) SubmitStashBoxFingerprints(ctx context.Context, sceneIDs []strin
return err return err
} }
if err := scene.LoadFiles(ctx, qb); err != nil {
return err
}
stashIDs := scene.StashIDs.List() stashIDs := scene.StashIDs.List()
sceneStashID := "" sceneStashID := ""
for _, stashID := range stashIDs { for _, stashID := range stashIDs {
@@ -241,41 +253,46 @@ func (c Client) SubmitStashBoxFingerprints(ctx context.Context, sceneIDs []strin
} }
if sceneStashID != "" { if sceneStashID != "" {
duration := scene.Duration() for _, f := range scene.Files.List() {
if checksum := scene.Checksum(); checksum != "" && duration != 0 { duration := f.Duration
fingerprint := graphql.FingerprintInput{
Hash: checksum,
Algorithm: graphql.FingerprintAlgorithmMd5,
Duration: int(duration),
}
fingerprints = append(fingerprints, graphql.FingerprintSubmission{
SceneID: sceneStashID,
Fingerprint: &fingerprint,
})
}
if oshash := scene.OSHash(); oshash != "" && duration != 0 { if duration != 0 {
fingerprint := graphql.FingerprintInput{ if checksum := f.Fingerprints.GetString(file.FingerprintTypeMD5); checksum != "" {
Hash: oshash, fingerprint := graphql.FingerprintInput{
Algorithm: graphql.FingerprintAlgorithmOshash, Hash: checksum,
Duration: int(duration), Algorithm: graphql.FingerprintAlgorithmMd5,
} Duration: int(duration),
fingerprints = append(fingerprints, graphql.FingerprintSubmission{ }
SceneID: sceneStashID, fingerprints = append(fingerprints, graphql.FingerprintSubmission{
Fingerprint: &fingerprint, SceneID: sceneStashID,
}) Fingerprint: &fingerprint,
} })
}
if phash := scene.Phash(); phash != 0 && duration != 0 { if oshash := f.Fingerprints.GetString(file.FingerprintTypeOshash); oshash != "" {
fingerprint := graphql.FingerprintInput{ fingerprint := graphql.FingerprintInput{
Hash: utils.PhashToString(phash), Hash: oshash,
Algorithm: graphql.FingerprintAlgorithmPhash, Algorithm: graphql.FingerprintAlgorithmOshash,
Duration: int(duration), Duration: int(duration),
}
fingerprints = append(fingerprints, graphql.FingerprintSubmission{
SceneID: sceneStashID,
Fingerprint: &fingerprint,
})
}
if phash := f.Fingerprints.GetInt64(file.FingerprintTypePhash); phash != 0 {
fingerprint := graphql.FingerprintInput{
Hash: utils.PhashToString(phash),
Algorithm: graphql.FingerprintAlgorithmPhash,
Duration: int(duration),
}
fingerprints = append(fingerprints, graphql.FingerprintSubmission{
SceneID: sceneStashID,
Fingerprint: &fingerprint,
})
}
} }
fingerprints = append(fingerprints, graphql.FingerprintSubmission{
SceneID: sceneStashID,
Fingerprint: &fingerprint,
})
} }
} }
} }
@@ -778,7 +795,7 @@ func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, endpo
} }
for _, stashID := range stashIDs { for _, stashID := range stashIDs {
c := stashID c := stashID
if c.Endpoint == endpoint { if stashID.Endpoint == endpoint {
studioDraft.ID = &c.StashID studioDraft.ID = &c.StashID
break break
} }
@@ -787,32 +804,39 @@ func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, endpo
} }
fingerprints := []*graphql.FingerprintInput{} fingerprints := []*graphql.FingerprintInput{}
duration := scene.Duration()
if oshash := scene.OSHash(); oshash != "" && duration != 0 {
fingerprint := graphql.FingerprintInput{
Hash: oshash,
Algorithm: graphql.FingerprintAlgorithmOshash,
Duration: int(duration),
}
fingerprints = append(fingerprints, &fingerprint)
}
if checksum := scene.Checksum(); checksum != "" && duration != 0 { // submit all file fingerprints
fingerprint := graphql.FingerprintInput{ for _, f := range scene.Files.List() {
Hash: checksum, duration := f.Duration
Algorithm: graphql.FingerprintAlgorithmMd5,
Duration: int(duration),
}
fingerprints = append(fingerprints, &fingerprint)
}
if phash := scene.Phash(); phash != 0 && duration != 0 { if duration != 0 {
fingerprint := graphql.FingerprintInput{ if oshash := f.Fingerprints.GetString(file.FingerprintTypeOshash); oshash != "" {
Hash: utils.PhashToString(phash), fingerprint := graphql.FingerprintInput{
Algorithm: graphql.FingerprintAlgorithmPhash, Hash: oshash,
Duration: int(duration), Algorithm: graphql.FingerprintAlgorithmOshash,
Duration: int(duration),
}
fingerprints = append(fingerprints, &fingerprint)
}
if checksum := f.Fingerprints.GetString(file.FingerprintTypeMD5); checksum != "" {
fingerprint := graphql.FingerprintInput{
Hash: checksum,
Algorithm: graphql.FingerprintAlgorithmMd5,
Duration: int(duration),
}
fingerprints = append(fingerprints, &fingerprint)
}
if phash := f.Fingerprints.GetInt64(file.FingerprintTypePhash); phash != 0 {
fingerprint := graphql.FingerprintInput{
Hash: utils.PhashToString(phash),
Algorithm: graphql.FingerprintAlgorithmPhash,
Duration: int(duration),
}
fingerprints = append(fingerprints, &fingerprint)
}
} }
fingerprints = append(fingerprints, &fingerprint)
} }
draft.Fingerprints = fingerprints draft.Fingerprints = fingerprints
@@ -854,11 +878,13 @@ func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, endpo
} }
draft.Tags = tags draft.Tags = tags
exists, _ := fsutil.FileExists(imagePath) if imagePath != "" {
if exists { exists, _ := fsutil.FileExists(imagePath)
file, err := os.Open(imagePath) if exists {
if err == nil { file, err := os.Open(imagePath)
image = file if err == nil {
image = file
}
} }
} }

View File

@@ -60,21 +60,38 @@ func (r *galleryRow) fromGallery(o models.Gallery) {
r.UpdatedAt = o.UpdatedAt r.UpdatedAt = o.UpdatedAt
} }
func (r *galleryRow) resolve() *models.Gallery { type galleryQueryRow struct {
return &models.Gallery{ galleryRow
ID: r.ID, FolderPath zero.String `db:"folder_path"`
Title: r.Title.String, PrimaryFileID null.Int `db:"primary_file_id"`
URL: r.URL.String, PrimaryFileFolderPath zero.String `db:"primary_file_folder_path"`
Date: r.Date.DatePtr(), PrimaryFileBasename zero.String `db:"primary_file_basename"`
Details: r.Details.String, PrimaryFileChecksum zero.String `db:"primary_file_checksum"`
Rating: nullIntPtr(r.Rating), }
Organized: r.Organized,
StudioID: nullIntPtr(r.StudioID), func (r *galleryQueryRow) resolve() *models.Gallery {
FolderID: nullIntFolderIDPtr(r.FolderID), ret := &models.Gallery{
// FolderPath: r.FolderPath.String, ID: r.ID,
CreatedAt: r.CreatedAt, Title: r.Title.String,
UpdatedAt: r.UpdatedAt, URL: r.URL.String,
Date: r.Date.DatePtr(),
Details: r.Details.String,
Rating: nullIntPtr(r.Rating),
Organized: r.Organized,
StudioID: nullIntPtr(r.StudioID),
FolderID: nullIntFolderIDPtr(r.FolderID),
PrimaryFileID: nullIntFileIDPtr(r.PrimaryFileID),
CreatedAt: r.CreatedAt,
UpdatedAt: r.UpdatedAt,
} }
if r.PrimaryFileFolderPath.Valid && r.PrimaryFileBasename.Valid {
ret.Path = filepath.Join(r.PrimaryFileFolderPath.String, r.PrimaryFileBasename.String)
} else if r.FolderPath.Valid {
ret.Path = r.FolderPath.String
}
return ret
} }
type galleryRowRecord struct { type galleryRowRecord struct {
@@ -184,13 +201,15 @@ func (qb *GalleryStore) Update(ctx context.Context, updatedObject *models.Galler
} }
} }
fileIDs := make([]file.ID, len(updatedObject.Files)) if updatedObject.Files.Loaded() {
for i, f := range updatedObject.Files { fileIDs := make([]file.ID, len(updatedObject.Files.List()))
fileIDs[i] = f.Base().ID for i, f := range updatedObject.Files.List() {
} fileIDs[i] = f.Base().ID
}
if err := galleriesFilesTableMgr.replaceJoins(ctx, updatedObject.ID, fileIDs); err != nil { if err := galleriesFilesTableMgr.replaceJoins(ctx, updatedObject.ID, fileIDs); err != nil {
return err return err
}
} }
return nil return nil
@@ -235,7 +254,33 @@ func (qb *GalleryStore) Destroy(ctx context.Context, id int) error {
} }
func (qb *GalleryStore) selectDataset() *goqu.SelectDataset { func (qb *GalleryStore) selectDataset() *goqu.SelectDataset {
return dialect.From(qb.table()).Select(qb.table().All()) table := qb.table()
files := fileTableMgr.table
folders := folderTableMgr.table
galleryFolder := folderTableMgr.table.As("gallery_folder")
return dialect.From(table).LeftJoin(
galleriesFilesJoinTable,
goqu.On(
galleriesFilesJoinTable.Col(galleryIDColumn).Eq(table.Col(idColumn)),
galleriesFilesJoinTable.Col("primary").Eq(1),
),
).LeftJoin(
files,
goqu.On(files.Col(idColumn).Eq(galleriesFilesJoinTable.Col(fileIDColumn))),
).LeftJoin(
folders,
goqu.On(folders.Col(idColumn).Eq(files.Col("parent_folder_id"))),
).LeftJoin(
galleryFolder,
goqu.On(galleryFolder.Col(idColumn).Eq(table.Col("folder_id"))),
).Select(
qb.table().All(),
galleriesFilesJoinTable.Col(fileIDColumn).As("primary_file_id"),
folders.Col("path").As("primary_file_folder_path"),
files.Col("basename").As("primary_file_basename"),
galleryFolder.Col("path").As("folder_path"),
)
} }
func (qb *GalleryStore) get(ctx context.Context, q *goqu.SelectDataset) (*models.Gallery, error) { func (qb *GalleryStore) get(ctx context.Context, q *goqu.SelectDataset) (*models.Gallery, error) {
@@ -255,7 +300,7 @@ func (qb *GalleryStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*
const single = false const single = false
var ret []*models.Gallery var ret []*models.Gallery
if err := queryFunc(ctx, q, single, func(r *sqlx.Rows) error { if err := queryFunc(ctx, q, single, func(r *sqlx.Rows) error {
var f galleryRow var f galleryQueryRow
if err := r.StructScan(&f); err != nil { if err := r.StructScan(&f); err != nil {
return err return err
} }
@@ -268,38 +313,10 @@ func (qb *GalleryStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*
return nil, err return nil, err
} }
for _, s := range ret {
if err := qb.resolveRelationships(ctx, s); err != nil {
return nil, err
}
}
return ret, nil return ret, nil
} }
func (qb *GalleryStore) resolveRelationships(ctx context.Context, s *models.Gallery) error { func (qb *GalleryStore) GetFiles(ctx context.Context, id int) ([]file.File, error) {
var err error
// files
s.Files, err = qb.getFiles(ctx, s.ID)
if err != nil {
return fmt.Errorf("resolving gallery files: %w", err)
}
// folder
if s.FolderID != nil {
folder, err := qb.folderStore.Find(ctx, *s.FolderID)
if err != nil {
return fmt.Errorf("resolving gallery folder: %w", err)
}
s.FolderPath = folder.Path
}
return nil
}
func (qb *GalleryStore) getFiles(ctx context.Context, id int) ([]file.File, error) {
fileIDs, err := qb.filesRepository().get(ctx, id) fileIDs, err := qb.filesRepository().get(ctx, id)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -317,6 +334,11 @@ func (qb *GalleryStore) getFiles(ctx context.Context, id int) ([]file.File, erro
return ret, nil return ret, nil
} }
func (qb *GalleryStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) {
const primaryOnly = false
return qb.filesRepository().getMany(ctx, ids, primaryOnly)
}
func (qb *GalleryStore) Find(ctx context.Context, id int) (*models.Gallery, error) { func (qb *GalleryStore) Find(ctx context.Context, id int) (*models.Gallery, error) {
q := qb.selectDataset().Where(qb.tableMgr.byID(id)) q := qb.selectDataset().Where(qb.tableMgr.byID(id))

View File

@@ -33,6 +33,19 @@ func loadGalleryRelationships(ctx context.Context, expected models.Gallery, actu
return err return err
} }
} }
if expected.Files.Loaded() {
if err := actual.LoadFiles(ctx, db.Gallery); err != nil {
return err
}
}
// clear Path, Checksum, PrimaryFileID
if expected.Path == "" {
actual.Path = ""
}
if expected.PrimaryFileID == nil {
actual.PrimaryFileID = nil
}
return nil return nil
} }
@@ -71,7 +84,6 @@ func Test_galleryQueryBuilder_Create(t *testing.T) {
SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdx1WithPerformer], sceneIDs[sceneIdx1WithStudio]}), SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdx1WithPerformer], sceneIDs[sceneIdx1WithStudio]}),
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}), TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}),
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}), PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
Files: []file.File{},
}, },
false, false,
}, },
@@ -85,9 +97,9 @@ func Test_galleryQueryBuilder_Create(t *testing.T) {
Rating: &rating, Rating: &rating,
Organized: true, Organized: true,
StudioID: &studioIDs[studioIdxWithScene], StudioID: &studioIDs[studioIdxWithScene],
Files: []file.File{ Files: models.NewRelatedFiles([]file.File{
galleryFile, galleryFile,
}, }),
CreatedAt: createdAt, CreatedAt: createdAt,
UpdatedAt: updatedAt, UpdatedAt: updatedAt,
SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdx1WithPerformer], sceneIDs[sceneIdx1WithStudio]}), SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdx1WithPerformer], sceneIDs[sceneIdx1WithStudio]}),
@@ -134,8 +146,8 @@ func Test_galleryQueryBuilder_Create(t *testing.T) {
s := tt.newObject s := tt.newObject
var fileIDs []file.ID var fileIDs []file.ID
if len(s.Files) > 0 { if s.Files.Loaded() {
fileIDs = []file.ID{s.Files[0].Base().ID} fileIDs = []file.ID{s.Files.List()[0].Base().ID}
} }
if err := qb.Create(ctx, &s, fileIDs); (err != nil) != tt.wantErr { if err := qb.Create(ctx, &s, fileIDs); (err != nil) != tt.wantErr {
@@ -217,9 +229,9 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
Rating: &rating, Rating: &rating,
Organized: true, Organized: true,
StudioID: &studioIDs[studioIdxWithScene], StudioID: &studioIDs[studioIdxWithScene],
Files: []file.File{ Files: models.NewRelatedFiles([]file.File{
makeGalleryFileWithID(galleryIdxWithScene), makeGalleryFileWithID(galleryIdxWithScene),
}, }),
CreatedAt: createdAt, CreatedAt: createdAt,
UpdatedAt: updatedAt, UpdatedAt: updatedAt,
SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdx1WithPerformer], sceneIDs[sceneIdx1WithStudio]}), SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdx1WithPerformer], sceneIDs[sceneIdx1WithStudio]}),
@@ -231,10 +243,7 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
{ {
"clear nullables", "clear nullables",
&models.Gallery{ &models.Gallery{
ID: galleryIDs[galleryIdxWithImage], ID: galleryIDs[galleryIdxWithImage],
Files: []file.File{
makeGalleryFileWithID(galleryIdxWithImage),
},
SceneIDs: models.NewRelatedIDs([]int{}), SceneIDs: models.NewRelatedIDs([]int{}),
TagIDs: models.NewRelatedIDs([]int{}), TagIDs: models.NewRelatedIDs([]int{}),
PerformerIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}),
@@ -247,10 +256,7 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
{ {
"clear scene ids", "clear scene ids",
&models.Gallery{ &models.Gallery{
ID: galleryIDs[galleryIdxWithScene], ID: galleryIDs[galleryIdxWithScene],
Files: []file.File{
makeGalleryFileWithID(galleryIdxWithScene),
},
SceneIDs: models.NewRelatedIDs([]int{}), SceneIDs: models.NewRelatedIDs([]int{}),
TagIDs: models.NewRelatedIDs([]int{}), TagIDs: models.NewRelatedIDs([]int{}),
PerformerIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}),
@@ -263,10 +269,7 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
{ {
"clear tag ids", "clear tag ids",
&models.Gallery{ &models.Gallery{
ID: galleryIDs[galleryIdxWithTag], ID: galleryIDs[galleryIdxWithTag],
Files: []file.File{
makeGalleryFileWithID(galleryIdxWithTag),
},
SceneIDs: models.NewRelatedIDs([]int{}), SceneIDs: models.NewRelatedIDs([]int{}),
TagIDs: models.NewRelatedIDs([]int{}), TagIDs: models.NewRelatedIDs([]int{}),
PerformerIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}),
@@ -279,10 +282,7 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
{ {
"clear performer ids", "clear performer ids",
&models.Gallery{ &models.Gallery{
ID: galleryIDs[galleryIdxWithPerformer], ID: galleryIDs[galleryIdxWithPerformer],
Files: []file.File{
makeGalleryFileWithID(galleryIdxWithPerformer),
},
SceneIDs: models.NewRelatedIDs([]int{}), SceneIDs: models.NewRelatedIDs([]int{}),
TagIDs: models.NewRelatedIDs([]int{}), TagIDs: models.NewRelatedIDs([]int{}),
PerformerIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}),
@@ -295,10 +295,7 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
{ {
"invalid studio id", "invalid studio id",
&models.Gallery{ &models.Gallery{
ID: galleryIDs[galleryIdxWithImage], ID: galleryIDs[galleryIdxWithImage],
Files: []file.File{
makeGalleryFileWithID(galleryIdxWithImage),
},
Organized: true, Organized: true,
StudioID: &invalidID, StudioID: &invalidID,
CreatedAt: createdAt, CreatedAt: createdAt,
@@ -309,10 +306,7 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
{ {
"invalid scene id", "invalid scene id",
&models.Gallery{ &models.Gallery{
ID: galleryIDs[galleryIdxWithImage], ID: galleryIDs[galleryIdxWithImage],
Files: []file.File{
makeGalleryFileWithID(galleryIdxWithImage),
},
Organized: true, Organized: true,
SceneIDs: models.NewRelatedIDs([]int{invalidID}), SceneIDs: models.NewRelatedIDs([]int{invalidID}),
CreatedAt: createdAt, CreatedAt: createdAt,
@@ -323,10 +317,7 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
{ {
"invalid tag id", "invalid tag id",
&models.Gallery{ &models.Gallery{
ID: galleryIDs[galleryIdxWithImage], ID: galleryIDs[galleryIdxWithImage],
Files: []file.File{
makeGalleryFileWithID(galleryIdxWithImage),
},
Organized: true, Organized: true,
TagIDs: models.NewRelatedIDs([]int{invalidID}), TagIDs: models.NewRelatedIDs([]int{invalidID}),
CreatedAt: createdAt, CreatedAt: createdAt,
@@ -337,10 +328,7 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
{ {
"invalid performer id", "invalid performer id",
&models.Gallery{ &models.Gallery{
ID: galleryIDs[galleryIdxWithImage], ID: galleryIDs[galleryIdxWithImage],
Files: []file.File{
makeGalleryFileWithID(galleryIdxWithImage),
},
Organized: true, Organized: true,
PerformerIDs: models.NewRelatedIDs([]int{invalidID}), PerformerIDs: models.NewRelatedIDs([]int{invalidID}),
CreatedAt: createdAt, CreatedAt: createdAt,
@@ -385,8 +373,10 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
} }
func clearGalleryFileIDs(gallery *models.Gallery) { func clearGalleryFileIDs(gallery *models.Gallery) {
for _, f := range gallery.Files { if gallery.Files.Loaded() {
f.Base().ID = 0 for _, f := range gallery.Files.List() {
f.Base().ID = 0
}
} }
} }
@@ -459,9 +449,9 @@ func Test_galleryQueryBuilder_UpdatePartial(t *testing.T) {
Rating: &rating, Rating: &rating,
Organized: true, Organized: true,
StudioID: &studioIDs[studioIdxWithGallery], StudioID: &studioIDs[studioIdxWithGallery],
Files: []file.File{ Files: models.NewRelatedFiles([]file.File{
makeGalleryFile(galleryIdxWithImage), makeGalleryFile(galleryIdxWithImage),
}, }),
CreatedAt: createdAt, CreatedAt: createdAt,
UpdatedAt: updatedAt, UpdatedAt: updatedAt,
SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdxWithGallery]}), SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdxWithGallery]}),
@@ -476,9 +466,9 @@ func Test_galleryQueryBuilder_UpdatePartial(t *testing.T) {
clearGalleryPartial(), clearGalleryPartial(),
models.Gallery{ models.Gallery{
ID: galleryIDs[galleryIdxWithImage], ID: galleryIDs[galleryIdxWithImage],
Files: []file.File{ Files: models.NewRelatedFiles([]file.File{
makeGalleryFile(galleryIdxWithImage), makeGalleryFile(galleryIdxWithImage),
}, }),
SceneIDs: models.NewRelatedIDs([]int{}), SceneIDs: models.NewRelatedIDs([]int{}),
TagIDs: models.NewRelatedIDs([]int{}), TagIDs: models.NewRelatedIDs([]int{}),
PerformerIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}),
@@ -509,12 +499,12 @@ func Test_galleryQueryBuilder_UpdatePartial(t *testing.T) {
return return
} }
clearGalleryFileIDs(got)
// load relationships // load relationships
if err := loadGalleryRelationships(ctx, tt.want, got); err != nil { if err := loadGalleryRelationships(ctx, tt.want, got); err != nil {
t.Errorf("loadGalleryRelationships() error = %v", err) t.Errorf("loadGalleryRelationships() error = %v", err)
return return
} }
clearGalleryFileIDs(got)
assert.Equal(tt.want, *got) assert.Equal(tt.want, *got)
s, err := qb.Find(ctx, tt.id) s, err := qb.Find(ctx, tt.id)
@@ -522,12 +512,12 @@ func Test_galleryQueryBuilder_UpdatePartial(t *testing.T) {
t.Errorf("galleryQueryBuilder.Find() error = %v", err) t.Errorf("galleryQueryBuilder.Find() error = %v", err)
} }
clearGalleryFileIDs(s)
// load relationships // load relationships
if err := loadGalleryRelationships(ctx, tt.want, s); err != nil { if err := loadGalleryRelationships(ctx, tt.want, s); err != nil {
t.Errorf("loadGalleryRelationships() error = %v", err) t.Errorf("loadGalleryRelationships() error = %v", err)
return return
} }
clearGalleryFileIDs(s)
assert.Equal(tt.want, *s) assert.Equal(tt.want, *s)
}) })
} }
@@ -858,7 +848,7 @@ func makeGalleryWithID(index int) *models.Gallery {
ret.Date = nil ret.Date = nil
} }
ret.Files = []file.File{makeGalleryFile(index)} ret.Files = models.NewRelatedFiles([]file.File{makeGalleryFile(index)})
return ret return ret
} }
@@ -908,13 +898,12 @@ func Test_galleryQueryBuilder_Find(t *testing.T) {
} }
if got != nil { if got != nil {
clearGalleryFileIDs(got)
// load relationships // load relationships
if err := loadGalleryRelationships(ctx, *tt.want, got); err != nil { if err := loadGalleryRelationships(ctx, *tt.want, got); err != nil {
t.Errorf("loadGalleryRelationships() error = %v", err) t.Errorf("loadGalleryRelationships() error = %v", err)
return return
} }
clearGalleryFileIDs(got)
} }
assert.Equal(tt.want, got) assert.Equal(tt.want, got)
}) })
@@ -923,14 +912,13 @@ func Test_galleryQueryBuilder_Find(t *testing.T) {
func postFindGalleries(ctx context.Context, want []*models.Gallery, got []*models.Gallery) error { func postFindGalleries(ctx context.Context, want []*models.Gallery, got []*models.Gallery) error {
for i, s := range got { for i, s := range got {
clearGalleryFileIDs(s)
// load relationships // load relationships
if i < len(want) { if i < len(want) {
if err := loadGalleryRelationships(ctx, *want[i], s); err != nil { if err := loadGalleryRelationships(ctx, *want[i], s); err != nil {
return err return err
} }
} }
clearGalleryFileIDs(s)
} }
return nil return nil
@@ -1490,7 +1478,7 @@ func TestGalleryQueryPath(t *testing.T) {
assert.NotEqual(t, 0, count) assert.NotEqual(t, 0, count)
for _, gallery := range got { for _, gallery := range got {
verifyString(t, gallery.Path(), tt.input) verifyString(t, gallery.Path, tt.input)
} }
}) })
} }
@@ -1508,7 +1496,7 @@ func verifyGalleriesPath(ctx context.Context, t *testing.T, pathCriterion models
} }
for _, gallery := range galleries { for _, gallery := range galleries {
verifyString(t, gallery.Path(), pathCriterion) verifyString(t, gallery.Path, pathCriterion)
} }
} }
@@ -1541,8 +1529,8 @@ func TestGalleryQueryPathOr(t *testing.T) {
return nil return nil
} }
assert.Equal(t, gallery1Path, galleries[0].Path()) assert.Equal(t, gallery1Path, galleries[0].Path)
assert.Equal(t, gallery2Path, galleries[1].Path()) assert.Equal(t, gallery2Path, galleries[1].Path)
return nil return nil
}) })
@@ -1575,7 +1563,7 @@ func TestGalleryQueryPathAndRating(t *testing.T) {
return nil return nil
} }
assert.Equal(t, galleryPath, galleries[0].Path()) assert.Equal(t, galleryPath, galleries[0].Path)
assert.Equal(t, *galleryRating, *galleries[0].Rating) assert.Equal(t, *galleryRating, *galleries[0].Rating)
return nil return nil
@@ -1610,7 +1598,7 @@ func TestGalleryQueryPathNotRating(t *testing.T) {
galleries := queryGallery(ctx, t, sqb, &galleryFilter, nil) galleries := queryGallery(ctx, t, sqb, &galleryFilter, nil)
for _, gallery := range galleries { for _, gallery := range galleries {
verifyString(t, gallery.Path(), pathCriterion) verifyString(t, gallery.Path, pathCriterion)
ratingCriterion.Modifier = models.CriterionModifierNotEquals ratingCriterion.Modifier = models.CriterionModifierNotEquals
verifyIntPtr(t, gallery.Rating, ratingCriterion) verifyIntPtr(t, gallery.Rating, ratingCriterion)
} }

View File

@@ -4,6 +4,7 @@ import (
"context" "context"
"database/sql" "database/sql"
"fmt" "fmt"
"path/filepath"
"time" "time"
"github.com/jmoiron/sqlx" "github.com/jmoiron/sqlx"
@@ -48,17 +49,35 @@ func (r *imageRow) fromImage(i models.Image) {
r.UpdatedAt = i.UpdatedAt r.UpdatedAt = i.UpdatedAt
} }
func (r *imageRow) resolve() *models.Image { type imageQueryRow struct {
return &models.Image{ imageRow
PrimaryFileID null.Int `db:"primary_file_id"`
PrimaryFileFolderPath zero.String `db:"primary_file_folder_path"`
PrimaryFileBasename zero.String `db:"primary_file_basename"`
PrimaryFileChecksum zero.String `db:"primary_file_checksum"`
}
func (r *imageQueryRow) resolve() *models.Image {
ret := &models.Image{
ID: r.ID, ID: r.ID,
Title: r.Title.String, Title: r.Title.String,
Rating: nullIntPtr(r.Rating), Rating: nullIntPtr(r.Rating),
Organized: r.Organized, Organized: r.Organized,
OCounter: r.OCounter, OCounter: r.OCounter,
StudioID: nullIntPtr(r.StudioID), StudioID: nullIntPtr(r.StudioID),
PrimaryFileID: nullIntFileIDPtr(r.PrimaryFileID),
Checksum: r.PrimaryFileChecksum.String,
CreatedAt: r.CreatedAt, CreatedAt: r.CreatedAt,
UpdatedAt: r.UpdatedAt, UpdatedAt: r.UpdatedAt,
} }
if r.PrimaryFileFolderPath.Valid && r.PrimaryFileBasename.Valid {
ret.Path = filepath.Join(r.PrimaryFileFolderPath.String, r.PrimaryFileBasename.String)
}
return ret
} }
type imageRowRecord struct { type imageRowRecord struct {
@@ -203,15 +222,16 @@ func (qb *ImageStore) Update(ctx context.Context, updatedObject *models.Image) e
} }
} }
fileIDs := make([]file.ID, len(updatedObject.Files)) if updatedObject.Files.Loaded() {
for i, f := range updatedObject.Files { fileIDs := make([]file.ID, len(updatedObject.Files.List()))
fileIDs[i] = f.ID for i, f := range updatedObject.Files.List() {
} fileIDs[i] = f.ID
}
if err := imagesFilesTableMgr.replaceJoins(ctx, updatedObject.ID, fileIDs); err != nil { if err := imagesFilesTableMgr.replaceJoins(ctx, updatedObject.ID, fileIDs); err != nil {
return err return err
}
} }
return nil return nil
} }
@@ -247,7 +267,36 @@ func (qb *ImageStore) FindMany(ctx context.Context, ids []int) ([]*models.Image,
} }
func (qb *ImageStore) selectDataset() *goqu.SelectDataset { func (qb *ImageStore) selectDataset() *goqu.SelectDataset {
return dialect.From(qb.table()).Select(qb.table().All()) table := qb.table()
files := fileTableMgr.table
folders := folderTableMgr.table
checksum := fingerprintTableMgr.table
return dialect.From(table).LeftJoin(
imagesFilesJoinTable,
goqu.On(
imagesFilesJoinTable.Col(imageIDColumn).Eq(table.Col(idColumn)),
imagesFilesJoinTable.Col("primary").Eq(1),
),
).LeftJoin(
files,
goqu.On(files.Col(idColumn).Eq(imagesFilesJoinTable.Col(fileIDColumn))),
).LeftJoin(
folders,
goqu.On(folders.Col(idColumn).Eq(files.Col("parent_folder_id"))),
).LeftJoin(
checksum,
goqu.On(
checksum.Col(fileIDColumn).Eq(imagesFilesJoinTable.Col(fileIDColumn)),
checksum.Col("type").Eq(file.FingerprintTypeMD5),
),
).Select(
qb.table().All(),
imagesFilesJoinTable.Col(fileIDColumn).As("primary_file_id"),
folders.Col("path").As("primary_file_folder_path"),
files.Col("basename").As("primary_file_basename"),
checksum.Col("fingerprint").As("primary_file_checksum"),
)
} }
func (qb *ImageStore) get(ctx context.Context, q *goqu.SelectDataset) (*models.Image, error) { func (qb *ImageStore) get(ctx context.Context, q *goqu.SelectDataset) (*models.Image, error) {
@@ -267,7 +316,7 @@ func (qb *ImageStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*mo
const single = false const single = false
var ret []*models.Image var ret []*models.Image
if err := queryFunc(ctx, q, single, func(r *sqlx.Rows) error { if err := queryFunc(ctx, q, single, func(r *sqlx.Rows) error {
var f imageRow var f imageQueryRow
if err := r.StructScan(&f); err != nil { if err := r.StructScan(&f); err != nil {
return err return err
} }
@@ -280,28 +329,10 @@ func (qb *ImageStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*mo
return nil, err return nil, err
} }
for _, i := range ret {
if err := qb.resolveRelationships(ctx, i); err != nil {
return nil, err
}
}
return ret, nil return ret, nil
} }
func (qb *ImageStore) resolveRelationships(ctx context.Context, i *models.Image) error { func (qb *ImageStore) GetFiles(ctx context.Context, id int) ([]*file.ImageFile, error) {
var err error
// files
i.Files, err = qb.getFiles(ctx, i.ID)
if err != nil {
return fmt.Errorf("resolving image files: %w", err)
}
return nil
}
func (qb *ImageStore) getFiles(ctx context.Context, id int) ([]*file.ImageFile, error) {
fileIDs, err := qb.filesRepository().get(ctx, id) fileIDs, err := qb.filesRepository().get(ctx, id)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -325,6 +356,11 @@ func (qb *ImageStore) getFiles(ctx context.Context, id int) ([]*file.ImageFile,
return ret, nil return ret, nil
} }
func (qb *ImageStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) {
const primaryOnly = false
return qb.filesRepository().getMany(ctx, ids, primaryOnly)
}
func (qb *ImageStore) find(ctx context.Context, id int) (*models.Image, error) { func (qb *ImageStore) find(ctx context.Context, id int) (*models.Image, error) {
q := qb.selectDataset().Where(qb.tableMgr.byID(id)) q := qb.selectDataset().Where(qb.tableMgr.byID(id))
@@ -428,16 +464,7 @@ func (qb *ImageStore) FindByGalleryID(ctx context.Context, galleryID int) ([]*mo
galleriesImagesJoinTable.Col("gallery_id").Eq(galleryID), galleriesImagesJoinTable.Col("gallery_id").Eq(galleryID),
) )
q := qb.selectDataset().Prepared(true).LeftJoin( q := qb.selectDataset().Prepared(true).Where(
imagesFilesJoinTable,
goqu.On(imagesFilesJoinTable.Col(imageIDColumn).Eq(table.Col(idColumn))),
).LeftJoin(
fileTable,
goqu.On(fileTable.Col(idColumn).Eq(imagesFilesJoinTable.Col(fileIDColumn))),
).LeftJoin(
folderTable,
goqu.On(folderTable.Col(idColumn).Eq(fileTable.Col("parent_folder_id"))),
).Where(
table.Col(idColumn).Eq( table.Col(idColumn).Eq(
sq, sq,
), ),

View File

@@ -31,6 +31,22 @@ func loadImageRelationships(ctx context.Context, expected models.Image, actual *
return err return err
} }
} }
if expected.Files.Loaded() {
if err := actual.LoadFiles(ctx, db.Image); err != nil {
return err
}
}
// clear Path, Checksum, PrimaryFileID
if expected.Path == "" {
actual.Path = ""
}
if expected.Checksum == "" {
actual.Checksum = ""
}
if expected.PrimaryFileID == nil {
actual.PrimaryFileID = nil
}
return nil return nil
} }
@@ -64,7 +80,6 @@ func Test_imageQueryBuilder_Create(t *testing.T) {
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}), GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}),
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithImage], tagIDs[tagIdx1WithDupName]}), TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithImage], tagIDs[tagIdx1WithDupName]}),
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]}), PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]}),
Files: []*file.ImageFile{},
}, },
false, false,
}, },
@@ -76,14 +91,16 @@ func Test_imageQueryBuilder_Create(t *testing.T) {
Organized: true, Organized: true,
OCounter: ocounter, OCounter: ocounter,
StudioID: &studioIDs[studioIdxWithImage], StudioID: &studioIDs[studioIdxWithImage],
Files: []*file.ImageFile{ Files: models.NewRelatedImageFiles([]*file.ImageFile{
imageFile.(*file.ImageFile), imageFile.(*file.ImageFile),
}, }),
CreatedAt: createdAt, PrimaryFileID: &imageFile.Base().ID,
UpdatedAt: updatedAt, Path: imageFile.Base().Path,
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}), CreatedAt: createdAt,
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithImage], tagIDs[tagIdx1WithDupName]}), UpdatedAt: updatedAt,
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]}), GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}),
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithImage], tagIDs[tagIdx1WithDupName]}),
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]}),
}, },
false, false,
}, },
@@ -124,10 +141,11 @@ func Test_imageQueryBuilder_Create(t *testing.T) {
assert := assert.New(t) assert := assert.New(t)
var fileIDs []file.ID var fileIDs []file.ID
for _, f := range tt.newObject.Files { if tt.newObject.Files.Loaded() {
fileIDs = append(fileIDs, f.ID) for _, f := range tt.newObject.Files.List() {
fileIDs = append(fileIDs, f.ID)
}
} }
s := tt.newObject s := tt.newObject
if err := qb.Create(ctx, &models.ImageCreateInput{ if err := qb.Create(ctx, &models.ImageCreateInput{
Image: &s, Image: &s,
@@ -174,8 +192,10 @@ func Test_imageQueryBuilder_Create(t *testing.T) {
} }
func clearImageFileIDs(image *models.Image) { func clearImageFileIDs(image *models.Image) {
for _, f := range image.Files { if image.Files.Loaded() {
f.Base().ID = 0 for _, f := range image.Files.List() {
f.Base().ID = 0
}
} }
} }
@@ -202,15 +222,12 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
{ {
"full", "full",
&models.Image{ &models.Image{
ID: imageIDs[imageIdxWithGallery], ID: imageIDs[imageIdxWithGallery],
Title: title, Title: title,
Rating: &rating, Rating: &rating,
Organized: true, Organized: true,
OCounter: ocounter, OCounter: ocounter,
StudioID: &studioIDs[studioIdxWithImage], StudioID: &studioIDs[studioIdxWithImage],
Files: []*file.ImageFile{
makeImageFileWithID(imageIdxWithGallery),
},
CreatedAt: createdAt, CreatedAt: createdAt,
UpdatedAt: updatedAt, UpdatedAt: updatedAt,
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}), GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}),
@@ -222,10 +239,7 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
{ {
"clear nullables", "clear nullables",
&models.Image{ &models.Image{
ID: imageIDs[imageIdxWithGallery], ID: imageIDs[imageIdxWithGallery],
Files: []*file.ImageFile{
makeImageFileWithID(imageIdxWithGallery),
},
GalleryIDs: models.NewRelatedIDs([]int{}), GalleryIDs: models.NewRelatedIDs([]int{}),
TagIDs: models.NewRelatedIDs([]int{}), TagIDs: models.NewRelatedIDs([]int{}),
PerformerIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}),
@@ -238,10 +252,7 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
{ {
"clear gallery ids", "clear gallery ids",
&models.Image{ &models.Image{
ID: imageIDs[imageIdxWithGallery], ID: imageIDs[imageIdxWithGallery],
Files: []*file.ImageFile{
makeImageFileWithID(imageIdxWithGallery),
},
GalleryIDs: models.NewRelatedIDs([]int{}), GalleryIDs: models.NewRelatedIDs([]int{}),
TagIDs: models.NewRelatedIDs([]int{}), TagIDs: models.NewRelatedIDs([]int{}),
PerformerIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}),
@@ -254,10 +265,7 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
{ {
"clear tag ids", "clear tag ids",
&models.Image{ &models.Image{
ID: imageIDs[imageIdxWithTag], ID: imageIDs[imageIdxWithTag],
Files: []*file.ImageFile{
makeImageFileWithID(imageIdxWithTag),
},
GalleryIDs: models.NewRelatedIDs([]int{}), GalleryIDs: models.NewRelatedIDs([]int{}),
TagIDs: models.NewRelatedIDs([]int{}), TagIDs: models.NewRelatedIDs([]int{}),
PerformerIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}),
@@ -270,10 +278,7 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
{ {
"clear performer ids", "clear performer ids",
&models.Image{ &models.Image{
ID: imageIDs[imageIdxWithPerformer], ID: imageIDs[imageIdxWithPerformer],
Files: []*file.ImageFile{
makeImageFileWithID(imageIdxWithPerformer),
},
GalleryIDs: models.NewRelatedIDs([]int{}), GalleryIDs: models.NewRelatedIDs([]int{}),
TagIDs: models.NewRelatedIDs([]int{}), TagIDs: models.NewRelatedIDs([]int{}),
PerformerIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}),
@@ -286,10 +291,7 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
{ {
"invalid studio id", "invalid studio id",
&models.Image{ &models.Image{
ID: imageIDs[imageIdxWithGallery], ID: imageIDs[imageIdxWithGallery],
Files: []*file.ImageFile{
makeImageFileWithID(imageIdxWithGallery),
},
Organized: true, Organized: true,
StudioID: &invalidID, StudioID: &invalidID,
CreatedAt: createdAt, CreatedAt: createdAt,
@@ -300,10 +302,7 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
{ {
"invalid gallery id", "invalid gallery id",
&models.Image{ &models.Image{
ID: imageIDs[imageIdxWithGallery], ID: imageIDs[imageIdxWithGallery],
Files: []*file.ImageFile{
makeImageFileWithID(imageIdxWithGallery),
},
Organized: true, Organized: true,
GalleryIDs: models.NewRelatedIDs([]int{invalidID}), GalleryIDs: models.NewRelatedIDs([]int{invalidID}),
CreatedAt: createdAt, CreatedAt: createdAt,
@@ -314,10 +313,7 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
{ {
"invalid tag id", "invalid tag id",
&models.Image{ &models.Image{
ID: imageIDs[imageIdxWithGallery], ID: imageIDs[imageIdxWithGallery],
Files: []*file.ImageFile{
makeImageFileWithID(imageIdxWithGallery),
},
Organized: true, Organized: true,
TagIDs: models.NewRelatedIDs([]int{invalidID}), TagIDs: models.NewRelatedIDs([]int{invalidID}),
CreatedAt: createdAt, CreatedAt: createdAt,
@@ -328,10 +324,7 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
{ {
"invalid performer id", "invalid performer id",
&models.Image{ &models.Image{
ID: imageIDs[imageIdxWithGallery], ID: imageIDs[imageIdxWithGallery],
Files: []*file.ImageFile{
makeImageFileWithID(imageIdxWithGallery),
},
Organized: true, Organized: true,
PerformerIDs: models.NewRelatedIDs([]int{invalidID}), PerformerIDs: models.NewRelatedIDs([]int{invalidID}),
CreatedAt: createdAt, CreatedAt: createdAt,
@@ -433,9 +426,9 @@ func Test_imageQueryBuilder_UpdatePartial(t *testing.T) {
Organized: true, Organized: true,
OCounter: ocounter, OCounter: ocounter,
StudioID: &studioIDs[studioIdxWithImage], StudioID: &studioIDs[studioIdxWithImage],
Files: []*file.ImageFile{ Files: models.NewRelatedImageFiles([]*file.ImageFile{
makeImageFile(imageIdx1WithGallery), makeImageFile(imageIdx1WithGallery),
}, }),
CreatedAt: createdAt, CreatedAt: createdAt,
UpdatedAt: updatedAt, UpdatedAt: updatedAt,
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}), GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}),
@@ -451,9 +444,9 @@ func Test_imageQueryBuilder_UpdatePartial(t *testing.T) {
models.Image{ models.Image{
ID: imageIDs[imageIdx1WithGallery], ID: imageIDs[imageIdx1WithGallery],
OCounter: getOCounter(imageIdx1WithGallery), OCounter: getOCounter(imageIdx1WithGallery),
Files: []*file.ImageFile{ Files: models.NewRelatedImageFiles([]*file.ImageFile{
makeImageFile(imageIdx1WithGallery), makeImageFile(imageIdx1WithGallery),
}, }),
GalleryIDs: models.NewRelatedIDs([]int{}), GalleryIDs: models.NewRelatedIDs([]int{}),
TagIDs: models.NewRelatedIDs([]int{}), TagIDs: models.NewRelatedIDs([]int{}),
PerformerIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}),
@@ -484,12 +477,12 @@ func Test_imageQueryBuilder_UpdatePartial(t *testing.T) {
return return
} }
clearImageFileIDs(got)
// load relationships // load relationships
if err := loadImageRelationships(ctx, tt.want, got); err != nil { if err := loadImageRelationships(ctx, tt.want, got); err != nil {
t.Errorf("loadImageRelationships() error = %v", err) t.Errorf("loadImageRelationships() error = %v", err)
return return
} }
clearImageFileIDs(got)
assert.Equal(tt.want, *got) assert.Equal(tt.want, *got)
@@ -498,12 +491,12 @@ func Test_imageQueryBuilder_UpdatePartial(t *testing.T) {
t.Errorf("imageQueryBuilder.Find() error = %v", err) t.Errorf("imageQueryBuilder.Find() error = %v", err)
} }
clearImageFileIDs(s)
// load relationships // load relationships
if err := loadImageRelationships(ctx, tt.want, s); err != nil { if err := loadImageRelationships(ctx, tt.want, s); err != nil {
t.Errorf("loadImageRelationships() error = %v", err) t.Errorf("loadImageRelationships() error = %v", err)
return return
} }
clearImageFileIDs(s)
assert.Equal(tt.want, *s) assert.Equal(tt.want, *s)
}) })
} }
@@ -952,7 +945,7 @@ func makeImageWithID(index int) *models.Image {
ret := makeImage(index) ret := makeImage(index)
ret.ID = imageIDs[index] ret.ID = imageIDs[index]
ret.Files = []*file.ImageFile{makeImageFile(index)} ret.Files = models.NewRelatedImageFiles([]*file.ImageFile{makeImageFile(index)})
return ret return ret
} }
@@ -1002,13 +995,12 @@ func Test_imageQueryBuilder_Find(t *testing.T) {
} }
if got != nil { if got != nil {
clearImageFileIDs(got)
// load relationships // load relationships
if err := loadImageRelationships(ctx, *tt.want, got); err != nil { if err := loadImageRelationships(ctx, *tt.want, got); err != nil {
t.Errorf("loadImageRelationships() error = %v", err) t.Errorf("loadImageRelationships() error = %v", err)
return return
} }
clearImageFileIDs(got)
} }
assert.Equal(tt.want, got) assert.Equal(tt.want, got)
}) })
@@ -1017,14 +1009,13 @@ func Test_imageQueryBuilder_Find(t *testing.T) {
func postFindImages(ctx context.Context, want []*models.Image, got []*models.Image) error { func postFindImages(ctx context.Context, want []*models.Image, got []*models.Image) error {
for i, s := range got { for i, s := range got {
clearImageFileIDs(s)
// load relationships // load relationships
if i < len(want) { if i < len(want) {
if err := loadImageRelationships(ctx, *want[i], s); err != nil { if err := loadImageRelationships(ctx, *want[i], s); err != nil {
return err return err
} }
} }
clearImageFileIDs(s)
} }
return nil return nil
@@ -1546,7 +1537,7 @@ func verifyImagePath(t *testing.T, pathCriterion models.StringCriterionInput, ex
assert.Equal(t, expected, len(images), "number of returned images") assert.Equal(t, expected, len(images), "number of returned images")
for _, image := range images { for _, image := range images {
verifyString(t, image.Path(), pathCriterion) verifyString(t, image.Path, pathCriterion)
} }
return nil return nil
@@ -1582,8 +1573,8 @@ func TestImageQueryPathOr(t *testing.T) {
return nil return nil
} }
assert.Equal(t, image1Path, images[0].Path()) assert.Equal(t, image1Path, images[0].Path)
assert.Equal(t, image2Path, images[1].Path()) assert.Equal(t, image2Path, images[1].Path)
return nil return nil
}) })
@@ -1613,7 +1604,7 @@ func TestImageQueryPathAndRating(t *testing.T) {
images := queryImages(ctx, t, sqb, &imageFilter, nil) images := queryImages(ctx, t, sqb, &imageFilter, nil)
assert.Len(t, images, 1) assert.Len(t, images, 1)
assert.Equal(t, imagePath, images[0].Path()) assert.Equal(t, imagePath, images[0].Path)
assert.Equal(t, int(imageRating.Int64), *images[0].Rating) assert.Equal(t, int(imageRating.Int64), *images[0].Rating)
return nil return nil
@@ -1648,7 +1639,7 @@ func TestImageQueryPathNotRating(t *testing.T) {
images := queryImages(ctx, t, sqb, &imageFilter, nil) images := queryImages(ctx, t, sqb, &imageFilter, nil)
for _, image := range images { for _, image := range images {
verifyString(t, image.Path(), pathCriterion) verifyString(t, image.Path, pathCriterion)
ratingCriterion.Modifier = models.CriterionModifierNotEquals ratingCriterion.Modifier = models.CriterionModifierNotEquals
verifyIntPtr(t, image.Rating, ratingCriterion) verifyIntPtr(t, image.Rating, ratingCriterion)
} }
@@ -1802,7 +1793,12 @@ func verifyImagesResolution(t *testing.T, resolution models.ResolutionEnum) {
} }
for _, image := range images { for _, image := range images {
verifyImageResolution(t, image.Files[0].Height, resolution) if err := image.LoadPrimaryFile(ctx, db.File); err != nil {
t.Errorf("Error loading primary file: %s", err.Error())
return nil
}
verifyImageResolution(t, image.Files.Primary().Height, resolution)
} }
return nil return nil

View File

@@ -477,6 +477,61 @@ type filesRepository struct {
repository repository
} }
type relatedFileRow struct {
ID int `db:"id"`
FileID file.ID `db:"file_id"`
Primary bool `db:"primary"`
}
func (r *filesRepository) getMany(ctx context.Context, ids []int, primaryOnly bool) ([][]file.ID, error) {
var primaryClause string
if primaryOnly {
primaryClause = " AND `primary` = 1"
}
query := fmt.Sprintf("SELECT %s as id, file_id, `primary` from %s WHERE %[1]s IN %[3]s%s", r.idColumn, r.tableName, getInBinding(len(ids)), primaryClause)
idi := make([]interface{}, len(ids))
for i, id := range ids {
idi[i] = id
}
var fileRows []relatedFileRow
if err := r.queryFunc(ctx, query, idi, false, func(rows *sqlx.Rows) error {
var f relatedFileRow
if err := rows.StructScan(&f); err != nil {
return err
}
fileRows = append(fileRows, f)
return nil
}); err != nil {
return nil, err
}
ret := make([][]file.ID, len(ids))
idToIndex := make(map[int]int)
for i, id := range ids {
idToIndex[id] = i
}
for _, row := range fileRows {
id := row.ID
fileID := row.FileID
if row.Primary {
// prepend to list
ret[idToIndex[id]] = append([]file.ID{fileID}, ret[idToIndex[id]]...)
} else {
ret[idToIndex[id]] = append(ret[idToIndex[id]], row.FileID)
}
}
return ret, nil
}
func (r *filesRepository) get(ctx context.Context, id int) ([]file.ID, error) { func (r *filesRepository) get(ctx context.Context, id int) ([]file.ID, error) {
query := fmt.Sprintf("SELECT file_id, `primary` from %s WHERE %s = ?", r.tableName, r.idColumn) query := fmt.Sprintf("SELECT file_id, `primary` from %s WHERE %s = ?", r.tableName, r.idColumn)

View File

@@ -82,8 +82,17 @@ func (r *sceneRow) fromScene(o models.Scene) {
r.UpdatedAt = o.UpdatedAt r.UpdatedAt = o.UpdatedAt
} }
func (r *sceneRow) resolve() *models.Scene { type sceneQueryRow struct {
return &models.Scene{ sceneRow
PrimaryFileID null.Int `db:"primary_file_id"`
PrimaryFileFolderPath zero.String `db:"primary_file_folder_path"`
PrimaryFileBasename zero.String `db:"primary_file_basename"`
PrimaryFileOshash zero.String `db:"primary_file_oshash"`
PrimaryFileChecksum zero.String `db:"primary_file_checksum"`
}
func (r *sceneQueryRow) resolve() *models.Scene {
ret := &models.Scene{
ID: r.ID, ID: r.ID,
Title: r.Title.String, Title: r.Title.String,
Details: r.Details.String, Details: r.Details.String,
@@ -93,9 +102,20 @@ func (r *sceneRow) resolve() *models.Scene {
Organized: r.Organized, Organized: r.Organized,
OCounter: r.OCounter, OCounter: r.OCounter,
StudioID: nullIntPtr(r.StudioID), StudioID: nullIntPtr(r.StudioID),
PrimaryFileID: nullIntFileIDPtr(r.PrimaryFileID),
OSHash: r.PrimaryFileOshash.String,
Checksum: r.PrimaryFileChecksum.String,
CreatedAt: r.CreatedAt, CreatedAt: r.CreatedAt,
UpdatedAt: r.UpdatedAt, UpdatedAt: r.UpdatedAt,
} }
if r.PrimaryFileFolderPath.Valid && r.PrimaryFileBasename.Valid {
ret.Path = filepath.Join(r.PrimaryFileFolderPath.String, r.PrimaryFileBasename.String)
}
return ret
} }
type sceneRowRecord struct { type sceneRowRecord struct {
@@ -278,13 +298,15 @@ func (qb *SceneStore) Update(ctx context.Context, updatedObject *models.Scene) e
} }
} }
fileIDs := make([]file.ID, len(updatedObject.Files)) if updatedObject.Files.Loaded() {
for i, f := range updatedObject.Files { fileIDs := make([]file.ID, len(updatedObject.Files.List()))
fileIDs[i] = f.ID for i, f := range updatedObject.Files.List() {
} fileIDs[i] = f.ID
}
if err := scenesFilesTableMgr.replaceJoins(ctx, updatedObject.ID, fileIDs); err != nil { if err := scenesFilesTableMgr.replaceJoins(ctx, updatedObject.ID, fileIDs); err != nil {
return err return err
}
} }
return nil return nil
@@ -333,7 +355,43 @@ func (qb *SceneStore) FindMany(ctx context.Context, ids []int) ([]*models.Scene,
func (qb *SceneStore) selectDataset() *goqu.SelectDataset { func (qb *SceneStore) selectDataset() *goqu.SelectDataset {
table := qb.table() table := qb.table()
return dialect.From(table).Select(table.All()) files := fileTableMgr.table
folders := folderTableMgr.table
checksum := fingerprintTableMgr.table.As("fingerprint_md5")
oshash := fingerprintTableMgr.table.As("fingerprint_oshash")
return dialect.From(table).LeftJoin(
scenesFilesJoinTable,
goqu.On(
scenesFilesJoinTable.Col(sceneIDColumn).Eq(table.Col(idColumn)),
scenesFilesJoinTable.Col("primary").Eq(1),
),
).LeftJoin(
files,
goqu.On(files.Col(idColumn).Eq(scenesFilesJoinTable.Col(fileIDColumn))),
).LeftJoin(
folders,
goqu.On(folders.Col(idColumn).Eq(files.Col("parent_folder_id"))),
).LeftJoin(
checksum,
goqu.On(
checksum.Col(fileIDColumn).Eq(scenesFilesJoinTable.Col(fileIDColumn)),
checksum.Col("type").Eq(file.FingerprintTypeMD5),
),
).LeftJoin(
oshash,
goqu.On(
oshash.Col(fileIDColumn).Eq(scenesFilesJoinTable.Col(fileIDColumn)),
oshash.Col("type").Eq(file.FingerprintTypeOshash),
),
).Select(
qb.table().All(),
scenesFilesJoinTable.Col(fileIDColumn).As("primary_file_id"),
folders.Col("path").As("primary_file_folder_path"),
files.Col("basename").As("primary_file_basename"),
checksum.Col("fingerprint").As("primary_file_checksum"),
oshash.Col("fingerprint").As("primary_file_oshash"),
)
} }
func (qb *SceneStore) get(ctx context.Context, q *goqu.SelectDataset) (*models.Scene, error) { func (qb *SceneStore) get(ctx context.Context, q *goqu.SelectDataset) (*models.Scene, error) {
@@ -353,7 +411,7 @@ func (qb *SceneStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*mo
const single = false const single = false
var ret []*models.Scene var ret []*models.Scene
if err := queryFunc(ctx, q, single, func(r *sqlx.Rows) error { if err := queryFunc(ctx, q, single, func(r *sqlx.Rows) error {
var f sceneRow var f sceneQueryRow
if err := r.StructScan(&f); err != nil { if err := r.StructScan(&f); err != nil {
return err return err
} }
@@ -366,28 +424,10 @@ func (qb *SceneStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*mo
return nil, err return nil, err
} }
for _, s := range ret {
if err := qb.resolveRelationships(ctx, s); err != nil {
return nil, err
}
}
return ret, nil return ret, nil
} }
func (qb *SceneStore) resolveRelationships(ctx context.Context, s *models.Scene) error { func (qb *SceneStore) GetFiles(ctx context.Context, id int) ([]*file.VideoFile, error) {
var err error
// files
s.Files, err = qb.getFiles(ctx, s.ID)
if err != nil {
return fmt.Errorf("resolving scene files: %w", err)
}
return nil
}
func (qb *SceneStore) getFiles(ctx context.Context, id int) ([]*file.VideoFile, error) {
fileIDs, err := qb.filesRepository().get(ctx, id) fileIDs, err := qb.filesRepository().get(ctx, id)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -411,6 +451,11 @@ func (qb *SceneStore) getFiles(ctx context.Context, id int) ([]*file.VideoFile,
return ret, nil return ret, nil
} }
func (qb *SceneStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) {
const primaryOnly = false
return qb.filesRepository().getMany(ctx, ids, primaryOnly)
}
func (qb *SceneStore) find(ctx context.Context, id int) (*models.Scene, error) { func (qb *SceneStore) find(ctx context.Context, id int) (*models.Scene, error) {
q := qb.selectDataset().Where(qb.tableMgr.byID(id)) q := qb.selectDataset().Where(qb.tableMgr.byID(id))
@@ -680,16 +725,8 @@ func (qb *SceneStore) All(ctx context.Context) ([]*models.Scene, error) {
table := qb.table() table := qb.table()
fileTable := fileTableMgr.table fileTable := fileTableMgr.table
folderTable := folderTableMgr.table folderTable := folderTableMgr.table
return qb.getMany(ctx, qb.selectDataset().LeftJoin(
scenesFilesJoinTable, return qb.getMany(ctx, qb.selectDataset().Order(
goqu.On(scenesFilesJoinTable.Col(sceneIDColumn).Eq(table.Col(idColumn))),
).LeftJoin(
fileTable,
goqu.On(fileTable.Col(idColumn).Eq(scenesFilesJoinTable.Col(fileIDColumn))),
).LeftJoin(
folderTable,
goqu.On(folderTable.Col(idColumn).Eq(fileTable.Col("parent_folder_id"))),
).Order(
folderTable.Col("path").Asc(), folderTable.Col("path").Asc(),
fileTable.Col("basename").Asc(), fileTable.Col("basename").Asc(),
table.Col("date").Asc(), table.Col("date").Asc(),

View File

@@ -47,6 +47,25 @@ func loadSceneRelationships(ctx context.Context, expected models.Scene, actual *
return err return err
} }
} }
if expected.Files.Loaded() {
if err := actual.LoadFiles(ctx, db.Scene); err != nil {
return err
}
}
// clear Path, Checksum, PrimaryFileID
if expected.Path == "" {
actual.Path = ""
}
if expected.Checksum == "" {
actual.Checksum = ""
}
if expected.OSHash == "" {
actual.OSHash = ""
}
if expected.PrimaryFileID == nil {
actual.PrimaryFileID = nil
}
return nil return nil
} }
@@ -113,7 +132,6 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
Endpoint: endpoint2, Endpoint: endpoint2,
}, },
}), }),
Files: []*file.VideoFile{},
}, },
false, false,
}, },
@@ -128,9 +146,9 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
Organized: true, Organized: true,
OCounter: ocounter, OCounter: ocounter,
StudioID: &studioIDs[studioIdxWithScene], StudioID: &studioIDs[studioIdxWithScene],
Files: []*file.VideoFile{ Files: models.NewRelatedVideoFiles([]*file.VideoFile{
videoFile.(*file.VideoFile), videoFile.(*file.VideoFile),
}, }),
CreatedAt: createdAt, CreatedAt: createdAt,
UpdatedAt: updatedAt, UpdatedAt: updatedAt,
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}), GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}),
@@ -208,8 +226,10 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
assert := assert.New(t) assert := assert.New(t)
var fileIDs []file.ID var fileIDs []file.ID
for _, f := range tt.newObject.Files { if tt.newObject.Files.Loaded() {
fileIDs = append(fileIDs, f.ID) for _, f := range tt.newObject.Files.List() {
fileIDs = append(fileIDs, f.ID)
}
} }
s := tt.newObject s := tt.newObject
@@ -258,8 +278,10 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
} }
func clearSceneFileIDs(scene *models.Scene) { func clearSceneFileIDs(scene *models.Scene) {
for _, f := range scene.Files { if scene.Files.Loaded() {
f.Base().ID = 0 for _, f := range scene.Files.List() {
f.Base().ID = 0
}
} }
} }
@@ -296,10 +318,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
{ {
"full", "full",
&models.Scene{ &models.Scene{
ID: sceneIDs[sceneIdxWithGallery], ID: sceneIDs[sceneIdxWithGallery],
Files: []*file.VideoFile{
makeSceneFileWithID(sceneIdxWithGallery),
},
Title: title, Title: title,
Details: details, Details: details,
URL: url, URL: url,
@@ -339,10 +358,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
{ {
"clear nullables", "clear nullables",
&models.Scene{ &models.Scene{
ID: sceneIDs[sceneIdxWithSpacedName], ID: sceneIDs[sceneIdxWithSpacedName],
Files: []*file.VideoFile{
makeSceneFileWithID(sceneIdxWithSpacedName),
},
GalleryIDs: models.NewRelatedIDs([]int{}), GalleryIDs: models.NewRelatedIDs([]int{}),
TagIDs: models.NewRelatedIDs([]int{}), TagIDs: models.NewRelatedIDs([]int{}),
PerformerIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}),
@@ -354,10 +370,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
{ {
"clear gallery ids", "clear gallery ids",
&models.Scene{ &models.Scene{
ID: sceneIDs[sceneIdxWithGallery], ID: sceneIDs[sceneIdxWithGallery],
Files: []*file.VideoFile{
makeSceneFileWithID(sceneIdxWithGallery),
},
GalleryIDs: models.NewRelatedIDs([]int{}), GalleryIDs: models.NewRelatedIDs([]int{}),
}, },
false, false,
@@ -365,10 +378,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
{ {
"clear tag ids", "clear tag ids",
&models.Scene{ &models.Scene{
ID: sceneIDs[sceneIdxWithTag], ID: sceneIDs[sceneIdxWithTag],
Files: []*file.VideoFile{
makeSceneFileWithID(sceneIdxWithTag),
},
TagIDs: models.NewRelatedIDs([]int{}), TagIDs: models.NewRelatedIDs([]int{}),
}, },
false, false,
@@ -376,10 +386,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
{ {
"clear performer ids", "clear performer ids",
&models.Scene{ &models.Scene{
ID: sceneIDs[sceneIdxWithPerformer], ID: sceneIDs[sceneIdxWithPerformer],
Files: []*file.VideoFile{
makeSceneFileWithID(sceneIdxWithPerformer),
},
PerformerIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}),
}, },
false, false,
@@ -387,10 +394,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
{ {
"clear movies", "clear movies",
&models.Scene{ &models.Scene{
ID: sceneIDs[sceneIdxWithMovie], ID: sceneIDs[sceneIdxWithMovie],
Files: []*file.VideoFile{
makeSceneFileWithID(sceneIdxWithMovie),
},
Movies: models.NewRelatedMovies([]models.MoviesScenes{}), Movies: models.NewRelatedMovies([]models.MoviesScenes{}),
}, },
false, false,
@@ -398,10 +402,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
{ {
"invalid studio id", "invalid studio id",
&models.Scene{ &models.Scene{
ID: sceneIDs[sceneIdxWithGallery], ID: sceneIDs[sceneIdxWithGallery],
Files: []*file.VideoFile{
makeSceneFileWithID(sceneIdxWithGallery),
},
StudioID: &invalidID, StudioID: &invalidID,
}, },
true, true,
@@ -409,10 +410,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
{ {
"invalid gallery id", "invalid gallery id",
&models.Scene{ &models.Scene{
ID: sceneIDs[sceneIdxWithGallery], ID: sceneIDs[sceneIdxWithGallery],
Files: []*file.VideoFile{
makeSceneFileWithID(sceneIdxWithGallery),
},
GalleryIDs: models.NewRelatedIDs([]int{invalidID}), GalleryIDs: models.NewRelatedIDs([]int{invalidID}),
}, },
true, true,
@@ -420,10 +418,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
{ {
"invalid tag id", "invalid tag id",
&models.Scene{ &models.Scene{
ID: sceneIDs[sceneIdxWithGallery], ID: sceneIDs[sceneIdxWithGallery],
Files: []*file.VideoFile{
makeSceneFileWithID(sceneIdxWithGallery),
},
TagIDs: models.NewRelatedIDs([]int{invalidID}), TagIDs: models.NewRelatedIDs([]int{invalidID}),
}, },
true, true,
@@ -431,10 +426,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
{ {
"invalid performer id", "invalid performer id",
&models.Scene{ &models.Scene{
ID: sceneIDs[sceneIdxWithGallery], ID: sceneIDs[sceneIdxWithGallery],
Files: []*file.VideoFile{
makeSceneFileWithID(sceneIdxWithGallery),
},
PerformerIDs: models.NewRelatedIDs([]int{invalidID}), PerformerIDs: models.NewRelatedIDs([]int{invalidID}),
}, },
true, true,
@@ -443,9 +435,6 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
"invalid movie id", "invalid movie id",
&models.Scene{ &models.Scene{
ID: sceneIDs[sceneIdxWithSpacedName], ID: sceneIDs[sceneIdxWithSpacedName],
Files: []*file.VideoFile{
makeSceneFileWithID(sceneIdxWithSpacedName),
},
Movies: models.NewRelatedMovies([]models.MoviesScenes{ Movies: models.NewRelatedMovies([]models.MoviesScenes{
{ {
MovieID: invalidID, MovieID: invalidID,
@@ -585,9 +574,9 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
}, },
models.Scene{ models.Scene{
ID: sceneIDs[sceneIdxWithSpacedName], ID: sceneIDs[sceneIdxWithSpacedName],
Files: []*file.VideoFile{ Files: models.NewRelatedVideoFiles([]*file.VideoFile{
makeSceneFile(sceneIdxWithSpacedName), makeSceneFile(sceneIdxWithSpacedName),
}, }),
Title: title, Title: title,
Details: details, Details: details,
URL: url, URL: url,
@@ -630,9 +619,9 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
clearScenePartial(), clearScenePartial(),
models.Scene{ models.Scene{
ID: sceneIDs[sceneIdxWithSpacedName], ID: sceneIDs[sceneIdxWithSpacedName],
Files: []*file.VideoFile{ Files: models.NewRelatedVideoFiles([]*file.VideoFile{
makeSceneFile(sceneIdxWithSpacedName), makeSceneFile(sceneIdxWithSpacedName),
}, }),
GalleryIDs: models.NewRelatedIDs([]int{}), GalleryIDs: models.NewRelatedIDs([]int{}),
TagIDs: models.NewRelatedIDs([]int{}), TagIDs: models.NewRelatedIDs([]int{}),
PerformerIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}),
@@ -665,15 +654,15 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
return return
} }
// ignore file ids
clearSceneFileIDs(got)
// load relationships // load relationships
if err := loadSceneRelationships(ctx, tt.want, got); err != nil { if err := loadSceneRelationships(ctx, tt.want, got); err != nil {
t.Errorf("loadSceneRelationships() error = %v", err) t.Errorf("loadSceneRelationships() error = %v", err)
return return
} }
// ignore file ids
clearSceneFileIDs(got)
assert.Equal(tt.want, *got) assert.Equal(tt.want, *got)
s, err := qb.Find(ctx, tt.id) s, err := qb.Find(ctx, tt.id)
@@ -681,14 +670,13 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
t.Errorf("sceneQueryBuilder.Find() error = %v", err) t.Errorf("sceneQueryBuilder.Find() error = %v", err)
} }
// ignore file ids
clearSceneFileIDs(s)
// load relationships // load relationships
if err := loadSceneRelationships(ctx, tt.want, s); err != nil { if err := loadSceneRelationships(ctx, tt.want, s); err != nil {
t.Errorf("loadSceneRelationships() error = %v", err) t.Errorf("loadSceneRelationships() error = %v", err)
return return
} }
// ignore file ids
clearSceneFileIDs(s)
assert.Equal(tt.want, *s) assert.Equal(tt.want, *s)
}) })
@@ -1338,7 +1326,7 @@ func makeSceneWithID(index int) *models.Scene {
ret.Date = nil ret.Date = nil
} }
ret.Files = []*file.VideoFile{makeSceneFile(index)} ret.Files = models.NewRelatedVideoFiles([]*file.VideoFile{makeSceneFile(index)})
return ret return ret
} }
@@ -1401,13 +1389,13 @@ func Test_sceneQueryBuilder_Find(t *testing.T) {
} }
if got != nil { if got != nil {
clearSceneFileIDs(got)
// load relationships // load relationships
if err := loadSceneRelationships(ctx, *tt.want, got); err != nil { if err := loadSceneRelationships(ctx, *tt.want, got); err != nil {
t.Errorf("loadSceneRelationships() error = %v", err) t.Errorf("loadSceneRelationships() error = %v", err)
return nil return nil
} }
clearSceneFileIDs(got)
} }
assert.Equal(tt.want, got) assert.Equal(tt.want, got)
@@ -1419,14 +1407,13 @@ func Test_sceneQueryBuilder_Find(t *testing.T) {
func postFindScenes(ctx context.Context, want []*models.Scene, got []*models.Scene) error { func postFindScenes(ctx context.Context, want []*models.Scene, got []*models.Scene) error {
for i, s := range got { for i, s := range got {
clearSceneFileIDs(s)
// load relationships // load relationships
if i < len(want) { if i < len(want) {
if err := loadSceneRelationships(ctx, *want[i], s); err != nil { if err := loadSceneRelationships(ctx, *want[i], s); err != nil {
return err return err
} }
} }
clearSceneFileIDs(s)
} }
return nil return nil
@@ -1935,7 +1922,7 @@ func TestSceneWall(t *testing.T) {
scene := scenes[0] scene := scenes[0]
assert.Equal(t, sceneIDs[sceneIdx], scene.ID) assert.Equal(t, sceneIDs[sceneIdx], scene.ID)
scenePath := getFilePath(folderIdxWithSceneFiles, getSceneBasename(sceneIdx)) scenePath := getFilePath(folderIdxWithSceneFiles, getSceneBasename(sceneIdx))
assert.Equal(t, scenePath, scene.Path()) assert.Equal(t, scenePath, scene.Path)
wallQuery = "not exist" wallQuery = "not exist"
scenes, err = sqb.Wall(ctx, &wallQuery) scenes, err = sqb.Wall(ctx, &wallQuery)
@@ -2248,8 +2235,8 @@ func TestSceneQueryPathOr(t *testing.T) {
if !assert.Len(t, scenes, 2) { if !assert.Len(t, scenes, 2) {
return nil return nil
} }
assert.Equal(t, scene1Path, scenes[0].Path()) assert.Equal(t, scene1Path, scenes[0].Path)
assert.Equal(t, scene2Path, scenes[1].Path()) assert.Equal(t, scene2Path, scenes[1].Path)
return nil return nil
}) })
@@ -2281,7 +2268,7 @@ func TestSceneQueryPathAndRating(t *testing.T) {
if !assert.Len(t, scenes, 1) { if !assert.Len(t, scenes, 1) {
return nil return nil
} }
assert.Equal(t, scenePath, scenes[0].Path()) assert.Equal(t, scenePath, scenes[0].Path)
assert.Equal(t, sceneRating, *scenes[0].Rating) assert.Equal(t, sceneRating, *scenes[0].Rating)
return nil return nil
@@ -2316,7 +2303,7 @@ func TestSceneQueryPathNotRating(t *testing.T) {
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil) scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
for _, scene := range scenes { for _, scene := range scenes {
verifyString(t, scene.Path(), pathCriterion) verifyString(t, scene.Path, pathCriterion)
ratingCriterion.Modifier = models.CriterionModifierNotEquals ratingCriterion.Modifier = models.CriterionModifierNotEquals
verifyIntPtr(t, scene.Rating, ratingCriterion) verifyIntPtr(t, scene.Rating, ratingCriterion)
} }
@@ -2394,7 +2381,7 @@ func verifyScenesPath(t *testing.T, pathCriterion models.StringCriterionInput) {
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil) scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
for _, scene := range scenes { for _, scene := range scenes {
verifyString(t, scene.Path(), pathCriterion) verifyString(t, scene.Path, pathCriterion)
} }
return nil return nil
@@ -2662,7 +2649,12 @@ func verifyScenesDuration(t *testing.T, durationCriterion models.IntCriterionInp
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil) scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
for _, scene := range scenes { for _, scene := range scenes {
duration := scene.Duration() if err := scene.LoadPrimaryFile(ctx, db.File); err != nil {
t.Errorf("Error querying scene files: %v", err)
return nil
}
duration := scene.Files.Primary().Duration
if durationCriterion.Modifier == models.CriterionModifierEquals { if durationCriterion.Modifier == models.CriterionModifierEquals {
assert.True(t, duration >= float64(durationCriterion.Value) && duration < float64(durationCriterion.Value+1)) assert.True(t, duration >= float64(durationCriterion.Value) && duration < float64(durationCriterion.Value+1))
} else if durationCriterion.Modifier == models.CriterionModifierNotEquals { } else if durationCriterion.Modifier == models.CriterionModifierNotEquals {
@@ -2732,7 +2724,11 @@ func verifyScenesResolution(t *testing.T, resolution models.ResolutionEnum) {
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil) scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
for _, scene := range scenes { for _, scene := range scenes {
f := scene.PrimaryFile() if err := scene.LoadPrimaryFile(ctx, db.File); err != nil {
t.Errorf("Error querying scene files: %v", err)
return nil
}
f := scene.Files.Primary()
height := 0 height := 0
if f != nil { if f != nil {
height = f.Height height = f.Height

View File

@@ -66,6 +66,7 @@ func WithDatabase(ctx context.Context, p DatabaseProvider, fn TxnFunc) error {
type Retryer struct { type Retryer struct {
Manager Manager Manager Manager
// use value < 0 to retry forever
Retries int Retries int
OnFail func(ctx context.Context, err error, attempt int) error OnFail func(ctx context.Context, err error, attempt int) error
} }
@@ -73,7 +74,7 @@ type Retryer struct {
func (r Retryer) WithTxn(ctx context.Context, fn TxnFunc) error { func (r Retryer) WithTxn(ctx context.Context, fn TxnFunc) error {
var attempt int var attempt int
var err error var err error
for attempt = 1; attempt <= r.Retries; attempt++ { for attempt = 1; attempt <= r.Retries || r.Retries < 0; attempt++ {
err = WithTxn(ctx, r.Manager, fn) err = WithTxn(ctx, r.Manager, fn)
if err == nil { if err == nil {

View File

@@ -171,7 +171,7 @@ export const App: React.FC = () => {
} }
function maybeRenderReleaseNotes() { function maybeRenderReleaseNotes() {
if (setupMatch) { if (setupMatch || config.loading || config.error) {
return; return;
} }