Data layer restructuring (#997)

* Move query builders to sqlite package
* Add transaction system
* Wrap model resolvers in transaction
* Add error return value for StringSliceToIntSlice
* Update/refactor mutation resolvers
* Convert query builders
* Remove unused join types
* Add stash id unit tests
* Use WAL journal mode
This commit is contained in:
WithoutPants
2021-01-18 12:23:20 +11:00
committed by GitHub
parent 7bae990c67
commit 1e04deb3d4
168 changed files with 12683 additions and 10863 deletions

View File

@@ -1,6 +1,7 @@
package manager
import (
"context"
"errors"
"fmt"
"os"
@@ -119,7 +120,7 @@ func (s *singleton) neededScan(paths []*models.StashConfig) (total *int, newFile
for _, sp := range paths {
err := walkFilesToScan(sp, func(path string, info os.FileInfo, err error) error {
t++
task := ScanTask{FilePath: path}
task := ScanTask{FilePath: path, TxnManager: s.TxnManager}
if !task.doesPathExist() {
n++
}
@@ -211,7 +212,17 @@ func (s *singleton) Scan(input models.ScanMetadataInput) {
instance.Paths.Generated.EnsureTmpDir()
wg.Add()
task := ScanTask{FilePath: path, UseFileMetadata: input.UseFileMetadata, StripFileExtension: input.StripFileExtension, fileNamingAlgorithm: fileNamingAlgo, calculateMD5: calculateMD5, GeneratePreview: input.ScanGeneratePreviews, GenerateImagePreview: input.ScanGenerateImagePreviews, GenerateSprite: input.ScanGenerateSprites}
task := ScanTask{
TxnManager: s.TxnManager,
FilePath: path,
UseFileMetadata: input.UseFileMetadata,
StripFileExtension: input.StripFileExtension,
fileNamingAlgorithm: fileNamingAlgo,
calculateMD5: calculateMD5,
GeneratePreview: input.ScanGeneratePreviews,
GenerateImagePreview: input.ScanGenerateImagePreviews,
GenerateSprite: input.ScanGenerateSprites,
}
go task.Start(&wg)
return nil
@@ -240,7 +251,11 @@ func (s *singleton) Scan(input models.ScanMetadataInput) {
for _, path := range galleries {
wg.Add()
task := ScanTask{FilePath: path, UseFileMetadata: false}
task := ScanTask{
TxnManager: s.TxnManager,
FilePath: path,
UseFileMetadata: false,
}
go task.associateGallery(&wg)
wg.Wait()
}
@@ -261,6 +276,7 @@ func (s *singleton) Import() {
var wg sync.WaitGroup
wg.Add(1)
task := ImportTask{
txnManager: s.TxnManager,
BaseDir: config.GetMetadataPath(),
Reset: true,
DuplicateBehaviour: models.ImportDuplicateEnumFail,
@@ -284,7 +300,11 @@ func (s *singleton) Export() {
var wg sync.WaitGroup
wg.Add(1)
task := ExportTask{full: true, fileNamingAlgorithm: config.GetVideoFileNamingAlgorithm()}
task := ExportTask{
txnManager: s.TxnManager,
full: true,
fileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(),
}
go task.Start(&wg)
wg.Wait()
}()
@@ -344,29 +364,47 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
s.Status.SetStatus(Generate)
s.Status.indefiniteProgress()
qb := models.NewSceneQueryBuilder()
mqb := models.NewSceneMarkerQueryBuilder()
//this.job.total = await ObjectionUtils.getCount(Scene);
instance.Paths.Generated.EnsureTmpDir()
sceneIDs := utils.StringSliceToIntSlice(input.SceneIDs)
markerIDs := utils.StringSliceToIntSlice(input.MarkerIDs)
sceneIDs, err := utils.StringSliceToIntSlice(input.SceneIDs)
if err != nil {
logger.Error(err.Error())
}
markerIDs, err := utils.StringSliceToIntSlice(input.MarkerIDs)
if err != nil {
logger.Error(err.Error())
}
go func() {
defer s.returnToIdleState()
var scenes []*models.Scene
var err error
var markers []*models.SceneMarker
if len(sceneIDs) > 0 {
scenes, err = qb.FindMany(sceneIDs)
} else {
scenes, err = qb.All()
}
if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
qb := r.Scene()
if len(sceneIDs) > 0 {
scenes, err = qb.FindMany(sceneIDs)
} else {
scenes, err = qb.All()
}
if err != nil {
logger.Errorf("failed to get scenes for generate")
if err != nil {
return err
}
if len(markerIDs) > 0 {
markers, err = r.SceneMarker().FindMany(markerIDs)
if err != nil {
return err
}
}
return nil
}); err != nil {
logger.Error(err.Error())
return
}
@@ -377,14 +415,7 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
s.Status.Progress = 0
lenScenes := len(scenes)
total := lenScenes
var markers []*models.SceneMarker
if len(markerIDs) > 0 {
markers, err = mqb.FindMany(markerIDs)
total += len(markers)
}
total := lenScenes + len(markers)
if s.Status.stopping {
logger.Info("Stopping due to user request")
@@ -429,7 +460,11 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
}
if input.Sprites {
task := GenerateSpriteTask{Scene: *scene, Overwrite: overwrite, fileNamingAlgorithm: fileNamingAlgo}
task := GenerateSpriteTask{
Scene: *scene,
Overwrite: overwrite,
fileNamingAlgorithm: fileNamingAlgo,
}
wg.Add()
go task.Start(&wg)
}
@@ -448,13 +483,22 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
if input.Markers {
wg.Add()
task := GenerateMarkersTask{Scene: scene, Overwrite: overwrite, fileNamingAlgorithm: fileNamingAlgo}
task := GenerateMarkersTask{
TxnManager: s.TxnManager,
Scene: scene,
Overwrite: overwrite,
fileNamingAlgorithm: fileNamingAlgo,
}
go task.Start(&wg)
}
if input.Transcodes {
wg.Add()
task := GenerateTranscodeTask{Scene: *scene, Overwrite: overwrite, fileNamingAlgorithm: fileNamingAlgo}
task := GenerateTranscodeTask{
Scene: *scene,
Overwrite: overwrite,
fileNamingAlgorithm: fileNamingAlgo,
}
go task.Start(&wg)
}
}
@@ -474,7 +518,12 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
}
wg.Add()
task := GenerateMarkersTask{Marker: marker, Overwrite: overwrite, fileNamingAlgorithm: fileNamingAlgo}
task := GenerateMarkersTask{
TxnManager: s.TxnManager,
Marker: marker,
Overwrite: overwrite,
fileNamingAlgorithm: fileNamingAlgo,
}
go task.Start(&wg)
}
@@ -502,7 +551,6 @@ func (s *singleton) generateScreenshot(sceneId string, at *float64) {
s.Status.SetStatus(Generate)
s.Status.indefiniteProgress()
qb := models.NewSceneQueryBuilder()
instance.Paths.Generated.EnsureTmpDir()
go func() {
@@ -514,13 +562,18 @@ func (s *singleton) generateScreenshot(sceneId string, at *float64) {
return
}
scene, err := qb.Find(sceneIdInt)
if err != nil || scene == nil {
logger.Errorf("failed to get scene for generate")
var scene *models.Scene
if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
var err error
scene, err = r.Scene().Find(sceneIdInt)
return err
}); err != nil || scene == nil {
logger.Errorf("failed to get scene for generate: %s", err.Error())
return
}
task := GenerateScreenshotTask{
txnManager: s.TxnManager,
Scene: *scene,
ScreenshotAt: at,
fileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(),
@@ -551,29 +604,36 @@ func (s *singleton) AutoTag(performerIds []string, studioIds []string, tagIds []
studioCount := len(studioIds)
tagCount := len(tagIds)
performerQuery := models.NewPerformerQueryBuilder()
studioQuery := models.NewTagQueryBuilder()
tagQuery := models.NewTagQueryBuilder()
if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
performerQuery := r.Performer()
studioQuery := r.Studio()
tagQuery := r.Tag()
const wildcard = "*"
var err error
if performerCount == 1 && performerIds[0] == wildcard {
performerCount, err = performerQuery.Count()
if err != nil {
logger.Errorf("Error getting performer count: %s", err.Error())
const wildcard = "*"
var err error
if performerCount == 1 && performerIds[0] == wildcard {
performerCount, err = performerQuery.Count()
if err != nil {
return fmt.Errorf("Error getting performer count: %s", err.Error())
}
}
}
if studioCount == 1 && studioIds[0] == wildcard {
studioCount, err = studioQuery.Count()
if err != nil {
logger.Errorf("Error getting studio count: %s", err.Error())
if studioCount == 1 && studioIds[0] == wildcard {
studioCount, err = studioQuery.Count()
if err != nil {
return fmt.Errorf("Error getting studio count: %s", err.Error())
}
}
}
if tagCount == 1 && tagIds[0] == wildcard {
tagCount, err = tagQuery.Count()
if err != nil {
logger.Errorf("Error getting tag count: %s", err.Error())
if tagCount == 1 && tagIds[0] == wildcard {
tagCount, err = tagQuery.Count()
if err != nil {
return fmt.Errorf("Error getting tag count: %s", err.Error())
}
}
return nil
}); err != nil {
logger.Error(err.Error())
return
}
total := performerCount + studioCount + tagCount
@@ -586,36 +646,44 @@ func (s *singleton) AutoTag(performerIds []string, studioIds []string, tagIds []
}
func (s *singleton) autoTagPerformers(performerIds []string) {
performerQuery := models.NewPerformerQueryBuilder()
var wg sync.WaitGroup
for _, performerId := range performerIds {
var performers []*models.Performer
if performerId == "*" {
var err error
performers, err = performerQuery.All()
if err != nil {
logger.Errorf("Error querying performers: %s", err.Error())
continue
}
} else {
performerIdInt, err := strconv.Atoi(performerId)
if err != nil {
logger.Errorf("Error parsing performer id %s: %s", performerId, err.Error())
continue
if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
performerQuery := r.Performer()
if performerId == "*" {
var err error
performers, err = performerQuery.All()
if err != nil {
return fmt.Errorf("Error querying performers: %s", err.Error())
}
} else {
performerIdInt, err := strconv.Atoi(performerId)
if err != nil {
return fmt.Errorf("Error parsing performer id %s: %s", performerId, err.Error())
}
performer, err := performerQuery.Find(performerIdInt)
if err != nil {
return fmt.Errorf("Error finding performer id %s: %s", performerId, err.Error())
}
performers = append(performers, performer)
}
performer, err := performerQuery.Find(performerIdInt)
if err != nil {
logger.Errorf("Error finding performer id %s: %s", performerId, err.Error())
continue
}
performers = append(performers, performer)
return nil
}); err != nil {
logger.Error(err.Error())
continue
}
for _, performer := range performers {
wg.Add(1)
task := AutoTagPerformerTask{performer: performer}
task := AutoTagPerformerTask{
txnManager: s.TxnManager,
performer: performer,
}
go task.Start(&wg)
wg.Wait()
@@ -625,36 +693,43 @@ func (s *singleton) autoTagPerformers(performerIds []string) {
}
func (s *singleton) autoTagStudios(studioIds []string) {
studioQuery := models.NewStudioQueryBuilder()
var wg sync.WaitGroup
for _, studioId := range studioIds {
var studios []*models.Studio
if studioId == "*" {
var err error
studios, err = studioQuery.All()
if err != nil {
logger.Errorf("Error querying studios: %s", err.Error())
continue
}
} else {
studioIdInt, err := strconv.Atoi(studioId)
if err != nil {
logger.Errorf("Error parsing studio id %s: %s", studioId, err.Error())
continue
if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
studioQuery := r.Studio()
if studioId == "*" {
var err error
studios, err = studioQuery.All()
if err != nil {
return fmt.Errorf("Error querying studios: %s", err.Error())
}
} else {
studioIdInt, err := strconv.Atoi(studioId)
if err != nil {
return fmt.Errorf("Error parsing studio id %s: %s", studioId, err.Error())
}
studio, err := studioQuery.Find(studioIdInt)
if err != nil {
return fmt.Errorf("Error finding studio id %s: %s", studioId, err.Error())
}
studios = append(studios, studio)
}
studio, err := studioQuery.Find(studioIdInt, nil)
if err != nil {
logger.Errorf("Error finding studio id %s: %s", studioId, err.Error())
continue
}
studios = append(studios, studio)
return nil
}); err != nil {
logger.Error(err.Error())
continue
}
for _, studio := range studios {
wg.Add(1)
task := AutoTagStudioTask{studio: studio}
task := AutoTagStudioTask{
studio: studio,
txnManager: s.TxnManager,
}
go task.Start(&wg)
wg.Wait()
@@ -664,36 +739,42 @@ func (s *singleton) autoTagStudios(studioIds []string) {
}
func (s *singleton) autoTagTags(tagIds []string) {
tagQuery := models.NewTagQueryBuilder()
var wg sync.WaitGroup
for _, tagId := range tagIds {
var tags []*models.Tag
if tagId == "*" {
var err error
tags, err = tagQuery.All()
if err != nil {
logger.Errorf("Error querying tags: %s", err.Error())
continue
}
} else {
tagIdInt, err := strconv.Atoi(tagId)
if err != nil {
logger.Errorf("Error parsing tag id %s: %s", tagId, err.Error())
continue
if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
tagQuery := r.Tag()
if tagId == "*" {
var err error
tags, err = tagQuery.All()
if err != nil {
return fmt.Errorf("Error querying tags: %s", err.Error())
}
} else {
tagIdInt, err := strconv.Atoi(tagId)
if err != nil {
return fmt.Errorf("Error parsing tag id %s: %s", tagId, err.Error())
}
tag, err := tagQuery.Find(tagIdInt)
if err != nil {
return fmt.Errorf("Error finding tag id %s: %s", tagId, err.Error())
}
tags = append(tags, tag)
}
tag, err := tagQuery.Find(tagIdInt, nil)
if err != nil {
logger.Errorf("Error finding tag id %s: %s", tagId, err.Error())
continue
}
tags = append(tags, tag)
return nil
}); err != nil {
logger.Error(err.Error())
continue
}
for _, tag := range tags {
wg.Add(1)
task := AutoTagTagTask{tag: tag}
task := AutoTagTagTask{
txnManager: s.TxnManager,
tag: tag,
}
go task.Start(&wg)
wg.Wait()
@@ -709,28 +790,38 @@ func (s *singleton) Clean() {
s.Status.SetStatus(Clean)
s.Status.indefiniteProgress()
qb := models.NewSceneQueryBuilder()
iqb := models.NewImageQueryBuilder()
gqb := models.NewGalleryQueryBuilder()
go func() {
defer s.returnToIdleState()
logger.Infof("Starting cleaning of tracked files")
scenes, err := qb.All()
if err != nil {
logger.Errorf("failed to fetch list of scenes for cleaning")
return
}
var scenes []*models.Scene
var images []*models.Image
var galleries []*models.Gallery
images, err := iqb.All()
if err != nil {
logger.Errorf("failed to fetch list of images for cleaning")
return
}
if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
qb := r.Scene()
iqb := r.Image()
gqb := r.Gallery()
galleries, err := gqb.All()
if err != nil {
logger.Errorf("failed to fetch list of galleries for cleaning")
logger.Infof("Starting cleaning of tracked files")
var err error
scenes, err = qb.All()
if err != nil {
return errors.New("failed to fetch list of scenes for cleaning")
}
images, err = iqb.All()
if err != nil {
return errors.New("failed to fetch list of images for cleaning")
}
galleries, err = gqb.All()
if err != nil {
return errors.New("failed to fetch list of galleries for cleaning")
}
return nil
}); err != nil {
logger.Error(err.Error())
return
}
@@ -757,7 +848,11 @@ func (s *singleton) Clean() {
wg.Add(1)
task := CleanTask{Scene: scene, fileNamingAlgorithm: fileNamingAlgo}
task := CleanTask{
TxnManager: s.TxnManager,
Scene: scene,
fileNamingAlgorithm: fileNamingAlgo,
}
go task.Start(&wg)
wg.Wait()
}
@@ -776,7 +871,10 @@ func (s *singleton) Clean() {
wg.Add(1)
task := CleanTask{Image: img}
task := CleanTask{
TxnManager: s.TxnManager,
Image: img,
}
go task.Start(&wg)
wg.Wait()
}
@@ -795,7 +893,10 @@ func (s *singleton) Clean() {
wg.Add(1)
task := CleanTask{Gallery: gallery}
task := CleanTask{
TxnManager: s.TxnManager,
Gallery: gallery,
}
go task.Start(&wg)
wg.Wait()
}
@@ -811,17 +912,19 @@ func (s *singleton) MigrateHash() {
s.Status.SetStatus(Migrate)
s.Status.indefiniteProgress()
qb := models.NewSceneQueryBuilder()
go func() {
defer s.returnToIdleState()
fileNamingAlgo := config.GetVideoFileNamingAlgorithm()
logger.Infof("Migrating generated files for %s naming hash", fileNamingAlgo.String())
scenes, err := qb.All()
if err != nil {
logger.Errorf("failed to fetch list of scenes for migration")
var scenes []*models.Scene
if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
var err error
scenes, err = r.Scene().All()
return err
}); err != nil {
logger.Errorf("failed to fetch list of scenes for migration: %s", err.Error())
return
}
@@ -926,6 +1029,7 @@ func (s *singleton) neededGenerate(scenes []*models.Scene, input models.Generate
if input.Markers {
task := GenerateMarkersTask{
TxnManager: s.TxnManager,
Scene: scene,
Overwrite: overwrite,
fileNamingAlgorithm: fileNamingAlgo,