mirror of
https://github.com/stashapp/stash.git
synced 2025-12-17 12:24:38 +03:00
Move image blobs into separate tables (#618)
* Scene cover fallback to database * Fix panic if studio not found * Fix movie studio not being imported/exported
This commit is contained in:
@@ -39,12 +39,10 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input models.MovieCr
|
||||
// Populate a new movie from the input
|
||||
currentTime := time.Now()
|
||||
newMovie := models.Movie{
|
||||
BackImage: backimageData,
|
||||
FrontImage: frontimageData,
|
||||
Checksum: checksum,
|
||||
Name: sql.NullString{String: input.Name, Valid: true},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
Checksum: checksum,
|
||||
Name: sql.NullString{String: input.Name, Valid: true},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
|
||||
if input.Aliases != nil {
|
||||
@@ -90,6 +88,14 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input models.MovieCr
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// update image table
|
||||
if len(frontimageData) > 0 {
|
||||
if err := qb.UpdateMovieImages(movie.ID, frontimageData, backimageData, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil, err
|
||||
@@ -106,19 +112,20 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp
|
||||
ID: movieID,
|
||||
UpdatedAt: &models.SQLiteTimestamp{Timestamp: time.Now()},
|
||||
}
|
||||
var frontimageData []byte
|
||||
var err error
|
||||
if input.FrontImage != nil {
|
||||
_, frontimageData, err := utils.ProcessBase64Image(*input.FrontImage)
|
||||
_, frontimageData, err = utils.ProcessBase64Image(*input.FrontImage)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
updatedMovie.FrontImage = &frontimageData
|
||||
}
|
||||
var backimageData []byte
|
||||
if input.BackImage != nil {
|
||||
_, backimageData, err := utils.ProcessBase64Image(*input.BackImage)
|
||||
_, backimageData, err = utils.ProcessBase64Image(*input.BackImage)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
updatedMovie.BackImage = &backimageData
|
||||
}
|
||||
|
||||
if input.Name != nil {
|
||||
@@ -177,6 +184,29 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// update image table
|
||||
if len(frontimageData) > 0 || len(backimageData) > 0 {
|
||||
if len(frontimageData) == 0 {
|
||||
frontimageData, err = qb.GetFrontImage(updatedMovie.ID, tx)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
if len(backimageData) == 0 {
|
||||
backimageData, err = qb.GetBackImage(updatedMovie.ID, tx)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if err := qb.UpdateMovieImages(movie.ID, frontimageData, backimageData, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -35,7 +35,6 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
|
||||
// Populate a new performer from the input
|
||||
currentTime := time.Now()
|
||||
newPerformer := models.Performer{
|
||||
Image: imageData,
|
||||
Checksum: checksum,
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
@@ -103,6 +102,14 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// update image table
|
||||
if len(imageData) > 0 {
|
||||
if err := qb.UpdatePerformerImage(performer.ID, imageData, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil, err
|
||||
@@ -118,12 +125,13 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
|
||||
ID: performerID,
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: time.Now()},
|
||||
}
|
||||
var imageData []byte
|
||||
var err error
|
||||
if input.Image != nil {
|
||||
_, imageData, err := utils.ProcessBase64Image(*input.Image)
|
||||
_, imageData, err = utils.ProcessBase64Image(*input.Image)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
updatedPerformer.Image = imageData
|
||||
}
|
||||
if input.Name != nil {
|
||||
// generate checksum from performer name rather than image
|
||||
@@ -192,6 +200,14 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// update image table
|
||||
if len(imageData) > 0 {
|
||||
if err := qb.UpdatePerformerImage(performer.ID, imageData, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -80,13 +80,15 @@ func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, tx *sqlx.T
|
||||
if input.Date != nil {
|
||||
updatedScene.Date = &models.SQLiteDate{String: *input.Date, Valid: true}
|
||||
}
|
||||
|
||||
if input.CoverImage != nil && *input.CoverImage != "" {
|
||||
var err error
|
||||
_, coverImageData, err = utils.ProcessBase64Image(*input.CoverImage)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
updatedScene.Cover = &coverImageData
|
||||
|
||||
// update the cover after updating the scene
|
||||
}
|
||||
|
||||
if input.Rating != nil {
|
||||
@@ -111,6 +113,13 @@ func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, tx *sqlx.T
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// update cover table
|
||||
if len(coverImageData) > 0 {
|
||||
if err := qb.UpdateSceneCover(sceneID, coverImageData, tx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Clear the existing gallery value
|
||||
gqb := models.NewGalleryQueryBuilder()
|
||||
err = gqb.ClearGalleryId(sceneID, tx)
|
||||
@@ -188,7 +197,6 @@ func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, tx *sqlx.T
|
||||
|
||||
// only update the cover image if provided and everything else was successful
|
||||
if coverImageData != nil {
|
||||
|
||||
err = manager.SetSceneScreenshot(scene.Checksum, coverImageData)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -32,7 +32,6 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
|
||||
// Populate a new studio from the input
|
||||
currentTime := time.Now()
|
||||
newStudio := models.Studio{
|
||||
Image: imageData,
|
||||
Checksum: checksum,
|
||||
Name: sql.NullString{String: input.Name, Valid: true},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
@@ -55,6 +54,14 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// update image table
|
||||
if len(imageData) > 0 {
|
||||
if err := qb.UpdateStudioImage(studio.ID, imageData, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil, err
|
||||
@@ -71,12 +78,14 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
|
||||
ID: studioID,
|
||||
UpdatedAt: &models.SQLiteTimestamp{Timestamp: time.Now()},
|
||||
}
|
||||
|
||||
var imageData []byte
|
||||
if input.Image != nil {
|
||||
_, imageData, err := utils.ProcessBase64Image(*input.Image)
|
||||
var err error
|
||||
_, imageData, err = utils.ProcessBase64Image(*input.Image)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
updatedStudio.Image = &imageData
|
||||
}
|
||||
if input.Name != nil {
|
||||
// generate checksum from studio name rather than image
|
||||
@@ -111,6 +120,14 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// update image table
|
||||
if len(imageData) > 0 {
|
||||
if err := qb.UpdateStudioImage(studio.ID, imageData, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
|
||||
"github.com/go-chi/chi"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
type movieRoutes struct{}
|
||||
@@ -25,12 +26,16 @@ func (rs movieRoutes) Routes() chi.Router {
|
||||
|
||||
func (rs movieRoutes) FrontImage(w http.ResponseWriter, r *http.Request) {
|
||||
movie := r.Context().Value(movieKey).(*models.Movie)
|
||||
_, _ = w.Write(movie.FrontImage)
|
||||
qb := models.NewMovieQueryBuilder()
|
||||
image, _ := qb.GetFrontImage(movie.ID, nil)
|
||||
utils.ServeImage(image, w, r)
|
||||
}
|
||||
|
||||
func (rs movieRoutes) BackImage(w http.ResponseWriter, r *http.Request) {
|
||||
movie := r.Context().Value(movieKey).(*models.Movie)
|
||||
_, _ = w.Write(movie.BackImage)
|
||||
qb := models.NewMovieQueryBuilder()
|
||||
image, _ := qb.GetBackImage(movie.ID, nil)
|
||||
utils.ServeImage(image, w, r)
|
||||
}
|
||||
|
||||
func MovieCtx(next http.Handler) http.Handler {
|
||||
|
||||
@@ -2,13 +2,12 @@ package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/md5"
|
||||
"fmt"
|
||||
"github.com/go-chi/chi"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/go-chi/chi"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
type performerRoutes struct{}
|
||||
@@ -26,17 +25,9 @@ func (rs performerRoutes) Routes() chi.Router {
|
||||
|
||||
func (rs performerRoutes) Image(w http.ResponseWriter, r *http.Request) {
|
||||
performer := r.Context().Value(performerKey).(*models.Performer)
|
||||
etag := fmt.Sprintf("%x", md5.Sum(performer.Image))
|
||||
|
||||
if match := r.Header.Get("If-None-Match"); match != "" {
|
||||
if strings.Contains(match, etag) {
|
||||
w.WriteHeader(http.StatusNotModified)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
w.Header().Add("Etag", etag)
|
||||
_, _ = w.Write(performer.Image)
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
image, _ := qb.GetPerformerImage(performer.ID, nil)
|
||||
utils.ServeImage(image, w, r)
|
||||
}
|
||||
|
||||
func PerformerCtx(next http.Handler) http.Handler {
|
||||
|
||||
@@ -151,7 +151,16 @@ func (rs sceneRoutes) Stream(w http.ResponseWriter, r *http.Request) {
|
||||
func (rs sceneRoutes) Screenshot(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
filepath := manager.GetInstance().Paths.Scene.GetScreenshotPath(scene.Checksum)
|
||||
http.ServeFile(w, r, filepath)
|
||||
|
||||
// fall back to the scene image blob if the file isn't present
|
||||
screenshotExists, _ := utils.FileExists(filepath)
|
||||
if screenshotExists {
|
||||
http.ServeFile(w, r, filepath)
|
||||
} else {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
cover, _ := qb.GetSceneCover(scene.ID, nil)
|
||||
utils.ServeImage(cover, w, r)
|
||||
}
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) Preview(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
@@ -4,11 +4,12 @@ import (
|
||||
"context"
|
||||
"crypto/md5"
|
||||
"fmt"
|
||||
"github.com/go-chi/chi"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/go-chi/chi"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
type studioRoutes struct{}
|
||||
@@ -26,7 +27,10 @@ func (rs studioRoutes) Routes() chi.Router {
|
||||
|
||||
func (rs studioRoutes) Image(w http.ResponseWriter, r *http.Request) {
|
||||
studio := r.Context().Value(studioKey).(*models.Studio)
|
||||
etag := fmt.Sprintf("%x", md5.Sum(studio.Image))
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
image, _ := qb.GetStudioImage(studio.ID, nil)
|
||||
|
||||
etag := fmt.Sprintf("%x", md5.Sum(image))
|
||||
if match := r.Header.Get("If-None-Match"); match != "" {
|
||||
if strings.Contains(match, etag) {
|
||||
w.WriteHeader(http.StatusNotModified)
|
||||
@@ -34,14 +38,14 @@ func (rs studioRoutes) Image(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
}
|
||||
|
||||
contentType := http.DetectContentType(studio.Image)
|
||||
contentType := http.DetectContentType(image)
|
||||
if contentType == "text/xml; charset=utf-8" || contentType == "text/plain; charset=utf-8" {
|
||||
contentType = "image/svg+xml"
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", contentType)
|
||||
w.Header().Add("Etag", etag)
|
||||
_, _ = w.Write(studio.Image)
|
||||
w.Write(image)
|
||||
}
|
||||
|
||||
func StudioCtx(next http.Handler) http.Handler {
|
||||
|
||||
@@ -19,7 +19,7 @@ import (
|
||||
|
||||
var DB *sqlx.DB
|
||||
var dbPath string
|
||||
var appSchemaVersion uint = 9
|
||||
var appSchemaVersion uint = 10
|
||||
var databaseSchemaVersion uint
|
||||
|
||||
const sqlite3Driver = "sqlite3ex"
|
||||
@@ -100,6 +100,7 @@ func Backup(backupPath string) error {
|
||||
}
|
||||
defer db.Close()
|
||||
|
||||
logger.Infof("Backing up database into: %s", backupPath)
|
||||
_, err = db.Exec(`VACUUM INTO "` + backupPath + `"`)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Vacuum failed: %s", err)
|
||||
@@ -109,6 +110,7 @@ func Backup(backupPath string) error {
|
||||
}
|
||||
|
||||
func RestoreFromBackup(backupPath string) error {
|
||||
logger.Infof("Restoring backup database %s into %s", backupPath, dbPath)
|
||||
return os.Rename(backupPath, dbPath)
|
||||
}
|
||||
|
||||
@@ -177,18 +179,28 @@ func RunMigrations() error {
|
||||
databaseSchemaVersion, _, _ = m.Version()
|
||||
stepNumber := appSchemaVersion - databaseSchemaVersion
|
||||
if stepNumber != 0 {
|
||||
logger.Infof("Migrating database from version %d to %d", databaseSchemaVersion, appSchemaVersion)
|
||||
err = m.Steps(int(stepNumber))
|
||||
if err != nil {
|
||||
// migration failed
|
||||
logger.Errorf("Error migrating database: %s", err.Error())
|
||||
m.Close()
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
m.Close()
|
||||
|
||||
// re-initialise the database
|
||||
Initialize(dbPath)
|
||||
|
||||
// run a vacuum on the database
|
||||
logger.Info("Performing vacuum on database")
|
||||
_, err = DB.Exec("VACUUM")
|
||||
if err != nil {
|
||||
logger.Warnf("error while performing post-migration vacuum: %s", err.Error())
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
514
pkg/database/migrations/10_image_tables.up.sql
Normal file
514
pkg/database/migrations/10_image_tables.up.sql
Normal file
@@ -0,0 +1,514 @@
|
||||
-- recreate scenes, studios and performers tables
|
||||
ALTER TABLE `studios` rename to `_studios_old`;
|
||||
ALTER TABLE `scenes` rename to `_scenes_old`;
|
||||
ALTER TABLE `performers` RENAME TO `_performers_old`;
|
||||
ALTER TABLE `movies` rename to `_movies_old`;
|
||||
|
||||
-- remove studio image
|
||||
CREATE TABLE `studios` (
|
||||
`id` integer not null primary key autoincrement,
|
||||
`checksum` varchar(255) not null,
|
||||
`name` varchar(255),
|
||||
`url` varchar(255),
|
||||
`parent_id` integer DEFAULT NULL CHECK ( id IS NOT parent_id ) REFERENCES studios(id) on delete set null,
|
||||
`created_at` datetime not null,
|
||||
`updated_at` datetime not null
|
||||
);
|
||||
|
||||
DROP INDEX `studios_checksum_unique`;
|
||||
DROP INDEX `index_studios_on_name`;
|
||||
DROP INDEX `index_studios_on_checksum`;
|
||||
|
||||
CREATE UNIQUE INDEX `studios_checksum_unique` on `studios` (`checksum`);
|
||||
CREATE INDEX `index_studios_on_name` on `studios` (`name`);
|
||||
CREATE INDEX `index_studios_on_checksum` on `studios` (`checksum`);
|
||||
|
||||
-- remove scene cover
|
||||
CREATE TABLE `scenes` (
|
||||
`id` integer not null primary key autoincrement,
|
||||
`path` varchar(510) not null,
|
||||
`checksum` varchar(255) not null,
|
||||
`title` varchar(255),
|
||||
`details` text,
|
||||
`url` varchar(255),
|
||||
`date` date,
|
||||
`rating` tinyint,
|
||||
`size` varchar(255),
|
||||
`duration` float,
|
||||
`video_codec` varchar(255),
|
||||
`audio_codec` varchar(255),
|
||||
`width` tinyint,
|
||||
`height` tinyint,
|
||||
`framerate` float,
|
||||
`bitrate` integer,
|
||||
`studio_id` integer,
|
||||
`o_counter` tinyint not null default 0,
|
||||
`format` varchar(255),
|
||||
`created_at` datetime not null,
|
||||
`updated_at` datetime not null,
|
||||
-- changed from cascade delete
|
||||
foreign key(`studio_id`) references `studios`(`id`) on delete SET NULL
|
||||
);
|
||||
|
||||
DROP INDEX IF EXISTS `scenes_path_unique`;
|
||||
DROP INDEX IF EXISTS `scenes_checksum_unique`;
|
||||
DROP INDEX IF EXISTS `index_scenes_on_studio_id`;
|
||||
|
||||
CREATE UNIQUE INDEX `scenes_path_unique` on `scenes` (`path`);
|
||||
CREATE UNIQUE INDEX `scenes_checksum_unique` on `scenes` (`checksum`);
|
||||
CREATE INDEX `index_scenes_on_studio_id` on `scenes` (`studio_id`);
|
||||
|
||||
-- remove performer image
|
||||
CREATE TABLE `performers` (
|
||||
`id` integer not null primary key autoincrement,
|
||||
`checksum` varchar(255) not null,
|
||||
`name` varchar(255),
|
||||
`gender` varchar(20),
|
||||
`url` varchar(255),
|
||||
`twitter` varchar(255),
|
||||
`instagram` varchar(255),
|
||||
`birthdate` date,
|
||||
`ethnicity` varchar(255),
|
||||
`country` varchar(255),
|
||||
`eye_color` varchar(255),
|
||||
`height` varchar(255),
|
||||
`measurements` varchar(255),
|
||||
`fake_tits` varchar(255),
|
||||
`career_length` varchar(255),
|
||||
`tattoos` varchar(255),
|
||||
`piercings` varchar(255),
|
||||
`aliases` varchar(255),
|
||||
`favorite` boolean not null default '0',
|
||||
`created_at` datetime not null,
|
||||
`updated_at` datetime not null
|
||||
);
|
||||
|
||||
DROP INDEX `performers_checksum_unique`;
|
||||
DROP INDEX `index_performers_on_name`;
|
||||
|
||||
CREATE UNIQUE INDEX `performers_checksum_unique` on `performers` (`checksum`);
|
||||
CREATE INDEX `index_performers_on_name` on `performers` (`name`);
|
||||
|
||||
-- remove front_image and back_image
|
||||
CREATE TABLE `movies` (
|
||||
`id` integer not null primary key autoincrement,
|
||||
`name` varchar(255) not null,
|
||||
`aliases` varchar(255),
|
||||
`duration` integer,
|
||||
`date` date,
|
||||
`rating` tinyint,
|
||||
`studio_id` integer,
|
||||
`director` varchar(255),
|
||||
`synopsis` text,
|
||||
`checksum` varchar(255) not null,
|
||||
`url` varchar(255),
|
||||
`created_at` datetime not null,
|
||||
`updated_at` datetime not null,
|
||||
foreign key(`studio_id`) references `studios`(`id`) on delete set null
|
||||
);
|
||||
|
||||
DROP INDEX `movies_name_unique`;
|
||||
DROP INDEX `movies_checksum_unique`;
|
||||
DROP INDEX `index_movies_on_studio_id`;
|
||||
|
||||
CREATE UNIQUE INDEX `movies_name_unique` on `movies` (`name`);
|
||||
CREATE UNIQUE INDEX `movies_checksum_unique` on `movies` (`checksum`);
|
||||
CREATE INDEX `index_movies_on_studio_id` on `movies` (`studio_id`);
|
||||
|
||||
-- recreate the tables referencing the above tables to correct their references
|
||||
ALTER TABLE `galleries` rename to `_galleries_old`;
|
||||
ALTER TABLE `performers_scenes` rename to `_performers_scenes_old`;
|
||||
ALTER TABLE `scene_markers` rename to `_scene_markers_old`;
|
||||
ALTER TABLE `scene_markers_tags` rename to `_scene_markers_tags_old`;
|
||||
ALTER TABLE `scenes_tags` rename to `_scenes_tags_old`;
|
||||
ALTER TABLE `movies_scenes` rename to `_movies_scenes_old`;
|
||||
ALTER TABLE `scraped_items` rename to `_scraped_items_old`;
|
||||
|
||||
CREATE TABLE `galleries` (
|
||||
`id` integer not null primary key autoincrement,
|
||||
`path` varchar(510) not null,
|
||||
`checksum` varchar(255) not null,
|
||||
`scene_id` integer,
|
||||
`created_at` datetime not null,
|
||||
`updated_at` datetime not null,
|
||||
foreign key(`scene_id`) references `scenes`(`id`)
|
||||
);
|
||||
|
||||
DROP INDEX IF EXISTS `index_galleries_on_scene_id`;
|
||||
DROP INDEX IF EXISTS `galleries_path_unique`;
|
||||
DROP INDEX IF EXISTS `galleries_checksum_unique`;
|
||||
|
||||
CREATE INDEX `index_galleries_on_scene_id` on `galleries` (`scene_id`);
|
||||
CREATE UNIQUE INDEX `galleries_path_unique` on `galleries` (`path`);
|
||||
CREATE UNIQUE INDEX `galleries_checksum_unique` on `galleries` (`checksum`);
|
||||
|
||||
CREATE TABLE `performers_scenes` (
|
||||
`performer_id` integer,
|
||||
`scene_id` integer,
|
||||
foreign key(`performer_id`) references `performers`(`id`),
|
||||
foreign key(`scene_id`) references `scenes`(`id`)
|
||||
);
|
||||
|
||||
DROP INDEX `index_performers_scenes_on_scene_id`;
|
||||
DROP INDEX `index_performers_scenes_on_performer_id`;
|
||||
|
||||
CREATE INDEX `index_performers_scenes_on_scene_id` on `performers_scenes` (`scene_id`);
|
||||
CREATE INDEX `index_performers_scenes_on_performer_id` on `performers_scenes` (`performer_id`);
|
||||
|
||||
CREATE TABLE `scene_markers` (
|
||||
`id` integer not null primary key autoincrement,
|
||||
`title` varchar(255) not null,
|
||||
`seconds` float not null,
|
||||
`primary_tag_id` integer not null,
|
||||
`scene_id` integer,
|
||||
`created_at` datetime not null,
|
||||
`updated_at` datetime not null,
|
||||
foreign key(`primary_tag_id`) references `tags`(`id`),
|
||||
foreign key(`scene_id`) references `scenes`(`id`)
|
||||
);
|
||||
|
||||
DROP INDEX `index_scene_markers_on_scene_id`;
|
||||
DROP INDEX `index_scene_markers_on_primary_tag_id`;
|
||||
|
||||
CREATE INDEX `index_scene_markers_on_scene_id` on `scene_markers` (`scene_id`);
|
||||
CREATE INDEX `index_scene_markers_on_primary_tag_id` on `scene_markers` (`primary_tag_id`);
|
||||
|
||||
CREATE TABLE `scene_markers_tags` (
|
||||
`scene_marker_id` integer,
|
||||
`tag_id` integer,
|
||||
foreign key(`scene_marker_id`) references `scene_markers`(`id`) on delete CASCADE,
|
||||
foreign key(`tag_id`) references `tags`(`id`)
|
||||
);
|
||||
|
||||
DROP INDEX `index_scene_markers_tags_on_tag_id`;
|
||||
DROP INDEX `index_scene_markers_tags_on_scene_marker_id`;
|
||||
|
||||
CREATE INDEX `index_scene_markers_tags_on_tag_id` on `scene_markers_tags` (`tag_id`);
|
||||
CREATE INDEX `index_scene_markers_tags_on_scene_marker_id` on `scene_markers_tags` (`scene_marker_id`);
|
||||
|
||||
CREATE TABLE `scenes_tags` (
|
||||
`scene_id` integer,
|
||||
`tag_id` integer,
|
||||
foreign key(`scene_id`) references `scenes`(`id`) on delete CASCADE,
|
||||
foreign key(`tag_id`) references `tags`(`id`)
|
||||
);
|
||||
|
||||
DROP INDEX `index_scenes_tags_on_tag_id`;
|
||||
DROP INDEX `index_scenes_tags_on_scene_id`;
|
||||
|
||||
CREATE INDEX `index_scenes_tags_on_tag_id` on `scenes_tags` (`tag_id`);
|
||||
CREATE INDEX `index_scenes_tags_on_scene_id` on `scenes_tags` (`scene_id`);
|
||||
|
||||
CREATE TABLE `movies_scenes` (
|
||||
`movie_id` integer,
|
||||
`scene_id` integer,
|
||||
`scene_index` tinyint,
|
||||
foreign key(`movie_id`) references `movies`(`id`) on delete cascade,
|
||||
foreign key(`scene_id`) references `scenes`(`id`) on delete cascade
|
||||
);
|
||||
|
||||
DROP INDEX `index_movies_scenes_on_movie_id`;
|
||||
DROP INDEX `index_movies_scenes_on_scene_id`;
|
||||
|
||||
CREATE INDEX `index_movies_scenes_on_movie_id` on `movies_scenes` (`movie_id`);
|
||||
CREATE INDEX `index_movies_scenes_on_scene_id` on `movies_scenes` (`scene_id`);
|
||||
|
||||
-- remove movie_id since doesn't appear to be used
|
||||
CREATE TABLE `scraped_items` (
|
||||
`id` integer not null primary key autoincrement,
|
||||
`title` varchar(255),
|
||||
`description` text,
|
||||
`url` varchar(255),
|
||||
`date` date,
|
||||
`rating` varchar(255),
|
||||
`tags` varchar(510),
|
||||
`models` varchar(510),
|
||||
`episode` integer,
|
||||
`gallery_filename` varchar(255),
|
||||
`gallery_url` varchar(510),
|
||||
`video_filename` varchar(255),
|
||||
`video_url` varchar(255),
|
||||
`studio_id` integer,
|
||||
`created_at` datetime not null,
|
||||
`updated_at` datetime not null,
|
||||
foreign key(`studio_id`) references `studios`(`id`)
|
||||
);
|
||||
|
||||
DROP INDEX `index_scraped_items_on_studio_id`;
|
||||
|
||||
CREATE INDEX `index_scraped_items_on_studio_id` on `scraped_items` (`studio_id`);
|
||||
|
||||
-- now populate from the old tables
|
||||
-- these tables are changed so require the full column def
|
||||
INSERT INTO `studios`
|
||||
(
|
||||
`id`,
|
||||
`checksum`,
|
||||
`name`,
|
||||
`url`,
|
||||
`parent_id`,
|
||||
`created_at`,
|
||||
`updated_at`
|
||||
)
|
||||
SELECT
|
||||
`id`,
|
||||
`checksum`,
|
||||
`name`,
|
||||
`url`,
|
||||
`parent_id`,
|
||||
`created_at`,
|
||||
`updated_at`
|
||||
FROM `_studios_old`;
|
||||
|
||||
INSERT INTO `scenes`
|
||||
(
|
||||
`id`,
|
||||
`path`,
|
||||
`checksum`,
|
||||
`title`,
|
||||
`details`,
|
||||
`url`,
|
||||
`date`,
|
||||
`rating`,
|
||||
`size`,
|
||||
`duration`,
|
||||
`video_codec`,
|
||||
`audio_codec`,
|
||||
`width`,
|
||||
`height`,
|
||||
`framerate`,
|
||||
`bitrate`,
|
||||
`studio_id`,
|
||||
`o_counter`,
|
||||
`format`,
|
||||
`created_at`,
|
||||
`updated_at`
|
||||
)
|
||||
SELECT
|
||||
`id`,
|
||||
`path`,
|
||||
`checksum`,
|
||||
`title`,
|
||||
`details`,
|
||||
`url`,
|
||||
`date`,
|
||||
`rating`,
|
||||
`size`,
|
||||
`duration`,
|
||||
`video_codec`,
|
||||
`audio_codec`,
|
||||
`width`,
|
||||
`height`,
|
||||
`framerate`,
|
||||
`bitrate`,
|
||||
`studio_id`,
|
||||
`o_counter`,
|
||||
`format`,
|
||||
`created_at`,
|
||||
`updated_at`
|
||||
FROM `_scenes_old`;
|
||||
|
||||
INSERT INTO `performers`
|
||||
(
|
||||
`id`,
|
||||
`checksum`,
|
||||
`name`,
|
||||
`gender`,
|
||||
`url`,
|
||||
`twitter`,
|
||||
`instagram`,
|
||||
`birthdate`,
|
||||
`ethnicity`,
|
||||
`country`,
|
||||
`eye_color`,
|
||||
`height`,
|
||||
`measurements`,
|
||||
`fake_tits`,
|
||||
`career_length`,
|
||||
`tattoos`,
|
||||
`piercings`,
|
||||
`aliases`,
|
||||
`favorite`,
|
||||
`created_at`,
|
||||
`updated_at`
|
||||
)
|
||||
SELECT
|
||||
`id`,
|
||||
`checksum`,
|
||||
`name`,
|
||||
`gender`,
|
||||
`url`,
|
||||
`twitter`,
|
||||
`instagram`,
|
||||
`birthdate`,
|
||||
`ethnicity`,
|
||||
`country`,
|
||||
`eye_color`,
|
||||
`height`,
|
||||
`measurements`,
|
||||
`fake_tits`,
|
||||
`career_length`,
|
||||
`tattoos`,
|
||||
`piercings`,
|
||||
`aliases`,
|
||||
`favorite`,
|
||||
`created_at`,
|
||||
`updated_at`
|
||||
FROM `_performers_old`;
|
||||
|
||||
INSERT INTO `movies`
|
||||
(
|
||||
`id`,
|
||||
`name`,
|
||||
`aliases`,
|
||||
`duration`,
|
||||
`date`,
|
||||
`rating`,
|
||||
`studio_id`,
|
||||
`director`,
|
||||
`synopsis`,
|
||||
`checksum`,
|
||||
`url`,
|
||||
`created_at`,
|
||||
`updated_at`
|
||||
)
|
||||
SELECT
|
||||
`id`,
|
||||
`name`,
|
||||
`aliases`,
|
||||
`duration`,
|
||||
`date`,
|
||||
`rating`,
|
||||
`studio_id`,
|
||||
`director`,
|
||||
`synopsis`,
|
||||
`checksum`,
|
||||
`url`,
|
||||
`created_at`,
|
||||
`updated_at`
|
||||
FROM `_movies_old`;
|
||||
|
||||
INSERT INTO `scraped_items`
|
||||
(
|
||||
`id`,
|
||||
`title`,
|
||||
`description`,
|
||||
`url`,
|
||||
`date`,
|
||||
`rating`,
|
||||
`tags`,
|
||||
`models`,
|
||||
`episode`,
|
||||
`gallery_filename`,
|
||||
`gallery_url`,
|
||||
`video_filename`,
|
||||
`video_url`,
|
||||
`studio_id`,
|
||||
`created_at`,
|
||||
`updated_at`
|
||||
)
|
||||
SELECT
|
||||
`id`,
|
||||
`title`,
|
||||
`description`,
|
||||
`url`,
|
||||
`date`,
|
||||
`rating`,
|
||||
`tags`,
|
||||
`models`,
|
||||
`episode`,
|
||||
`gallery_filename`,
|
||||
`gallery_url`,
|
||||
`video_filename`,
|
||||
`video_url`,
|
||||
`studio_id`,
|
||||
`created_at`,
|
||||
`updated_at`
|
||||
FROM `_scraped_items_old`;
|
||||
|
||||
-- these tables are a direct copy
|
||||
INSERT INTO `galleries` SELECT * from `_galleries_old`;
|
||||
INSERT INTO `performers_scenes` SELECT * from `_performers_scenes_old`;
|
||||
INSERT INTO `scene_markers` SELECT * from `_scene_markers_old`;
|
||||
INSERT INTO `scene_markers_tags` SELECT * from `_scene_markers_tags_old`;
|
||||
INSERT INTO `scenes_tags` SELECT * from `_scenes_tags_old`;
|
||||
INSERT INTO `movies_scenes` SELECT * from `_movies_scenes_old`;
|
||||
|
||||
-- populate covers in separate table
|
||||
CREATE TABLE `scenes_cover` (
|
||||
`scene_id` integer,
|
||||
`cover` blob not null,
|
||||
foreign key(`scene_id`) references `scenes`(`id`) on delete CASCADE
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX `index_scene_covers_on_scene_id` on `scenes_cover` (`scene_id`);
|
||||
|
||||
INSERT INTO `scenes_cover`
|
||||
(
|
||||
`scene_id`,
|
||||
`cover`
|
||||
)
|
||||
SELECT `id`, `cover` from `_scenes_old` where `cover` is not null;
|
||||
|
||||
-- put performer images in separate table
|
||||
CREATE TABLE `performers_image` (
|
||||
`performer_id` integer,
|
||||
`image` blob not null,
|
||||
foreign key(`performer_id`) references `performers`(`id`) on delete CASCADE
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX `index_performer_image_on_performer_id` on `performers_image` (`performer_id`);
|
||||
|
||||
INSERT INTO `performers_image`
|
||||
(
|
||||
`performer_id`,
|
||||
`image`
|
||||
)
|
||||
SELECT `id`, `image` from `_performers_old` where `image` is not null;
|
||||
|
||||
-- put studio images in separate table
|
||||
CREATE TABLE `studios_image` (
|
||||
`studio_id` integer,
|
||||
`image` blob not null,
|
||||
foreign key(`studio_id`) references `studios`(`id`) on delete CASCADE
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX `index_studio_image_on_studio_id` on `studios_image` (`studio_id`);
|
||||
|
||||
INSERT INTO `studios_image`
|
||||
(
|
||||
`studio_id`,
|
||||
`image`
|
||||
)
|
||||
SELECT `id`, `image` from `_studios_old` where `image` is not null;
|
||||
|
||||
-- put movie images in separate table
|
||||
CREATE TABLE `movies_images` (
|
||||
`movie_id` integer,
|
||||
`front_image` blob not null,
|
||||
`back_image` blob,
|
||||
foreign key(`movie_id`) references `movies`(`id`) on delete CASCADE
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX `index_movie_images_on_movie_id` on `movies_images` (`movie_id`);
|
||||
|
||||
INSERT INTO `movies_images`
|
||||
(
|
||||
`movie_id`,
|
||||
`front_image`,
|
||||
`back_image`
|
||||
)
|
||||
SELECT `id`, `front_image`, `back_image` from `_movies_old` where `front_image` is not null;
|
||||
|
||||
-- drop old tables
|
||||
DROP TABLE `_scenes_old`;
|
||||
DROP TABLE `_studios_old`;
|
||||
DROP TABLE `_performers_old`;
|
||||
DROP TABLE `_movies_old`;
|
||||
DROP TABLE `_galleries_old`;
|
||||
DROP TABLE `_performers_scenes_old`;
|
||||
DROP TABLE `_scene_markers_old`;
|
||||
DROP TABLE `_scene_markers_tags_old`;
|
||||
DROP TABLE `_scenes_tags_old`;
|
||||
DROP TABLE `_movies_scenes_old`;
|
||||
DROP TABLE `_scraped_items_old`;
|
||||
@@ -3,12 +3,13 @@ package database
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"github.com/gobuffalo/packr/v2"
|
||||
"github.com/golang-migrate/migrate/v4"
|
||||
"github.com/golang-migrate/migrate/v4/source"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
|
||||
"github.com/gobuffalo/packr/v2"
|
||||
"github.com/golang-migrate/migrate/v4"
|
||||
"github.com/golang-migrate/migrate/v4/source"
|
||||
)
|
||||
|
||||
type Packr2Source struct {
|
||||
@@ -72,7 +73,7 @@ func (s *Packr2Source) ReadUp(version uint) (r io.ReadCloser, identifier string,
|
||||
if migration, ok := s.Migrations.Up(version); !ok {
|
||||
return nil, "", os.ErrNotExist
|
||||
} else {
|
||||
b := s.Box.Bytes(migration.Raw)
|
||||
b, _ := s.Box.Find(migration.Raw)
|
||||
return ioutil.NopCloser(bytes.NewBuffer(b)),
|
||||
migration.Identifier,
|
||||
nil
|
||||
@@ -83,7 +84,7 @@ func (s *Packr2Source) ReadDown(version uint) (r io.ReadCloser, identifier strin
|
||||
if migration, ok := s.Migrations.Down(version); !ok {
|
||||
return nil, "", migrate.ErrNilVersion
|
||||
} else {
|
||||
b := s.Box.Bytes(migration.Raw)
|
||||
b, _ := s.Box.Find(migration.Raw)
|
||||
return ioutil.NopCloser(bytes.NewBuffer(b)),
|
||||
migration.Identifier,
|
||||
nil
|
||||
|
||||
@@ -2,9 +2,9 @@ package jsonschema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/json-iterator/go"
|
||||
"os"
|
||||
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
@@ -19,6 +19,7 @@ type Movie struct {
|
||||
FrontImage string `json:"front_image,omitempty"`
|
||||
BackImage string `json:"back_image,omitempty"`
|
||||
URL string `json:"url,omitempty"`
|
||||
Studio string `json:"studio,omitempty"`
|
||||
CreatedAt models.JSONTime `json:"created_at,omitempty"`
|
||||
UpdatedAt models.JSONTime `json:"updated_at,omitempty"`
|
||||
}
|
||||
|
||||
@@ -111,7 +111,6 @@ func createPerformer(tx *sqlx.Tx) error {
|
||||
pqb := models.NewPerformerQueryBuilder()
|
||||
|
||||
performer := models.Performer{
|
||||
Image: []byte{0, 1, 2},
|
||||
Checksum: testName,
|
||||
Name: sql.NullString{Valid: true, String: testName},
|
||||
Favorite: sql.NullBool{Valid: true, Bool: false},
|
||||
@@ -130,7 +129,6 @@ func createStudio(tx *sqlx.Tx, name string) (*models.Studio, error) {
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
|
||||
studio := models.Studio{
|
||||
Image: []byte{0, 1, 2},
|
||||
Checksum: name,
|
||||
Name: sql.NullString{Valid: true, String: testName},
|
||||
}
|
||||
|
||||
@@ -87,6 +87,7 @@ func (t *ExportTask) ExportScenes(ctx context.Context, workers int) {
|
||||
}
|
||||
func exportScene(wg *sync.WaitGroup, jobChan <-chan *models.Scene, t *ExportTask, tx *sqlx.Tx) {
|
||||
defer wg.Done()
|
||||
sceneQB := models.NewSceneQueryBuilder()
|
||||
studioQB := models.NewStudioQueryBuilder()
|
||||
movieQB := models.NewMovieQueryBuilder()
|
||||
galleryQB := models.NewGalleryQueryBuilder()
|
||||
@@ -216,8 +217,14 @@ func exportScene(wg *sync.WaitGroup, jobChan <-chan *models.Scene, t *ExportTask
|
||||
newSceneJSON.File.Bitrate = int(scene.Bitrate.Int64)
|
||||
}
|
||||
|
||||
if len(scene.Cover) > 0 {
|
||||
newSceneJSON.Cover = utils.GetBase64StringFromData(scene.Cover)
|
||||
cover, err := sceneQB.GetSceneCover(scene.ID, tx)
|
||||
if err != nil {
|
||||
logger.Errorf("[scenes] <%s> error getting scene cover: %s", scene.Checksum, err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
if len(cover) > 0 {
|
||||
newSceneJSON.Cover = utils.GetBase64StringFromData(cover)
|
||||
}
|
||||
|
||||
sceneJSON, err := instance.JSON.getScene(scene.Checksum)
|
||||
@@ -287,6 +294,8 @@ func (t *ExportTask) ExportPerformers(ctx context.Context, workers int) {
|
||||
func exportPerformer(wg *sync.WaitGroup, jobChan <-chan *models.Performer) {
|
||||
defer wg.Done()
|
||||
|
||||
performerQB := models.NewPerformerQueryBuilder()
|
||||
|
||||
for performer := range jobChan {
|
||||
newPerformerJSON := jsonschema.Performer{
|
||||
CreatedAt: models.JSONTime{Time: performer.CreatedAt.Timestamp},
|
||||
@@ -345,7 +354,15 @@ func exportPerformer(wg *sync.WaitGroup, jobChan <-chan *models.Performer) {
|
||||
newPerformerJSON.Favorite = performer.Favorite.Bool
|
||||
}
|
||||
|
||||
newPerformerJSON.Image = utils.GetBase64StringFromData(performer.Image)
|
||||
image, err := performerQB.GetPerformerImage(performer.ID, nil)
|
||||
if err != nil {
|
||||
logger.Errorf("[performers] <%s> error getting performers image: %s", performer.Checksum, err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
if len(image) > 0 {
|
||||
newPerformerJSON.Image = utils.GetBase64StringFromData(image)
|
||||
}
|
||||
|
||||
performerJSON, err := instance.JSON.getPerformer(performer.Checksum)
|
||||
if err != nil {
|
||||
@@ -418,7 +435,15 @@ func exportStudio(wg *sync.WaitGroup, jobChan <-chan *models.Studio) {
|
||||
}
|
||||
}
|
||||
|
||||
newStudioJSON.Image = utils.GetBase64StringFromData(studio.Image)
|
||||
image, err := studioQB.GetStudioImage(studio.ID, nil)
|
||||
if err != nil {
|
||||
logger.Errorf("[studios] <%s> error getting studio image: %s", studio.Checksum, err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
if len(image) > 0 {
|
||||
newStudioJSON.Image = utils.GetBase64StringFromData(image)
|
||||
}
|
||||
|
||||
studioJSON, err := instance.JSON.getStudio(studio.Checksum)
|
||||
if err != nil {
|
||||
@@ -469,6 +494,9 @@ func (t *ExportTask) ExportMovies(ctx context.Context, workers int) {
|
||||
func exportMovie(wg *sync.WaitGroup, jobChan <-chan *models.Movie) {
|
||||
defer wg.Done()
|
||||
|
||||
movieQB := models.NewMovieQueryBuilder()
|
||||
studioQB := models.NewStudioQueryBuilder()
|
||||
|
||||
for movie := range jobChan {
|
||||
newMovieJSON := jsonschema.Movie{
|
||||
CreatedAt: models.JSONTime{Time: movie.CreatedAt.Timestamp},
|
||||
@@ -503,8 +531,33 @@ func exportMovie(wg *sync.WaitGroup, jobChan <-chan *models.Movie) {
|
||||
newMovieJSON.URL = movie.URL.String
|
||||
}
|
||||
|
||||
newMovieJSON.FrontImage = utils.GetBase64StringFromData(movie.FrontImage)
|
||||
newMovieJSON.BackImage = utils.GetBase64StringFromData(movie.BackImage)
|
||||
if movie.StudioID.Valid {
|
||||
studio, _ := studioQB.Find(int(movie.StudioID.Int64), nil)
|
||||
if studio != nil {
|
||||
newMovieJSON.Studio = studio.Name.String
|
||||
}
|
||||
}
|
||||
|
||||
frontImage, err := movieQB.GetFrontImage(movie.ID, nil)
|
||||
if err != nil {
|
||||
logger.Errorf("[movies] <%s> error getting movie front image: %s", movie.Checksum, err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
if len(frontImage) > 0 {
|
||||
newMovieJSON.FrontImage = utils.GetBase64StringFromData(frontImage)
|
||||
}
|
||||
|
||||
backImage, err := movieQB.GetBackImage(movie.ID, nil)
|
||||
if err != nil {
|
||||
logger.Errorf("[movies] <%s> error getting movie back image: %s", movie.Checksum, err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
if len(backImage) > 0 {
|
||||
newMovieJSON.BackImage = utils.GetBase64StringFromData(backImage)
|
||||
}
|
||||
|
||||
movieJSON, err := instance.JSON.getMovie(movie.Checksum)
|
||||
if err != nil {
|
||||
logger.Debugf("[movies] error reading movie json: %s", err.Error())
|
||||
|
||||
@@ -69,11 +69,24 @@ func (t *GenerateScreenshotTask) Start(wg *sync.WaitGroup) {
|
||||
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
|
||||
}
|
||||
|
||||
updatedScene.Cover = &coverImageData
|
||||
err = SetSceneScreenshot(t.Scene.Checksum, coverImageData)
|
||||
if err := SetSceneScreenshot(t.Scene.Checksum, coverImageData); err != nil {
|
||||
logger.Errorf("Error writing screenshot: %s", err.Error())
|
||||
tx.Rollback()
|
||||
return
|
||||
}
|
||||
|
||||
// update the scene cover table
|
||||
if err := qb.UpdateSceneCover(t.Scene.ID, coverImageData, tx); err != nil {
|
||||
logger.Errorf("Error setting screenshot: %s", err.Error())
|
||||
tx.Rollback()
|
||||
return
|
||||
}
|
||||
|
||||
// update the scene with the update date
|
||||
_, err = qb.Update(updatedScene, tx)
|
||||
if err != nil {
|
||||
logger.Errorf("Error setting screenshot: %s", err.Error())
|
||||
logger.Errorf("Error updating scene: %s", err.Error())
|
||||
tx.Rollback()
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -85,7 +85,6 @@ func (t *ImportTask) ImportPerformers(ctx context.Context) {
|
||||
|
||||
// Populate a new performer from the input
|
||||
newPerformer := models.Performer{
|
||||
Image: imageData,
|
||||
Checksum: checksum,
|
||||
Favorite: sql.NullBool{Bool: performerJSON.Favorite, Valid: true},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(performerJSON.CreatedAt)},
|
||||
@@ -141,12 +140,21 @@ func (t *ImportTask) ImportPerformers(ctx context.Context) {
|
||||
newPerformer.Instagram = sql.NullString{String: performerJSON.Instagram, Valid: true}
|
||||
}
|
||||
|
||||
_, err = qb.Create(newPerformer, tx)
|
||||
createdPerformer, err := qb.Create(newPerformer, tx)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
logger.Errorf("[performers] <%s> failed to create: %s", mappingJSON.Checksum, err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
// Add the performer image if set
|
||||
if len(imageData) > 0 {
|
||||
if err := qb.UpdatePerformerImage(createdPerformer.ID, imageData, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
logger.Errorf("[performers] <%s> error setting performer image: %s", mappingJSON.Checksum, err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.Info("[performers] importing")
|
||||
@@ -217,7 +225,6 @@ func (t *ImportTask) ImportStudio(studioJSON *jsonschema.Studio, pendingParent m
|
||||
|
||||
// Populate a new studio from the input
|
||||
newStudio := models.Studio{
|
||||
Image: imageData,
|
||||
Checksum: checksum,
|
||||
Name: sql.NullString{String: studioJSON.Name, Valid: true},
|
||||
URL: sql.NullString{String: studioJSON.URL, Valid: true},
|
||||
@@ -251,11 +258,17 @@ func (t *ImportTask) ImportStudio(studioJSON *jsonschema.Studio, pendingParent m
|
||||
}
|
||||
}
|
||||
|
||||
_, err = qb.Create(newStudio, tx)
|
||||
createdStudio, err := qb.Create(newStudio, tx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(imageData) > 0 {
|
||||
if err := qb.UpdateStudioImage(createdStudio.ID, imageData, tx); err != nil {
|
||||
return fmt.Errorf("error setting studio image: %s", err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
// now create the studios pending this studios creation
|
||||
s := pendingParent[studioJSON.Name]
|
||||
for _, childStudioJSON := range s {
|
||||
@@ -307,17 +320,15 @@ func (t *ImportTask) ImportMovies(ctx context.Context) {
|
||||
|
||||
// Populate a new movie from the input
|
||||
newMovie := models.Movie{
|
||||
FrontImage: frontimageData,
|
||||
BackImage: backimageData,
|
||||
Checksum: checksum,
|
||||
Name: sql.NullString{String: movieJSON.Name, Valid: true},
|
||||
Aliases: sql.NullString{String: movieJSON.Aliases, Valid: true},
|
||||
Date: models.SQLiteDate{String: movieJSON.Date, Valid: true},
|
||||
Director: sql.NullString{String: movieJSON.Director, Valid: true},
|
||||
Synopsis: sql.NullString{String: movieJSON.Synopsis, Valid: true},
|
||||
URL: sql.NullString{String: movieJSON.URL, Valid: true},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(movieJSON.CreatedAt)},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(movieJSON.UpdatedAt)},
|
||||
Checksum: checksum,
|
||||
Name: sql.NullString{String: movieJSON.Name, Valid: true},
|
||||
Aliases: sql.NullString{String: movieJSON.Aliases, Valid: true},
|
||||
Date: models.SQLiteDate{String: movieJSON.Date, Valid: true},
|
||||
Director: sql.NullString{String: movieJSON.Director, Valid: true},
|
||||
Synopsis: sql.NullString{String: movieJSON.Synopsis, Valid: true},
|
||||
URL: sql.NullString{String: movieJSON.URL, Valid: true},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(movieJSON.CreatedAt)},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(movieJSON.UpdatedAt)},
|
||||
}
|
||||
|
||||
if movieJSON.Rating != 0 {
|
||||
@@ -327,12 +338,34 @@ func (t *ImportTask) ImportMovies(ctx context.Context) {
|
||||
newMovie.Duration = sql.NullInt64{Int64: int64(movieJSON.Duration), Valid: true}
|
||||
}
|
||||
|
||||
_, err = qb.Create(newMovie, tx)
|
||||
// Populate the studio ID
|
||||
if movieJSON.Studio != "" {
|
||||
sqb := models.NewStudioQueryBuilder()
|
||||
studio, err := sqb.FindByName(movieJSON.Studio, tx, false)
|
||||
if err != nil {
|
||||
logger.Warnf("[movies] error getting studio <%s>: %s", movieJSON.Studio, err.Error())
|
||||
} else if studio == nil {
|
||||
logger.Warnf("[movies] studio <%s> does not exist", movieJSON.Studio)
|
||||
} else {
|
||||
newMovie.StudioID = sql.NullInt64{Int64: int64(studio.ID), Valid: true}
|
||||
}
|
||||
}
|
||||
|
||||
createdMovie, err := qb.Create(newMovie, tx)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
logger.Errorf("[movies] <%s> failed to create: %s", mappingJSON.Checksum, err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
// Add the performer image if set
|
||||
if len(frontimageData) > 0 {
|
||||
if err := qb.UpdateMovieImages(createdMovie.ID, frontimageData, backimageData, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
logger.Errorf("[movies] <%s> error setting movie images: %s", mappingJSON.Checksum, err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.Info("[movies] importing")
|
||||
@@ -521,17 +554,18 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
|
||||
}
|
||||
|
||||
// Process the base 64 encoded cover image string
|
||||
var coverImageData []byte
|
||||
if sceneJSON.Cover != "" {
|
||||
_, coverImageData, err := utils.ProcessBase64Image(sceneJSON.Cover)
|
||||
_, coverImageData, err = utils.ProcessBase64Image(sceneJSON.Cover)
|
||||
if err != nil {
|
||||
logger.Warnf("[scenes] <%s> invalid cover image: %s", mappingJSON.Checksum, err.Error())
|
||||
}
|
||||
if len(coverImageData) > 0 {
|
||||
if err = SetSceneScreenshot(mappingJSON.Checksum, coverImageData); err != nil {
|
||||
logger.Warnf("[scenes] <%s> failed to create cover image: %s", mappingJSON.Checksum, err.Error())
|
||||
} else {
|
||||
newScene.Cover = coverImageData
|
||||
}
|
||||
|
||||
// write the cover image data after creating the scene
|
||||
}
|
||||
}
|
||||
|
||||
@@ -597,7 +631,9 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
|
||||
sqb := models.NewStudioQueryBuilder()
|
||||
studio, err := sqb.FindByName(sceneJSON.Studio, tx, false)
|
||||
if err != nil {
|
||||
logger.Warnf("[scenes] studio <%s> does not exist: %s", sceneJSON.Studio, err.Error())
|
||||
logger.Warnf("[scenes] error getting studio <%s>: %s", sceneJSON.Studio, err.Error())
|
||||
} else if studio == nil {
|
||||
logger.Warnf("[scenes] studio <%s> does not exist", sceneJSON.Studio)
|
||||
} else {
|
||||
newScene.StudioID = sql.NullInt64{Int64: int64(studio.ID), Valid: true}
|
||||
}
|
||||
@@ -616,6 +652,15 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
// Add the scene cover if set
|
||||
if len(coverImageData) > 0 {
|
||||
if err := qb.UpdateSceneCover(scene.ID, coverImageData, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
logger.Errorf("[scenes] <%s> error setting scene cover: %s", mappingJSON.Checksum, err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Relate the scene to the gallery
|
||||
if sceneJSON.Gallery != "" {
|
||||
gqb := models.NewGalleryQueryBuilder()
|
||||
|
||||
@@ -5,39 +5,35 @@ import (
|
||||
)
|
||||
|
||||
type Movie struct {
|
||||
ID int `db:"id" json:"id"`
|
||||
FrontImage []byte `db:"front_image" json:"front_image"`
|
||||
BackImage []byte `db:"back_image" json:"back_image"`
|
||||
Checksum string `db:"checksum" json:"checksum"`
|
||||
Name sql.NullString `db:"name" json:"name"`
|
||||
Aliases sql.NullString `db:"aliases" json:"aliases"`
|
||||
Duration sql.NullInt64 `db:"duration" json:"duration"`
|
||||
Date SQLiteDate `db:"date" json:"date"`
|
||||
Rating sql.NullInt64 `db:"rating" json:"rating"`
|
||||
StudioID sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
|
||||
Director sql.NullString `db:"director" json:"director"`
|
||||
Synopsis sql.NullString `db:"synopsis" json:"synopsis"`
|
||||
URL sql.NullString `db:"url" json:"url"`
|
||||
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
ID int `db:"id" json:"id"`
|
||||
Checksum string `db:"checksum" json:"checksum"`
|
||||
Name sql.NullString `db:"name" json:"name"`
|
||||
Aliases sql.NullString `db:"aliases" json:"aliases"`
|
||||
Duration sql.NullInt64 `db:"duration" json:"duration"`
|
||||
Date SQLiteDate `db:"date" json:"date"`
|
||||
Rating sql.NullInt64 `db:"rating" json:"rating"`
|
||||
StudioID sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
|
||||
Director sql.NullString `db:"director" json:"director"`
|
||||
Synopsis sql.NullString `db:"synopsis" json:"synopsis"`
|
||||
URL sql.NullString `db:"url" json:"url"`
|
||||
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
}
|
||||
|
||||
type MoviePartial struct {
|
||||
ID int `db:"id" json:"id"`
|
||||
FrontImage *[]byte `db:"front_image" json:"front_image"`
|
||||
BackImage *[]byte `db:"back_image" json:"back_image"`
|
||||
Checksum *string `db:"checksum" json:"checksum"`
|
||||
Name *sql.NullString `db:"name" json:"name"`
|
||||
Aliases *sql.NullString `db:"aliases" json:"aliases"`
|
||||
Duration *sql.NullInt64 `db:"duration" json:"duration"`
|
||||
Date *SQLiteDate `db:"date" json:"date"`
|
||||
Rating *sql.NullInt64 `db:"rating" json:"rating"`
|
||||
StudioID *sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
|
||||
Director *sql.NullString `db:"director" json:"director"`
|
||||
Synopsis *sql.NullString `db:"synopsis" json:"synopsis"`
|
||||
URL *sql.NullString `db:"url" json:"url"`
|
||||
CreatedAt *SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
ID int `db:"id" json:"id"`
|
||||
Checksum *string `db:"checksum" json:"checksum"`
|
||||
Name *sql.NullString `db:"name" json:"name"`
|
||||
Aliases *sql.NullString `db:"aliases" json:"aliases"`
|
||||
Duration *sql.NullInt64 `db:"duration" json:"duration"`
|
||||
Date *SQLiteDate `db:"date" json:"date"`
|
||||
Rating *sql.NullInt64 `db:"rating" json:"rating"`
|
||||
StudioID *sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
|
||||
Director *sql.NullString `db:"director" json:"director"`
|
||||
Synopsis *sql.NullString `db:"synopsis" json:"synopsis"`
|
||||
URL *sql.NullString `db:"url" json:"url"`
|
||||
CreatedAt *SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
}
|
||||
|
||||
var DefaultMovieImage = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABkCAYAAABw4pVUAAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAA3XAAAN1wFCKJt4AAAAB3RJTUUH4wgVBQsJl1CMZAAAASJJREFUeNrt3N0JwyAYhlEj3cj9R3Cm5rbkqtAP+qrnGaCYHPwJpLlaa++mmLpbAERAgAgIEAEBIiBABERAgAgIEAEBIiBABERAgAgIEAHZuVflj40x4i94zhk9vqsVvEq6AsQqMP1EjORx20OACAgQRRx7T+zzcFBxcjNDfoB4ntQqTm5Awo7MlqywZxcgYQ+RlqywJ3ozJAQCSBiEJSsQA0gYBpDAgAARECACAkRAgAgIEAERECACAmSjUv6eAOSB8m8YIGGzBUjYbAESBgMkbBkDEjZbgITBAClcxiqQvEoatreYIWEBASIgJ4Gkf11ntXH3nS9uxfGWfJ5J9hAgAgJEQAQEiIAAERAgAgJEQAQEiIAAERAgAgJEQAQEiL7qBuc6RKLHxr0CAAAAAElFTkSuQmCC"
|
||||
|
||||
@@ -6,7 +6,6 @@ import (
|
||||
|
||||
type Performer struct {
|
||||
ID int `db:"id" json:"id"`
|
||||
Image []byte `db:"image" json:"image"`
|
||||
Checksum string `db:"checksum" json:"checksum"`
|
||||
Name sql.NullString `db:"name" json:"name"`
|
||||
Gender sql.NullString `db:"gender" json:"gender"`
|
||||
|
||||
@@ -9,7 +9,6 @@ type Scene struct {
|
||||
ID int `db:"id" json:"id"`
|
||||
Checksum string `db:"checksum" json:"checksum"`
|
||||
Path string `db:"path" json:"path"`
|
||||
Cover []byte `db:"cover" json:"cover"`
|
||||
Title sql.NullString `db:"title" json:"title"`
|
||||
Details sql.NullString `db:"details" json:"details"`
|
||||
URL sql.NullString `db:"url" json:"url"`
|
||||
@@ -34,7 +33,6 @@ type ScenePartial struct {
|
||||
ID int `db:"id" json:"id"`
|
||||
Checksum *string `db:"checksum" json:"checksum"`
|
||||
Path *string `db:"path" json:"path"`
|
||||
Cover *[]byte `db:"cover" json:"cover"`
|
||||
Title *sql.NullString `db:"title" json:"title"`
|
||||
Details *sql.NullString `db:"details" json:"details"`
|
||||
URL *sql.NullString `db:"url" json:"url"`
|
||||
|
||||
@@ -6,7 +6,6 @@ import (
|
||||
|
||||
type Studio struct {
|
||||
ID int `db:"id" json:"id"`
|
||||
Image []byte `db:"image" json:"image"`
|
||||
Checksum string `db:"checksum" json:"checksum"`
|
||||
Name sql.NullString `db:"name" json:"name"`
|
||||
URL sql.NullString `db:"url" json:"url"`
|
||||
@@ -17,7 +16,6 @@ type Studio struct {
|
||||
|
||||
type StudioPartial struct {
|
||||
ID int `db:"id" json:"id"`
|
||||
Image *[]byte `db:"image" json:"image"`
|
||||
Checksum *string `db:"checksum" json:"checksum"`
|
||||
Name *sql.NullString `db:"name" json:"name"`
|
||||
URL *sql.NullString `db:"url" json:"url"`
|
||||
|
||||
@@ -16,8 +16,8 @@ func NewMovieQueryBuilder() MovieQueryBuilder {
|
||||
func (qb *MovieQueryBuilder) Create(newMovie Movie, tx *sqlx.Tx) (*Movie, error) {
|
||||
ensureTx(tx)
|
||||
result, err := tx.NamedExec(
|
||||
`INSERT INTO movies (front_image, back_image, checksum, name, aliases, duration, date, rating, studio_id, director, synopsis, url, created_at, updated_at)
|
||||
VALUES (:front_image, :back_image, :checksum, :name, :aliases, :duration, :date, :rating, :studio_id, :director, :synopsis, :url, :created_at, :updated_at)
|
||||
`INSERT INTO movies (checksum, name, aliases, duration, date, rating, studio_id, director, synopsis, url, created_at, updated_at)
|
||||
VALUES (:checksum, :name, :aliases, :duration, :date, :rating, :studio_id, :director, :synopsis, :url, :created_at, :updated_at)
|
||||
`,
|
||||
newMovie,
|
||||
)
|
||||
@@ -214,3 +214,42 @@ func (qb *MovieQueryBuilder) queryMovies(query string, args []interface{}, tx *s
|
||||
|
||||
return movies, nil
|
||||
}
|
||||
|
||||
func (qb *MovieQueryBuilder) UpdateMovieImages(movieID int, frontImage []byte, backImage []byte, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
|
||||
// Delete the existing cover and then create new
|
||||
if err := qb.DestroyMovieImages(movieID, tx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err := tx.Exec(
|
||||
`INSERT INTO movies_images (movie_id, front_image, back_image) VALUES (?, ?, ?)`,
|
||||
movieID,
|
||||
frontImage,
|
||||
backImage,
|
||||
)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (qb *MovieQueryBuilder) DestroyMovieImages(movieID int, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
|
||||
// Delete the existing joins
|
||||
_, err := tx.Exec("DELETE FROM movies_images WHERE movie_id = ?", movieID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (qb *MovieQueryBuilder) GetFrontImage(movieID int, tx *sqlx.Tx) ([]byte, error) {
|
||||
query := `SELECT front_image from movies_images WHERE movie_id = ?`
|
||||
return getImage(tx, query, movieID)
|
||||
}
|
||||
|
||||
func (qb *MovieQueryBuilder) GetBackImage(movieID int, tx *sqlx.Tx) ([]byte, error) {
|
||||
query := `SELECT back_image from movies_images WHERE movie_id = ?`
|
||||
return getImage(tx, query, movieID)
|
||||
}
|
||||
|
||||
@@ -3,13 +3,17 @@
|
||||
package models_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"strconv"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
func TestMovieFindBySceneID(t *testing.T) {
|
||||
@@ -123,6 +127,146 @@ func TestMovieQueryStudio(t *testing.T) {
|
||||
assert.Len(t, movies, 0)
|
||||
}
|
||||
|
||||
func TestMovieUpdateMovieImages(t *testing.T) {
|
||||
mqb := models.NewMovieQueryBuilder()
|
||||
|
||||
// create movie to test against
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
const name = "TestMovieUpdateMovieImages"
|
||||
movie := models.Movie{
|
||||
Name: sql.NullString{String: name, Valid: true},
|
||||
Checksum: utils.MD5FromString(name),
|
||||
}
|
||||
created, err := mqb.Create(movie, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error creating movie: %s", err.Error())
|
||||
}
|
||||
|
||||
frontImage := []byte("frontImage")
|
||||
backImage := []byte("backImage")
|
||||
err = mqb.UpdateMovieImages(created.ID, frontImage, backImage, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error updating movie images: %s", err.Error())
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error committing: %s", err.Error())
|
||||
}
|
||||
|
||||
// ensure images are set
|
||||
storedFront, err := mqb.GetFrontImage(created.ID, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("Error getting front image: %s", err.Error())
|
||||
}
|
||||
assert.Equal(t, storedFront, frontImage)
|
||||
|
||||
storedBack, err := mqb.GetBackImage(created.ID, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("Error getting back image: %s", err.Error())
|
||||
}
|
||||
assert.Equal(t, storedBack, backImage)
|
||||
|
||||
// set front image only
|
||||
newImage := []byte("newImage")
|
||||
tx = database.DB.MustBeginTx(ctx, nil)
|
||||
err = mqb.UpdateMovieImages(created.ID, newImage, nil, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error updating movie images: %s", err.Error())
|
||||
}
|
||||
|
||||
storedFront, err = mqb.GetFrontImage(created.ID, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error getting front image: %s", err.Error())
|
||||
}
|
||||
assert.Equal(t, storedFront, newImage)
|
||||
|
||||
// back image should be nil
|
||||
storedBack, err = mqb.GetBackImage(created.ID, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error getting back image: %s", err.Error())
|
||||
}
|
||||
assert.Nil(t, nil)
|
||||
|
||||
// set back image only
|
||||
err = mqb.UpdateMovieImages(created.ID, nil, newImage, tx)
|
||||
if err == nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Expected error setting nil front image")
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error committing: %s", err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func TestMovieDestroyMovieImages(t *testing.T) {
|
||||
mqb := models.NewMovieQueryBuilder()
|
||||
|
||||
// create movie to test against
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
const name = "TestMovieDestroyMovieImages"
|
||||
movie := models.Movie{
|
||||
Name: sql.NullString{String: name, Valid: true},
|
||||
Checksum: utils.MD5FromString(name),
|
||||
}
|
||||
created, err := mqb.Create(movie, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error creating movie: %s", err.Error())
|
||||
}
|
||||
|
||||
frontImage := []byte("frontImage")
|
||||
backImage := []byte("backImage")
|
||||
err = mqb.UpdateMovieImages(created.ID, frontImage, backImage, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error updating movie images: %s", err.Error())
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error committing: %s", err.Error())
|
||||
}
|
||||
|
||||
tx = database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
err = mqb.DestroyMovieImages(created.ID, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error destroying movie images: %s", err.Error())
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error committing: %s", err.Error())
|
||||
}
|
||||
|
||||
// front image should be nil
|
||||
storedFront, err := mqb.GetFrontImage(created.ID, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("Error getting front image: %s", err.Error())
|
||||
}
|
||||
assert.Nil(t, storedFront)
|
||||
|
||||
// back image should be nil
|
||||
storedBack, err := mqb.GetBackImage(created.ID, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("Error getting back image: %s", err.Error())
|
||||
}
|
||||
assert.Nil(t, storedBack)
|
||||
}
|
||||
|
||||
// TODO Update
|
||||
// TODO Destroy
|
||||
// TODO Find
|
||||
|
||||
@@ -18,10 +18,10 @@ func NewPerformerQueryBuilder() PerformerQueryBuilder {
|
||||
func (qb *PerformerQueryBuilder) Create(newPerformer Performer, tx *sqlx.Tx) (*Performer, error) {
|
||||
ensureTx(tx)
|
||||
result, err := tx.NamedExec(
|
||||
`INSERT INTO performers (image, checksum, name, url, gender, twitter, instagram, birthdate, ethnicity, country,
|
||||
`INSERT INTO performers (checksum, name, url, gender, twitter, instagram, birthdate, ethnicity, country,
|
||||
eye_color, height, measurements, fake_tits, career_length, tattoos, piercings,
|
||||
aliases, favorite, created_at, updated_at)
|
||||
VALUES (:image, :checksum, :name, :url, :gender, :twitter, :instagram, :birthdate, :ethnicity, :country,
|
||||
VALUES (:checksum, :name, :url, :gender, :twitter, :instagram, :birthdate, :ethnicity, :country,
|
||||
:eye_color, :height, :measurements, :fake_tits, :career_length, :tattoos, :piercings,
|
||||
:aliases, :favorite, :created_at, :updated_at)
|
||||
`,
|
||||
@@ -342,3 +342,36 @@ func (qb *PerformerQueryBuilder) queryPerformers(query string, args []interface{
|
||||
|
||||
return performers, nil
|
||||
}
|
||||
|
||||
func (qb *PerformerQueryBuilder) UpdatePerformerImage(performerID int, image []byte, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
|
||||
// Delete the existing cover and then create new
|
||||
if err := qb.DestroyPerformerImage(performerID, tx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err := tx.Exec(
|
||||
`INSERT INTO performers_image (performer_id, image) VALUES (?, ?)`,
|
||||
performerID,
|
||||
image,
|
||||
)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (qb *PerformerQueryBuilder) DestroyPerformerImage(performerID int, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
|
||||
// Delete the existing joins
|
||||
_, err := tx.Exec("DELETE FROM performers_image WHERE performer_id = ?", performerID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (qb *PerformerQueryBuilder) GetPerformerImage(performerID int, tx *sqlx.Tx) ([]byte, error) {
|
||||
query := `SELECT image from performers_image WHERE performer_id = ?`
|
||||
return getImage(tx, query, performerID)
|
||||
}
|
||||
|
||||
@@ -3,12 +3,16 @@
|
||||
package models_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
func TestPerformerFindBySceneID(t *testing.T) {
|
||||
@@ -103,6 +107,106 @@ func TestPerformerFindByNames(t *testing.T) {
|
||||
|
||||
}
|
||||
|
||||
func TestPerformerUpdatePerformerImage(t *testing.T) {
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
|
||||
// create performer to test against
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
const name = "TestPerformerUpdatePerformerImage"
|
||||
performer := models.Performer{
|
||||
Name: sql.NullString{String: name, Valid: true},
|
||||
Checksum: utils.MD5FromString(name),
|
||||
Favorite: sql.NullBool{Bool: false, Valid: true},
|
||||
}
|
||||
created, err := qb.Create(performer, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error creating performer: %s", err.Error())
|
||||
}
|
||||
|
||||
image := []byte("image")
|
||||
err = qb.UpdatePerformerImage(created.ID, image, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error updating performer image: %s", err.Error())
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error committing: %s", err.Error())
|
||||
}
|
||||
|
||||
// ensure image set
|
||||
storedImage, err := qb.GetPerformerImage(created.ID, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("Error getting image: %s", err.Error())
|
||||
}
|
||||
assert.Equal(t, storedImage, image)
|
||||
|
||||
// set nil image
|
||||
tx = database.DB.MustBeginTx(ctx, nil)
|
||||
err = qb.UpdatePerformerImage(created.ID, nil, tx)
|
||||
if err == nil {
|
||||
t.Fatalf("Expected error setting nil image")
|
||||
}
|
||||
|
||||
tx.Rollback()
|
||||
}
|
||||
|
||||
func TestPerformerDestroyPerformerImage(t *testing.T) {
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
|
||||
// create performer to test against
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
const name = "TestPerformerDestroyPerformerImage"
|
||||
performer := models.Performer{
|
||||
Name: sql.NullString{String: name, Valid: true},
|
||||
Checksum: utils.MD5FromString(name),
|
||||
Favorite: sql.NullBool{Bool: false, Valid: true},
|
||||
}
|
||||
created, err := qb.Create(performer, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error creating performer: %s", err.Error())
|
||||
}
|
||||
|
||||
image := []byte("image")
|
||||
err = qb.UpdatePerformerImage(created.ID, image, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error updating performer image: %s", err.Error())
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error committing: %s", err.Error())
|
||||
}
|
||||
|
||||
tx = database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
err = qb.DestroyPerformerImage(created.ID, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error destroying performer image: %s", err.Error())
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error committing: %s", err.Error())
|
||||
}
|
||||
|
||||
// image should be nil
|
||||
storedImage, err := qb.GetPerformerImage(created.ID, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("Error getting image: %s", err.Error())
|
||||
}
|
||||
assert.Nil(t, storedImage)
|
||||
}
|
||||
|
||||
// TODO Update
|
||||
// TODO Destroy
|
||||
// TODO Find
|
||||
|
||||
@@ -50,11 +50,9 @@ func (qb *SceneQueryBuilder) Create(newScene Scene, tx *sqlx.Tx) (*Scene, error)
|
||||
ensureTx(tx)
|
||||
result, err := tx.NamedExec(
|
||||
`INSERT INTO scenes (checksum, path, title, details, url, date, rating, o_counter, size, duration, video_codec,
|
||||
audio_codec, format, width, height, framerate, bitrate, studio_id, cover,
|
||||
created_at, updated_at)
|
||||
audio_codec, format, width, height, framerate, bitrate, studio_id, created_at, updated_at)
|
||||
VALUES (:checksum, :path, :title, :details, :url, :date, :rating, :o_counter, :size, :duration, :video_codec,
|
||||
:audio_codec, :format, :width, :height, :framerate, :bitrate, :studio_id, :cover,
|
||||
:created_at, :updated_at)
|
||||
:audio_codec, :format, :width, :height, :framerate, :bitrate, :studio_id, :created_at, :updated_at)
|
||||
`,
|
||||
newScene,
|
||||
)
|
||||
@@ -525,3 +523,36 @@ func (qb *SceneQueryBuilder) UpdateFormat(id int, format string, tx *sqlx.Tx) er
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (qb *SceneQueryBuilder) UpdateSceneCover(sceneID int, cover []byte, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
|
||||
// Delete the existing cover and then create new
|
||||
if err := qb.DestroySceneCover(sceneID, tx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err := tx.Exec(
|
||||
`INSERT INTO scenes_cover (scene_id, cover) VALUES (?, ?)`,
|
||||
sceneID,
|
||||
cover,
|
||||
)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (qb *SceneQueryBuilder) DestroySceneCover(sceneID int, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
|
||||
// Delete the existing joins
|
||||
_, err := tx.Exec("DELETE FROM scenes_cover WHERE scene_id = ?", sceneID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (qb *SceneQueryBuilder) GetSceneCover(sceneID int, tx *sqlx.Tx) ([]byte, error) {
|
||||
query := `SELECT cover from scenes_cover WHERE scene_id = ?`
|
||||
return getImage(tx, query, sceneID)
|
||||
}
|
||||
|
||||
@@ -3,13 +3,16 @@
|
||||
package models_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
func TestSceneFind(t *testing.T) {
|
||||
@@ -894,6 +897,104 @@ func TestFindByStudioID(t *testing.T) {
|
||||
assert.Len(t, scenes, 0)
|
||||
}
|
||||
|
||||
func TestSceneUpdateSceneCover(t *testing.T) {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
|
||||
// create performer to test against
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
const name = "TestSceneUpdateSceneCover"
|
||||
scene := models.Scene{
|
||||
Path: name,
|
||||
Checksum: utils.MD5FromString(name),
|
||||
}
|
||||
created, err := qb.Create(scene, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error creating scene: %s", err.Error())
|
||||
}
|
||||
|
||||
image := []byte("image")
|
||||
err = qb.UpdateSceneCover(created.ID, image, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error updating scene cover: %s", err.Error())
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error committing: %s", err.Error())
|
||||
}
|
||||
|
||||
// ensure image set
|
||||
storedImage, err := qb.GetSceneCover(created.ID, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("Error getting image: %s", err.Error())
|
||||
}
|
||||
assert.Equal(t, storedImage, image)
|
||||
|
||||
// set nil image
|
||||
tx = database.DB.MustBeginTx(ctx, nil)
|
||||
err = qb.UpdateSceneCover(created.ID, nil, tx)
|
||||
if err == nil {
|
||||
t.Fatalf("Expected error setting nil image")
|
||||
}
|
||||
|
||||
tx.Rollback()
|
||||
}
|
||||
|
||||
func TestSceneDestroySceneCover(t *testing.T) {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
|
||||
// create performer to test against
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
const name = "TestSceneDestroySceneCover"
|
||||
scene := models.Scene{
|
||||
Path: name,
|
||||
Checksum: utils.MD5FromString(name),
|
||||
}
|
||||
created, err := qb.Create(scene, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error creating scene: %s", err.Error())
|
||||
}
|
||||
|
||||
image := []byte("image")
|
||||
err = qb.UpdateSceneCover(created.ID, image, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error updating scene image: %s", err.Error())
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error committing: %s", err.Error())
|
||||
}
|
||||
|
||||
tx = database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
err = qb.DestroySceneCover(created.ID, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error destroying scene cover: %s", err.Error())
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error committing: %s", err.Error())
|
||||
}
|
||||
|
||||
// image should be nil
|
||||
storedImage, err := qb.GetSceneCover(created.ID, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("Error getting image: %s", err.Error())
|
||||
}
|
||||
assert.Nil(t, storedImage)
|
||||
}
|
||||
|
||||
// TODO Update
|
||||
// TODO IncrementOCounter
|
||||
// TODO DecrementOCounter
|
||||
|
||||
@@ -418,3 +418,31 @@ func sqlGenKeys(i interface{}, partial bool) string {
|
||||
}
|
||||
return strings.Join(query, ", ")
|
||||
}
|
||||
|
||||
func getImage(tx *sqlx.Tx, query string, args ...interface{}) ([]byte, error) {
|
||||
var rows *sqlx.Rows
|
||||
var err error
|
||||
if tx != nil {
|
||||
rows, err = tx.Queryx(query, args...)
|
||||
} else {
|
||||
rows, err = database.DB.Queryx(query, args...)
|
||||
}
|
||||
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var ret []byte
|
||||
if rows.Next() {
|
||||
if err := rows.Scan(&ret); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
@@ -16,8 +16,8 @@ func NewStudioQueryBuilder() StudioQueryBuilder {
|
||||
func (qb *StudioQueryBuilder) Create(newStudio Studio, tx *sqlx.Tx) (*Studio, error) {
|
||||
ensureTx(tx)
|
||||
result, err := tx.NamedExec(
|
||||
`INSERT INTO studios (image, checksum, name, url, parent_id, created_at, updated_at)
|
||||
VALUES (:image, :checksum, :name, :url, :parent_id, :created_at, :updated_at)
|
||||
`INSERT INTO studios (checksum, name, url, parent_id, created_at, updated_at)
|
||||
VALUES (:checksum, :name, :url, :parent_id, :created_at, :updated_at)
|
||||
`,
|
||||
newStudio,
|
||||
)
|
||||
@@ -208,3 +208,36 @@ func (qb *StudioQueryBuilder) queryStudios(query string, args []interface{}, tx
|
||||
|
||||
return studios, nil
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) UpdateStudioImage(studioID int, image []byte, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
|
||||
// Delete the existing cover and then create new
|
||||
if err := qb.DestroyStudioImage(studioID, tx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err := tx.Exec(
|
||||
`INSERT INTO studios_image (studio_id, image) VALUES (?, ?)`,
|
||||
studioID,
|
||||
image,
|
||||
)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) DestroyStudioImage(studioID int, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
|
||||
// Delete the existing joins
|
||||
_, err := tx.Exec("DELETE FROM studios_image WHERE studio_id = ?", studioID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) GetStudioImage(studioID int, tx *sqlx.Tx) ([]byte, error) {
|
||||
query := `SELECT image from studios_image WHERE studio_id = ?`
|
||||
return getImage(tx, query, studioID)
|
||||
}
|
||||
|
||||
@@ -210,6 +210,96 @@ func TestStudioUpdateClearParent(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestStudioUpdateStudioImage(t *testing.T) {
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
|
||||
// create performer to test against
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
const name = "TestStudioUpdateStudioImage"
|
||||
created, err := createStudio(tx, name, nil)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error creating studio: %s", err.Error())
|
||||
}
|
||||
|
||||
image := []byte("image")
|
||||
err = qb.UpdateStudioImage(created.ID, image, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error updating studio image: %s", err.Error())
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error committing: %s", err.Error())
|
||||
}
|
||||
|
||||
// ensure image set
|
||||
storedImage, err := qb.GetStudioImage(created.ID, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("Error getting image: %s", err.Error())
|
||||
}
|
||||
assert.Equal(t, storedImage, image)
|
||||
|
||||
// set nil image
|
||||
tx = database.DB.MustBeginTx(ctx, nil)
|
||||
err = qb.UpdateStudioImage(created.ID, nil, tx)
|
||||
if err == nil {
|
||||
t.Fatalf("Expected error setting nil image")
|
||||
}
|
||||
|
||||
tx.Rollback()
|
||||
}
|
||||
|
||||
func TestStudioDestroyStudioImage(t *testing.T) {
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
|
||||
// create performer to test against
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
const name = "TestStudioDestroyStudioImage"
|
||||
created, err := createStudio(tx, name, nil)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error creating studio: %s", err.Error())
|
||||
}
|
||||
|
||||
image := []byte("image")
|
||||
err = qb.UpdateStudioImage(created.ID, image, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error updating studio image: %s", err.Error())
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error committing: %s", err.Error())
|
||||
}
|
||||
|
||||
tx = database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
err = qb.DestroyStudioImage(created.ID, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error destroying studio image: %s", err.Error())
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error committing: %s", err.Error())
|
||||
}
|
||||
|
||||
// image should be nil
|
||||
storedImage, err := qb.GetStudioImage(created.ID, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("Error getting image: %s", err.Error())
|
||||
}
|
||||
assert.Nil(t, storedImage)
|
||||
}
|
||||
|
||||
// TODO Create
|
||||
// TODO Update
|
||||
// TODO Destroy
|
||||
|
||||
@@ -338,9 +338,8 @@ func createMovies(tx *sqlx.Tx, n int, o int) error {
|
||||
|
||||
name = getMovieStringValue(index, name)
|
||||
movie := models.Movie{
|
||||
Name: sql.NullString{String: name, Valid: true},
|
||||
FrontImage: []byte(models.DefaultMovieImage),
|
||||
Checksum: utils.MD5FromString(name),
|
||||
Name: sql.NullString{String: name, Valid: true},
|
||||
Checksum: utils.MD5FromString(name),
|
||||
}
|
||||
|
||||
created, err := mqb.Create(movie, tx)
|
||||
@@ -385,8 +384,6 @@ func createPerformers(tx *sqlx.Tx, n int, o int) error {
|
||||
performer := models.Performer{
|
||||
Name: sql.NullString{String: getPerformerStringValue(index, name), Valid: true},
|
||||
Checksum: getPerformerStringValue(i, checksumField),
|
||||
// just use movie image
|
||||
Image: []byte(models.DefaultMovieImage),
|
||||
Favorite: sql.NullBool{Bool: getPerformerBoolValue(i), Valid: true},
|
||||
}
|
||||
|
||||
@@ -450,7 +447,6 @@ func createStudio(tx *sqlx.Tx, name string, parentID *int64) (*models.Studio, er
|
||||
sqb := models.NewStudioQueryBuilder()
|
||||
studio := models.Studio{
|
||||
Name: sql.NullString{String: name, Valid: true},
|
||||
Image: []byte(models.DefaultStudioImage),
|
||||
Checksum: utils.MD5FromString(name),
|
||||
}
|
||||
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"crypto/md5"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// ProcessBase64Image transforms a base64 encoded string from a form post and returns the MD5 hash of the data and the
|
||||
@@ -45,3 +48,18 @@ func GetBase64StringFromData(data []byte) string {
|
||||
//}
|
||||
//return result
|
||||
}
|
||||
|
||||
func ServeImage(image []byte, w http.ResponseWriter, r *http.Request) error {
|
||||
etag := fmt.Sprintf("%x", md5.Sum(image))
|
||||
|
||||
if match := r.Header.Get("If-None-Match"); match != "" {
|
||||
if strings.Contains(match, etag) {
|
||||
w.WriteHeader(http.StatusNotModified)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
w.Header().Add("Etag", etag)
|
||||
_, err := w.Write(image)
|
||||
return err
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user