Add oshash support (#667)

This commit is contained in:
WithoutPants
2020-08-06 11:21:14 +10:00
committed by GitHub
parent f59ad0ca2b
commit 5992ff8706
50 changed files with 1276 additions and 195 deletions

View File

@@ -3,6 +3,8 @@ fragment ConfigGeneralData on ConfigGeneralResult {
databasePath databasePath
generatedPath generatedPath
cachePath cachePath
calculateMD5
videoFileNamingAlgorithm
previewSegments previewSegments
previewSegmentDuration previewSegmentDuration
previewExcludeStart previewExcludeStart

View File

@@ -1,6 +1,7 @@
fragment SlimSceneData on Scene { fragment SlimSceneData on Scene {
id id
checksum checksum
oshash
title title
details details
url url

View File

@@ -1,6 +1,7 @@
fragment SceneData on Scene { fragment SceneData on Scene {
id id
checksum checksum
oshash
title title
details details
url url

View File

@@ -22,6 +22,10 @@ mutation MetadataClean {
metadataClean metadataClean
} }
mutation MigrateHashNaming {
migrateHashNaming
}
mutation StopJob { mutation StopJob {
stopJob stopJob
} }

View File

@@ -2,6 +2,8 @@
type Query { type Query {
"""Find a scene by ID or Checksum""" """Find a scene by ID or Checksum"""
findScene(id: ID, checksum: String): Scene findScene(id: ID, checksum: String): Scene
findSceneByHash(input: SceneHashInput!): Scene
"""A function which queries Scene objects""" """A function which queries Scene objects"""
findScenes(scene_filter: SceneFilterType, scene_ids: [Int!], filter: FindFilterType): FindScenesResultType! findScenes(scene_filter: SceneFilterType, scene_ids: [Int!], filter: FindFilterType): FindScenesResultType!
@@ -158,6 +160,8 @@ type Mutation {
metadataAutoTag(input: AutoTagMetadataInput!): String! metadataAutoTag(input: AutoTagMetadataInput!): String!
"""Clean metadata. Returns the job ID""" """Clean metadata. Returns the job ID"""
metadataClean: String! metadataClean: String!
"""Migrate generated files for the current hash naming"""
migrateHashNaming: String!
"""Reload scrapers""" """Reload scrapers"""
reloadScrapers: Boolean! reloadScrapers: Boolean!

View File

@@ -17,6 +17,11 @@ enum PreviewPreset {
"X264_VERYSLOW", veryslow "X264_VERYSLOW", veryslow
} }
enum HashAlgorithm {
MD5
"oshash", OSHASH
}
input ConfigGeneralInput { input ConfigGeneralInput {
"""Array of file paths to content""" """Array of file paths to content"""
stashes: [String!] stashes: [String!]
@@ -26,6 +31,10 @@ input ConfigGeneralInput {
generatedPath: String generatedPath: String
"""Path to cache""" """Path to cache"""
cachePath: String cachePath: String
"""Whether to calculate MD5 checksums for scene video files"""
calculateMD5: Boolean!
"""Hash algorithm to use for generated file naming"""
videoFileNamingAlgorithm: HashAlgorithm!
"""Number of segments in a preview file""" """Number of segments in a preview file"""
previewSegments: Int previewSegments: Int
"""Preview segment duration, in seconds""" """Preview segment duration, in seconds"""
@@ -71,6 +80,10 @@ type ConfigGeneralResult {
generatedPath: String! generatedPath: String!
"""Path to cache""" """Path to cache"""
cachePath: String! cachePath: String!
"""Whether to calculate MD5 checksums for scene video files"""
calculateMD5: Boolean!
"""Hash algorithm to use for generated file naming"""
videoFileNamingAlgorithm: HashAlgorithm!
"""Number of segments in a preview file""" """Number of segments in a preview file"""
previewSegments: Int! previewSegments: Int!
"""Preview segment duration, in seconds""" """Preview segment duration, in seconds"""

View File

@@ -25,7 +25,8 @@ type SceneMovie {
type Scene { type Scene {
id: ID! id: ID!
checksum: String! checksum: String
oshash: String
title: String title: String
details: String details: String
url: String url: String
@@ -139,6 +140,11 @@ type SceneParserResultType {
results: [SceneParserResult!]! results: [SceneParserResult!]!
} }
input SceneHashInput {
checksum: String
oshash: String
}
type SceneStreamEndpoint { type SceneStreamEndpoint {
url: String! url: String!
mime_type: String mime_type: String

View File

@@ -13,7 +13,12 @@ import (
func main() { func main() {
manager.Initialize() manager.Initialize()
database.Initialize(config.GetDatabasePath())
// perform the post-migration for new databases
if database.Initialize(config.GetDatabasePath()) {
manager.GetInstance().PostMigrate()
}
api.Start() api.Start()
blockForever() blockForever()
} }

View File

@@ -8,6 +8,7 @@ import (
"github.com/stashapp/stash/pkg/database" "github.com/stashapp/stash/pkg/database"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager"
) )
type migrateData struct { type migrateData struct {
@@ -80,6 +81,9 @@ func doMigrateHandler(w http.ResponseWriter, r *http.Request) {
return return
} }
// perform post-migration operations
manager.GetInstance().PostMigrate()
// if no backup path was provided, then delete the created backup // if no backup path was provided, then delete the created backup
if formBackupPath == "" { if formBackupPath == "" {
err = os.Remove(backupPath) err = os.Remove(backupPath)

View File

@@ -8,6 +8,20 @@ import (
"github.com/stashapp/stash/pkg/utils" "github.com/stashapp/stash/pkg/utils"
) )
func (r *sceneResolver) Checksum(ctx context.Context, obj *models.Scene) (*string, error) {
if obj.Checksum.Valid {
return &obj.Checksum.String, nil
}
return nil, nil
}
func (r *sceneResolver) Oshash(ctx context.Context, obj *models.Scene) (*string, error) {
if obj.OSHash.Valid {
return &obj.OSHash.String, nil
}
return nil, nil
}
func (r *sceneResolver) Title(ctx context.Context, obj *models.Scene) (*string, error) { func (r *sceneResolver) Title(ctx context.Context, obj *models.Scene) (*string, error) {
if obj.Title.Valid { if obj.Title.Valid {
return &obj.Title.String, nil return &obj.Title.String, nil

View File

@@ -2,6 +2,7 @@ package api
import ( import (
"context" "context"
"errors"
"fmt" "fmt"
"path/filepath" "path/filepath"
@@ -45,6 +46,21 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
config.Set(config.Cache, input.CachePath) config.Set(config.Cache, input.CachePath)
} }
if !input.CalculateMd5 && input.VideoFileNamingAlgorithm == models.HashAlgorithmMd5 {
return makeConfigGeneralResult(), errors.New("calculateMD5 must be true if using MD5")
}
if input.VideoFileNamingAlgorithm != config.GetVideoFileNamingAlgorithm() {
// validate changing VideoFileNamingAlgorithm
if err := manager.ValidateVideoFileNamingAlgorithm(input.VideoFileNamingAlgorithm); err != nil {
return makeConfigGeneralResult(), err
}
config.Set(config.VideoFileNamingAlgorithm, input.VideoFileNamingAlgorithm)
}
config.Set(config.CalculateMD5, input.CalculateMd5)
if input.PreviewSegments != nil { if input.PreviewSegments != nil {
config.Set(config.PreviewSegments, *input.PreviewSegments) config.Set(config.PreviewSegments, *input.PreviewSegments)
} }

View File

@@ -37,6 +37,11 @@ func (r *mutationResolver) MetadataClean(ctx context.Context) (string, error) {
return "todo", nil return "todo", nil
} }
func (r *mutationResolver) MigrateHashNaming(ctx context.Context) (string, error) {
manager.GetInstance().MigrateHash()
return "todo", nil
}
func (r *mutationResolver) JobStatus(ctx context.Context) (*models.MetadataUpdateStatus, error) { func (r *mutationResolver) JobStatus(ctx context.Context) (*models.MetadataUpdateStatus, error) {
status := manager.GetInstance().Status status := manager.GetInstance().Status
ret := models.MetadataUpdateStatus{ ret := models.MetadataUpdateStatus{

View File

@@ -10,6 +10,7 @@ import (
"github.com/stashapp/stash/pkg/database" "github.com/stashapp/stash/pkg/database"
"github.com/stashapp/stash/pkg/manager" "github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils" "github.com/stashapp/stash/pkg/utils"
) )
@@ -197,7 +198,7 @@ func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, tx *sqlx.T
// only update the cover image if provided and everything else was successful // only update the cover image if provided and everything else was successful
if coverImageData != nil { if coverImageData != nil {
err = manager.SetSceneScreenshot(scene.Checksum, coverImageData) err = manager.SetSceneScreenshot(scene.GetHash(config.GetVideoFileNamingAlgorithm()), coverImageData)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -417,7 +418,7 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
// if delete generated is true, then delete the generated files // if delete generated is true, then delete the generated files
// for the scene // for the scene
if input.DeleteGenerated != nil && *input.DeleteGenerated { if input.DeleteGenerated != nil && *input.DeleteGenerated {
manager.DeleteGeneratedSceneFiles(scene) manager.DeleteGeneratedSceneFiles(scene, config.GetVideoFileNamingAlgorithm())
} }
// if delete file is true, then delete the file as well // if delete file is true, then delete the file as well
@@ -453,11 +454,12 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
return false, err return false, err
} }
fileNamingAlgo := config.GetVideoFileNamingAlgorithm()
for _, scene := range scenes { for _, scene := range scenes {
// if delete generated is true, then delete the generated files // if delete generated is true, then delete the generated files
// for the scene // for the scene
if input.DeleteGenerated != nil && *input.DeleteGenerated { if input.DeleteGenerated != nil && *input.DeleteGenerated {
manager.DeleteGeneratedSceneFiles(scene) manager.DeleteGeneratedSceneFiles(scene, fileNamingAlgo)
} }
// if delete file is true, then delete the file as well // if delete file is true, then delete the file as well
@@ -528,7 +530,7 @@ func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (b
if scene != nil { if scene != nil {
seconds := int(marker.Seconds) seconds := int(marker.Seconds)
manager.DeleteSceneMarkerFiles(scene, seconds) manager.DeleteSceneMarkerFiles(scene, seconds, config.GetVideoFileNamingAlgorithm())
} }
return true, nil return true, nil
@@ -597,7 +599,7 @@ func changeMarker(ctx context.Context, changeType int, changedMarker models.Scen
if scene != nil { if scene != nil {
seconds := int(existingMarker.Seconds) seconds := int(existingMarker.Seconds)
manager.DeleteSceneMarkerFiles(scene, seconds) manager.DeleteSceneMarkerFiles(scene, seconds, config.GetVideoFileNamingAlgorithm())
} }
} }

View File

@@ -47,6 +47,8 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult {
DatabasePath: config.GetDatabasePath(), DatabasePath: config.GetDatabasePath(),
GeneratedPath: config.GetGeneratedPath(), GeneratedPath: config.GetGeneratedPath(),
CachePath: config.GetCachePath(), CachePath: config.GetCachePath(),
CalculateMd5: config.IsCalculateMD5(),
VideoFileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(),
PreviewSegments: config.GetPreviewSegments(), PreviewSegments: config.GetPreviewSegments(),
PreviewSegmentDuration: config.GetPreviewSegmentDuration(), PreviewSegmentDuration: config.GetPreviewSegmentDuration(),
PreviewExcludeStart: config.GetPreviewExcludeStart(), PreviewExcludeStart: config.GetPreviewExcludeStart(),

View File

@@ -21,6 +21,28 @@ func (r *queryResolver) FindScene(ctx context.Context, id *string, checksum *str
return scene, err return scene, err
} }
func (r *queryResolver) FindSceneByHash(ctx context.Context, input models.SceneHashInput) (*models.Scene, error) {
qb := models.NewSceneQueryBuilder()
var scene *models.Scene
var err error
if input.Checksum != nil {
scene, err = qb.FindByChecksum(*input.Checksum)
if err != nil {
return nil, err
}
}
if scene == nil && input.Oshash != nil {
scene, err = qb.FindByOSHash(*input.Oshash)
if err != nil {
return nil, err
}
}
return scene, err
}
func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.SceneFilterType, sceneIds []int, filter *models.FindFilterType) (*models.FindScenesResultType, error) { func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.SceneFilterType, sceneIds []int, filter *models.FindFilterType) (*models.FindScenesResultType, error) {
qb := models.NewSceneQueryBuilder() qb := models.NewSceneQueryBuilder()
scenes, total := qb.Query(sceneFilter, filter) scenes, total := qb.Query(sceneFilter, filter)

View File

@@ -67,8 +67,9 @@ func getSceneFileContainer(scene *models.Scene) ffmpeg.Container {
func (rs sceneRoutes) StreamDirect(w http.ResponseWriter, r *http.Request) { func (rs sceneRoutes) StreamDirect(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene) scene := r.Context().Value(sceneKey).(*models.Scene)
fileNamingAlgo := config.GetVideoFileNamingAlgorithm()
filepath := manager.GetInstance().Paths.Scene.GetStreamPath(scene.Path, scene.Checksum) filepath := manager.GetInstance().Paths.Scene.GetStreamPath(scene.Path, scene.GetHash(fileNamingAlgo))
manager.RegisterStream(filepath, &w) manager.RegisterStream(filepath, &w)
http.ServeFile(w, r, filepath) http.ServeFile(w, r, filepath)
manager.WaitAndDeregisterStream(filepath, &w, r) manager.WaitAndDeregisterStream(filepath, &w, r)
@@ -171,7 +172,7 @@ func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, vi
func (rs sceneRoutes) Screenshot(w http.ResponseWriter, r *http.Request) { func (rs sceneRoutes) Screenshot(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene) scene := r.Context().Value(sceneKey).(*models.Scene)
filepath := manager.GetInstance().Paths.Scene.GetScreenshotPath(scene.Checksum) filepath := manager.GetInstance().Paths.Scene.GetScreenshotPath(scene.GetHash(config.GetVideoFileNamingAlgorithm()))
// fall back to the scene image blob if the file isn't present // fall back to the scene image blob if the file isn't present
screenshotExists, _ := utils.FileExists(filepath) screenshotExists, _ := utils.FileExists(filepath)
@@ -186,13 +187,13 @@ func (rs sceneRoutes) Screenshot(w http.ResponseWriter, r *http.Request) {
func (rs sceneRoutes) Preview(w http.ResponseWriter, r *http.Request) { func (rs sceneRoutes) Preview(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene) scene := r.Context().Value(sceneKey).(*models.Scene)
filepath := manager.GetInstance().Paths.Scene.GetStreamPreviewPath(scene.Checksum) filepath := manager.GetInstance().Paths.Scene.GetStreamPreviewPath(scene.GetHash(config.GetVideoFileNamingAlgorithm()))
utils.ServeFileNoCache(w, r, filepath) utils.ServeFileNoCache(w, r, filepath)
} }
func (rs sceneRoutes) Webp(w http.ResponseWriter, r *http.Request) { func (rs sceneRoutes) Webp(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene) scene := r.Context().Value(sceneKey).(*models.Scene)
filepath := manager.GetInstance().Paths.Scene.GetStreamPreviewImagePath(scene.Checksum) filepath := manager.GetInstance().Paths.Scene.GetStreamPreviewImagePath(scene.GetHash(config.GetVideoFileNamingAlgorithm()))
http.ServeFile(w, r, filepath) http.ServeFile(w, r, filepath)
} }
@@ -248,14 +249,14 @@ func (rs sceneRoutes) ChapterVtt(w http.ResponseWriter, r *http.Request) {
func (rs sceneRoutes) VttThumbs(w http.ResponseWriter, r *http.Request) { func (rs sceneRoutes) VttThumbs(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene) scene := r.Context().Value(sceneKey).(*models.Scene)
w.Header().Set("Content-Type", "text/vtt") w.Header().Set("Content-Type", "text/vtt")
filepath := manager.GetInstance().Paths.Scene.GetSpriteVttFilePath(scene.Checksum) filepath := manager.GetInstance().Paths.Scene.GetSpriteVttFilePath(scene.GetHash(config.GetVideoFileNamingAlgorithm()))
http.ServeFile(w, r, filepath) http.ServeFile(w, r, filepath)
} }
func (rs sceneRoutes) VttSprite(w http.ResponseWriter, r *http.Request) { func (rs sceneRoutes) VttSprite(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene) scene := r.Context().Value(sceneKey).(*models.Scene)
w.Header().Set("Content-Type", "image/jpeg") w.Header().Set("Content-Type", "image/jpeg")
filepath := manager.GetInstance().Paths.Scene.GetSpriteImageFilePath(scene.Checksum) filepath := manager.GetInstance().Paths.Scene.GetSpriteImageFilePath(scene.GetHash(config.GetVideoFileNamingAlgorithm()))
http.ServeFile(w, r, filepath) http.ServeFile(w, r, filepath)
} }
@@ -269,7 +270,7 @@ func (rs sceneRoutes) SceneMarkerStream(w http.ResponseWriter, r *http.Request)
http.Error(w, http.StatusText(404), 404) http.Error(w, http.StatusText(404), 404)
return return
} }
filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPath(scene.Checksum, int(sceneMarker.Seconds)) filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPath(scene.GetHash(config.GetVideoFileNamingAlgorithm()), int(sceneMarker.Seconds))
http.ServeFile(w, r, filepath) http.ServeFile(w, r, filepath)
} }
@@ -283,7 +284,7 @@ func (rs sceneRoutes) SceneMarkerPreview(w http.ResponseWriter, r *http.Request)
http.Error(w, http.StatusText(404), 404) http.Error(w, http.StatusText(404), 404)
return return
} }
filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPreviewImagePath(scene.Checksum, int(sceneMarker.Seconds)) filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPreviewImagePath(scene.GetHash(config.GetVideoFileNamingAlgorithm()), int(sceneMarker.Seconds))
// If the image doesn't exist, send the placeholder // If the image doesn't exist, send the placeholder
exists, _ := utils.FileExists(filepath) exists, _ := utils.FileExists(filepath)

View File

@@ -19,7 +19,7 @@ import (
var DB *sqlx.DB var DB *sqlx.DB
var dbPath string var dbPath string
var appSchemaVersion uint = 11 var appSchemaVersion uint = 12
var databaseSchemaVersion uint var databaseSchemaVersion uint
const sqlite3Driver = "sqlite3ex" const sqlite3Driver = "sqlite3ex"
@@ -29,7 +29,11 @@ func init() {
registerCustomDriver() registerCustomDriver()
} }
func Initialize(databasePath string) { // Initialize initializes the database. If the database is new, then it
// performs a full migration to the latest schema version. Otherwise, any
// necessary migrations must be run separately using RunMigrations.
// Returns true if the database is new.
func Initialize(databasePath string) bool {
dbPath = databasePath dbPath = databasePath
if err := getDatabaseSchemaVersion(); err != nil { if err := getDatabaseSchemaVersion(); err != nil {
@@ -42,7 +46,7 @@ func Initialize(databasePath string) {
panic(err) panic(err)
} }
// RunMigrations calls Initialise. Just return // RunMigrations calls Initialise. Just return
return return true
} else { } else {
if databaseSchemaVersion > appSchemaVersion { if databaseSchemaVersion > appSchemaVersion {
panic(fmt.Sprintf("Database schema version %d is incompatible with required schema version %d", databaseSchemaVersion, appSchemaVersion)) panic(fmt.Sprintf("Database schema version %d is incompatible with required schema version %d", databaseSchemaVersion, appSchemaVersion))
@@ -51,12 +55,14 @@ func Initialize(databasePath string) {
// if migration is needed, then don't open the connection // if migration is needed, then don't open the connection
if NeedsMigration() { if NeedsMigration() {
logger.Warnf("Database schema version %d does not match required schema version %d.", databaseSchemaVersion, appSchemaVersion) logger.Warnf("Database schema version %d does not match required schema version %d.", databaseSchemaVersion, appSchemaVersion)
return return false
} }
} }
const disableForeignKeys = false const disableForeignKeys = false
DB = open(databasePath, disableForeignKeys) DB = open(databasePath, disableForeignKeys)
return false
} }
func open(databasePath string, disableForeignKeys bool) *sqlx.DB { func open(databasePath string, disableForeignKeys bool) *sqlx.DB {

View File

@@ -0,0 +1,219 @@
-- need to change scenes.checksum to be nullable
ALTER TABLE `scenes` rename to `_scenes_old`;
CREATE TABLE `scenes` (
`id` integer not null primary key autoincrement,
`path` varchar(510) not null,
-- nullable
`checksum` varchar(255),
-- add oshash
`oshash` varchar(255),
`title` varchar(255),
`details` text,
`url` varchar(255),
`date` date,
`rating` tinyint,
`size` varchar(255),
`duration` float,
`video_codec` varchar(255),
`audio_codec` varchar(255),
`width` tinyint,
`height` tinyint,
`framerate` float,
`bitrate` integer,
`studio_id` integer,
`o_counter` tinyint not null default 0,
`format` varchar(255),
`created_at` datetime not null,
`updated_at` datetime not null,
foreign key(`studio_id`) references `studios`(`id`) on delete SET NULL,
-- add check to ensure at least one hash is set
CHECK (`checksum` is not null or `oshash` is not null)
);
DROP INDEX IF EXISTS `scenes_path_unique`;
DROP INDEX IF EXISTS `scenes_checksum_unique`;
DROP INDEX IF EXISTS `index_scenes_on_studio_id`;
CREATE UNIQUE INDEX `scenes_path_unique` on `scenes` (`path`);
CREATE UNIQUE INDEX `scenes_checksum_unique` on `scenes` (`checksum`);
CREATE UNIQUE INDEX `scenes_oshash_unique` on `scenes` (`oshash`);
CREATE INDEX `index_scenes_on_studio_id` on `scenes` (`studio_id`);
-- recreate the tables referencing scenes to correct their references
ALTER TABLE `galleries` rename to `_galleries_old`;
ALTER TABLE `performers_scenes` rename to `_performers_scenes_old`;
ALTER TABLE `scene_markers` rename to `_scene_markers_old`;
ALTER TABLE `scene_markers_tags` rename to `_scene_markers_tags_old`;
ALTER TABLE `scenes_tags` rename to `_scenes_tags_old`;
ALTER TABLE `movies_scenes` rename to `_movies_scenes_old`;
ALTER TABLE `scenes_cover` rename to `_scenes_cover_old`;
CREATE TABLE `galleries` (
`id` integer not null primary key autoincrement,
`path` varchar(510) not null,
`checksum` varchar(255) not null,
`scene_id` integer,
`created_at` datetime not null,
`updated_at` datetime not null,
foreign key(`scene_id`) references `scenes`(`id`)
);
DROP INDEX IF EXISTS `index_galleries_on_scene_id`;
DROP INDEX IF EXISTS `galleries_path_unique`;
DROP INDEX IF EXISTS `galleries_checksum_unique`;
CREATE INDEX `index_galleries_on_scene_id` on `galleries` (`scene_id`);
CREATE UNIQUE INDEX `galleries_path_unique` on `galleries` (`path`);
CREATE UNIQUE INDEX `galleries_checksum_unique` on `galleries` (`checksum`);
CREATE TABLE `performers_scenes` (
`performer_id` integer,
`scene_id` integer,
foreign key(`performer_id`) references `performers`(`id`),
foreign key(`scene_id`) references `scenes`(`id`)
);
DROP INDEX `index_performers_scenes_on_scene_id`;
DROP INDEX `index_performers_scenes_on_performer_id`;
CREATE INDEX `index_performers_scenes_on_scene_id` on `performers_scenes` (`scene_id`);
CREATE INDEX `index_performers_scenes_on_performer_id` on `performers_scenes` (`performer_id`);
CREATE TABLE `scene_markers` (
`id` integer not null primary key autoincrement,
`title` varchar(255) not null,
`seconds` float not null,
`primary_tag_id` integer not null,
`scene_id` integer,
`created_at` datetime not null,
`updated_at` datetime not null,
foreign key(`primary_tag_id`) references `tags`(`id`),
foreign key(`scene_id`) references `scenes`(`id`)
);
DROP INDEX `index_scene_markers_on_scene_id`;
DROP INDEX `index_scene_markers_on_primary_tag_id`;
CREATE INDEX `index_scene_markers_on_scene_id` on `scene_markers` (`scene_id`);
CREATE INDEX `index_scene_markers_on_primary_tag_id` on `scene_markers` (`primary_tag_id`);
CREATE TABLE `scene_markers_tags` (
`scene_marker_id` integer,
`tag_id` integer,
foreign key(`scene_marker_id`) references `scene_markers`(`id`) on delete CASCADE,
foreign key(`tag_id`) references `tags`(`id`)
);
DROP INDEX `index_scene_markers_tags_on_tag_id`;
DROP INDEX `index_scene_markers_tags_on_scene_marker_id`;
CREATE INDEX `index_scene_markers_tags_on_tag_id` on `scene_markers_tags` (`tag_id`);
CREATE INDEX `index_scene_markers_tags_on_scene_marker_id` on `scene_markers_tags` (`scene_marker_id`);
CREATE TABLE `scenes_tags` (
`scene_id` integer,
`tag_id` integer,
foreign key(`scene_id`) references `scenes`(`id`) on delete CASCADE,
foreign key(`tag_id`) references `tags`(`id`)
);
DROP INDEX `index_scenes_tags_on_tag_id`;
DROP INDEX `index_scenes_tags_on_scene_id`;
CREATE INDEX `index_scenes_tags_on_tag_id` on `scenes_tags` (`tag_id`);
CREATE INDEX `index_scenes_tags_on_scene_id` on `scenes_tags` (`scene_id`);
CREATE TABLE `movies_scenes` (
`movie_id` integer,
`scene_id` integer,
`scene_index` tinyint,
foreign key(`movie_id`) references `movies`(`id`) on delete cascade,
foreign key(`scene_id`) references `scenes`(`id`) on delete cascade
);
DROP INDEX `index_movies_scenes_on_movie_id`;
DROP INDEX `index_movies_scenes_on_scene_id`;
CREATE INDEX `index_movies_scenes_on_movie_id` on `movies_scenes` (`movie_id`);
CREATE INDEX `index_movies_scenes_on_scene_id` on `movies_scenes` (`scene_id`);
CREATE TABLE `scenes_cover` (
`scene_id` integer,
`cover` blob not null,
foreign key(`scene_id`) references `scenes`(`id`) on delete CASCADE
);
DROP INDEX `index_scene_covers_on_scene_id`;
CREATE UNIQUE INDEX `index_scene_covers_on_scene_id` on `scenes_cover` (`scene_id`);
-- now populate from the old tables
-- these tables are changed so require the full column def
INSERT INTO `scenes`
(
`id`,
`path`,
`checksum`,
`title`,
`details`,
`url`,
`date`,
`rating`,
`size`,
`duration`,
`video_codec`,
`audio_codec`,
`width`,
`height`,
`framerate`,
`bitrate`,
`studio_id`,
`o_counter`,
`format`,
`created_at`,
`updated_at`
)
SELECT
`id`,
`path`,
`checksum`,
`title`,
`details`,
`url`,
`date`,
`rating`,
`size`,
`duration`,
`video_codec`,
`audio_codec`,
`width`,
`height`,
`framerate`,
`bitrate`,
`studio_id`,
`o_counter`,
`format`,
`created_at`,
`updated_at`
FROM `_scenes_old`;
-- these tables are a direct copy
INSERT INTO `galleries` SELECT * from `_galleries_old`;
INSERT INTO `performers_scenes` SELECT * from `_performers_scenes_old`;
INSERT INTO `scene_markers` SELECT * from `_scene_markers_old`;
INSERT INTO `scene_markers_tags` SELECT * from `_scene_markers_tags_old`;
INSERT INTO `scenes_tags` SELECT * from `_scenes_tags_old`;
INSERT INTO `movies_scenes` SELECT * from `_movies_scenes_old`;
INSERT INTO `scenes_cover` SELECT * from `_scenes_cover_old`;
-- drop old tables
DROP TABLE `_scenes_old`;
DROP TABLE `_galleries_old`;
DROP TABLE `_performers_scenes_old`;
DROP TABLE `_scene_markers_old`;
DROP TABLE `_scene_markers_tags_old`;
DROP TABLE `_scenes_tags_old`;
DROP TABLE `_movies_scenes_old`;
DROP TABLE `_scenes_cover_old`;

70
pkg/manager/checksum.go Normal file
View File

@@ -0,0 +1,70 @@
package manager
import (
"errors"
"github.com/spf13/viper"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/models"
)
func setInitialMD5Config() {
// if there are no scene files in the database, then default the
// VideoFileNamingAlgorithm config setting to oshash and calculateMD5 to
// false, otherwise set them to true for backwards compatibility purposes
sqb := models.NewSceneQueryBuilder()
count, err := sqb.Count()
if err != nil {
logger.Errorf("Error while counting scenes: %s", err.Error())
return
}
usingMD5 := count != 0
defaultAlgorithm := models.HashAlgorithmOshash
if usingMD5 {
defaultAlgorithm = models.HashAlgorithmMd5
}
viper.SetDefault(config.VideoFileNamingAlgorithm, defaultAlgorithm)
viper.SetDefault(config.CalculateMD5, usingMD5)
if err := config.Write(); err != nil {
logger.Errorf("Error while writing configuration file: %s", err.Error())
}
}
// ValidateVideoFileNamingAlgorithm validates changing the
// VideoFileNamingAlgorithm configuration flag.
//
// If setting VideoFileNamingAlgorithm to MD5, then this function will ensure
// that all checksum values are set on all scenes.
//
// Likewise, if VideoFileNamingAlgorithm is set to oshash, then this function
// will ensure that all oshash values are set on all scenes.
func ValidateVideoFileNamingAlgorithm(newValue models.HashAlgorithm) error {
// if algorithm is being set to MD5, then all checksums must be present
qb := models.NewSceneQueryBuilder()
if newValue == models.HashAlgorithmMd5 {
missingMD5, err := qb.CountMissingChecksum()
if err != nil {
return err
}
if missingMD5 > 0 {
return errors.New("some checksums are missing on scenes. Run Scan with calculateMD5 set to true")
}
} else if newValue == models.HashAlgorithmOshash {
missingOSHash, err := qb.CountMissingOSHash()
if err != nil {
return err
}
if missingOSHash > 0 {
return errors.New("some oshash values are missing on scenes. Run Scan to populate")
}
}
return nil
}

View File

@@ -27,6 +27,14 @@ const Database = "database"
const Exclude = "exclude" const Exclude = "exclude"
// CalculateMD5 is the config key used to determine if MD5 should be calculated
// for video files.
const CalculateMD5 = "calculate_md5"
// VideoFileNamingAlgorithm is the config key used to determine what hash
// should be used when generating and using generated files for scenes.
const VideoFileNamingAlgorithm = "video_file_naming_algorithm"
const PreviewPreset = "preview_preset" const PreviewPreset = "preview_preset"
const MaxTranscodeSize = "max_transcode_size" const MaxTranscodeSize = "max_transcode_size"
@@ -151,6 +159,25 @@ func GetLanguage() string {
return ret return ret
} }
// IsCalculateMD5 returns true if MD5 checksums should be generated for
// scene video files.
func IsCalculateMD5() bool {
return viper.GetBool(CalculateMD5)
}
// GetVideoFileNamingAlgorithm returns what hash algorithm should be used for
// naming generated scene video files.
func GetVideoFileNamingAlgorithm() models.HashAlgorithm {
ret := viper.GetString(VideoFileNamingAlgorithm)
// default to oshash
if ret == "" {
return models.HashAlgorithmOshash
}
return models.HashAlgorithm(ret)
}
func GetScrapersPath() string { func GetScrapersPath() string {
return viper.GetString(ScrapersPath) return viper.GetString(ScrapersPath)
} }

View File

@@ -11,6 +11,7 @@ const (
Clean JobStatus = 5 Clean JobStatus = 5
Scrape JobStatus = 6 Scrape JobStatus = 6
AutoTag JobStatus = 7 AutoTag JobStatus = 7
Migrate JobStatus = 8
) )
func (s JobStatus) String() string { func (s JobStatus) String() string {
@@ -29,6 +30,10 @@ func (s JobStatus) String() string {
statusMessage = "Generate" statusMessage = "Generate"
case AutoTag: case AutoTag:
statusMessage = "Auto Tag" statusMessage = "Auto Tag"
case Migrate:
statusMessage = "Migrate"
case Clean:
statusMessage = "Clean"
} }
return statusMessage return statusMessage

View File

@@ -2,9 +2,9 @@ package jsonschema
import ( import (
"fmt" "fmt"
"github.com/json-iterator/go"
"os" "os"
jsoniter "github.com/json-iterator/go"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
) )
@@ -36,6 +36,8 @@ type SceneMovie struct {
type Scene struct { type Scene struct {
Title string `json:"title,omitempty"` Title string `json:"title,omitempty"`
Checksum string `json:"checksum,omitempty"`
OSHash string `json:"oshash,omitempty"`
Studio string `json:"studio,omitempty"` Studio string `json:"studio,omitempty"`
URL string `json:"url,omitempty"` URL string `json:"url,omitempty"`
Date string `json:"date,omitempty"` Date string `json:"date,omitempty"`

View File

@@ -106,6 +106,8 @@ func (s *singleton) Scan(useFileMetadata bool) {
var wg sync.WaitGroup var wg sync.WaitGroup
s.Status.Progress = 0 s.Status.Progress = 0
fileNamingAlgo := config.GetVideoFileNamingAlgorithm()
calculateMD5 := config.IsCalculateMD5()
for i, path := range results { for i, path := range results {
s.Status.setProgress(i, total) s.Status.setProgress(i, total)
if s.Status.stopping { if s.Status.stopping {
@@ -113,7 +115,7 @@ func (s *singleton) Scan(useFileMetadata bool) {
return return
} }
wg.Add(1) wg.Add(1)
task := ScanTask{FilePath: path, UseFileMetadata: useFileMetadata} task := ScanTask{FilePath: path, UseFileMetadata: useFileMetadata, fileNamingAlgorithm: fileNamingAlgo, calculateMD5: calculateMD5}
go task.Start(&wg) go task.Start(&wg)
wg.Wait() wg.Wait()
} }
@@ -143,7 +145,7 @@ func (s *singleton) Import() {
var wg sync.WaitGroup var wg sync.WaitGroup
wg.Add(1) wg.Add(1)
task := ImportTask{} task := ImportTask{fileNamingAlgorithm: config.GetVideoFileNamingAlgorithm()}
go task.Start(&wg) go task.Start(&wg)
wg.Wait() wg.Wait()
}() }()
@@ -161,7 +163,7 @@ func (s *singleton) Export() {
var wg sync.WaitGroup var wg sync.WaitGroup
wg.Add(1) wg.Add(1)
task := ExportTask{} task := ExportTask{fileNamingAlgorithm: config.GetVideoFileNamingAlgorithm()}
go task.Start(&wg) go task.Start(&wg)
wg.Wait() wg.Wait()
}() }()
@@ -271,6 +273,8 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
logger.Infof("Generating %d sprites %d previews %d image previews %d markers %d transcodes", totalsNeeded.sprites, totalsNeeded.previews, totalsNeeded.imagePreviews, totalsNeeded.markers, totalsNeeded.transcodes) logger.Infof("Generating %d sprites %d previews %d image previews %d markers %d transcodes", totalsNeeded.sprites, totalsNeeded.previews, totalsNeeded.imagePreviews, totalsNeeded.markers, totalsNeeded.transcodes)
} }
fileNamingAlgo := config.GetVideoFileNamingAlgorithm()
overwrite := false overwrite := false
if input.Overwrite != nil { if input.Overwrite != nil {
overwrite = *input.Overwrite overwrite = *input.Overwrite
@@ -302,7 +306,7 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
} }
if input.Sprites { if input.Sprites {
task := GenerateSpriteTask{Scene: *scene, Overwrite: overwrite} task := GenerateSpriteTask{Scene: *scene, Overwrite: overwrite, fileNamingAlgorithm: fileNamingAlgo}
go task.Start(&wg) go task.Start(&wg)
} }
@@ -312,17 +316,18 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
ImagePreview: input.ImagePreviews, ImagePreview: input.ImagePreviews,
Options: *generatePreviewOptions, Options: *generatePreviewOptions,
Overwrite: overwrite, Overwrite: overwrite,
fileNamingAlgorithm: fileNamingAlgo,
} }
go task.Start(&wg) go task.Start(&wg)
} }
if input.Markers { if input.Markers {
task := GenerateMarkersTask{Scene: scene, Overwrite: overwrite} task := GenerateMarkersTask{Scene: scene, Overwrite: overwrite, fileNamingAlgorithm: fileNamingAlgo}
go task.Start(&wg) go task.Start(&wg)
} }
if input.Transcodes { if input.Transcodes {
task := GenerateTranscodeTask{Scene: *scene, Overwrite: overwrite} task := GenerateTranscodeTask{Scene: *scene, Overwrite: overwrite, fileNamingAlgorithm: fileNamingAlgo}
go task.Start(&wg) go task.Start(&wg)
} }
@@ -363,7 +368,7 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
} }
wg.Add(1) wg.Add(1)
task := GenerateMarkersTask{Marker: marker, Overwrite: overwrite} task := GenerateMarkersTask{Marker: marker, Overwrite: overwrite, fileNamingAlgorithm: fileNamingAlgo}
go task.Start(&wg) go task.Start(&wg)
wg.Wait() wg.Wait()
} }
@@ -409,6 +414,7 @@ func (s *singleton) generateScreenshot(sceneId string, at *float64) {
task := GenerateScreenshotTask{ task := GenerateScreenshotTask{
Scene: *scene, Scene: *scene,
ScreenshotAt: at, ScreenshotAt: at,
fileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(),
} }
var wg sync.WaitGroup var wg sync.WaitGroup
@@ -620,6 +626,7 @@ func (s *singleton) Clean() {
var wg sync.WaitGroup var wg sync.WaitGroup
s.Status.Progress = 0 s.Status.Progress = 0
total := len(scenes) + len(galleries) total := len(scenes) + len(galleries)
fileNamingAlgo := config.GetVideoFileNamingAlgorithm()
for i, scene := range scenes { for i, scene := range scenes {
s.Status.setProgress(i, total) s.Status.setProgress(i, total)
if s.Status.stopping { if s.Status.stopping {
@@ -634,7 +641,7 @@ func (s *singleton) Clean() {
wg.Add(1) wg.Add(1)
task := CleanTask{Scene: scene} task := CleanTask{Scene: scene, fileNamingAlgorithm: fileNamingAlgo}
go task.Start(&wg) go task.Start(&wg)
wg.Wait() wg.Wait()
} }
@@ -662,6 +669,54 @@ func (s *singleton) Clean() {
}() }()
} }
func (s *singleton) MigrateHash() {
if s.Status.Status != Idle {
return
}
s.Status.SetStatus(Migrate)
s.Status.indefiniteProgress()
qb := models.NewSceneQueryBuilder()
go func() {
defer s.returnToIdleState()
fileNamingAlgo := config.GetVideoFileNamingAlgorithm()
logger.Infof("Migrating generated files for %s naming hash", fileNamingAlgo.String())
scenes, err := qb.All()
if err != nil {
logger.Errorf("failed to fetch list of scenes for migration")
return
}
var wg sync.WaitGroup
s.Status.Progress = 0
total := len(scenes)
for i, scene := range scenes {
s.Status.setProgress(i, total)
if s.Status.stopping {
logger.Info("Stopping due to user request")
return
}
if scene == nil {
logger.Errorf("nil scene, skipping migrate")
continue
}
wg.Add(1)
task := MigrateHashTask{Scene: scene, fileNamingAlgorithm: fileNamingAlgo}
go task.Start(&wg)
wg.Wait()
}
logger.Info("Finished migrating")
}()
}
func (s *singleton) returnToIdleState() { func (s *singleton) returnToIdleState() {
if r := recover(); r != nil { if r := recover(); r != nil {
logger.Info("recovered from ", r) logger.Info("recovered from ", r)
@@ -709,6 +764,7 @@ func (s *singleton) neededGenerate(scenes []*models.Scene, input models.Generate
chTimeout <- struct{}{} chTimeout <- struct{}{}
}() }()
fileNamingAlgo := config.GetVideoFileNamingAlgorithm()
overwrite := false overwrite := false
if input.Overwrite != nil { if input.Overwrite != nil {
overwrite = *input.Overwrite overwrite = *input.Overwrite
@@ -718,29 +774,48 @@ func (s *singleton) neededGenerate(scenes []*models.Scene, input models.Generate
for _, scene := range scenes { for _, scene := range scenes {
if scene != nil { if scene != nil {
if input.Sprites { if input.Sprites {
task := GenerateSpriteTask{Scene: *scene} task := GenerateSpriteTask{
if overwrite || !task.doesSpriteExist(task.Scene.Checksum) { Scene: *scene,
fileNamingAlgorithm: fileNamingAlgo,
}
if overwrite || task.required() {
totals.sprites++ totals.sprites++
} }
} }
if input.Previews { if input.Previews {
task := GeneratePreviewTask{Scene: *scene, ImagePreview: input.ImagePreviews} task := GeneratePreviewTask{
if overwrite || !task.doesVideoPreviewExist(task.Scene.Checksum) { Scene: *scene,
ImagePreview: input.ImagePreviews,
fileNamingAlgorithm: fileNamingAlgo,
}
sceneHash := scene.GetHash(task.fileNamingAlgorithm)
if overwrite || !task.doesVideoPreviewExist(sceneHash) {
totals.previews++ totals.previews++
} }
if input.ImagePreviews && (overwrite || !task.doesImagePreviewExist(task.Scene.Checksum)) {
if input.ImagePreviews && (overwrite || !task.doesImagePreviewExist(sceneHash)) {
totals.imagePreviews++ totals.imagePreviews++
} }
} }
if input.Markers { if input.Markers {
task := GenerateMarkersTask{Scene: scene, Overwrite: overwrite} task := GenerateMarkersTask{
Scene: scene,
Overwrite: overwrite,
fileNamingAlgorithm: fileNamingAlgo,
}
totals.markers += int64(task.isMarkerNeeded()) totals.markers += int64(task.isMarkerNeeded())
} }
if input.Transcodes { if input.Transcodes {
task := GenerateTranscodeTask{Scene: *scene, Overwrite: overwrite} task := GenerateTranscodeTask{
Scene: *scene,
Overwrite: overwrite,
fileNamingAlgorithm: fileNamingAlgo,
}
if task.isTranscodeNeeded() { if task.isTranscodeNeeded() {
totals.transcodes++ totals.transcodes++
} }

View File

@@ -0,0 +1,6 @@
package manager
// PostMigrate is executed after migrations have been executed.
func (s *singleton) PostMigrate() {
setInitialMD5Config()
}

View File

@@ -10,10 +10,13 @@ import (
"github.com/stashapp/stash/pkg/ffmpeg" "github.com/stashapp/stash/pkg/ffmpeg"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils" "github.com/stashapp/stash/pkg/utils"
) )
// DestroyScene deletes a scene and its associated relationships from the
// database.
func DestroyScene(sceneID int, tx *sqlx.Tx) error { func DestroyScene(sceneID int, tx *sqlx.Tx) error {
qb := models.NewSceneQueryBuilder() qb := models.NewSceneQueryBuilder()
jqb := models.NewJoinsQueryBuilder() jqb := models.NewJoinsQueryBuilder()
@@ -46,18 +49,25 @@ func DestroyScene(sceneID int, tx *sqlx.Tx) error {
return nil return nil
} }
func DeleteGeneratedSceneFiles(scene *models.Scene) { // DeleteGeneratedSceneFiles deletes generated files for the provided scene.
markersFolder := filepath.Join(GetInstance().Paths.Generated.Markers, scene.Checksum) func DeleteGeneratedSceneFiles(scene *models.Scene, fileNamingAlgo models.HashAlgorithm) {
sceneHash := scene.GetHash(fileNamingAlgo)
if sceneHash == "" {
return
}
markersFolder := filepath.Join(GetInstance().Paths.Generated.Markers, sceneHash)
exists, _ := utils.FileExists(markersFolder) exists, _ := utils.FileExists(markersFolder)
if exists { if exists {
err := os.RemoveAll(markersFolder) err := os.RemoveAll(markersFolder)
if err != nil { if err != nil {
logger.Warnf("Could not delete file %s: %s", scene.Path, err.Error()) logger.Warnf("Could not delete folder %s: %s", markersFolder, err.Error())
} }
} }
thumbPath := GetInstance().Paths.Scene.GetThumbnailScreenshotPath(scene.Checksum) thumbPath := GetInstance().Paths.Scene.GetThumbnailScreenshotPath(sceneHash)
exists, _ = utils.FileExists(thumbPath) exists, _ = utils.FileExists(thumbPath)
if exists { if exists {
err := os.Remove(thumbPath) err := os.Remove(thumbPath)
@@ -66,7 +76,7 @@ func DeleteGeneratedSceneFiles(scene *models.Scene) {
} }
} }
normalPath := GetInstance().Paths.Scene.GetScreenshotPath(scene.Checksum) normalPath := GetInstance().Paths.Scene.GetScreenshotPath(sceneHash)
exists, _ = utils.FileExists(normalPath) exists, _ = utils.FileExists(normalPath)
if exists { if exists {
err := os.Remove(normalPath) err := os.Remove(normalPath)
@@ -75,7 +85,7 @@ func DeleteGeneratedSceneFiles(scene *models.Scene) {
} }
} }
streamPreviewPath := GetInstance().Paths.Scene.GetStreamPreviewPath(scene.Checksum) streamPreviewPath := GetInstance().Paths.Scene.GetStreamPreviewPath(sceneHash)
exists, _ = utils.FileExists(streamPreviewPath) exists, _ = utils.FileExists(streamPreviewPath)
if exists { if exists {
err := os.Remove(streamPreviewPath) err := os.Remove(streamPreviewPath)
@@ -84,7 +94,7 @@ func DeleteGeneratedSceneFiles(scene *models.Scene) {
} }
} }
streamPreviewImagePath := GetInstance().Paths.Scene.GetStreamPreviewImagePath(scene.Checksum) streamPreviewImagePath := GetInstance().Paths.Scene.GetStreamPreviewImagePath(sceneHash)
exists, _ = utils.FileExists(streamPreviewImagePath) exists, _ = utils.FileExists(streamPreviewImagePath)
if exists { if exists {
err := os.Remove(streamPreviewImagePath) err := os.Remove(streamPreviewImagePath)
@@ -93,7 +103,7 @@ func DeleteGeneratedSceneFiles(scene *models.Scene) {
} }
} }
transcodePath := GetInstance().Paths.Scene.GetTranscodePath(scene.Checksum) transcodePath := GetInstance().Paths.Scene.GetTranscodePath(sceneHash)
exists, _ = utils.FileExists(transcodePath) exists, _ = utils.FileExists(transcodePath)
if exists { if exists {
// kill any running streams // kill any running streams
@@ -105,7 +115,7 @@ func DeleteGeneratedSceneFiles(scene *models.Scene) {
} }
} }
spritePath := GetInstance().Paths.Scene.GetSpriteImageFilePath(scene.Checksum) spritePath := GetInstance().Paths.Scene.GetSpriteImageFilePath(sceneHash)
exists, _ = utils.FileExists(spritePath) exists, _ = utils.FileExists(spritePath)
if exists { if exists {
err := os.Remove(spritePath) err := os.Remove(spritePath)
@@ -114,7 +124,7 @@ func DeleteGeneratedSceneFiles(scene *models.Scene) {
} }
} }
vttPath := GetInstance().Paths.Scene.GetSpriteVttFilePath(scene.Checksum) vttPath := GetInstance().Paths.Scene.GetSpriteVttFilePath(sceneHash)
exists, _ = utils.FileExists(vttPath) exists, _ = utils.FileExists(vttPath)
if exists { if exists {
err := os.Remove(vttPath) err := os.Remove(vttPath)
@@ -124,9 +134,11 @@ func DeleteGeneratedSceneFiles(scene *models.Scene) {
} }
} }
func DeleteSceneMarkerFiles(scene *models.Scene, seconds int) { // DeleteSceneMarkerFiles deletes generated files for a scene marker with the
videoPath := GetInstance().Paths.SceneMarkers.GetStreamPath(scene.Checksum, seconds) // provided scene and timestamp.
imagePath := GetInstance().Paths.SceneMarkers.GetStreamPreviewImagePath(scene.Checksum, seconds) func DeleteSceneMarkerFiles(scene *models.Scene, seconds int, fileNamingAlgo models.HashAlgorithm) {
videoPath := GetInstance().Paths.SceneMarkers.GetStreamPath(scene.GetHash(fileNamingAlgo), seconds)
imagePath := GetInstance().Paths.SceneMarkers.GetStreamPreviewImagePath(scene.GetHash(fileNamingAlgo), seconds)
exists, _ := utils.FileExists(videoPath) exists, _ := utils.FileExists(videoPath)
if exists { if exists {
@@ -145,6 +157,7 @@ func DeleteSceneMarkerFiles(scene *models.Scene, seconds int) {
} }
} }
// DeleteSceneFile deletes the scene video file from the filesystem.
func DeleteSceneFile(scene *models.Scene) { func DeleteSceneFile(scene *models.Scene) {
// kill any running encoders // kill any running encoders
KillRunningStreams(scene.Path) KillRunningStreams(scene.Path)
@@ -195,8 +208,7 @@ func GetSceneStreamPaths(scene *models.Scene, directStreamURL string) ([]*models
return nil, err return nil, err
} }
hasTranscode, _ := HasTranscode(scene) if HasTranscode(scene, config.GetVideoFileNamingAlgorithm()) || ffmpeg.IsValidAudioForContainer(audioCodec, container) {
if hasTranscode || ffmpeg.IsValidAudioForContainer(audioCodec, container) {
label := "Direct stream" label := "Direct stream"
ret = append(ret, &models.SceneStreamEndpoint{ ret = append(ret, &models.SceneStreamEndpoint{
URL: directStreamURL, URL: directStreamURL,
@@ -236,10 +248,20 @@ func GetSceneStreamPaths(scene *models.Scene, directStreamURL string) ([]*models
return ret, nil return ret, nil
} }
func HasTranscode(scene *models.Scene) (bool, error) { // HasTranscode returns true if a transcoded video exists for the provided
// scene. It will check using the OSHash of the scene first, then fall back
// to the checksum.
func HasTranscode(scene *models.Scene, fileNamingAlgo models.HashAlgorithm) bool {
if scene == nil { if scene == nil {
return false, fmt.Errorf("nil scene") return false
} }
transcodePath := instance.Paths.Scene.GetTranscodePath(scene.Checksum)
return utils.FileExists(transcodePath) sceneHash := scene.GetHash(fileNamingAlgo)
if sceneHash == "" {
return false
}
transcodePath := instance.Paths.Scene.GetTranscodePath(sceneHash)
ret, _ := utils.FileExists(transcodePath)
return ret
} }

View File

@@ -200,7 +200,7 @@ func createScenes(tx *sqlx.Tx) error {
func makeScene(name string, expectedResult bool) *models.Scene { func makeScene(name string, expectedResult bool) *models.Scene {
scene := &models.Scene{ scene := &models.Scene{
Checksum: utils.MD5FromString(name), Checksum: sql.NullString{String: utils.MD5FromString(name), Valid: true},
Path: name, Path: name,
} }

View File

@@ -17,6 +17,7 @@ import (
type CleanTask struct { type CleanTask struct {
Scene *models.Scene Scene *models.Scene
Gallery *models.Gallery Gallery *models.Gallery
fileNamingAlgorithm models.HashAlgorithm
} }
func (t *CleanTask) Start(wg *sync.WaitGroup) { func (t *CleanTask) Start(wg *sync.WaitGroup) {
@@ -32,7 +33,13 @@ func (t *CleanTask) Start(wg *sync.WaitGroup) {
} }
func (t *CleanTask) shouldClean(path string) bool { func (t *CleanTask) shouldClean(path string) bool {
if t.fileExists(path) && t.pathInStash(path) { fileExists, err := t.fileExists(path)
if err != nil {
logger.Errorf("Error checking existence of %s: %s", path, err.Error())
return false
}
if fileExists && t.pathInStash(path) {
logger.Debugf("File Found: %s", path) logger.Debugf("File Found: %s", path)
if matchFile(path, config.GetExcludes()) { if matchFile(path, config.GetExcludes()) {
logger.Infof("File matched regex. Cleaning: \"%s\"", path) logger.Infof("File matched regex. Cleaning: \"%s\"", path)
@@ -78,7 +85,7 @@ func (t *CleanTask) deleteScene(sceneID int) {
return return
} }
DeleteGeneratedSceneFiles(scene) DeleteGeneratedSceneFiles(scene, t.fileNamingAlgorithm)
} }
func (t *CleanTask) deleteGallery(galleryID int) { func (t *CleanTask) deleteGallery(galleryID int) {
@@ -105,12 +112,18 @@ func (t *CleanTask) deleteGallery(galleryID int) {
} }
} }
func (t *CleanTask) fileExists(filename string) bool { func (t *CleanTask) fileExists(filename string) (bool, error) {
info, err := os.Stat(filename) info, err := os.Stat(filename)
if os.IsNotExist(err) { if os.IsNotExist(err) {
return false return false, nil
} }
return !info.IsDir()
// handle if error is something else
if err != nil {
return false, err
}
return !info.IsDir(), nil
} }
func (t *CleanTask) pathInStash(pathToCheck string) bool { func (t *CleanTask) pathInStash(pathToCheck string) bool {

View File

@@ -21,6 +21,7 @@ import (
type ExportTask struct { type ExportTask struct {
Mappings *jsonschema.Mappings Mappings *jsonschema.Mappings
Scraped []jsonschema.ScrapedItem Scraped []jsonschema.ScrapedItem
fileNamingAlgorithm models.HashAlgorithm
} }
func (t *ExportTask) Start(wg *sync.WaitGroup) { func (t *ExportTask) Start(wg *sync.WaitGroup) {
@@ -77,7 +78,7 @@ func (t *ExportTask) ExportScenes(ctx context.Context, workers int) {
if (i % 100) == 0 { // make progress easier to read if (i % 100) == 0 { // make progress easier to read
logger.Progressf("[scenes] %d of %d", index, len(scenes)) logger.Progressf("[scenes] %d of %d", index, len(scenes))
} }
t.Mappings.Scenes = append(t.Mappings.Scenes, jsonschema.PathMapping{Path: scene.Path, Checksum: scene.Checksum}) t.Mappings.Scenes = append(t.Mappings.Scenes, jsonschema.PathMapping{Path: scene.Path, Checksum: scene.GetHash(t.fileNamingAlgorithm)})
jobCh <- scene // feed workers jobCh <- scene // feed workers
} }
@@ -103,6 +104,14 @@ func exportScene(wg *sync.WaitGroup, jobChan <-chan *models.Scene, t *ExportTask
UpdatedAt: models.JSONTime{Time: scene.UpdatedAt.Timestamp}, UpdatedAt: models.JSONTime{Time: scene.UpdatedAt.Timestamp},
} }
if scene.Checksum.Valid {
newSceneJSON.Checksum = scene.Checksum.String
}
if scene.OSHash.Valid {
newSceneJSON.OSHash = scene.OSHash.String
}
var studioName string var studioName string
if scene.StudioID.Valid { if scene.StudioID.Valid {
studio, _ := studioQB.Find(int(scene.StudioID.Int64), tx) studio, _ := studioQB.Find(int(scene.StudioID.Int64), tx)
@@ -150,15 +159,17 @@ func exportScene(wg *sync.WaitGroup, jobChan <-chan *models.Scene, t *ExportTask
newSceneJSON.Performers = t.getPerformerNames(performers) newSceneJSON.Performers = t.getPerformerNames(performers)
newSceneJSON.Tags = t.getTagNames(tags) newSceneJSON.Tags = t.getTagNames(tags)
sceneHash := scene.GetHash(t.fileNamingAlgorithm)
for _, sceneMarker := range sceneMarkers { for _, sceneMarker := range sceneMarkers {
primaryTag, err := tagQB.Find(sceneMarker.PrimaryTagID, tx) primaryTag, err := tagQB.Find(sceneMarker.PrimaryTagID, tx)
if err != nil { if err != nil {
logger.Errorf("[scenes] <%s> invalid primary tag for scene marker: %s", scene.Checksum, err.Error()) logger.Errorf("[scenes] <%s> invalid primary tag for scene marker: %s", sceneHash, err.Error())
continue continue
} }
sceneMarkerTags, err := tagQB.FindBySceneMarkerID(sceneMarker.ID, tx) sceneMarkerTags, err := tagQB.FindBySceneMarkerID(sceneMarker.ID, tx)
if err != nil { if err != nil {
logger.Errorf("[scenes] <%s> invalid tags for scene marker: %s", scene.Checksum, err.Error()) logger.Errorf("[scenes] <%s> invalid tags for scene marker: %s", sceneHash, err.Error())
continue continue
} }
if sceneMarker.Seconds == 0 || primaryTag.Name == "" { if sceneMarker.Seconds == 0 || primaryTag.Name == "" {
@@ -220,7 +231,7 @@ func exportScene(wg *sync.WaitGroup, jobChan <-chan *models.Scene, t *ExportTask
cover, err := sceneQB.GetSceneCover(scene.ID, tx) cover, err := sceneQB.GetSceneCover(scene.ID, tx)
if err != nil { if err != nil {
logger.Errorf("[scenes] <%s> error getting scene cover: %s", scene.Checksum, err.Error()) logger.Errorf("[scenes] <%s> error getting scene cover: %s", sceneHash, err.Error())
continue continue
} }
@@ -228,15 +239,15 @@ func exportScene(wg *sync.WaitGroup, jobChan <-chan *models.Scene, t *ExportTask
newSceneJSON.Cover = utils.GetBase64StringFromData(cover) newSceneJSON.Cover = utils.GetBase64StringFromData(cover)
} }
sceneJSON, err := instance.JSON.getScene(scene.Checksum) sceneJSON, err := instance.JSON.getScene(sceneHash)
if err != nil { if err != nil {
logger.Debugf("[scenes] error reading scene json: %s", err.Error()) logger.Debugf("[scenes] error reading scene json: %s", err.Error())
} else if jsonschema.CompareJSON(*sceneJSON, newSceneJSON) { } else if jsonschema.CompareJSON(*sceneJSON, newSceneJSON) {
continue continue
} }
if err := instance.JSON.saveScene(scene.Checksum, &newSceneJSON); err != nil { if err := instance.JSON.saveScene(sceneHash, &newSceneJSON); err != nil {
logger.Errorf("[scenes] <%s> failed to save json: %s", scene.Checksum, err.Error()) logger.Errorf("[scenes] <%s> failed to save json: %s", sceneHash, err.Error())
} }
} }

View File

@@ -16,6 +16,7 @@ type GenerateMarkersTask struct {
Scene *models.Scene Scene *models.Scene
Marker *models.SceneMarker Marker *models.SceneMarker
Overwrite bool Overwrite bool
fileNamingAlgorithm models.HashAlgorithm
} }
func (t *GenerateMarkersTask) Start(wg *sync.WaitGroup) { func (t *GenerateMarkersTask) Start(wg *sync.WaitGroup) {
@@ -56,27 +57,28 @@ func (t *GenerateMarkersTask) generateSceneMarkers() {
return return
} }
sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm)
// Make the folder for the scenes markers // Make the folder for the scenes markers
markersFolder := filepath.Join(instance.Paths.Generated.Markers, t.Scene.Checksum) markersFolder := filepath.Join(instance.Paths.Generated.Markers, sceneHash)
_ = utils.EnsureDir(markersFolder) utils.EnsureDir(markersFolder)
for i, sceneMarker := range sceneMarkers { for i, sceneMarker := range sceneMarkers {
index := i + 1 index := i + 1
logger.Progressf("[generator] <%s> scene marker %d of %d", t.Scene.Checksum, index, len(sceneMarkers)) logger.Progressf("[generator] <%s> scene marker %d of %d", sceneHash, index, len(sceneMarkers))
t.generateMarker(videoFile, t.Scene, sceneMarker) t.generateMarker(videoFile, t.Scene, sceneMarker)
} }
} }
func (t *GenerateMarkersTask) generateMarker(videoFile *ffmpeg.VideoFile, scene *models.Scene, sceneMarker *models.SceneMarker) { func (t *GenerateMarkersTask) generateMarker(videoFile *ffmpeg.VideoFile, scene *models.Scene, sceneMarker *models.SceneMarker) {
sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm)
seconds := int(sceneMarker.Seconds) seconds := int(sceneMarker.Seconds)
videoExists := t.videoExists(sceneHash, seconds)
imageExists := t.imageExists(sceneHash, seconds)
baseFilename := strconv.Itoa(seconds) baseFilename := strconv.Itoa(seconds)
videoFilename := baseFilename + ".mp4"
imageFilename := baseFilename + ".webp"
videoPath := instance.Paths.SceneMarkers.GetStreamPath(scene.Checksum, seconds)
imagePath := instance.Paths.SceneMarkers.GetStreamPreviewImagePath(scene.Checksum, seconds)
videoExists, _ := utils.FileExists(videoPath)
imageExists, _ := utils.FileExists(imagePath)
options := ffmpeg.SceneMarkerOptions{ options := ffmpeg.SceneMarkerOptions{
ScenePath: scene.Path, ScenePath: scene.Path,
@@ -87,6 +89,9 @@ func (t *GenerateMarkersTask) generateMarker(videoFile *ffmpeg.VideoFile, scene
encoder := ffmpeg.NewEncoder(instance.FFMPEGPath) encoder := ffmpeg.NewEncoder(instance.FFMPEGPath)
if t.Overwrite || !videoExists { if t.Overwrite || !videoExists {
videoFilename := baseFilename + ".mp4"
videoPath := instance.Paths.SceneMarkers.GetStreamPath(sceneHash, seconds)
options.OutputPath = instance.Paths.Generated.GetTmpPath(videoFilename) // tmp output in case the process ends abruptly options.OutputPath = instance.Paths.Generated.GetTmpPath(videoFilename) // tmp output in case the process ends abruptly
if err := encoder.SceneMarkerVideo(*videoFile, options); err != nil { if err := encoder.SceneMarkerVideo(*videoFile, options); err != nil {
logger.Errorf("[generator] failed to generate marker video: %s", err) logger.Errorf("[generator] failed to generate marker video: %s", err)
@@ -97,18 +102,20 @@ func (t *GenerateMarkersTask) generateMarker(videoFile *ffmpeg.VideoFile, scene
} }
if t.Overwrite || !imageExists { if t.Overwrite || !imageExists {
imageFilename := baseFilename + ".webp"
imagePath := instance.Paths.SceneMarkers.GetStreamPreviewImagePath(sceneHash, seconds)
options.OutputPath = instance.Paths.Generated.GetTmpPath(imageFilename) // tmp output in case the process ends abruptly options.OutputPath = instance.Paths.Generated.GetTmpPath(imageFilename) // tmp output in case the process ends abruptly
if err := encoder.SceneMarkerImage(*videoFile, options); err != nil { if err := encoder.SceneMarkerImage(*videoFile, options); err != nil {
logger.Errorf("[generator] failed to generate marker image: %s", err) logger.Errorf("[generator] failed to generate marker image: %s", err)
} else { } else {
_ = os.Rename(options.OutputPath, imagePath) _ = os.Rename(options.OutputPath, imagePath)
logger.Debug("created marker image: ", videoPath) logger.Debug("created marker image: ", imagePath)
} }
} }
} }
func (t *GenerateMarkersTask) isMarkerNeeded() int { func (t *GenerateMarkersTask) isMarkerNeeded() int {
markers := 0 markers := 0
qb := models.NewSceneMarkerQueryBuilder() qb := models.NewSceneMarkerQueryBuilder()
sceneMarkers, _ := qb.FindBySceneID(t.Scene.ID, nil) sceneMarkers, _ := qb.FindBySceneID(t.Scene.ID, nil)
@@ -116,18 +123,49 @@ func (t *GenerateMarkersTask) isMarkerNeeded() int {
return 0 return 0
} }
sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm)
for _, sceneMarker := range sceneMarkers { for _, sceneMarker := range sceneMarkers {
seconds := int(sceneMarker.Seconds) seconds := int(sceneMarker.Seconds)
videoPath := instance.Paths.SceneMarkers.GetStreamPath(t.Scene.Checksum, seconds)
imagePath := instance.Paths.SceneMarkers.GetStreamPreviewImagePath(t.Scene.Checksum, seconds)
videoExists, _ := utils.FileExists(videoPath)
imageExists, _ := utils.FileExists(imagePath)
if t.Overwrite || !videoExists || !imageExists { if t.Overwrite || !t.markerExists(sceneHash, seconds) {
markers++ markers++
} }
} }
return markers return markers
} }
func (t *GenerateMarkersTask) markerExists(sceneChecksum string, seconds int) bool {
if sceneChecksum == "" {
return false
}
videoPath := instance.Paths.SceneMarkers.GetStreamPath(sceneChecksum, seconds)
imagePath := instance.Paths.SceneMarkers.GetStreamPreviewImagePath(sceneChecksum, seconds)
videoExists, _ := utils.FileExists(videoPath)
imageExists, _ := utils.FileExists(imagePath)
return videoExists && imageExists
}
func (t *GenerateMarkersTask) videoExists(sceneChecksum string, seconds int) bool {
if sceneChecksum == "" {
return false
}
videoPath := instance.Paths.SceneMarkers.GetStreamPath(sceneChecksum, seconds)
videoExists, _ := utils.FileExists(videoPath)
return videoExists
}
func (t *GenerateMarkersTask) imageExists(sceneChecksum string, seconds int) bool {
if sceneChecksum == "" {
return false
}
imagePath := instance.Paths.SceneMarkers.GetStreamPreviewImagePath(sceneChecksum, seconds)
imageExists, _ := utils.FileExists(imagePath)
return imageExists
}

View File

@@ -16,6 +16,7 @@ type GeneratePreviewTask struct {
Options models.GeneratePreviewOptionsInput Options models.GeneratePreviewOptionsInput
Overwrite bool Overwrite bool
fileNamingAlgorithm models.HashAlgorithm
} }
func (t *GeneratePreviewTask) Start(wg *sync.WaitGroup) { func (t *GeneratePreviewTask) Start(wg *sync.WaitGroup) {
@@ -23,8 +24,7 @@ func (t *GeneratePreviewTask) Start(wg *sync.WaitGroup) {
videoFilename := t.videoFilename() videoFilename := t.videoFilename()
imageFilename := t.imageFilename() imageFilename := t.imageFilename()
videoExists := t.doesVideoPreviewExist(t.Scene.Checksum) if !t.Overwrite && !t.required() {
if !t.Overwrite && ((!t.ImagePreview || t.doesImagePreviewExist(t.Scene.Checksum)) && videoExists) {
return return
} }
@@ -34,7 +34,8 @@ func (t *GeneratePreviewTask) Start(wg *sync.WaitGroup) {
return return
} }
generator, err := NewPreviewGenerator(*videoFile, videoFilename, imageFilename, instance.Paths.Generated.Screenshots, true, t.ImagePreview, t.Options.PreviewPreset.String()) const generateVideo = true
generator, err := NewPreviewGenerator(*videoFile, videoFilename, imageFilename, instance.Paths.Generated.Screenshots, generateVideo, t.ImagePreview, t.Options.PreviewPreset.String())
if err != nil { if err != nil {
logger.Errorf("error creating preview generator: %s", err.Error()) logger.Errorf("error creating preview generator: %s", err.Error())
return return
@@ -53,20 +54,35 @@ func (t *GeneratePreviewTask) Start(wg *sync.WaitGroup) {
} }
} }
func (t GeneratePreviewTask) required() bool {
sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm)
videoExists := t.doesVideoPreviewExist(sceneHash)
imageExists := !t.ImagePreview || t.doesImagePreviewExist(sceneHash)
return !imageExists || !videoExists
}
func (t *GeneratePreviewTask) doesVideoPreviewExist(sceneChecksum string) bool { func (t *GeneratePreviewTask) doesVideoPreviewExist(sceneChecksum string) bool {
if sceneChecksum == "" {
return false
}
videoExists, _ := utils.FileExists(instance.Paths.Scene.GetStreamPreviewPath(sceneChecksum)) videoExists, _ := utils.FileExists(instance.Paths.Scene.GetStreamPreviewPath(sceneChecksum))
return videoExists return videoExists
} }
func (t *GeneratePreviewTask) doesImagePreviewExist(sceneChecksum string) bool { func (t *GeneratePreviewTask) doesImagePreviewExist(sceneChecksum string) bool {
if sceneChecksum == "" {
return false
}
imageExists, _ := utils.FileExists(instance.Paths.Scene.GetStreamPreviewImagePath(sceneChecksum)) imageExists, _ := utils.FileExists(instance.Paths.Scene.GetStreamPreviewImagePath(sceneChecksum))
return imageExists return imageExists
} }
func (t *GeneratePreviewTask) videoFilename() string { func (t *GeneratePreviewTask) videoFilename() string {
return t.Scene.Checksum + ".mp4" return t.Scene.GetHash(t.fileNamingAlgorithm) + ".mp4"
} }
func (t *GeneratePreviewTask) imageFilename() string { func (t *GeneratePreviewTask) imageFilename() string {
return t.Scene.Checksum + ".webp" return t.Scene.GetHash(t.fileNamingAlgorithm) + ".webp"
} }

View File

@@ -16,6 +16,7 @@ import (
type GenerateScreenshotTask struct { type GenerateScreenshotTask struct {
Scene models.Scene Scene models.Scene
ScreenshotAt *float64 ScreenshotAt *float64
fileNamingAlgorithm models.HashAlgorithm
} }
func (t *GenerateScreenshotTask) Start(wg *sync.WaitGroup) { func (t *GenerateScreenshotTask) Start(wg *sync.WaitGroup) {
@@ -36,7 +37,7 @@ func (t *GenerateScreenshotTask) Start(wg *sync.WaitGroup) {
at = *t.ScreenshotAt at = *t.ScreenshotAt
} }
checksum := t.Scene.Checksum checksum := t.Scene.GetHash(t.fileNamingAlgorithm)
normalPath := instance.Paths.Scene.GetScreenshotPath(checksum) normalPath := instance.Paths.Scene.GetScreenshotPath(checksum)
// we'll generate the screenshot, grab the generated data and set it // we'll generate the screenshot, grab the generated data and set it
@@ -69,7 +70,7 @@ func (t *GenerateScreenshotTask) Start(wg *sync.WaitGroup) {
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime}, UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
} }
if err := SetSceneScreenshot(t.Scene.Checksum, coverImageData); err != nil { if err := SetSceneScreenshot(checksum, coverImageData); err != nil {
logger.Errorf("Error writing screenshot: %s", err.Error()) logger.Errorf("Error writing screenshot: %s", err.Error())
tx.Rollback() tx.Rollback()
return return

View File

@@ -12,12 +12,13 @@ import (
type GenerateSpriteTask struct { type GenerateSpriteTask struct {
Scene models.Scene Scene models.Scene
Overwrite bool Overwrite bool
fileNamingAlgorithm models.HashAlgorithm
} }
func (t *GenerateSpriteTask) Start(wg *sync.WaitGroup) { func (t *GenerateSpriteTask) Start(wg *sync.WaitGroup) {
defer wg.Done() defer wg.Done()
if t.doesSpriteExist(t.Scene.Checksum) && !t.Overwrite { if !t.Overwrite && !t.required() {
return return
} }
@@ -27,8 +28,9 @@ func (t *GenerateSpriteTask) Start(wg *sync.WaitGroup) {
return return
} }
imagePath := instance.Paths.Scene.GetSpriteImageFilePath(t.Scene.Checksum) sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm)
vttPath := instance.Paths.Scene.GetSpriteVttFilePath(t.Scene.Checksum) imagePath := instance.Paths.Scene.GetSpriteImageFilePath(sceneHash)
vttPath := instance.Paths.Scene.GetSpriteVttFilePath(sceneHash)
generator, err := NewSpriteGenerator(*videoFile, imagePath, vttPath, 9, 9) generator, err := NewSpriteGenerator(*videoFile, imagePath, vttPath, 9, 9)
if err != nil { if err != nil {
logger.Errorf("error creating sprite generator: %s", err.Error()) logger.Errorf("error creating sprite generator: %s", err.Error())
@@ -42,7 +44,17 @@ func (t *GenerateSpriteTask) Start(wg *sync.WaitGroup) {
} }
} }
// required returns true if the sprite needs to be generated
func (t GenerateSpriteTask) required() bool {
sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm)
return !t.doesSpriteExist(sceneHash)
}
func (t *GenerateSpriteTask) doesSpriteExist(sceneChecksum string) bool { func (t *GenerateSpriteTask) doesSpriteExist(sceneChecksum string) bool {
if sceneChecksum == "" {
return false
}
imageExists, _ := utils.FileExists(instance.Paths.Scene.GetSpriteImageFilePath(sceneChecksum)) imageExists, _ := utils.FileExists(instance.Paths.Scene.GetSpriteImageFilePath(sceneChecksum))
vttExists, _ := utils.FileExists(instance.Paths.Scene.GetSpriteVttFilePath(sceneChecksum)) vttExists, _ := utils.FileExists(instance.Paths.Scene.GetSpriteVttFilePath(sceneChecksum))
return imageExists && vttExists return imageExists && vttExists

View File

@@ -20,6 +20,7 @@ import (
type ImportTask struct { type ImportTask struct {
Mappings *jsonschema.Mappings Mappings *jsonschema.Mappings
Scraped []jsonschema.ScrapedItem Scraped []jsonschema.ScrapedItem
fileNamingAlgorithm models.HashAlgorithm
} }
func (t *ImportTask) Start(wg *sync.WaitGroup) { func (t *ImportTask) Start(wg *sync.WaitGroup) {
@@ -533,27 +534,30 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
logger.Progressf("[scenes] %d of %d", index, len(t.Mappings.Scenes)) logger.Progressf("[scenes] %d of %d", index, len(t.Mappings.Scenes))
newScene := models.Scene{
Checksum: mappingJSON.Checksum,
Path: mappingJSON.Path,
}
sceneJSON, err := instance.JSON.getScene(mappingJSON.Checksum) sceneJSON, err := instance.JSON.getScene(mappingJSON.Checksum)
if err != nil { if err != nil {
logger.Infof("[scenes] <%s> json parse failure: %s", mappingJSON.Checksum, err.Error()) logger.Infof("[scenes] <%s> json parse failure: %s", mappingJSON.Checksum, err.Error())
continue continue
} }
sceneHash := mappingJSON.Checksum
newScene := models.Scene{
Checksum: sql.NullString{String: sceneJSON.Checksum, Valid: sceneJSON.Checksum != ""},
OSHash: sql.NullString{String: sceneJSON.OSHash, Valid: sceneJSON.OSHash != ""},
Path: mappingJSON.Path,
}
// Process the base 64 encoded cover image string // Process the base 64 encoded cover image string
var coverImageData []byte var coverImageData []byte
if sceneJSON.Cover != "" { if sceneJSON.Cover != "" {
_, coverImageData, err = utils.ProcessBase64Image(sceneJSON.Cover) _, coverImageData, err = utils.ProcessBase64Image(sceneJSON.Cover)
if err != nil { if err != nil {
logger.Warnf("[scenes] <%s> invalid cover image: %s", mappingJSON.Checksum, err.Error()) logger.Warnf("[scenes] <%s> invalid cover image: %s", sceneHash, err.Error())
} }
if len(coverImageData) > 0 { if len(coverImageData) > 0 {
if err = SetSceneScreenshot(mappingJSON.Checksum, coverImageData); err != nil { if err = SetSceneScreenshot(sceneHash, coverImageData); err != nil {
logger.Warnf("[scenes] <%s> failed to create cover image: %s", mappingJSON.Checksum, err.Error()) logger.Warnf("[scenes] <%s> failed to create cover image: %s", sceneHash, err.Error())
} }
// write the cover image data after creating the scene // write the cover image data after creating the scene
@@ -634,12 +638,12 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
scene, err := qb.Create(newScene, tx) scene, err := qb.Create(newScene, tx)
if err != nil { if err != nil {
_ = tx.Rollback() _ = tx.Rollback()
logger.Errorf("[scenes] <%s> failed to create: %s", mappingJSON.Checksum, err.Error()) logger.Errorf("[scenes] <%s> failed to create: %s", sceneHash, err.Error())
return return
} }
if scene.ID == 0 { if scene.ID == 0 {
_ = tx.Rollback() _ = tx.Rollback()
logger.Errorf("[scenes] <%s> invalid id after scene creation", mappingJSON.Checksum) logger.Errorf("[scenes] <%s> invalid id after scene creation", sceneHash)
return return
} }
@@ -647,7 +651,7 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
if len(coverImageData) > 0 { if len(coverImageData) > 0 {
if err := qb.UpdateSceneCover(scene.ID, coverImageData, tx); err != nil { if err := qb.UpdateSceneCover(scene.ID, coverImageData, tx); err != nil {
_ = tx.Rollback() _ = tx.Rollback()
logger.Errorf("[scenes] <%s> error setting scene cover: %s", mappingJSON.Checksum, err.Error()) logger.Errorf("[scenes] <%s> error setting scene cover: %s", sceneHash, err.Error())
return return
} }
} }
@@ -662,7 +666,7 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
gallery.SceneID = sql.NullInt64{Int64: int64(scene.ID), Valid: true} gallery.SceneID = sql.NullInt64{Int64: int64(scene.ID), Valid: true}
_, err := gqb.Update(*gallery, tx) _, err := gqb.Update(*gallery, tx)
if err != nil { if err != nil {
logger.Errorf("[scenes] <%s> failed to update gallery: %s", scene.Checksum, err.Error()) logger.Errorf("[scenes] <%s> failed to update gallery: %s", sceneHash, err.Error())
} }
} }
} }
@@ -671,7 +675,7 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
if len(sceneJSON.Performers) > 0 { if len(sceneJSON.Performers) > 0 {
performers, err := t.getPerformers(sceneJSON.Performers, tx) performers, err := t.getPerformers(sceneJSON.Performers, tx)
if err != nil { if err != nil {
logger.Warnf("[scenes] <%s> failed to fetch performers: %s", scene.Checksum, err.Error()) logger.Warnf("[scenes] <%s> failed to fetch performers: %s", sceneHash, err.Error())
} else { } else {
var performerJoins []models.PerformersScenes var performerJoins []models.PerformersScenes
for _, performer := range performers { for _, performer := range performers {
@@ -682,7 +686,7 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
performerJoins = append(performerJoins, join) performerJoins = append(performerJoins, join)
} }
if err := jqb.CreatePerformersScenes(performerJoins, tx); err != nil { if err := jqb.CreatePerformersScenes(performerJoins, tx); err != nil {
logger.Errorf("[scenes] <%s> failed to associate performers: %s", scene.Checksum, err.Error()) logger.Errorf("[scenes] <%s> failed to associate performers: %s", sceneHash, err.Error())
} }
} }
} }
@@ -691,19 +695,19 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
if len(sceneJSON.Movies) > 0 { if len(sceneJSON.Movies) > 0 {
moviesScenes, err := t.getMoviesScenes(sceneJSON.Movies, scene.ID, tx) moviesScenes, err := t.getMoviesScenes(sceneJSON.Movies, scene.ID, tx)
if err != nil { if err != nil {
logger.Warnf("[scenes] <%s> failed to fetch movies: %s", scene.Checksum, err.Error()) logger.Warnf("[scenes] <%s> failed to fetch movies: %s", sceneHash, err.Error())
} else { } else {
if err := jqb.CreateMoviesScenes(moviesScenes, tx); err != nil { if err := jqb.CreateMoviesScenes(moviesScenes, tx); err != nil {
logger.Errorf("[scenes] <%s> failed to associate movies: %s", scene.Checksum, err.Error()) logger.Errorf("[scenes] <%s> failed to associate movies: %s", sceneHash, err.Error())
} }
} }
} }
// Relate the scene to the tags // Relate the scene to the tags
if len(sceneJSON.Tags) > 0 { if len(sceneJSON.Tags) > 0 {
tags, err := t.getTags(scene.Checksum, sceneJSON.Tags, tx) tags, err := t.getTags(sceneHash, sceneJSON.Tags, tx)
if err != nil { if err != nil {
logger.Warnf("[scenes] <%s> failed to fetch tags: %s", scene.Checksum, err.Error()) logger.Warnf("[scenes] <%s> failed to fetch tags: %s", sceneHash, err.Error())
} else { } else {
var tagJoins []models.ScenesTags var tagJoins []models.ScenesTags
for _, tag := range tags { for _, tag := range tags {
@@ -714,7 +718,7 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
tagJoins = append(tagJoins, join) tagJoins = append(tagJoins, join)
} }
if err := jqb.CreateScenesTags(tagJoins, tx); err != nil { if err := jqb.CreateScenesTags(tagJoins, tx); err != nil {
logger.Errorf("[scenes] <%s> failed to associate tags: %s", scene.Checksum, err.Error()) logger.Errorf("[scenes] <%s> failed to associate tags: %s", sceneHash, err.Error())
} }
} }
} }
@@ -735,7 +739,7 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
primaryTag, err := tqb.FindByName(marker.PrimaryTag, tx, false) primaryTag, err := tqb.FindByName(marker.PrimaryTag, tx, false)
if err != nil { if err != nil {
logger.Errorf("[scenes] <%s> failed to find primary tag for marker: %s", scene.Checksum, err.Error()) logger.Errorf("[scenes] <%s> failed to find primary tag for marker: %s", sceneHash, err.Error())
} else { } else {
newSceneMarker.PrimaryTagID = primaryTag.ID newSceneMarker.PrimaryTagID = primaryTag.ID
} }
@@ -743,18 +747,18 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
// Create the scene marker in the DB // Create the scene marker in the DB
sceneMarker, err := smqb.Create(newSceneMarker, tx) sceneMarker, err := smqb.Create(newSceneMarker, tx)
if err != nil { if err != nil {
logger.Warnf("[scenes] <%s> failed to create scene marker: %s", scene.Checksum, err.Error()) logger.Warnf("[scenes] <%s> failed to create scene marker: %s", sceneHash, err.Error())
continue continue
} }
if sceneMarker.ID == 0 { if sceneMarker.ID == 0 {
logger.Warnf("[scenes] <%s> invalid scene marker id after scene marker creation", scene.Checksum) logger.Warnf("[scenes] <%s> invalid scene marker id after scene marker creation", sceneHash)
continue continue
} }
// Get the scene marker tags and create the joins // Get the scene marker tags and create the joins
tags, err := t.getTags(scene.Checksum, marker.Tags, tx) tags, err := t.getTags(sceneHash, marker.Tags, tx)
if err != nil { if err != nil {
logger.Warnf("[scenes] <%s> failed to fetch scene marker tags: %s", scene.Checksum, err.Error()) logger.Warnf("[scenes] <%s> failed to fetch scene marker tags: %s", sceneHash, err.Error())
} else { } else {
var tagJoins []models.SceneMarkersTags var tagJoins []models.SceneMarkersTags
for _, tag := range tags { for _, tag := range tags {
@@ -765,7 +769,7 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
tagJoins = append(tagJoins, join) tagJoins = append(tagJoins, join)
} }
if err := jqb.CreateSceneMarkersTags(tagJoins, tx); err != nil { if err := jqb.CreateSceneMarkersTags(tagJoins, tx); err != nil {
logger.Errorf("[scenes] <%s> failed to associate scene marker tags: %s", scene.Checksum, err.Error()) logger.Errorf("[scenes] <%s> failed to associate scene marker tags: %s", sceneHash, err.Error())
} }
} }
} }

View File

@@ -0,0 +1,86 @@
package manager
import (
"os"
"path/filepath"
"sync"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
// MigrateHashTask renames generated files between oshash and MD5 based on the
// value of the fileNamingAlgorithm flag.
type MigrateHashTask struct {
Scene *models.Scene
fileNamingAlgorithm models.HashAlgorithm
}
// Start starts the task.
func (t *MigrateHashTask) Start(wg *sync.WaitGroup) {
defer wg.Done()
if !t.Scene.OSHash.Valid || !t.Scene.Checksum.Valid {
// nothing to do
return
}
oshash := t.Scene.OSHash.String
checksum := t.Scene.Checksum.String
oldHash := oshash
newHash := checksum
if t.fileNamingAlgorithm == models.HashAlgorithmOshash {
oldHash = checksum
newHash = oshash
}
oldPath := filepath.Join(instance.Paths.Generated.Markers, oldHash)
newPath := filepath.Join(instance.Paths.Generated.Markers, newHash)
t.migrate(oldPath, newPath)
scenePaths := GetInstance().Paths.Scene
oldPath = scenePaths.GetThumbnailScreenshotPath(oldHash)
newPath = scenePaths.GetThumbnailScreenshotPath(newHash)
t.migrate(oldPath, newPath)
oldPath = scenePaths.GetScreenshotPath(oldHash)
newPath = scenePaths.GetScreenshotPath(newHash)
t.migrate(oldPath, newPath)
oldPath = scenePaths.GetStreamPreviewPath(oldHash)
newPath = scenePaths.GetStreamPreviewPath(newHash)
t.migrate(oldPath, newPath)
oldPath = scenePaths.GetStreamPreviewImagePath(oldHash)
newPath = scenePaths.GetStreamPreviewImagePath(newHash)
t.migrate(oldPath, newPath)
oldPath = scenePaths.GetTranscodePath(oldHash)
newPath = scenePaths.GetTranscodePath(newHash)
t.migrate(oldPath, newPath)
oldPath = scenePaths.GetSpriteVttFilePath(oldHash)
newPath = scenePaths.GetSpriteVttFilePath(newHash)
t.migrate(oldPath, newPath)
oldPath = scenePaths.GetSpriteImageFilePath(oldHash)
newPath = scenePaths.GetSpriteImageFilePath(newHash)
t.migrate(oldPath, newPath)
}
func (t *MigrateHashTask) migrate(oldName, newName string) {
oldExists, err := utils.FileExists(oldName)
if err != nil && !os.IsNotExist(err) {
logger.Errorf("Error checking existence of %s: %s", oldName, err.Error())
return
}
if oldExists {
logger.Infof("renaming %s to %s", oldName, newName)
if err := os.Rename(oldName, newName); err != nil {
logger.Errorf("error renaming %s to %s: %s", oldName, newName, err.Error())
}
}
}

View File

@@ -19,6 +19,8 @@ import (
type ScanTask struct { type ScanTask struct {
FilePath string FilePath string
UseFileMetadata bool UseFileMetadata bool
calculateMD5 bool
fileNamingAlgorithm models.HashAlgorithm
} }
func (t *ScanTask) Start(wg *sync.WaitGroup) { func (t *ScanTask) Start(wg *sync.WaitGroup) {
@@ -144,7 +146,7 @@ func (t *ScanTask) scanScene() {
if scene != nil { if scene != nil {
// We already have this item in the database // We already have this item in the database
// check for thumbnails,screenshots // check for thumbnails,screenshots
t.makeScreenshots(nil, scene.Checksum) t.makeScreenshots(nil, scene.GetHash(t.fileNamingAlgorithm))
// check for container // check for container
if !scene.Format.Valid { if !scene.Format.Valid {
@@ -165,8 +167,47 @@ func (t *ScanTask) scanScene() {
} else if err := tx.Commit(); err != nil { } else if err := tx.Commit(); err != nil {
logger.Error(err.Error()) logger.Error(err.Error())
} }
} }
// check if oshash is set
if !scene.OSHash.Valid {
logger.Infof("Calculating oshash for existing file %s ...", t.FilePath)
oshash, err := utils.OSHashFromFilePath(t.FilePath)
if err != nil {
logger.Error(err.Error())
return
}
ctx := context.TODO()
tx := database.DB.MustBeginTx(ctx, nil)
err = qb.UpdateOSHash(scene.ID, oshash, tx)
if err != nil {
logger.Error(err.Error())
_ = tx.Rollback()
} else if err := tx.Commit(); err != nil {
logger.Error(err.Error())
}
}
// check if MD5 is set, if calculateMD5 is true
if t.calculateMD5 && !scene.Checksum.Valid {
checksum, err := t.calculateChecksum()
if err != nil {
logger.Error(err.Error())
return
}
ctx := context.TODO()
tx := database.DB.MustBeginTx(ctx, nil)
err = qb.UpdateChecksum(scene.ID, checksum, tx)
if err != nil {
logger.Error(err.Error())
_ = tx.Rollback()
} else if err := tx.Commit(); err != nil {
logger.Error(err.Error())
}
}
return return
} }
@@ -182,15 +223,36 @@ func (t *ScanTask) scanScene() {
videoFile.SetTitleFromPath() videoFile.SetTitleFromPath()
} }
checksum, err := t.calculateChecksum() var checksum string
logger.Infof("%s not found. Calculating oshash...", t.FilePath)
oshash, err := utils.OSHashFromFilePath(t.FilePath)
if err != nil { if err != nil {
logger.Error(err.Error()) logger.Error(err.Error())
return return
} }
t.makeScreenshots(videoFile, checksum) if t.fileNamingAlgorithm == models.HashAlgorithmMd5 || t.calculateMD5 {
checksum, err = t.calculateChecksum()
if err != nil {
logger.Error(err.Error())
return
}
}
sceneHash := oshash
if t.fileNamingAlgorithm == models.HashAlgorithmMd5 {
sceneHash = checksum
scene, _ = qb.FindByChecksum(sceneHash)
} else if t.fileNamingAlgorithm == models.HashAlgorithmOshash {
scene, _ = qb.FindByOSHash(sceneHash)
} else {
logger.Error("unknown file naming algorithm")
return
}
t.makeScreenshots(videoFile, sceneHash)
scene, _ = qb.FindByChecksum(checksum)
ctx := context.TODO() ctx := context.TODO()
tx := database.DB.MustBeginTx(ctx, nil) tx := database.DB.MustBeginTx(ctx, nil)
if scene != nil { if scene != nil {
@@ -209,7 +271,8 @@ func (t *ScanTask) scanScene() {
logger.Infof("%s doesn't exist. Creating new item...", t.FilePath) logger.Infof("%s doesn't exist. Creating new item...", t.FilePath)
currentTime := time.Now() currentTime := time.Now()
newScene := models.Scene{ newScene := models.Scene{
Checksum: checksum, Checksum: sql.NullString{String: checksum, Valid: checksum != ""},
OSHash: sql.NullString{String: oshash, Valid: oshash != ""},
Path: t.FilePath, Path: t.FilePath,
Title: sql.NullString{String: videoFile.Title, Valid: true}, Title: sql.NullString{String: videoFile.Title, Valid: true},
Duration: sql.NullFloat64{Float64: videoFile.Duration, Valid: true}, Duration: sql.NullFloat64{Float64: videoFile.Duration, Valid: true},
@@ -277,7 +340,7 @@ func (t *ScanTask) makeScreenshots(probeResult *ffmpeg.VideoFile, checksum strin
} }
func (t *ScanTask) calculateChecksum() (string, error) { func (t *ScanTask) calculateChecksum() (string, error) {
logger.Infof("%s not found. Calculating checksum...", t.FilePath) logger.Infof("Calculating checksum for %s...", t.FilePath)
checksum, err := utils.MD5FromFilePath(t.FilePath) checksum, err := utils.MD5FromFilePath(t.FilePath)
if err != nil { if err != nil {
return "", err return "", err

View File

@@ -13,12 +13,13 @@ import (
type GenerateTranscodeTask struct { type GenerateTranscodeTask struct {
Scene models.Scene Scene models.Scene
Overwrite bool Overwrite bool
fileNamingAlgorithm models.HashAlgorithm
} }
func (t *GenerateTranscodeTask) Start(wg *sync.WaitGroup) { func (t *GenerateTranscodeTask) Start(wg *sync.WaitGroup) {
defer wg.Done() defer wg.Done()
hasTranscode, _ := HasTranscode(&t.Scene) hasTranscode := HasTranscode(&t.Scene, t.fileNamingAlgorithm)
if !t.Overwrite && hasTranscode { if !t.Overwrite && hasTranscode {
return return
} }
@@ -27,7 +28,6 @@ func (t *GenerateTranscodeTask) Start(wg *sync.WaitGroup) {
if t.Scene.Format.Valid { if t.Scene.Format.Valid {
container = ffmpeg.Container(t.Scene.Format.String) container = ffmpeg.Container(t.Scene.Format.String)
} else { // container isn't in the DB } else { // container isn't in the DB
// shouldn't happen unless user hasn't scanned after updating to PR#384+ version // shouldn't happen unless user hasn't scanned after updating to PR#384+ version
tmpVideoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path) tmpVideoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path)
@@ -55,7 +55,8 @@ func (t *GenerateTranscodeTask) Start(wg *sync.WaitGroup) {
return return
} }
outputPath := instance.Paths.Generated.GetTmpPath(t.Scene.Checksum + ".mp4") sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm)
outputPath := instance.Paths.Generated.GetTmpPath(sceneHash + ".mp4")
transcodeSize := config.GetMaxTranscodeSize() transcodeSize := config.GetMaxTranscodeSize()
options := ffmpeg.TranscodeOptions{ options := ffmpeg.TranscodeOptions{
OutputPath: outputPath, OutputPath: outputPath,
@@ -78,12 +79,12 @@ func (t *GenerateTranscodeTask) Start(wg *sync.WaitGroup) {
} }
} }
if err := os.Rename(outputPath, instance.Paths.Scene.GetTranscodePath(t.Scene.Checksum)); err != nil { if err := os.Rename(outputPath, instance.Paths.Scene.GetTranscodePath(sceneHash)); err != nil {
logger.Errorf("[transcode] error generating transcode: %s", err.Error()) logger.Errorf("[transcode] error generating transcode: %s", err.Error())
return return
} }
logger.Debugf("[transcode] <%s> created transcode: %s", t.Scene.Checksum, outputPath) logger.Debugf("[transcode] <%s> created transcode: %s", sceneHash, outputPath)
return return
} }
@@ -107,7 +108,7 @@ func (t *GenerateTranscodeTask) isTranscodeNeeded() bool {
return false return false
} }
hasTranscode, _ := HasTranscode(&t.Scene) hasTranscode := HasTranscode(&t.Scene, t.fileNamingAlgorithm)
if !t.Overwrite && hasTranscode { if !t.Overwrite && hasTranscode {
return false return false
} }

View File

@@ -5,9 +5,11 @@ import (
"path/filepath" "path/filepath"
) )
// Scene stores the metadata for a single video scene.
type Scene struct { type Scene struct {
ID int `db:"id" json:"id"` ID int `db:"id" json:"id"`
Checksum string `db:"checksum" json:"checksum"` Checksum sql.NullString `db:"checksum" json:"checksum"`
OSHash sql.NullString `db:"oshash" json:"oshash"`
Path string `db:"path" json:"path"` Path string `db:"path" json:"path"`
Title sql.NullString `db:"title" json:"title"` Title sql.NullString `db:"title" json:"title"`
Details sql.NullString `db:"details" json:"details"` Details sql.NullString `db:"details" json:"details"`
@@ -29,9 +31,12 @@ type Scene struct {
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"` UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
} }
// ScenePartial represents part of a Scene object. It is used to update
// the database entry. Only non-nil fields will be updated.
type ScenePartial struct { type ScenePartial struct {
ID int `db:"id" json:"id"` ID int `db:"id" json:"id"`
Checksum *string `db:"checksum" json:"checksum"` Checksum *sql.NullString `db:"checksum" json:"checksum"`
OSHash *sql.NullString `db:"oshash" json:"oshash"`
Path *string `db:"path" json:"path"` Path *string `db:"path" json:"path"`
Title *sql.NullString `db:"title" json:"title"` Title *sql.NullString `db:"title" json:"title"`
Details *sql.NullString `db:"details" json:"details"` Details *sql.NullString `db:"details" json:"details"`
@@ -52,6 +57,8 @@ type ScenePartial struct {
UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"` UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"`
} }
// GetTitle returns the title of the scene. If the Title field is empty,
// then the base filename is returned.
func (s Scene) GetTitle() string { func (s Scene) GetTitle() string {
if s.Title.String != "" { if s.Title.String != "" {
return s.Title.String return s.Title.String
@@ -60,6 +67,19 @@ func (s Scene) GetTitle() string {
return filepath.Base(s.Path) return filepath.Base(s.Path)
} }
// GetHash returns the hash of the scene, based on the hash algorithm provided. If
// hash algorithm is MD5, then Checksum is returned. Otherwise, OSHash is returned.
func (s Scene) GetHash(hashAlgorithm HashAlgorithm) string {
if hashAlgorithm == HashAlgorithmMd5 {
return s.Checksum.String
} else if hashAlgorithm == HashAlgorithmOshash {
return s.OSHash.String
}
panic("unknown hash algorithm")
}
// SceneFileType represents the file metadata for a scene.
type SceneFileType struct { type SceneFileType struct {
Size *string `graphql:"size" json:"size"` Size *string `graphql:"size" json:"size"`
Duration *float64 `graphql:"duration" json:"duration"` Duration *float64 `graphql:"duration" json:"duration"`

View File

@@ -41,6 +41,16 @@ WHERE scenes_tags.tag_id = ?
GROUP BY scenes_tags.scene_id GROUP BY scenes_tags.scene_id
` `
var countScenesForMissingChecksumQuery = `
SELECT id FROM scenes
WHERE scenes.checksum is null
`
var countScenesForMissingOSHashQuery = `
SELECT id FROM scenes
WHERE scenes.oshash is null
`
type SceneQueryBuilder struct{} type SceneQueryBuilder struct{}
func NewSceneQueryBuilder() SceneQueryBuilder { func NewSceneQueryBuilder() SceneQueryBuilder {
@@ -50,9 +60,9 @@ func NewSceneQueryBuilder() SceneQueryBuilder {
func (qb *SceneQueryBuilder) Create(newScene Scene, tx *sqlx.Tx) (*Scene, error) { func (qb *SceneQueryBuilder) Create(newScene Scene, tx *sqlx.Tx) (*Scene, error) {
ensureTx(tx) ensureTx(tx)
result, err := tx.NamedExec( result, err := tx.NamedExec(
`INSERT INTO scenes (checksum, path, title, details, url, date, rating, o_counter, size, duration, video_codec, `INSERT INTO scenes (oshash, checksum, path, title, details, url, date, rating, o_counter, size, duration, video_codec,
audio_codec, format, width, height, framerate, bitrate, studio_id, created_at, updated_at) audio_codec, format, width, height, framerate, bitrate, studio_id, created_at, updated_at)
VALUES (:checksum, :path, :title, :details, :url, :date, :rating, :o_counter, :size, :duration, :video_codec, VALUES (:oshash, :checksum, :path, :title, :details, :url, :date, :rating, :o_counter, :size, :duration, :video_codec,
:audio_codec, :format, :width, :height, :framerate, :bitrate, :studio_id, :created_at, :updated_at) :audio_codec, :format, :width, :height, :framerate, :bitrate, :studio_id, :created_at, :updated_at)
`, `,
newScene, newScene,
@@ -178,6 +188,12 @@ func (qb *SceneQueryBuilder) FindByChecksum(checksum string) (*Scene, error) {
return qb.queryScene(query, args, nil) return qb.queryScene(query, args, nil)
} }
func (qb *SceneQueryBuilder) FindByOSHash(oshash string) (*Scene, error) {
query := "SELECT * FROM scenes WHERE oshash = ? LIMIT 1"
args := []interface{}{oshash}
return qb.queryScene(query, args, nil)
}
func (qb *SceneQueryBuilder) FindByPath(path string) (*Scene, error) { func (qb *SceneQueryBuilder) FindByPath(path string) (*Scene, error) {
query := selectAll(sceneTable) + "WHERE path = ? LIMIT 1" query := selectAll(sceneTable) + "WHERE path = ? LIMIT 1"
args := []interface{}{path} args := []interface{}{path}
@@ -231,6 +247,16 @@ func (qb *SceneQueryBuilder) CountByTagID(tagID int) (int, error) {
return runCountQuery(buildCountQuery(countScenesForTagQuery), args) return runCountQuery(buildCountQuery(countScenesForTagQuery), args)
} }
// CountMissingChecksum returns the number of scenes missing a checksum value.
func (qb *SceneQueryBuilder) CountMissingChecksum() (int, error) {
return runCountQuery(buildCountQuery(countScenesForMissingChecksumQuery), []interface{}{})
}
// CountMissingOSHash returns the number of scenes missing an oshash value.
func (qb *SceneQueryBuilder) CountMissingOSHash() (int, error) {
return runCountQuery(buildCountQuery(countScenesForMissingOSHashQuery), []interface{}{})
}
func (qb *SceneQueryBuilder) Wall(q *string) ([]*Scene, error) { func (qb *SceneQueryBuilder) Wall(q *string) ([]*Scene, error) {
s := "" s := ""
if q != nil { if q != nil {
@@ -267,7 +293,7 @@ func (qb *SceneQueryBuilder) Query(sceneFilter *SceneFilterType, findFilter *Fin
` `
if q := findFilter.Q; q != nil && *q != "" { if q := findFilter.Q; q != nil && *q != "" {
searchColumns := []string{"scenes.title", "scenes.details", "scenes.path", "scenes.checksum", "scene_markers.title"} searchColumns := []string{"scenes.title", "scenes.details", "scenes.path", "scenes.oshash", "scenes.checksum", "scene_markers.title"}
clause, thisArgs := getSearchBinding(searchColumns, *q, false) clause, thisArgs := getSearchBinding(searchColumns, *q, false)
query.addWhere(clause) query.addWhere(clause)
query.addArg(thisArgs...) query.addArg(thisArgs...)
@@ -543,6 +569,32 @@ func (qb *SceneQueryBuilder) UpdateFormat(id int, format string, tx *sqlx.Tx) er
return nil return nil
} }
func (qb *SceneQueryBuilder) UpdateOSHash(id int, oshash string, tx *sqlx.Tx) error {
ensureTx(tx)
_, err := tx.Exec(
`UPDATE scenes SET oshash = ? WHERE scenes.id = ? `,
oshash, id,
)
if err != nil {
return err
}
return nil
}
func (qb *SceneQueryBuilder) UpdateChecksum(id int, checksum string, tx *sqlx.Tx) error {
ensureTx(tx)
_, err := tx.Exec(
`UPDATE scenes SET checksum = ? WHERE scenes.id = ? `,
checksum, id,
)
if err != nil {
return err
}
return nil
}
func (qb *SceneQueryBuilder) UpdateSceneCover(sceneID int, cover []byte, tx *sqlx.Tx) error { func (qb *SceneQueryBuilder) UpdateSceneCover(sceneID int, cover []byte, tx *sqlx.Tx) error {
ensureTx(tx) ensureTx(tx)

View File

@@ -908,7 +908,7 @@ func TestSceneUpdateSceneCover(t *testing.T) {
const name = "TestSceneUpdateSceneCover" const name = "TestSceneUpdateSceneCover"
scene := models.Scene{ scene := models.Scene{
Path: name, Path: name,
Checksum: utils.MD5FromString(name), Checksum: sql.NullString{String: utils.MD5FromString(name), Valid: true},
} }
created, err := qb.Create(scene, tx) created, err := qb.Create(scene, tx)
if err != nil { if err != nil {
@@ -955,7 +955,7 @@ func TestSceneDestroySceneCover(t *testing.T) {
const name = "TestSceneDestroySceneCover" const name = "TestSceneDestroySceneCover"
scene := models.Scene{ scene := models.Scene{
Path: name, Path: name,
Checksum: utils.MD5FromString(name), Checksum: sql.NullString{String: utils.MD5FromString(name), Valid: true},
} }
created, err := qb.Create(scene, tx) created, err := qb.Create(scene, tx)
if err != nil { if err != nil {

View File

@@ -276,7 +276,7 @@ func createScenes(tx *sqlx.Tx, n int) error {
scene := models.Scene{ scene := models.Scene{
Path: getSceneStringValue(i, pathField), Path: getSceneStringValue(i, pathField),
Title: sql.NullString{String: getSceneStringValue(i, titleField), Valid: true}, Title: sql.NullString{String: getSceneStringValue(i, titleField), Valid: true},
Checksum: getSceneStringValue(i, checksumField), Checksum: sql.NullString{String: getSceneStringValue(i, checksumField), Valid: true},
Details: sql.NullString{String: getSceneStringValue(i, "Details"), Valid: true}, Details: sql.NullString{String: getSceneStringValue(i, "Details"), Valid: true},
Rating: getSceneRating(i), Rating: getSceneRating(i),
OCounter: getSceneOCounter(i), OCounter: getSceneOCounter(i),

View File

@@ -130,12 +130,21 @@ func (s *stashScraper) scrapeSceneByFragment(scene models.SceneUpdateInput) (*mo
} }
var q struct { var q struct {
FindScene *models.ScrapedSceneStash `graphql:"findScene(checksum: $c)"` FindScene *models.ScrapedSceneStash `graphql:"findSceneByHash(input: $c)"`
}
type SceneHashInput struct {
Checksum *string `graphql:"checksum" json:"checksum"`
Oshash *string `graphql:"oshash" json:"oshash"`
}
input := SceneHashInput{
Checksum: &storedScene.Checksum.String,
Oshash: &storedScene.OSHash.String,
} }
checksum := graphql.String(storedScene.Checksum)
vars := map[string]interface{}{ vars := map[string]interface{}{
"c": &checksum, "c": &input,
} }
client := s.getStashClient() client := s.getStashClient()

82
pkg/utils/oshash.go Normal file
View File

@@ -0,0 +1,82 @@
package utils
import (
"bytes"
"encoding/binary"
"fmt"
"os"
)
// OSHashFromFilePath calculates the hash using the same algorithm that
// OpenSubtitles.org uses.
//
// Calculation is as follows:
// size + 64 bit checksum of the first and last 64k bytes of the file.
func OSHashFromFilePath(filePath string) (string, error) {
f, err := os.Open(filePath)
if err != nil {
return "", err
}
defer f.Close()
fi, err := f.Stat()
if err != nil {
return "", err
}
fileSize := int64(fi.Size())
if fileSize == 0 {
return "", nil
}
const chunkSize = 64 * 1024
fileChunkSize := int64(chunkSize)
if fileSize < fileChunkSize {
fileChunkSize = fileSize
}
head := make([]byte, fileChunkSize)
tail := make([]byte, fileChunkSize)
// read the head of the file into the start of the buffer
_, err = f.Read(head)
if err != nil {
return "", err
}
// seek to the end of the file - the chunk size
_, err = f.Seek(-fileChunkSize, 2)
if err != nil {
return "", err
}
// read the tail of the file
_, err = f.Read(tail)
if err != nil {
return "", err
}
// put the head and tail together
buf := append(head, tail...)
// convert bytes into uint64
ints := make([]uint64, len(buf)/8)
reader := bytes.NewReader(buf)
err = binary.Read(reader, binary.LittleEndian, &ints)
if err != nil {
return "", err
}
// sum the integers
var sum uint64
for _, v := range ints {
sum += v
}
// add the filesize
sum += uint64(fileSize)
// output as hex
return fmt.Sprintf("%016x", sum), nil
}

View File

@@ -1,20 +0,0 @@
// +build ignore
package main
import (
"fmt"
"os"
"github.com/stashapp/stash/pkg/utils"
)
func main() {
hash, err := utils.OSHashFromFilePath(os.Args[1])
if err != nil {
panic(err)
}
fmt.Println(hash)
}

View File

@@ -2,7 +2,10 @@ import React from "react";
import ReactMarkdown from "react-markdown"; import ReactMarkdown from "react-markdown";
const markup = ` const markup = `
#### 💥 **Note: After upgrading, the next scan will populate all scenes with oshash hashes. MD5 calculation can be disabled after populating the oshash for all scenes. See \`Hashing Algorithms\` in the \`Configuration\` section of the manual for details. **
### ✨ New Features ### ✨ New Features
* Add oshash algorithm for hashing scene video files. Enabled by default on new systems.
* Support (re-)generation of generated content for specific scenes. * Support (re-)generation of generated content for specific scenes.
* Add tag thumbnails, tags grid view and tag page. * Add tag thumbnails, tags grid view and tag page.
* Add post-scrape dialog. * Add post-scrape dialog.

View File

@@ -10,7 +10,19 @@ interface ISceneFileInfoPanelProps {
export const SceneFileInfoPanel: React.FC<ISceneFileInfoPanelProps> = ( export const SceneFileInfoPanel: React.FC<ISceneFileInfoPanelProps> = (
props: ISceneFileInfoPanelProps props: ISceneFileInfoPanelProps
) => { ) => {
function renderOSHash() {
if (props.scene.oshash) {
return (
<div className="row">
<span className="col-4">Hash</span>
<span className="col-8 text-truncate">{props.scene.oshash}</span>
</div>
);
}
}
function renderChecksum() { function renderChecksum() {
if (props.scene.checksum) {
return ( return (
<div className="row"> <div className="row">
<span className="col-4">Checksum</span> <span className="col-4">Checksum</span>
@@ -18,6 +30,7 @@ export const SceneFileInfoPanel: React.FC<ISceneFileInfoPanelProps> = (
</div> </div>
); );
} }
}
function renderPath() { function renderPath() {
const { const {
@@ -178,6 +191,7 @@ export const SceneFileInfoPanel: React.FC<ISceneFileInfoPanelProps> = (
return ( return (
<div className="container scene-file-info"> <div className="container scene-file-info">
{renderOSHash()}
{renderChecksum()} {renderChecksum()}
{renderPath()} {renderPath()}
{renderStream()} {renderStream()}

View File

@@ -17,6 +17,10 @@ export const SettingsConfigurationPanel: React.FC = () => {
undefined undefined
); );
const [cachePath, setCachePath] = useState<string | undefined>(undefined); const [cachePath, setCachePath] = useState<string | undefined>(undefined);
const [calculateMD5, setCalculateMD5] = useState<boolean>(false);
const [videoFileNamingAlgorithm, setVideoFileNamingAlgorithm] = useState<
GQL.HashAlgorithm | undefined
>(undefined);
const [previewSegments, setPreviewSegments] = useState<number>(0); const [previewSegments, setPreviewSegments] = useState<number>(0);
const [previewSegmentDuration, setPreviewSegmentDuration] = useState<number>( const [previewSegmentDuration, setPreviewSegmentDuration] = useState<number>(
0 0
@@ -58,6 +62,9 @@ export const SettingsConfigurationPanel: React.FC = () => {
databasePath, databasePath,
generatedPath, generatedPath,
cachePath, cachePath,
calculateMD5,
videoFileNamingAlgorithm:
(videoFileNamingAlgorithm as GQL.HashAlgorithm) ?? undefined,
previewSegments, previewSegments,
previewSegmentDuration, previewSegmentDuration,
previewExcludeStart, previewExcludeStart,
@@ -86,6 +93,8 @@ export const SettingsConfigurationPanel: React.FC = () => {
setDatabasePath(conf.general.databasePath); setDatabasePath(conf.general.databasePath);
setGeneratedPath(conf.general.generatedPath); setGeneratedPath(conf.general.generatedPath);
setCachePath(conf.general.cachePath); setCachePath(conf.general.cachePath);
setVideoFileNamingAlgorithm(conf.general.videoFileNamingAlgorithm);
setCalculateMD5(conf.general.calculateMD5);
setPreviewSegments(conf.general.previewSegments); setPreviewSegments(conf.general.previewSegments);
setPreviewSegmentDuration(conf.general.previewSegmentDuration); setPreviewSegmentDuration(conf.general.previewSegmentDuration);
setPreviewExcludeStart(conf.general.previewExcludeStart); setPreviewExcludeStart(conf.general.previewExcludeStart);
@@ -191,6 +200,33 @@ export const SettingsConfigurationPanel: React.FC = () => {
return GQL.StreamingResolutionEnum.Original; return GQL.StreamingResolutionEnum.Original;
} }
const namingHashAlgorithms = [
GQL.HashAlgorithm.Md5,
GQL.HashAlgorithm.Oshash,
].map(namingHashToString);
function namingHashToString(value: GQL.HashAlgorithm | undefined) {
switch (value) {
case GQL.HashAlgorithm.Oshash:
return "oshash";
case GQL.HashAlgorithm.Md5:
return "MD5";
}
return "MD5";
}
function translateNamingHash(value: string) {
switch (value) {
case "oshash":
return GQL.HashAlgorithm.Oshash;
case "MD5":
return GQL.HashAlgorithm.Md5;
}
return GQL.HashAlgorithm.Md5;
}
if (error) return <h1>{error.message}</h1>; if (error) return <h1>{error.message}</h1>;
if (!data?.configuration || loading) return <LoadingIndicator />; if (!data?.configuration || loading) return <LoadingIndicator />;
@@ -294,6 +330,52 @@ export const SettingsConfigurationPanel: React.FC = () => {
<hr /> <hr />
<Form.Group>
<h4>Hashing</h4>
<Form.Group>
<Form.Check
checked={calculateMD5}
label="Calculate MD5 for videos"
onChange={() => setCalculateMD5(!calculateMD5)}
/>
<Form.Text className="text-muted">
Calculate MD5 checksum in addition to oshash. Enabling will cause
initial scans to be slower. File naming hash must be set to oshash
to disable MD5 calculation.
</Form.Text>
</Form.Group>
<Form.Group id="transcode-size">
<h6>Generated file naming hash</h6>
<Form.Control
className="w-auto input-control"
as="select"
value={namingHashToString(videoFileNamingAlgorithm)}
onChange={(e: React.ChangeEvent<HTMLSelectElement>) =>
setVideoFileNamingAlgorithm(
translateNamingHash(e.currentTarget.value)
)
}
>
{namingHashAlgorithms.map((q) => (
<option key={q} value={q}>
{q}
</option>
))}
</Form.Control>
<Form.Text className="text-muted">
Use MD5 or oshash for generated file naming. Changing this requires
that all scenes have the applicable MD5/oshash value populated.
After changing this value, existing generated files will need to be
migrated or regenerated. See Tasks page for migration.
</Form.Text>
</Form.Group>
</Form.Group>
<hr />
<Form.Group> <Form.Group>
<h4>Video</h4> <h4>Video</h4>
<Form.Group id="transcode-size"> <Form.Group id="transcode-size">

View File

@@ -9,6 +9,7 @@ import {
mutateMetadataScan, mutateMetadataScan,
mutateMetadataAutoTag, mutateMetadataAutoTag,
mutateMetadataExport, mutateMetadataExport,
mutateMigrateHashNaming,
mutateStopJob, mutateStopJob,
} from "src/core/StashService"; } from "src/core/StashService";
import { useToast } from "src/hooks"; import { useToast } from "src/hooks";
@@ -46,6 +47,8 @@ export const SettingsTasksPanel: React.FC = () => {
return "Importing from JSON"; return "Importing from JSON";
case "Auto Tag": case "Auto Tag":
return "Auto tagging scenes"; return "Auto tagging scenes";
case "Migrate":
return "Migrating";
default: default:
return "Idle"; return "Idle";
} }
@@ -308,6 +311,28 @@ export const SettingsTasksPanel: React.FC = () => {
Import from exported JSON. This is a destructive action. Import from exported JSON. This is a destructive action.
</Form.Text> </Form.Text>
</Form.Group> </Form.Group>
<hr />
<h5>Migrations</h5>
<Form.Group>
<Button
id="migrateHashNaming"
variant="danger"
onClick={() =>
mutateMigrateHashNaming().then(() => {
jobStatus.refetch();
})
}
>
Rename generated files
</Button>
<Form.Text className="text-muted">
Used after changing the Generated file naming hash to rename existing
generated files to the new hash format.
</Form.Text>
</Form.Group>
</> </>
); );
}; };

View File

@@ -36,3 +36,7 @@
#configuration-tabs-tabpane-about .table { #configuration-tabs-tabpane-about .table {
width: initial; width: initial;
} }
#configuration-tabs-tabpane-tasks h5 {
margin-bottom: 1em;
}

View File

@@ -469,6 +469,11 @@ export const mutateMetadataClean = () =>
mutation: GQL.MetadataCleanDocument, mutation: GQL.MetadataCleanDocument,
}); });
export const mutateMigrateHashNaming = () =>
client.mutate<GQL.MigrateHashNamingMutation>({
mutation: GQL.MigrateHashNamingDocument,
});
export const mutateMetadataExport = () => export const mutateMetadataExport = () =>
client.mutate<GQL.MetadataExportMutation>({ client.mutate<GQL.MetadataExportMutation>({
mutation: GQL.MetadataExportDocument, mutation: GQL.MetadataExportDocument,

View File

@@ -34,6 +34,32 @@ exclude:
_a useful [link](https://regex101.com/) to experiment with regexps_ _a useful [link](https://regex101.com/) to experiment with regexps_
## Hashing algorithms
Stash identifies video files by calculating a hash of the file. There are two algorithms available for hashing: `oshash` and `MD5`. `MD5` requires reading the entire file, and can therefore be slow, particularly when reading files over a network. `oshash` (which uses OpenSubtitle's hashing algorithm) only reads 64k from each end of the file.
The hash is used to name the generated files such as preview images and videos, and sprite images.
By default, new systems have MD5 calculation disabled for optimal performance. Existing systems that are upgraded will have the oshash populated for each scene on the next scan.
### Changing the hashing algorithm
To change the file naming hash to oshash, all scenes must have their oshash values populated. oshash population is done automatically when scanning.
To change the file naming hash to `MD5`, the MD5 must be populated for all scenes. To do this, `Calculate MD5` for videos must be enabled and the library must be rescanned.
MD5 calculation may only be disabled if the file naming hash is set to `oshash`.
After changing the file naming hash, any existing generated files will now be named incorrectly. This means that stash will not find them and may regenerate them if the `Generate task` is used. To remedy this, run the `Rename generated files` task, which will rename existing generated files to their correct names.
#### Step-by-step instructions to migrate to oshash for existing users
These instructions are for existing users whose systems will be defaulted to use and calculate MD5 checksums. Once completed, MD5 checksums will no longer be calculated when scanning, and oshash will be used for generated file naming. Existing calculated MD5 checksums will remain on scenes, but checksums will not be calculated for new scenes.
1. Scan the library (to populate oshash for all existing scenes).
2. In Settings -> Configuration page, untick `Calculate MD5` and select `oshash` as file naming hash. Save the configuration.
3. In Settings -> Tasks page, click on the `Rename generated files` migration button.
## Scraping ## Scraping
### User Agent string ### User Agent string