Scan refactor (#1816)

* Add file scanner
* Scan scene changes
* Split scan files
* Generalise scan
* Refactor ffprobe
* Refactor ffmpeg encoder
* Move scene scan code to scene package
* Move matchExtension to utils
* Refactor gallery scanning
* Refactor image scanning
* Prevent race conditions on identical hashes
* Refactor image thumbnail generation
* Perform count concurrently
* Allow progress increment before total set
* Make progress updates more frequent
This commit is contained in:
WithoutPants
2021-10-15 10:39:48 +11:00
committed by GitHub
parent 3d5ee16e90
commit 39fdde273d
55 changed files with 2172 additions and 1429 deletions

View File

@@ -43,7 +43,7 @@ func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) {
if exists {
http.ServeFile(w, r, filepath)
} else {
encoder := image.NewThumbnailEncoder(manager.GetInstance().FFMPEGPath)
encoder := image.NewThumbnailEncoder(manager.GetInstance().FFMPEG)
data, err := encoder.GetThumbnail(img, models.DefaultGthumbWidth)
if err != nil {
logger.Errorf("error generating thumbnail for image: %s", err.Error())

View File

@@ -57,7 +57,8 @@ func getSceneFileContainer(scene *models.Scene) ffmpeg.Container {
container = ffmpeg.Container(scene.Format.String)
} else { // container isn't in the DB
// shouldn't happen, fallback to ffprobe
tmpVideoFile, err := ffmpeg.NewVideoFile(manager.GetInstance().FFProbePath, scene.Path, false)
ffprobe := manager.GetInstance().FFProbe
tmpVideoFile, err := ffprobe.NewVideoFile(scene.Path, false)
if err != nil {
logger.Errorf("[transcode] error reading video file: %v", err)
return ffmpeg.Container("")
@@ -105,7 +106,8 @@ func (rs sceneRoutes) StreamMp4(w http.ResponseWriter, r *http.Request) {
func (rs sceneRoutes) StreamHLS(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene)
videoFile, err := ffmpeg.NewVideoFile(manager.GetInstance().FFProbePath, scene.Path, false)
ffprobe := manager.GetInstance().FFProbe
videoFile, err := ffprobe.NewVideoFile(scene.Path, false)
if err != nil {
logger.Errorf("[stream] error reading video file: %v", err)
return
@@ -142,8 +144,8 @@ func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, vi
scene := r.Context().Value(sceneKey).(*models.Scene)
// needs to be transcoded
videoFile, err := ffmpeg.NewVideoFile(manager.GetInstance().FFProbePath, scene.Path, false)
ffprobe := manager.GetInstance().FFProbe
videoFile, err := ffprobe.NewVideoFile(scene.Path, false)
if err != nil {
logger.Errorf("[stream] error reading video file: %v", err)
return
@@ -171,7 +173,7 @@ func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, vi
options.MaxTranscodeSize = models.StreamingResolutionEnum(requestedSize)
}
encoder := ffmpeg.NewEncoder(manager.GetInstance().FFMPEGPath)
encoder := manager.GetInstance().FFMPEG
stream, err = encoder.GetTranscodeStream(options)
if err != nil {

View File

@@ -12,21 +12,13 @@ import (
"github.com/stashapp/stash/pkg/logger"
)
type Encoder struct {
Path string
}
type Encoder string
var (
runningEncoders = make(map[string][]*os.Process)
runningEncodersMutex = sync.RWMutex{}
)
func NewEncoder(ffmpegPath string) Encoder {
return Encoder{
Path: ffmpegPath,
}
}
func registerRunningEncoder(path string, process *os.Process) {
runningEncodersMutex.Lock()
processes := runningEncoders[path]
@@ -86,7 +78,7 @@ func KillRunningEncoders(path string) {
// FFmpeg runner with progress output, used for transcodes
func (e *Encoder) runTranscode(probeResult VideoFile, args []string) (string, error) {
cmd := exec.Command(e.Path, args...)
cmd := exec.Command(string(*e), args...)
stderr, err := cmd.StderrPipe()
if err != nil {
@@ -141,19 +133,25 @@ func (e *Encoder) runTranscode(probeResult VideoFile, args []string) (string, er
return stdoutString, nil
}
func (e *Encoder) run(probeResult VideoFile, args []string) (string, error) {
cmd := exec.Command(e.Path, args...)
func (e *Encoder) run(sourcePath string, args []string, stdin io.Reader) (string, error) {
cmd := exec.Command(string(*e), args...)
var stdout, stderr bytes.Buffer
cmd.Stdout = &stdout
cmd.Stderr = &stderr
cmd.Stdin = stdin
if err := cmd.Start(); err != nil {
return "", err
}
registerRunningEncoder(probeResult.Path, cmd.Process)
err := waitAndDeregister(probeResult.Path, cmd)
var err error
if sourcePath != "" {
registerRunningEncoder(sourcePath, cmd.Process)
err = waitAndDeregister(sourcePath, cmd)
} else {
err = cmd.Wait()
}
if err != nil {
// error message should be in the stderr stream

View File

@@ -34,7 +34,7 @@ func (e *Encoder) SceneMarkerVideo(probeResult VideoFile, options SceneMarkerOpt
"-strict", "-2",
options.OutputPath,
}
_, err := e.run(probeResult, args)
_, err := e.run(probeResult.Path, args, nil)
return err
}
@@ -55,6 +55,6 @@ func (e *Encoder) SceneMarkerImage(probeResult VideoFile, options SceneMarkerOpt
"-an",
options.OutputPath,
}
_, err := e.run(probeResult, args)
_, err := e.run(probeResult.Path, args, nil)
return err
}

View File

@@ -89,7 +89,7 @@ func (e *Encoder) ScenePreviewVideoChunk(probeResult VideoFile, options ScenePre
args3 = append(args3, argsAudio...)
finalArgs := append(args3, options.OutputPath)
_, err := e.run(probeResult, finalArgs)
_, err := e.run(probeResult.Path, finalArgs, nil)
return err
}
@@ -102,7 +102,7 @@ func (e *Encoder) ScenePreviewVideoChunkCombine(probeResult VideoFile, concatFil
"-c", "copy",
outputPath,
}
_, err := e.run(probeResult, args)
_, err := e.run(probeResult.Path, args, nil)
return err
}
@@ -122,6 +122,6 @@ func (e *Encoder) ScenePreviewVideoToImage(probeResult VideoFile, width int, vid
"-an",
outputPath,
}
_, err := e.run(probeResult, args)
_, err := e.run(probeResult.Path, args, nil)
return err
}

View File

@@ -28,7 +28,7 @@ func (e *Encoder) Screenshot(probeResult VideoFile, options ScreenshotOptions) e
"-f", "image2",
options.OutputPath,
}
_, err := e.run(probeResult, args)
_, err := e.run(probeResult.Path, args, nil)
return err
}

View File

@@ -22,7 +22,7 @@ func (e *Encoder) SpriteScreenshot(probeResult VideoFile, options SpriteScreensh
"-f", "rawvideo",
"-",
}
data, err := e.run(probeResult, args)
data, err := e.run(probeResult.Path, args, nil)
if err != nil {
return nil, err
}

View File

@@ -221,14 +221,17 @@ type VideoFile struct {
AudioCodec string
}
// FFProbe
type FFProbe string
// Execute exec command and bind result to struct.
func NewVideoFile(ffprobePath string, videoPath string, stripExt bool) (*VideoFile, error) {
func (f *FFProbe) NewVideoFile(videoPath string, stripExt bool) (*VideoFile, error) {
args := []string{"-v", "quiet", "-print_format", "json", "-show_format", "-show_streams", "-show_error", videoPath}
//// Extremely slow on windows for some reason
//if runtime.GOOS != "windows" {
// args = append(args, "-count_frames")
//}
out, err := exec.Command(ffprobePath, args...).Output()
out, err := exec.Command(string(*f), args...).Output()
if err != nil {
return nil, fmt.Errorf("FFProbe encountered an error with <%s>.\nError JSON:\n%s\nError: %s", videoPath, string(out), err.Error())

35
pkg/ffmpeg/image.go Normal file
View File

@@ -0,0 +1,35 @@
package ffmpeg
import (
"bytes"
"errors"
"fmt"
)
func (e *Encoder) ImageThumbnail(image *bytes.Buffer, format *string, maxDimensions int, path string) ([]byte, error) {
// ffmpeg spends a long sniffing image format when data is piped through stdio, so we pass the format explicitly instead
ffmpegformat := ""
if format != nil && *format == "jpeg" {
ffmpegformat = "mjpeg"
} else if format != nil && *format == "png" {
ffmpegformat = "png_pipe"
} else if format != nil && *format == "webp" {
ffmpegformat = "webp_pipe"
} else {
return nil, errors.New("unsupported image format")
}
args := []string{
"-f", ffmpegformat,
"-i", "-",
"-vf", fmt.Sprintf("scale=%v:%v:force_original_aspect_ratio=decrease", maxDimensions, maxDimensions),
"-c:v", "mjpeg",
"-q:v", "5",
"-f", "image2pipe",
"-",
}
data, err := e.run(path, args, image)
return []byte(data), err
}

View File

@@ -205,7 +205,7 @@ func (e *Encoder) GetTranscodeStream(options TranscodeStreamOptions) (*Stream, e
func (e *Encoder) stream(probeResult VideoFile, options TranscodeStreamOptions) (*Stream, error) {
args := options.getStreamArgs()
cmd := exec.Command(e.Path, args...)
cmd := exec.Command(string(*e), args...)
logger.Debugf("Streaming via: %s", strings.Join(cmd.Args, " "))
stdout, err := cmd.StdoutPipe()

31
pkg/file/file.go Normal file
View File

@@ -0,0 +1,31 @@
package file
import (
"io"
"io/fs"
"os"
)
type fsFile struct {
path string
info fs.FileInfo
}
func (f *fsFile) Open() (io.ReadCloser, error) {
return os.Open(f.path)
}
func (f *fsFile) Path() string {
return f.path
}
func (f *fsFile) FileInfo() fs.FileInfo {
return f.info
}
func FSFile(path string, info fs.FileInfo) SourceFile {
return &fsFile{
path: path,
info: info,
}
}

17
pkg/file/hash.go Normal file
View File

@@ -0,0 +1,17 @@
package file
import (
"io"
"github.com/stashapp/stash/pkg/utils"
)
type FSHasher struct{}
func (h *FSHasher) OSHash(src io.ReadSeeker, size int64) (string, error) {
return utils.OSHashFromReader(src, size)
}
func (h *FSHasher) MD5(src io.Reader) (string, error) {
return utils.MD5FromReader(src)
}

175
pkg/file/scan.go Normal file
View File

@@ -0,0 +1,175 @@
package file
import (
"fmt"
"io"
"io/fs"
"strconv"
"time"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
)
type SourceFile interface {
Open() (io.ReadCloser, error)
Path() string
FileInfo() fs.FileInfo
}
type FileBased interface {
File() models.File
}
type Hasher interface {
OSHash(src io.ReadSeeker, size int64) (string, error)
MD5(src io.Reader) (string, error)
}
type Scanned struct {
Old *models.File
New *models.File
}
// FileUpdated returns true if both old and new files are present and not equal.
func (s Scanned) FileUpdated() bool {
if s.Old == nil || s.New == nil {
return false
}
return !s.Old.Equal(*s.New)
}
// ContentsChanged returns true if both old and new files are present and the file content is different.
func (s Scanned) ContentsChanged() bool {
if s.Old == nil || s.New == nil {
return false
}
if s.Old.Checksum != s.New.Checksum {
return true
}
if s.Old.OSHash != s.New.OSHash {
return true
}
return false
}
type Scanner struct {
Hasher Hasher
CalculateMD5 bool
CalculateOSHash bool
}
func (o Scanner) ScanExisting(existing FileBased, file SourceFile) (h *Scanned, err error) {
info := file.FileInfo()
h = &Scanned{}
existingFile := existing.File()
h.Old = &existingFile
updatedFile := existingFile
h.New = &updatedFile
// update existing data if needed
// truncate to seconds, since we don't store beyond that in the database
updatedFile.FileModTime = info.ModTime().Truncate(time.Second)
modTimeChanged := !existingFile.FileModTime.Equal(updatedFile.FileModTime)
// regenerate hash(es) if missing or file mod time changed
if _, err = o.generateHashes(&updatedFile, file, modTimeChanged); err != nil {
return nil, err
}
// notify of changes as needed
// object exists, no further processing required
return
}
func (o Scanner) ScanNew(file SourceFile) (*models.File, error) {
info := file.FileInfo()
sizeStr := strconv.FormatInt(info.Size(), 10)
modTime := info.ModTime()
f := models.File{
Path: file.Path(),
Size: sizeStr,
FileModTime: modTime,
}
if _, err := o.generateHashes(&f, file, true); err != nil {
return nil, err
}
return &f, nil
}
// generateHashes regenerates and sets the hashes in the provided File.
// It will not recalculate unless specified.
func (o Scanner) generateHashes(f *models.File, file SourceFile, regenerate bool) (changed bool, err error) {
existing := *f
var src io.ReadCloser
if o.CalculateOSHash && (regenerate || f.OSHash == "") {
logger.Infof("Calculating oshash for %s ...", f.Path)
src, err = file.Open()
if err != nil {
return false, err
}
defer src.Close()
seekSrc, valid := src.(io.ReadSeeker)
if !valid {
return false, fmt.Errorf("invalid source file type: %s", file.Path())
}
// regenerate hash
var oshash string
oshash, err = o.Hasher.OSHash(seekSrc, file.FileInfo().Size())
if err != nil {
return false, fmt.Errorf("error generating oshash for %s: %w", file.Path(), err)
}
f.OSHash = oshash
// reset reader to start of file
_, err = seekSrc.Seek(0, io.SeekStart)
if err != nil {
return false, fmt.Errorf("error seeking to start of file in %s: %w", file.Path(), err)
}
}
// always generate if MD5 is nil
// only regenerate MD5 if:
// - OSHash was not calculated, or
// - existing OSHash is different to generated one
// or if it was different to the previous version
if o.CalculateMD5 && (f.Checksum == "" || (regenerate && (!o.CalculateOSHash || existing.OSHash != f.OSHash))) {
logger.Infof("Calculating checksum for %s...", f.Path)
if src == nil {
src, err = file.Open()
if err != nil {
return false, err
}
defer src.Close()
}
// regenerate checksum
var checksum string
checksum, err = o.Hasher.MD5(src)
if err != nil {
return
}
f.Checksum = checksum
}
changed = (o.CalculateOSHash && (f.OSHash != existing.OSHash)) || (o.CalculateMD5 && (f.Checksum != existing.Checksum))
return
}

64
pkg/file/zip.go Normal file
View File

@@ -0,0 +1,64 @@
package file
import (
"archive/zip"
"io"
"io/fs"
"strings"
)
const zipSeparator = "\x00"
type zipFile struct {
zipPath string
zf *zip.File
}
func (f *zipFile) Open() (io.ReadCloser, error) {
return f.zf.Open()
}
func (f *zipFile) Path() string {
// TODO - fix this
return ZipFilename(f.zipPath, f.zf.Name)
}
func (f *zipFile) FileInfo() fs.FileInfo {
return f.zf.FileInfo()
}
func ZipFile(zipPath string, zf *zip.File) SourceFile {
return &zipFile{
zipPath: zipPath,
zf: zf,
}
}
func ZipFilename(zipFilename, filenameInZip string) string {
return zipFilename + zipSeparator + filenameInZip
}
// IsZipPath returns true if the path includes the zip separator byte,
// indicating it is within a zip file.
func IsZipPath(p string) bool {
return strings.Contains(p, zipSeparator)
}
// ZipPathDisplayName converts an zip path for display. It translates the zip
// file separator character into '/', since this character is also used for
// path separators within zip files. It returns the original provided path
// if it does not contain the zip file separator character.
func ZipPathDisplayName(path string) string {
return strings.Replace(path, zipSeparator, "/", -1)
}
func ZipFilePath(path string) (zipFilename, filename string) {
nullIndex := strings.Index(path, zipSeparator)
if nullIndex != -1 {
zipFilename = path[0:nullIndex]
filename = path[nullIndex+1:]
} else {
filename = path
}
return
}

View File

@@ -25,7 +25,7 @@ const (
const (
path = "path"
zip = true
isZip = true
url = "url"
checksum = "checksum"
title = "title"
@@ -48,7 +48,7 @@ func createFullGallery(id int) models.Gallery {
return models.Gallery{
ID: id,
Path: models.NullString(path),
Zip: zip,
Zip: isZip,
Title: models.NullString(title),
Checksum: checksum,
Date: models.SQLiteDate{
@@ -72,7 +72,7 @@ func createFullJSONGallery() *jsonschema.Gallery {
return &jsonschema.Gallery{
Title: title,
Path: path,
Zip: zip,
Zip: isZip,
Checksum: checksum,
Date: date,
Details: details,

225
pkg/gallery/scan.go Normal file
View File

@@ -0,0 +1,225 @@
package gallery
import (
"archive/zip"
"context"
"database/sql"
"fmt"
"strings"
"time"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager/paths"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/plugin"
"github.com/stashapp/stash/pkg/utils"
)
const mutexType = "gallery"
type Scanner struct {
file.Scanner
ImageExtensions []string
StripFileExtension bool
Ctx context.Context
CaseSensitiveFs bool
TxnManager models.TransactionManager
Paths *paths.Paths
PluginCache *plugin.Cache
MutexManager *utils.MutexManager
}
func FileScanner(hasher file.Hasher) file.Scanner {
return file.Scanner{
Hasher: hasher,
CalculateMD5: true,
}
}
func (scanner *Scanner) ScanExisting(existing file.FileBased, file file.SourceFile) (retGallery *models.Gallery, scanImages bool, err error) {
scanned, err := scanner.Scanner.ScanExisting(existing, file)
if err != nil {
return nil, false, err
}
retGallery = existing.(*models.Gallery)
path := scanned.New.Path
changed := false
if scanned.ContentsChanged() {
logger.Infof("%s has been updated: rescanning", path)
retGallery.SetFile(*scanned.New)
changed = true
} else if scanned.FileUpdated() {
logger.Infof("Updated gallery file %s", path)
retGallery.SetFile(*scanned.New)
changed = true
}
if changed {
scanImages = true
logger.Infof("%s has been updated: rescanning", path)
retGallery.UpdatedAt = models.SQLiteTimestamp{Timestamp: time.Now()}
// we are operating on a checksum now, so grab a mutex on the checksum
done := make(chan struct{})
scanner.MutexManager.Claim(mutexType, scanned.New.Checksum, done)
if err := scanner.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
// free the mutex once transaction is complete
defer close(done)
// ensure no clashes of hashes
if scanned.New.Checksum != "" && scanned.Old.Checksum != scanned.New.Checksum {
dupe, _ := r.Gallery().FindByChecksum(retGallery.Checksum)
if dupe != nil {
return fmt.Errorf("MD5 for file %s is the same as that of %s", path, dupe.Path.String)
}
}
retGallery, err = r.Gallery().Update(*retGallery)
return err
}); err != nil {
return nil, false, err
}
scanner.PluginCache.ExecutePostHooks(scanner.Ctx, retGallery.ID, plugin.GalleryUpdatePost, nil, nil)
}
return
}
func (scanner *Scanner) ScanNew(file file.SourceFile) (retGallery *models.Gallery, scanImages bool, err error) {
scanned, err := scanner.Scanner.ScanNew(file)
if err != nil {
return nil, false, err
}
path := file.Path()
checksum := scanned.Checksum
isNewGallery := false
isUpdatedGallery := false
var g *models.Gallery
// grab a mutex on the checksum
done := make(chan struct{})
scanner.MutexManager.Claim(mutexType, checksum, done)
defer close(done)
if err := scanner.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
qb := r.Gallery()
g, _ = qb.FindByChecksum(checksum)
if g != nil {
exists, _ := utils.FileExists(g.Path.String)
if !scanner.CaseSensitiveFs {
// #1426 - if file exists but is a case-insensitive match for the
// original filename, then treat it as a move
if exists && strings.EqualFold(path, g.Path.String) {
exists = false
}
}
if exists {
logger.Infof("%s already exists. Duplicate of %s ", path, g.Path.String)
} else {
logger.Infof("%s already exists. Updating path...", path)
g.Path = sql.NullString{
String: path,
Valid: true,
}
g, err = qb.Update(*g)
if err != nil {
return err
}
isUpdatedGallery = true
}
} else {
// don't create gallery if it has no images
if scanner.hasImages(path) {
currentTime := time.Now()
g = &models.Gallery{
Zip: true,
Title: sql.NullString{
String: utils.GetNameFromPath(path, scanner.StripFileExtension),
Valid: true,
},
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
}
g.SetFile(*scanned)
// only warn when creating the gallery
ok, err := utils.IsZipFileUncompressed(path)
if err == nil && !ok {
logger.Warnf("%s is using above store (0) level compression.", path)
}
logger.Infof("%s doesn't exist. Creating new item...", path)
g, err = qb.Create(*g)
if err != nil {
return err
}
scanImages = true
isNewGallery = true
}
}
return nil
}); err != nil {
return nil, false, err
}
if isNewGallery {
scanner.PluginCache.ExecutePostHooks(scanner.Ctx, g.ID, plugin.GalleryCreatePost, nil, nil)
} else if isUpdatedGallery {
scanner.PluginCache.ExecutePostHooks(scanner.Ctx, g.ID, plugin.GalleryUpdatePost, nil, nil)
}
scanImages = isNewGallery
retGallery = g
return
}
func (scanner *Scanner) isImage(pathname string) bool {
return utils.MatchExtension(pathname, scanner.ImageExtensions)
}
func (scanner *Scanner) hasImages(path string) bool {
readCloser, err := zip.OpenReader(path)
if err != nil {
logger.Warnf("Error while walking gallery zip: %v", err)
return false
}
defer readCloser.Close()
for _, file := range readCloser.File {
if file.FileInfo().IsDir() {
continue
}
if strings.Contains(file.Name, "__MACOSX") {
continue
}
if !scanner.isImage(file.Name) {
continue
}
return true
}
return false
}

View File

@@ -12,14 +12,13 @@ import (
"strings"
"time"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
_ "golang.org/x/image/webp"
)
const zipSeparator = "\x00"
func GetSourceImage(i *models.Image) (image.Image, error) {
f, err := openSourceImage(i.Path)
if err != nil {
@@ -67,17 +66,6 @@ func FileExists(path string) bool {
return true
}
func ZipFilename(zipFilename, filenameInZip string) string {
return zipFilename + zipSeparator + filenameInZip
}
// IsZipPath returns true if the path includes the zip separator byte,
// indicating it is within a zip file.
// TODO - this should be moved to utils
func IsZipPath(p string) bool {
return strings.Contains(p, zipSeparator)
}
type imageReadCloser struct {
src io.ReadCloser
zrc *zip.ReadCloser
@@ -102,7 +90,7 @@ func (i *imageReadCloser) Close() error {
func openSourceImage(path string) (io.ReadCloser, error) {
// may need to read from a zip file
zipFilename, filename := getFilePath(path)
zipFilename, filename := file.ZipFilePath(path)
if zipFilename != "" {
r, err := zip.OpenReader(zipFilename)
if err != nil {
@@ -134,17 +122,6 @@ func openSourceImage(path string) (io.ReadCloser, error) {
return os.Open(filename)
}
func getFilePath(path string) (zipFilename, filename string) {
nullIndex := strings.Index(path, zipSeparator)
if nullIndex != -1 {
zipFilename = path[0:nullIndex]
filename = path[nullIndex+1:]
} else {
filename = path
}
return
}
// GetFileDetails returns a pointer to an Image object with the
// width, height and size populated.
func GetFileDetails(path string) (*models.Image, error) {
@@ -203,7 +180,7 @@ func GetFileModTime(path string) (time.Time, error) {
func stat(path string) (os.FileInfo, error) {
// may need to read from a zip file
zipFilename, filename := getFilePath(path)
zipFilename, filename := file.ZipFilePath(path)
if zipFilename != "" {
r, err := zip.OpenReader(zipFilename)
if err != nil {
@@ -224,16 +201,8 @@ func stat(path string) (os.FileInfo, error) {
return os.Stat(filename)
}
// PathDisplayName converts an image path for display. It translates the zip
// file separator character into '/', since this character is also used for
// path separators within zip files. It returns the original provided path
// if it does not contain the zip file separator character.
func PathDisplayName(path string) string {
return strings.Replace(path, zipSeparator, "/", -1)
}
func Serve(w http.ResponseWriter, r *http.Request, path string) {
zipFilename, _ := getFilePath(path)
zipFilename, _ := file.ZipFilePath(path)
w.Header().Add("Cache-Control", "max-age=604800000") // 1 Week
if zipFilename == "" {
http.ServeFile(w, r, path)
@@ -259,7 +228,7 @@ func Serve(w http.ResponseWriter, r *http.Request, path string) {
}
func IsCover(img *models.Image) bool {
_, fn := getFilePath(img.Path)
_, fn := file.ZipFilePath(img.Path)
return strings.HasSuffix(fn, "cover.jpg")
}
@@ -268,13 +237,13 @@ func GetTitle(s *models.Image) string {
return s.Title.String
}
_, fn := getFilePath(s.Path)
_, fn := file.ZipFilePath(s.Path)
return filepath.Base(fn)
}
// GetFilename gets the base name of the image file
// If stripExt is set the file extension is omitted from the name
func GetFilename(s *models.Image, stripExt bool) string {
_, fn := getFilePath(s.Path)
_, fn := file.ZipFilePath(s.Path)
return utils.GetNameFromPath(fn, stripExt)
}

192
pkg/image/scan.go Normal file
View File

@@ -0,0 +1,192 @@
package image
import (
"context"
"fmt"
"os"
"strings"
"time"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager/paths"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/plugin"
"github.com/stashapp/stash/pkg/utils"
)
const mutexType = "image"
type Scanner struct {
file.Scanner
StripFileExtension bool
Ctx context.Context
CaseSensitiveFs bool
TxnManager models.TransactionManager
Paths *paths.Paths
PluginCache *plugin.Cache
MutexManager *utils.MutexManager
}
func FileScanner(hasher file.Hasher) file.Scanner {
return file.Scanner{
Hasher: hasher,
CalculateMD5: true,
}
}
func (scanner *Scanner) ScanExisting(existing file.FileBased, file file.SourceFile) (retImage *models.Image, err error) {
scanned, err := scanner.Scanner.ScanExisting(existing, file)
if err != nil {
return nil, err
}
i := existing.(*models.Image)
path := scanned.New.Path
oldChecksum := i.Checksum
changed := false
if scanned.ContentsChanged() {
logger.Infof("%s has been updated: rescanning", path)
// regenerate the file details as well
if err := SetFileDetails(i); err != nil {
return nil, err
}
changed = true
} else if scanned.FileUpdated() {
logger.Infof("Updated image file %s", path)
changed = true
}
if changed {
i.SetFile(*scanned.New)
i.UpdatedAt = models.SQLiteTimestamp{Timestamp: time.Now()}
// we are operating on a checksum now, so grab a mutex on the checksum
done := make(chan struct{})
scanner.MutexManager.Claim(mutexType, scanned.New.Checksum, done)
if err := scanner.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
// free the mutex once transaction is complete
defer close(done)
var err error
// ensure no clashes of hashes
if scanned.New.Checksum != "" && scanned.Old.Checksum != scanned.New.Checksum {
dupe, _ := r.Image().FindByChecksum(i.Checksum)
if dupe != nil {
return fmt.Errorf("MD5 for file %s is the same as that of %s", path, dupe.Path)
}
}
retImage, err = r.Image().UpdateFull(*i)
return err
}); err != nil {
return nil, err
}
// remove the old thumbnail if the checksum changed - we'll regenerate it
if oldChecksum != scanned.New.Checksum {
// remove cache dir of gallery
err = os.Remove(scanner.Paths.Generated.GetThumbnailPath(oldChecksum, models.DefaultGthumbWidth))
if err != nil {
logger.Errorf("Error deleting thumbnail image: %s", err)
}
}
scanner.PluginCache.ExecutePostHooks(scanner.Ctx, retImage.ID, plugin.ImageUpdatePost, nil, nil)
}
return
}
func (scanner *Scanner) ScanNew(f file.SourceFile) (retImage *models.Image, err error) {
scanned, err := scanner.Scanner.ScanNew(f)
if err != nil {
return nil, err
}
path := f.Path()
checksum := scanned.Checksum
// grab a mutex on the checksum
done := make(chan struct{})
scanner.MutexManager.Claim(mutexType, checksum, done)
defer close(done)
// check for image by checksum
var existingImage *models.Image
if err := scanner.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
var err error
existingImage, err = r.Image().FindByChecksum(checksum)
return err
}); err != nil {
return nil, err
}
pathDisplayName := file.ZipPathDisplayName(path)
if existingImage != nil {
exists := FileExists(existingImage.Path)
if !scanner.CaseSensitiveFs {
// #1426 - if file exists but is a case-insensitive match for the
// original filename, then treat it as a move
if exists && strings.EqualFold(path, existingImage.Path) {
exists = false
}
}
if exists {
logger.Infof("%s already exists. Duplicate of %s ", pathDisplayName, file.ZipPathDisplayName(existingImage.Path))
return nil, nil
} else {
logger.Infof("%s already exists. Updating path...", pathDisplayName)
imagePartial := models.ImagePartial{
ID: existingImage.ID,
Path: &path,
}
if err := scanner.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
retImage, err = r.Image().Update(imagePartial)
return err
}); err != nil {
return nil, err
}
scanner.PluginCache.ExecutePostHooks(scanner.Ctx, existingImage.ID, plugin.ImageUpdatePost, nil, nil)
}
} else {
logger.Infof("%s doesn't exist. Creating new item...", pathDisplayName)
currentTime := time.Now()
newImage := models.Image{
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
}
newImage.SetFile(*scanned)
newImage.Title.String = GetFilename(&newImage, scanner.StripFileExtension)
newImage.Title.Valid = true
if err := SetFileDetails(&newImage); err != nil {
logger.Error(err.Error())
return nil, err
}
if err := scanner.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
var err error
retImage, err = r.Image().Create(newImage)
return err
}); err != nil {
return nil, err
}
scanner.PluginCache.ExecutePostHooks(scanner.Ctx, retImage.ID, plugin.ImageCreatePost, nil, nil)
}
return
}

View File

@@ -2,14 +2,11 @@ package image
import (
"bytes"
"errors"
"fmt"
"os/exec"
"runtime"
"strings"
"sync"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/ffmpeg"
"github.com/stashapp/stash/pkg/models"
)
@@ -17,8 +14,8 @@ var vipsPath string
var once sync.Once
type ThumbnailEncoder struct {
FFMPEGPath string
VipsPath string
ffmpeg ffmpeg.Encoder
vips *vipsEncoder
}
func GetVipsPath() string {
@@ -28,11 +25,18 @@ func GetVipsPath() string {
return vipsPath
}
func NewThumbnailEncoder(ffmpegPath string) ThumbnailEncoder {
return ThumbnailEncoder{
FFMPEGPath: ffmpegPath,
VipsPath: GetVipsPath(),
func NewThumbnailEncoder(ffmpegEncoder ffmpeg.Encoder) ThumbnailEncoder {
ret := ThumbnailEncoder{
ffmpeg: ffmpegEncoder,
}
vipsPath := GetVipsPath()
if vipsPath != "" {
vipsEncoder := vipsEncoder(vipsPath)
ret.vips = &vipsEncoder
}
return ret
}
// GetThumbnail returns the thumbnail image of the provided image resized to
@@ -60,72 +64,9 @@ func (e *ThumbnailEncoder) GetThumbnail(img *models.Image, maxSize int) ([]byte,
}
// vips has issues loading files from stdin on Windows
if e.VipsPath != "" && runtime.GOOS != "windows" {
return e.getVipsThumbnail(buf, maxSize)
if e.vips != nil && runtime.GOOS != "windows" {
return e.vips.ImageThumbnail(buf, maxSize)
} else {
return e.getFFMPEGThumbnail(buf, format, maxSize, img.Path)
return e.ffmpeg.ImageThumbnail(buf, format, maxSize, img.Path)
}
}
func (e *ThumbnailEncoder) getVipsThumbnail(image *bytes.Buffer, maxSize int) ([]byte, error) {
args := []string{
"thumbnail_source",
"[descriptor=0]",
".jpg[Q=70,strip]",
fmt.Sprint(maxSize),
"--size", "down",
}
data, err := e.run(e.VipsPath, args, image)
return []byte(data), err
}
func (e *ThumbnailEncoder) getFFMPEGThumbnail(image *bytes.Buffer, format *string, maxDimensions int, path string) ([]byte, error) {
// ffmpeg spends a long sniffing image format when data is piped through stdio, so we pass the format explicitly instead
ffmpegformat := ""
if format != nil && *format == "jpeg" {
ffmpegformat = "mjpeg"
} else if format != nil && *format == "png" {
ffmpegformat = "png_pipe"
} else if format != nil && *format == "webp" {
ffmpegformat = "webp_pipe"
} else {
return nil, errors.New("unsupported image format")
}
args := []string{
"-f", ffmpegformat,
"-i", "-",
"-vf", fmt.Sprintf("scale=%v:%v:force_original_aspect_ratio=decrease", maxDimensions, maxDimensions),
"-c:v", "mjpeg",
"-q:v", "5",
"-f", "image2pipe",
"-",
}
data, err := e.run(e.FFMPEGPath, args, image)
return []byte(data), err
}
func (e *ThumbnailEncoder) run(path string, args []string, stdin *bytes.Buffer) (string, error) {
cmd := exec.Command(path, args...)
var stdout, stderr bytes.Buffer
cmd.Stdout = &stdout
cmd.Stderr = &stderr
cmd.Stdin = stdin
if err := cmd.Start(); err != nil {
return "", err
}
err := cmd.Wait()
if err != nil {
// error message should be in the stderr stream
logger.Errorf("image encoder error when running command <%s>: %s", strings.Join(cmd.Args, " "), stderr.String())
return stdout.String(), err
}
return stdout.String(), nil
}

48
pkg/image/vips.go Normal file
View File

@@ -0,0 +1,48 @@
package image
import (
"bytes"
"fmt"
"os/exec"
"strings"
"github.com/stashapp/stash/pkg/logger"
)
type vipsEncoder string
func (e *vipsEncoder) ImageThumbnail(image *bytes.Buffer, maxSize int) ([]byte, error) {
args := []string{
"thumbnail_source",
"[descriptor=0]",
".jpg[Q=70,strip]",
fmt.Sprint(maxSize),
"--size", "down",
}
data, err := e.run(args, image)
return []byte(data), err
}
func (e *vipsEncoder) run(args []string, stdin *bytes.Buffer) (string, error) {
cmd := exec.Command(string(*e), args...)
var stdout, stderr bytes.Buffer
cmd.Stdout = &stdout
cmd.Stderr = &stderr
cmd.Stdin = stdin
if err := cmd.Start(); err != nil {
return "", err
}
err := cmd.Wait()
if err != nil {
// error message should be in the stderr stream
logger.Errorf("image encoder error when running command <%s>: %s", strings.Join(cmd.Args, " "), stderr.String())
return stdout.String(), err
}
return stdout.String(), nil
}

View File

@@ -9,7 +9,7 @@ import (
)
const maxGraveyardSize = 10
const defaultThrottleLimit = time.Second
const defaultThrottleLimit = 100 * time.Millisecond
// Manager maintains a queue of jobs. Jobs are executed one at a time.
type Manager struct {

View File

@@ -92,13 +92,13 @@ func (p *Progress) SetPercent(percent float64) {
p.updated()
}
// Increment increments the number of processed work units, if this does not
// exceed the total units. This is used to calculate the percentage.
// Increment increments the number of processed work units. This is used to calculate the percentage.
// If total is set already, then the number of processed work units will not exceed the total.
func (p *Progress) Increment() {
p.mutex.Lock()
defer p.mutex.Unlock()
if p.processed < p.total {
if p.total <= 0 || p.processed < p.total {
p.processed++
p.calculatePercent()
}

View File

@@ -1,7 +1,6 @@
package manager
import (
"path/filepath"
"regexp"
"strings"
@@ -86,14 +85,3 @@ func matchFileSimple(file string, regExps []*regexp.Regexp) bool {
}
return false
}
func matchExtension(path string, extensions []string) bool {
ext := filepath.Ext(path)
for _, e := range extensions {
if strings.EqualFold(ext, "."+e) {
return true
}
}
return false
}

View File

@@ -71,7 +71,7 @@ func (g *GeneratorInfo) calculateFrameRate(videoStream *ffmpeg.FFProbeStream) er
args = append(args, "/dev/null")
}
command := exec.Command(instance.FFMPEGPath, args...)
command := exec.Command(string(instance.FFMPEG), args...)
var stdErrBuffer bytes.Buffer
command.Stderr = &stdErrBuffer // Frames go to stderr rather than stdout
if err := command.Run(); err == nil {

View File

@@ -42,7 +42,7 @@ func NewPhashGenerator(videoFile ffmpeg.VideoFile, checksum string) (*PhashGener
}
func (g *PhashGenerator) Generate() (*uint64, error) {
encoder := ffmpeg.NewEncoder(instance.FFMPEGPath)
encoder := instance.FFMPEG
sprite, err := g.generateSprite(&encoder)
if err != nil {

View File

@@ -57,7 +57,7 @@ func (g *PreviewGenerator) Generate() error {
return err
}
encoder := ffmpeg.NewEncoder(instance.FFMPEGPath)
encoder := instance.FFMPEG
if g.GenerateVideo {
if err := g.generateVideo(&encoder, false); err != nil {
logger.Warnf("[generator] failed generating scene preview, trying fallback")

View File

@@ -53,7 +53,7 @@ func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageO
}
func (g *SpriteGenerator) Generate() error {
encoder := ffmpeg.NewEncoder(instance.FFMPEGPath)
encoder := instance.FFMPEG
if err := g.generateSpriteImage(&encoder); err != nil {
return err

View File

@@ -30,8 +30,8 @@ type singleton struct {
Paths *paths.Paths
FFMPEGPath string
FFProbePath string
FFMPEG ffmpeg.Encoder
FFProbe ffmpeg.FFProbe
SessionStore *session.Store
@@ -184,8 +184,8 @@ func initFFMPEG() error {
}
}
instance.FFMPEGPath = ffmpegPath
instance.FFProbePath = ffprobePath
instance.FFMPEG = ffmpeg.Encoder(ffmpegPath)
instance.FFProbe = ffmpeg.FFProbe(ffprobePath)
}
return nil
@@ -346,7 +346,7 @@ func (s *singleton) Setup(ctx context.Context, input models.SetupInput) error {
}
func (s *singleton) validateFFMPEG() error {
if s.FFMPEGPath == "" || s.FFProbePath == "" {
if s.FFMPEG == "" || s.FFProbe == "" {
return errors.New("missing ffmpeg and/or ffprobe")
}

View File

@@ -19,17 +19,17 @@ import (
func isGallery(pathname string) bool {
gExt := config.GetInstance().GetGalleryExtensions()
return matchExtension(pathname, gExt)
return utils.MatchExtension(pathname, gExt)
}
func isVideo(pathname string) bool {
vidExt := config.GetInstance().GetVideoExtensions()
return matchExtension(pathname, vidExt)
return utils.MatchExtension(pathname, vidExt)
}
func isImage(pathname string) bool {
imgExt := config.GetInstance().GetImageExtensions()
return matchExtension(pathname, imgExt)
return utils.MatchExtension(pathname, imgExt)
}
func getScanPaths(inputPaths []string) []*models.StashConfig {

View File

@@ -192,7 +192,8 @@ func GetSceneFileContainer(scene *models.Scene) (ffmpeg.Container, error) {
container = ffmpeg.Container(scene.Format.String)
} else { // container isn't in the DB
// shouldn't happen, fallback to ffprobe
tmpVideoFile, err := ffmpeg.NewVideoFile(GetInstance().FFProbePath, scene.Path, false)
ffprobe := GetInstance().FFProbe
tmpVideoFile, err := ffprobe.NewVideoFile(scene.Path, false)
if err != nil {
return ffmpeg.Container(""), fmt.Errorf("error reading video file: %v", err)
}

View File

@@ -6,7 +6,7 @@ import (
)
func makeScreenshot(probeResult ffmpeg.VideoFile, outputPath string, quality int, width int, time float64) {
encoder := ffmpeg.NewEncoder(instance.FFMPEGPath)
encoder := instance.FFMPEG
options := ffmpeg.ScreenshotOptions{
OutputPath: outputPath,
Quality: quality,

View File

@@ -294,7 +294,7 @@ func (j *cleanJob) shouldCleanScene(s *models.Scene) bool {
}
config := config.GetInstance()
if !matchExtension(s.Path, config.GetVideoExtensions()) {
if !utils.MatchExtension(s.Path, config.GetVideoExtensions()) {
logger.Infof("File extension does not match video extensions. Marking to clean: \"%s\"", s.Path)
return true
}
@@ -325,7 +325,7 @@ func (j *cleanJob) shouldCleanGallery(g *models.Gallery) bool {
}
config := config.GetInstance()
if !matchExtension(path, config.GetGalleryExtensions()) {
if !utils.MatchExtension(path, config.GetGalleryExtensions()) {
logger.Infof("File extension does not match gallery extensions. Marking to clean: \"%s\"", path)
return true
}
@@ -355,7 +355,7 @@ func (j *cleanJob) shouldCleanImage(s *models.Image) bool {
}
config := config.GetInstance()
if !matchExtension(s.Path, config.GetImageExtensions()) {
if !utils.MatchExtension(s.Path, config.GetImageExtensions()) {
logger.Infof("File extension does not match image extensions. Marking to clean: \"%s\"", s.Path)
return true
}

View File

@@ -43,7 +43,8 @@ func (t *GenerateMarkersTask) Start() {
return
}
videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path, false)
ffprobe := instance.FFProbe
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path, false)
if err != nil {
logger.Errorf("error reading video file: %s", err.Error())
return
@@ -68,7 +69,8 @@ func (t *GenerateMarkersTask) generateSceneMarkers() {
return
}
videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path, false)
ffprobe := instance.FFProbe
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path, false)
if err != nil {
logger.Errorf("error reading video file: %s", err.Error())
return
@@ -106,7 +108,7 @@ func (t *GenerateMarkersTask) generateMarker(videoFile *ffmpeg.VideoFile, scene
Width: 640,
}
encoder := ffmpeg.NewEncoder(instance.FFMPEGPath)
encoder := instance.FFMPEG
if t.Overwrite || !videoExists {
videoFilename := baseFilename + ".mp4"

View File

@@ -4,7 +4,6 @@ import (
"context"
"database/sql"
"github.com/stashapp/stash/pkg/ffmpeg"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
)
@@ -21,7 +20,8 @@ func (t *GeneratePhashTask) Start() {
return
}
videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path, false)
ffprobe := instance.FFProbe
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path, false)
if err != nil {
logger.Errorf("error reading video file: %s", err.Error())
return

View File

@@ -1,7 +1,6 @@
package manager
import (
"github.com/stashapp/stash/pkg/ffmpeg"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/models"
@@ -27,7 +26,8 @@ func (t *GeneratePreviewTask) Start() {
return
}
videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path, false)
ffprobe := instance.FFProbe
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path, false)
if err != nil {
logger.Errorf("error reading video file: %s", err.Error())
return

View File

@@ -7,7 +7,6 @@ import (
"os"
"time"
"github.com/stashapp/stash/pkg/ffmpeg"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
)
@@ -21,7 +20,8 @@ type GenerateScreenshotTask struct {
func (t *GenerateScreenshotTask) Start() {
scenePath := t.Scene.Path
probeResult, err := ffmpeg.NewVideoFile(instance.FFProbePath, scenePath, false)
ffprobe := instance.FFProbe
probeResult, err := ffprobe.NewVideoFile(scenePath, false)
if err != nil {
logger.Error(err.Error())

View File

@@ -1,7 +1,6 @@
package manager
import (
"github.com/stashapp/stash/pkg/ffmpeg"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
@@ -18,7 +17,8 @@ func (t *GenerateSpriteTask) Start() {
return
}
videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path, false)
ffprobe := instance.FFProbe
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path, false)
if err != nil {
logger.Errorf("error reading video file: %s", err.Error())
return

View File

@@ -2,6 +2,7 @@ package manager
import (
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene"
)
// MigrateHashTask renames generated files between oshash and MD5 based on the
@@ -28,5 +29,5 @@ func (t *MigrateHashTask) Start() {
newHash = oshash
}
MigrateHash(oldHash, newHash)
scene.MigrateHash(instance.Paths, oldHash, newHash)
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,170 @@
package manager
import (
"archive/zip"
"context"
"fmt"
"path/filepath"
"strings"
"github.com/remeh/sizedwaitgroup"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/models"
)
func (t *ScanTask) scanGallery(ctx context.Context) {
var g *models.Gallery
path := t.file.Path()
images := 0
scanImages := false
if err := t.TxnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
var err error
g, err = r.Gallery().FindByPath(path)
if g != nil && err != nil {
images, err = r.Image().CountByGalleryID(g.ID)
if err != nil {
return fmt.Errorf("error getting images for zip gallery %s: %s", path, err.Error())
}
}
return err
}); err != nil {
logger.Error(err.Error())
return
}
scanner := gallery.Scanner{
Scanner: gallery.FileScanner(&file.FSHasher{}),
ImageExtensions: instance.Config.GetImageExtensions(),
StripFileExtension: t.StripFileExtension,
Ctx: t.ctx,
CaseSensitiveFs: t.CaseSensitiveFs,
TxnManager: t.TxnManager,
Paths: instance.Paths,
PluginCache: instance.PluginCache,
MutexManager: t.mutexManager,
}
var err error
if g != nil {
g, scanImages, err = scanner.ScanExisting(g, t.file)
if err != nil {
logger.Error(err.Error())
return
}
// scan the zip files if the gallery has no images
scanImages = scanImages || images == 0
} else {
g, scanImages, err = scanner.ScanNew(t.file)
if err != nil {
logger.Error(err.Error())
}
}
if g != nil {
if scanImages {
t.scanZipImages(g)
} else {
// in case thumbnails have been deleted, regenerate them
t.regenerateZipImages(g)
}
}
}
// associates a gallery to a scene with the same basename
func (t *ScanTask) associateGallery(wg *sizedwaitgroup.SizedWaitGroup) {
path := t.file.Path()
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
qb := r.Gallery()
sqb := r.Scene()
g, err := qb.FindByPath(path)
if err != nil {
return err
}
if g == nil {
// associate is run after scan is finished
// should only happen if gallery is a directory or an io error occurs during hashing
logger.Warnf("associate: gallery %s not found in DB", path)
return nil
}
basename := strings.TrimSuffix(path, filepath.Ext(path))
var relatedFiles []string
vExt := config.GetInstance().GetVideoExtensions()
// make a list of media files that can be related to the gallery
for _, ext := range vExt {
related := basename + "." + ext
// exclude gallery extensions from the related files
if !isGallery(related) {
relatedFiles = append(relatedFiles, related)
}
}
for _, scenePath := range relatedFiles {
scene, _ := sqb.FindByPath(scenePath)
// found related Scene
if scene != nil {
sceneGalleries, _ := sqb.FindByGalleryID(g.ID) // check if gallery is already associated to the scene
isAssoc := false
for _, sg := range sceneGalleries {
if scene.ID == sg.ID {
isAssoc = true
break
}
}
if !isAssoc {
logger.Infof("associate: Gallery %s is related to scene: %d", path, scene.ID)
if err := sqb.UpdateGalleries(scene.ID, []int{g.ID}); err != nil {
return err
}
}
}
}
return nil
}); err != nil {
logger.Error(err.Error())
}
wg.Done()
}
func (t *ScanTask) scanZipImages(zipGallery *models.Gallery) {
err := walkGalleryZip(zipGallery.Path.String, func(f *zip.File) error {
// copy this task and change the filename
subTask := *t
// filepath is the zip file and the internal file name, separated by a null byte
subTask.file = file.ZipFile(zipGallery.Path.String, f)
subTask.zipGallery = zipGallery
// run the subtask and wait for it to complete
subTask.Start(context.TODO())
return nil
})
if err != nil {
logger.Warnf("failed to scan zip file images for %s: %s", zipGallery.Path.String, err.Error())
}
}
func (t *ScanTask) regenerateZipImages(zipGallery *models.Gallery) {
var images []*models.Image
if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
iqb := r.Image()
var err error
images, err = iqb.FindByGalleryID(zipGallery.ID)
return err
}); err != nil {
logger.Warnf("failed to find gallery images: %s", err.Error())
return
}
for _, img := range images {
t.generateThumbnail(img)
}
}

View File

@@ -0,0 +1,166 @@
package manager
import (
"context"
"database/sql"
"path/filepath"
"time"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/plugin"
"github.com/stashapp/stash/pkg/utils"
)
func (t *ScanTask) scanImage() {
var i *models.Image
path := t.file.Path()
if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
var err error
i, err = r.Image().FindByPath(path)
return err
}); err != nil {
logger.Error(err.Error())
return
}
scanner := image.Scanner{
Scanner: image.FileScanner(&file.FSHasher{}),
StripFileExtension: t.StripFileExtension,
Ctx: t.ctx,
TxnManager: t.TxnManager,
Paths: GetInstance().Paths,
PluginCache: instance.PluginCache,
MutexManager: t.mutexManager,
}
var err error
if i != nil {
i, err = scanner.ScanExisting(i, t.file)
if err != nil {
logger.Error(err.Error())
return
}
} else {
i, err = scanner.ScanNew(t.file)
if err != nil {
logger.Error(err.Error())
return
}
if i != nil {
if t.zipGallery != nil {
// associate with gallery
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
return gallery.AddImage(r.Gallery(), t.zipGallery.ID, i.ID)
}); err != nil {
logger.Error(err.Error())
return
}
} else if config.GetInstance().GetCreateGalleriesFromFolders() {
// create gallery from folder or associate with existing gallery
logger.Infof("Associating image %s with folder gallery", i.Path)
var galleryID int
var isNewGallery bool
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
var err error
galleryID, isNewGallery, err = t.associateImageWithFolderGallery(i.ID, r.Gallery())
return err
}); err != nil {
logger.Error(err.Error())
return
}
if isNewGallery {
GetInstance().PluginCache.ExecutePostHooks(t.ctx, galleryID, plugin.GalleryCreatePost, nil, nil)
}
}
}
}
if i != nil {
t.generateThumbnail(i)
}
}
func (t *ScanTask) associateImageWithFolderGallery(imageID int, qb models.GalleryReaderWriter) (galleryID int, isNew bool, err error) {
// find a gallery with the path specified
path := filepath.Dir(t.file.Path())
var g *models.Gallery
g, err = qb.FindByPath(path)
if err != nil {
return
}
if g == nil {
checksum := utils.MD5FromString(path)
// create the gallery
currentTime := time.Now()
newGallery := models.Gallery{
Checksum: checksum,
Path: sql.NullString{
String: path,
Valid: true,
},
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
Title: sql.NullString{
String: utils.GetNameFromPath(path, false),
Valid: true,
},
}
logger.Infof("Creating gallery for folder %s", path)
g, err = qb.Create(newGallery)
if err != nil {
return 0, false, err
}
isNew = true
}
// associate image with gallery
err = gallery.AddImage(qb, g.ID, imageID)
galleryID = g.ID
return
}
func (t *ScanTask) generateThumbnail(i *models.Image) {
if !t.GenerateThumbnails {
return
}
thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(i.Checksum, models.DefaultGthumbWidth)
exists, _ := utils.FileExists(thumbPath)
if exists {
return
}
config, _, err := image.DecodeSourceImage(i)
if err != nil {
logger.Errorf("error reading image %s: %s", i.Path, err.Error())
return
}
if config.Height > models.DefaultGthumbWidth || config.Width > models.DefaultGthumbWidth {
encoder := image.NewThumbnailEncoder(instance.FFMPEG)
data, err := encoder.GetThumbnail(i, models.DefaultGthumbWidth)
if err != nil {
logger.Errorf("error getting thumbnail for image %s: %s", i.Path, err.Error())
return
}
err = utils.WriteFile(thumbPath, data)
if err != nil {
logger.Errorf("error writing thumbnail for image %s: %s", i.Path, err)
}
}
}

View File

@@ -0,0 +1,58 @@
package manager
import (
"context"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene"
)
func (t *ScanTask) scanScene() *models.Scene {
logError := func(err error) *models.Scene {
logger.Error(err.Error())
return nil
}
var retScene *models.Scene
var s *models.Scene
if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
var err error
s, err = r.Scene().FindByPath(t.file.Path())
return err
}); err != nil {
logger.Error(err.Error())
return nil
}
scanner := scene.Scanner{
Scanner: scene.FileScanner(&file.FSHasher{}, t.fileNamingAlgorithm, t.calculateMD5),
StripFileExtension: t.StripFileExtension,
FileNamingAlgorithm: t.fileNamingAlgorithm,
Ctx: t.ctx,
TxnManager: t.TxnManager,
Paths: GetInstance().Paths,
Screenshotter: &instance.FFMPEG,
VideoFileCreator: &instance.FFProbe,
PluginCache: instance.PluginCache,
MutexManager: t.mutexManager,
}
if s != nil {
if err := scanner.ScanExisting(s, t.file); err != nil {
return logError(err)
}
return nil
}
var err error
retScene, err = scanner.ScanNew(t.file)
if err != nil {
return logError(err)
}
return retScene
}

View File

@@ -20,13 +20,14 @@ func (t *GenerateTranscodeTask) Start() {
return
}
ffprobe := instance.FFProbe
var container ffmpeg.Container
if t.Scene.Format.Valid {
container = ffmpeg.Container(t.Scene.Format.String)
} else { // container isn't in the DB
// shouldn't happen unless user hasn't scanned after updating to PR#384+ version
tmpVideoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path, false)
tmpVideoFile, err := ffprobe.NewVideoFile(t.Scene.Path, false)
if err != nil {
logger.Errorf("[transcode] error reading video file: %s", err.Error())
return
@@ -45,7 +46,7 @@ func (t *GenerateTranscodeTask) Start() {
return
}
videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path, false)
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path, false)
if err != nil {
logger.Errorf("[transcode] error reading video file: %s", err.Error())
return
@@ -58,7 +59,7 @@ func (t *GenerateTranscodeTask) Start() {
OutputPath: outputPath,
MaxTranscodeSize: transcodeSize,
}
encoder := ffmpeg.NewEncoder(instance.FFMPEGPath)
encoder := instance.FFMPEG
if videoCodec == ffmpeg.H264 { // for non supported h264 files stream copy the video part
if audioCodec == ffmpeg.MissingUnsupported {

30
pkg/models/model_file.go Normal file
View File

@@ -0,0 +1,30 @@
package models
import "time"
type File struct {
Checksum string `db:"checksum" json:"checksum"`
OSHash string `db:"oshash" json:"oshash"`
Path string `db:"path" json:"path"`
Size string `db:"size" json:"size"`
FileModTime time.Time `db:"file_mod_time" json:"file_mod_time"`
}
// GetHash returns the hash of the scene, based on the hash algorithm provided. If
// hash algorithm is MD5, then Checksum is returned. Otherwise, OSHash is returned.
func (s File) GetHash(hashAlgorithm HashAlgorithm) string {
var ret string
if hashAlgorithm == HashAlgorithmMd5 {
ret = s.Checksum
} else if hashAlgorithm == HashAlgorithmOshash {
ret = s.OSHash
} else {
panic("unknown hash algorithm")
}
return ret
}
func (s File) Equal(o File) bool {
return s.Path == o.Path && s.Checksum == o.Checksum && s.OSHash == o.OSHash && s.Size == o.Size && s.FileModTime.Equal(o.FileModTime)
}

View File

@@ -3,6 +3,7 @@ package models
import (
"database/sql"
"path/filepath"
"time"
)
type Gallery struct {
@@ -40,6 +41,40 @@ type GalleryPartial struct {
UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"`
}
func (s *Gallery) File() File {
ret := File{
Path: s.Path.String,
}
ret.Checksum = s.Checksum
if s.FileModTime.Valid {
ret.FileModTime = s.FileModTime.Timestamp
}
return ret
}
func (s *Gallery) SetFile(f File) {
path := f.Path
s.Path = sql.NullString{
String: path,
Valid: true,
}
if f.Checksum != "" {
s.Checksum = f.Checksum
}
zeroTime := time.Time{}
if f.FileModTime != zeroTime {
s.FileModTime = NullSQLiteTimestamp{
Timestamp: f.FileModTime,
Valid: true,
}
}
}
// GetTitle returns the title of the scene. If the Title field is empty,
// then the base filename is returned.
func (s Gallery) GetTitle() string {

View File

@@ -3,6 +3,8 @@ package models
import (
"database/sql"
"path/filepath"
"strconv"
"time"
)
// Image stores the metadata for a single image.
@@ -41,14 +43,55 @@ type ImagePartial struct {
UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"`
}
// GetTitle returns the title of the image. If the Title field is empty,
// then the base filename is returned.
func (s Image) GetTitle() string {
if s.Title.String != "" {
return s.Title.String
func (i *Image) File() File {
ret := File{
Path: i.Path,
}
return filepath.Base(s.Path)
ret.Checksum = i.Checksum
if i.FileModTime.Valid {
ret.FileModTime = i.FileModTime.Timestamp
}
if i.Size.Valid {
ret.Size = strconv.FormatInt(i.Size.Int64, 10)
}
return ret
}
func (i *Image) SetFile(f File) {
path := f.Path
i.Path = path
if f.Checksum != "" {
i.Checksum = f.Checksum
}
zeroTime := time.Time{}
if f.FileModTime != zeroTime {
i.FileModTime = NullSQLiteTimestamp{
Timestamp: f.FileModTime,
Valid: true,
}
}
if f.Size != "" {
size, err := strconv.ParseInt(f.Size, 10, 64)
if err == nil {
i.Size = sql.NullInt64{
Int64: size,
Valid: true,
}
}
}
}
// GetTitle returns the title of the image. If the Title field is empty,
// then the base filename is returned.
func (i *Image) GetTitle() string {
if i.Title.String != "" {
return i.Title.String
}
return filepath.Base(i.Path)
}
// ImageFileType represents the file metadata for an image.

View File

@@ -3,6 +3,7 @@ package models
import (
"database/sql"
"path/filepath"
"time"
)
// Scene stores the metadata for a single video scene.
@@ -35,6 +36,58 @@ type Scene struct {
Interactive bool `db:"interactive" json:"interactive"`
}
func (s *Scene) File() File {
ret := File{
Path: s.Path,
}
if s.Checksum.Valid {
ret.Checksum = s.Checksum.String
}
if s.OSHash.Valid {
ret.OSHash = s.OSHash.String
}
if s.FileModTime.Valid {
ret.FileModTime = s.FileModTime.Timestamp
}
if s.Size.Valid {
ret.Size = s.Size.String
}
return ret
}
func (s *Scene) SetFile(f File) {
path := f.Path
s.Path = path
if f.Checksum != "" {
s.Checksum = sql.NullString{
String: f.Checksum,
Valid: true,
}
}
if f.OSHash != "" {
s.OSHash = sql.NullString{
String: f.OSHash,
Valid: true,
}
}
zeroTime := time.Time{}
if f.FileModTime != zeroTime {
s.FileModTime = NullSQLiteTimestamp{
Timestamp: f.FileModTime,
Valid: true,
}
}
if f.Size != "" {
s.Size = sql.NullString{
String: f.Size,
Valid: true,
}
}
}
// ScenePartial represents part of a Scene object. It is used to update
// the database entry. Only non-nil fields will be updated.
type ScenePartial struct {
@@ -66,6 +119,37 @@ type ScenePartial struct {
Interactive *bool `db:"interactive" json:"interactive"`
}
func (s *ScenePartial) SetFile(f File) {
path := f.Path
s.Path = &path
if f.Checksum != "" {
s.Checksum = &sql.NullString{
String: f.Checksum,
Valid: true,
}
}
if f.OSHash != "" {
s.OSHash = &sql.NullString{
String: f.OSHash,
Valid: true,
}
}
zeroTime := time.Time{}
if f.FileModTime != zeroTime {
s.FileModTime = &NullSQLiteTimestamp{
Timestamp: f.FileModTime,
Valid: true,
}
}
if f.Size != "" {
s.Size = &sql.NullString{
String: f.Size,
Valid: true,
}
}
}
// GetTitle returns the title of the scene. If the Title field is empty,
// then the base filename is returned.
func (s Scene) GetTitle() string {
@@ -79,13 +163,7 @@ func (s Scene) GetTitle() string {
// GetHash returns the hash of the scene, based on the hash algorithm provided. If
// hash algorithm is MD5, then Checksum is returned. Otherwise, OSHash is returned.
func (s Scene) GetHash(hashAlgorithm HashAlgorithm) string {
if hashAlgorithm == HashAlgorithmMd5 {
return s.Checksum.String
} else if hashAlgorithm == HashAlgorithmOshash {
return s.OSHash.String
}
panic("unknown hash algorithm")
return s.File().GetHash(hashAlgorithm)
}
func (s Scene) GetMinResolution() int64 {

View File

@@ -1,49 +1,50 @@
package manager
package scene
import (
"os"
"path/filepath"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager/paths"
"github.com/stashapp/stash/pkg/utils"
)
func MigrateHash(oldHash string, newHash string) {
oldPath := filepath.Join(instance.Paths.Generated.Markers, oldHash)
newPath := filepath.Join(instance.Paths.Generated.Markers, newHash)
migrate(oldPath, newPath)
func MigrateHash(p *paths.Paths, oldHash string, newHash string) {
oldPath := filepath.Join(p.Generated.Markers, oldHash)
newPath := filepath.Join(p.Generated.Markers, newHash)
migrateSceneFiles(oldPath, newPath)
scenePaths := GetInstance().Paths.Scene
scenePaths := p.Scene
oldPath = scenePaths.GetThumbnailScreenshotPath(oldHash)
newPath = scenePaths.GetThumbnailScreenshotPath(newHash)
migrate(oldPath, newPath)
migrateSceneFiles(oldPath, newPath)
oldPath = scenePaths.GetScreenshotPath(oldHash)
newPath = scenePaths.GetScreenshotPath(newHash)
migrate(oldPath, newPath)
migrateSceneFiles(oldPath, newPath)
oldPath = scenePaths.GetStreamPreviewPath(oldHash)
newPath = scenePaths.GetStreamPreviewPath(newHash)
migrate(oldPath, newPath)
migrateSceneFiles(oldPath, newPath)
oldPath = scenePaths.GetStreamPreviewImagePath(oldHash)
newPath = scenePaths.GetStreamPreviewImagePath(newHash)
migrate(oldPath, newPath)
migrateSceneFiles(oldPath, newPath)
oldPath = scenePaths.GetTranscodePath(oldHash)
newPath = scenePaths.GetTranscodePath(newHash)
migrate(oldPath, newPath)
migrateSceneFiles(oldPath, newPath)
oldPath = scenePaths.GetSpriteVttFilePath(oldHash)
newPath = scenePaths.GetSpriteVttFilePath(newHash)
migrate(oldPath, newPath)
migrateSceneFiles(oldPath, newPath)
oldPath = scenePaths.GetSpriteImageFilePath(oldHash)
newPath = scenePaths.GetSpriteImageFilePath(newHash)
migrate(oldPath, newPath)
migrateSceneFiles(oldPath, newPath)
}
func migrate(oldName, newName string) {
func migrateSceneFiles(oldName, newName string) {
oldExists, err := utils.FileExists(oldName)
if err != nil && !os.IsNotExist(err) {
logger.Errorf("Error checking existence of %s: %s", oldName, err.Error())

335
pkg/scene/scan.go Normal file
View File

@@ -0,0 +1,335 @@
package scene
import (
"context"
"database/sql"
"fmt"
"os"
"strconv"
"strings"
"time"
"github.com/stashapp/stash/pkg/ffmpeg"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/manager/paths"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/plugin"
"github.com/stashapp/stash/pkg/utils"
)
const mutexType = "scene"
type videoFileCreator interface {
NewVideoFile(path string, stripFileExtension bool) (*ffmpeg.VideoFile, error)
}
type Scanner struct {
file.Scanner
StripFileExtension bool
UseFileMetadata bool
FileNamingAlgorithm models.HashAlgorithm
Ctx context.Context
CaseSensitiveFs bool
TxnManager models.TransactionManager
Paths *paths.Paths
Screenshotter screenshotter
VideoFileCreator videoFileCreator
PluginCache *plugin.Cache
MutexManager *utils.MutexManager
}
func FileScanner(hasher file.Hasher, fileNamingAlgorithm models.HashAlgorithm, calculateMD5 bool) file.Scanner {
return file.Scanner{
Hasher: hasher,
CalculateOSHash: true,
CalculateMD5: fileNamingAlgorithm == models.HashAlgorithmMd5 || calculateMD5,
}
}
func (scanner *Scanner) ScanExisting(existing file.FileBased, file file.SourceFile) (err error) {
scanned, err := scanner.Scanner.ScanExisting(existing, file)
if err != nil {
return err
}
s := existing.(*models.Scene)
path := scanned.New.Path
interactive := getInteractive(path)
config := config.GetInstance()
oldHash := s.GetHash(scanner.FileNamingAlgorithm)
changed := false
var videoFile *ffmpeg.VideoFile
if scanned.ContentsChanged() {
logger.Infof("%s has been updated: rescanning", path)
s.SetFile(*scanned.New)
videoFile, err = scanner.VideoFileCreator.NewVideoFile(path, scanner.StripFileExtension)
if err != nil {
return err
}
videoFileToScene(s, videoFile)
changed = true
} else if scanned.FileUpdated() || s.Interactive != interactive {
logger.Infof("Updated scene file %s", path)
// update fields as needed
s.SetFile(*scanned.New)
changed = true
}
// check for container
if !s.Format.Valid {
if videoFile == nil {
videoFile, err = scanner.VideoFileCreator.NewVideoFile(path, scanner.StripFileExtension)
if err != nil {
return err
}
}
container := ffmpeg.MatchContainer(videoFile.Container, path)
logger.Infof("Adding container %s to file %s", container, path)
s.Format = models.NullString(string(container))
changed = true
}
if changed {
// we are operating on a checksum now, so grab a mutex on the checksum
done := make(chan struct{})
if scanned.New.OSHash != "" {
scanner.MutexManager.Claim(mutexType, scanned.New.OSHash, done)
}
if scanned.New.Checksum != "" {
scanner.MutexManager.Claim(mutexType, scanned.New.Checksum, done)
}
if err := scanner.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
defer close(done)
qb := r.Scene()
// ensure no clashes of hashes
if scanned.New.Checksum != "" && scanned.Old.Checksum != scanned.New.Checksum {
dupe, _ := qb.FindByChecksum(s.Checksum.String)
if dupe != nil {
return fmt.Errorf("MD5 for file %s is the same as that of %s", path, dupe.Path)
}
}
if scanned.New.OSHash != "" && scanned.Old.OSHash != scanned.New.OSHash {
dupe, _ := qb.FindByOSHash(scanned.New.OSHash)
if dupe != nil {
return fmt.Errorf("OSHash for file %s is the same as that of %s", path, dupe.Path)
}
}
s.UpdatedAt = models.SQLiteTimestamp{Timestamp: time.Now()}
_, err := qb.UpdateFull(*s)
return err
}); err != nil {
return err
}
// Migrate any generated files if the hash has changed
newHash := s.GetHash(config.GetVideoFileNamingAlgorithm())
if newHash != oldHash {
MigrateHash(scanner.Paths, oldHash, newHash)
}
scanner.PluginCache.ExecutePostHooks(scanner.Ctx, s.ID, plugin.SceneUpdatePost, nil, nil)
}
// We already have this item in the database
// check for thumbnails, screenshots
scanner.makeScreenshots(path, videoFile, s.GetHash(scanner.FileNamingAlgorithm))
return nil
}
func (scanner *Scanner) ScanNew(file file.SourceFile) (retScene *models.Scene, err error) {
scanned, err := scanner.Scanner.ScanNew(file)
if err != nil {
return nil, err
}
path := file.Path()
checksum := scanned.Checksum
oshash := scanned.OSHash
// grab a mutex on the checksum and oshash
done := make(chan struct{})
if oshash != "" {
scanner.MutexManager.Claim(mutexType, oshash, done)
}
if checksum != "" {
scanner.MutexManager.Claim(mutexType, checksum, done)
}
defer close(done)
// check for scene by checksum and oshash - MD5 should be
// redundant, but check both
var s *models.Scene
if err := scanner.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
qb := r.Scene()
if checksum != "" {
s, _ = qb.FindByChecksum(checksum)
}
if s == nil {
s, _ = qb.FindByOSHash(oshash)
}
return nil
}); err != nil {
return nil, err
}
sceneHash := oshash
if scanner.FileNamingAlgorithm == models.HashAlgorithmMd5 {
sceneHash = checksum
}
interactive := getInteractive(file.Path())
if s != nil {
exists, _ := utils.FileExists(s.Path)
if !scanner.CaseSensitiveFs {
// #1426 - if file exists but is a case-insensitive match for the
// original filename, then treat it as a move
if exists && strings.EqualFold(path, s.Path) {
exists = false
}
}
if exists {
logger.Infof("%s already exists. Duplicate of %s", path, s.Path)
} else {
logger.Infof("%s already exists. Updating path...", path)
scenePartial := models.ScenePartial{
ID: s.ID,
Path: &path,
Interactive: &interactive,
}
if err := scanner.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
_, err := r.Scene().Update(scenePartial)
return err
}); err != nil {
return nil, err
}
scanner.makeScreenshots(path, nil, sceneHash)
scanner.PluginCache.ExecutePostHooks(scanner.Ctx, s.ID, plugin.SceneUpdatePost, nil, nil)
}
} else {
logger.Infof("%s doesn't exist. Creating new item...", path)
currentTime := time.Now()
videoFile, err := scanner.VideoFileCreator.NewVideoFile(path, scanner.StripFileExtension)
if err != nil {
return nil, err
}
// Override title to be filename if UseFileMetadata is false
if !scanner.UseFileMetadata {
videoFile.SetTitleFromPath(scanner.StripFileExtension)
}
newScene := models.Scene{
Checksum: sql.NullString{String: checksum, Valid: checksum != ""},
OSHash: sql.NullString{String: oshash, Valid: oshash != ""},
Path: path,
FileModTime: models.NullSQLiteTimestamp{
Timestamp: scanned.FileModTime,
Valid: true,
},
Title: sql.NullString{String: videoFile.Title, Valid: true},
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
Interactive: interactive,
}
videoFileToScene(&newScene, videoFile)
if scanner.UseFileMetadata {
newScene.Details = sql.NullString{String: videoFile.Comment, Valid: true}
newScene.Date = models.SQLiteDate{String: videoFile.CreationTime.Format("2006-01-02")}
}
if err := scanner.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
var err error
retScene, err = r.Scene().Create(newScene)
return err
}); err != nil {
return nil, err
}
scanner.makeScreenshots(path, videoFile, sceneHash)
scanner.PluginCache.ExecutePostHooks(scanner.Ctx, retScene.ID, plugin.SceneCreatePost, nil, nil)
}
return retScene, nil
}
func videoFileToScene(s *models.Scene, videoFile *ffmpeg.VideoFile) {
container := ffmpeg.MatchContainer(videoFile.Container, s.Path)
s.Duration = sql.NullFloat64{Float64: videoFile.Duration, Valid: true}
s.VideoCodec = sql.NullString{String: videoFile.VideoCodec, Valid: true}
s.AudioCodec = sql.NullString{String: videoFile.AudioCodec, Valid: true}
s.Format = sql.NullString{String: string(container), Valid: true}
s.Width = sql.NullInt64{Int64: int64(videoFile.Width), Valid: true}
s.Height = sql.NullInt64{Int64: int64(videoFile.Height), Valid: true}
s.Framerate = sql.NullFloat64{Float64: videoFile.FrameRate, Valid: true}
s.Bitrate = sql.NullInt64{Int64: videoFile.Bitrate, Valid: true}
s.Size = sql.NullString{String: strconv.FormatInt(videoFile.Size, 10), Valid: true}
}
func (scanner *Scanner) makeScreenshots(path string, probeResult *ffmpeg.VideoFile, checksum string) {
thumbPath := scanner.Paths.Scene.GetThumbnailScreenshotPath(checksum)
normalPath := scanner.Paths.Scene.GetScreenshotPath(checksum)
thumbExists, _ := utils.FileExists(thumbPath)
normalExists, _ := utils.FileExists(normalPath)
if thumbExists && normalExists {
return
}
if probeResult == nil {
var err error
probeResult, err = scanner.VideoFileCreator.NewVideoFile(path, scanner.StripFileExtension)
if err != nil {
logger.Error(err.Error())
return
}
logger.Infof("Regenerating images for %s", path)
}
at := float64(probeResult.Duration) * 0.2
if !thumbExists {
logger.Debugf("Creating thumbnail for %s", path)
makeScreenshot(scanner.Screenshotter, *probeResult, thumbPath, 5, 320, at)
}
if !normalExists {
logger.Debugf("Creating screenshot for %s", path)
makeScreenshot(scanner.Screenshotter, *probeResult, normalPath, 2, probeResult.Width, at)
}
}
func getInteractive(path string) bool {
_, err := os.Stat(utils.GetFunscriptPath(path))
return err == nil
}

23
pkg/scene/screenshot.go Normal file
View File

@@ -0,0 +1,23 @@
package scene
import (
"github.com/stashapp/stash/pkg/ffmpeg"
"github.com/stashapp/stash/pkg/logger"
)
type screenshotter interface {
Screenshot(probeResult ffmpeg.VideoFile, options ffmpeg.ScreenshotOptions) error
}
func makeScreenshot(encoder screenshotter, probeResult ffmpeg.VideoFile, outputPath string, quality int, width int, time float64) {
options := ffmpeg.ScreenshotOptions{
OutputPath: outputPath,
Quality: quality,
Time: time,
Width: width,
}
if err := encoder.Screenshot(probeResult, options); err != nil {
logger.Warnf("[encoder] failure to generate screenshot: %v", err)
}
}

View File

@@ -377,3 +377,16 @@ func FindInPaths(paths []string, baseName string) string {
return ""
}
// MatchExtension returns true if the extension of the provided path
// matches any of the provided extensions.
func MatchExtension(path string, extensions []string) bool {
ext := filepath.Ext(path)
for _, e := range extensions {
if strings.EqualFold(ext, "."+e) {
return true
}
}
return false
}

64
pkg/utils/mutex.go Normal file
View File

@@ -0,0 +1,64 @@
package utils
// MutexManager manages access to mutexes using a mutex type and key.
type MutexManager struct {
mapChan chan map[string]<-chan struct{}
}
// NewMutexManager returns a new instance of MutexManager.
func NewMutexManager() *MutexManager {
ret := &MutexManager{
mapChan: make(chan map[string]<-chan struct{}, 1),
}
initial := make(map[string]<-chan struct{})
ret.mapChan <- initial
return ret
}
// Claim blocks until the mutex for the mutexType and key pair is available.
// The mutex is then claimed by the calling code until the provided done
// channel is closed.
func (csm *MutexManager) Claim(mutexType string, key string, done <-chan struct{}) {
mapKey := mutexType + "_" + key
success := false
var existing <-chan struct{}
for !success {
// grab the map
m := <-csm.mapChan
// get the entry for the given key
newEntry := m[mapKey]
// if its the existing entry or nil, then it's available, add our channel
if newEntry == nil || newEntry == existing {
m[mapKey] = done
success = true
}
// return the map
csm.mapChan <- m
// if there is an existing entry, now we can wait for it to
// finish, then repeat the process
if newEntry != nil {
existing = newEntry
<-newEntry
}
}
// add to goroutine to remove from the map only
go func() {
<-done
m := <-csm.mapChan
if m[mapKey] == done {
delete(m, mapKey)
}
csm.mapChan <- m
}()
}

50
pkg/utils/mutex_test.go Normal file
View File

@@ -0,0 +1,50 @@
package utils
import (
"sync"
"testing"
)
// should be run with -race
func TestMutexManager(t *testing.T) {
m := NewMutexManager()
map1 := make(map[string]bool)
map2 := make(map[string]bool)
map3 := make(map[string]bool)
maps := []map[string]bool{
map1,
map2,
map3,
}
types := []string{
"foo",
"foo",
"bar",
}
const key = "baz"
const workers = 8
const loops = 300
var wg sync.WaitGroup
for k := 0; k < workers; k++ {
wg.Add(1)
go func(wk int) {
defer wg.Done()
for l := 0; l < loops; l++ {
func(l int) {
c := make(chan struct{})
defer close(c)
m.Claim(types[l%3], key, c)
maps[l%3][key] = true
}(l)
}
}(k)
}
wg.Wait()
}

View File

@@ -4,6 +4,7 @@ import (
"encoding/binary"
"errors"
"fmt"
"io"
"os"
)
@@ -41,6 +42,40 @@ func oshash(size int64, head []byte, tail []byte) (string, error) {
return fmt.Sprintf("%016x", result), nil
}
func OSHashFromReader(src io.ReadSeeker, fileSize int64) (string, error) {
if fileSize == 0 {
return "", nil
}
fileChunkSize := chunkSize
if fileSize < fileChunkSize {
fileChunkSize = fileSize
}
head := make([]byte, fileChunkSize)
tail := make([]byte, fileChunkSize)
// read the head of the file into the start of the buffer
_, err := src.Read(head)
if err != nil {
return "", err
}
// seek to the end of the file - the chunk size
_, err = src.Seek(-fileChunkSize, 2)
if err != nil {
return "", err
}
// read the tail of the file
_, err = src.Read(tail)
if err != nil {
return "", err
}
return oshash(fileSize, head, tail)
}
// OSHashFromFilePath calculates the hash using the same algorithm that
// OpenSubtitles.org uses.
//
@@ -60,35 +95,5 @@ func OSHashFromFilePath(filePath string) (string, error) {
fileSize := fi.Size()
if fileSize == 0 {
return "", nil
}
fileChunkSize := chunkSize
if fileSize < fileChunkSize {
fileChunkSize = fileSize
}
head := make([]byte, fileChunkSize)
tail := make([]byte, fileChunkSize)
// read the head of the file into the start of the buffer
_, err = f.Read(head)
if err != nil {
return "", err
}
// seek to the end of the file - the chunk size
_, err = f.Seek(-fileChunkSize, 2)
if err != nil {
return "", err
}
// read the tail of the file
_, err = f.Read(tail)
if err != nil {
return "", err
}
return oshash(fileSize, head, tail)
return OSHashFromReader(f, fileSize)
}

View File

@@ -3,6 +3,9 @@
* Added built-in `Auto Tag` scene scraper to match performers, studio and tags from filename - using AutoTag logic. ([#1817](https://github.com/stashapp/stash/pull/1817))
* Added interface options to disable creating performers/studios/tags from dropdown selectors. ([#1814](https://github.com/stashapp/stash/pull/1814))
### 🎨 Improvements
* Optimised scanning process. ([#1816](https://github.com/stashapp/stash/pull/1816))
### 🐛 Bug fixes
* Fix colour codes not outputting correctly when logging to file on Windows. ([#1846](https://github.com/stashapp/stash/pull/1846))
* Sort directory listings using case sensitive collation. ([#1823](https://github.com/stashapp/stash/pull/1823))