mirror of
https://github.com/stashapp/stash.git
synced 2025-12-17 20:34:37 +03:00
Parallel scanning/generation, and combined scanning/preview/sprite (#820)
* Implement parallel scanning and generation, and combined scanning/preview/sprite generation. * Added UI component for preview/sprite generation during scan, and configurable number of parallel tasks. * Add v050 changelog entry
This commit is contained in:
1
go.mod
1
go.mod
@@ -20,6 +20,7 @@ require (
|
||||
github.com/json-iterator/go v1.1.9
|
||||
github.com/mattn/go-sqlite3 v1.13.0
|
||||
github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007
|
||||
github.com/remeh/sizedwaitgroup v1.0.0
|
||||
github.com/rs/cors v1.6.0
|
||||
github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f
|
||||
github.com/sirupsen/logrus v1.4.2
|
||||
|
||||
2
go.sum
2
go.sum
@@ -614,6 +614,8 @@ github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R
|
||||
github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
|
||||
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
|
||||
github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
|
||||
github.com/remeh/sizedwaitgroup v1.0.0 h1:VNGGFwNo/R5+MJBf6yrsr110p0m4/OX4S3DCy7Kyl5E=
|
||||
github.com/remeh/sizedwaitgroup v1.0.0/go.mod h1:3j2R4OIe/SeS6YDhICBy22RWjJC5eNCJ1V+9+NVNYlo=
|
||||
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
||||
github.com/rogpeppe/go-internal v1.0.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/rogpeppe/go-internal v1.1.0 h1:g0fH8RicVgNl+zVZDCDfbdWxAWoAEJyI7I3TZYXFiig=
|
||||
|
||||
@@ -9,6 +9,7 @@ fragment ConfigGeneralData on ConfigGeneralResult {
|
||||
cachePath
|
||||
calculateMD5
|
||||
videoFileNamingAlgorithm
|
||||
parallelTasks
|
||||
previewSegments
|
||||
previewSegmentDuration
|
||||
previewExcludeStart
|
||||
|
||||
@@ -35,6 +35,8 @@ input ConfigGeneralInput {
|
||||
calculateMD5: Boolean!
|
||||
"""Hash algorithm to use for generated file naming"""
|
||||
videoFileNamingAlgorithm: HashAlgorithm!
|
||||
"""Number of parallel tasks to start during scan/generate"""
|
||||
parallelTasks: Int
|
||||
"""Number of segments in a preview file"""
|
||||
previewSegments: Int
|
||||
"""Preview segment duration, in seconds"""
|
||||
@@ -96,6 +98,8 @@ type ConfigGeneralResult {
|
||||
calculateMD5: Boolean!
|
||||
"""Hash algorithm to use for generated file naming"""
|
||||
videoFileNamingAlgorithm: HashAlgorithm!
|
||||
"""Number of parallel tasks to start during scan/generate"""
|
||||
parallelTasks: Int!
|
||||
"""Number of segments in a preview file"""
|
||||
previewSegments: Int!
|
||||
"""Preview segment duration, in seconds"""
|
||||
|
||||
@@ -32,7 +32,14 @@ input GeneratePreviewOptionsInput {
|
||||
|
||||
input ScanMetadataInput {
|
||||
paths: [String!]
|
||||
"""Set name, date, details from metadata (if present)"""
|
||||
useFileMetadata: Boolean!
|
||||
"""Generate previews during scan"""
|
||||
scanGeneratePreviews: Boolean!
|
||||
"""Generate image previews during scan"""
|
||||
scanGenerateImagePreviews: Boolean!
|
||||
"""Generate sprites during scan"""
|
||||
scanGenerateSprites: Boolean!
|
||||
}
|
||||
|
||||
input AutoTagMetadataInput {
|
||||
|
||||
@@ -61,6 +61,9 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
|
||||
|
||||
config.Set(config.CalculateMD5, input.CalculateMd5)
|
||||
|
||||
if input.ParallelTasks != nil {
|
||||
config.Set(config.ParallelTasks, *input.ParallelTasks)
|
||||
}
|
||||
if input.PreviewSegments != nil {
|
||||
config.Set(config.PreviewSegments, *input.PreviewSegments)
|
||||
}
|
||||
|
||||
@@ -49,6 +49,7 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult {
|
||||
CachePath: config.GetCachePath(),
|
||||
CalculateMd5: config.IsCalculateMD5(),
|
||||
VideoFileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(),
|
||||
ParallelTasks: config.GetParallelTasks(),
|
||||
PreviewSegments: config.GetPreviewSegments(),
|
||||
PreviewSegmentDuration: config.GetPreviewSegmentDuration(),
|
||||
PreviewExcludeStart: config.GetPreviewExcludeStart(),
|
||||
|
||||
@@ -2,6 +2,7 @@ package config
|
||||
|
||||
import (
|
||||
"golang.org/x/crypto/bcrypt"
|
||||
"runtime"
|
||||
|
||||
"errors"
|
||||
"io/ioutil"
|
||||
@@ -56,6 +57,9 @@ const PreviewPreset = "preview_preset"
|
||||
const MaxTranscodeSize = "max_transcode_size"
|
||||
const MaxStreamingTranscodeSize = "max_streaming_transcode_size"
|
||||
|
||||
const ParallelTasks = "parallel_tasks"
|
||||
const parallelTasksDefault = 1
|
||||
|
||||
const PreviewSegmentDuration = "preview_segment_duration"
|
||||
const previewSegmentDurationDefault = 0.75
|
||||
|
||||
@@ -297,6 +301,20 @@ func GetPreviewSegmentDuration() float64 {
|
||||
return viper.GetFloat64(PreviewSegmentDuration)
|
||||
}
|
||||
|
||||
// GetParallelTasks returns the number of parallel tasks that should be started
|
||||
// by scan or generate task.
|
||||
func GetParallelTasks() int {
|
||||
return viper.GetInt(ParallelTasks)
|
||||
}
|
||||
|
||||
func GetParallelTasksWithAutoDetection() int {
|
||||
parallelTasks := viper.GetInt(ParallelTasks)
|
||||
if parallelTasks <= 0 {
|
||||
parallelTasks = (runtime.NumCPU() / 4) + 1
|
||||
}
|
||||
return parallelTasks
|
||||
}
|
||||
|
||||
// GetPreviewSegments returns the amount of segments in a scene preview file.
|
||||
func GetPreviewSegments() int {
|
||||
return viper.GetInt(PreviewSegments)
|
||||
@@ -550,6 +568,7 @@ func IsValid() bool {
|
||||
}
|
||||
|
||||
func setDefaultValues() {
|
||||
viper.SetDefault(ParallelTasks, parallelTasksDefault)
|
||||
viper.SetDefault(PreviewSegmentDuration, previewSegmentDurationDefault)
|
||||
viper.SetDefault(PreviewSegments, previewSegmentsDefault)
|
||||
viper.SetDefault(PreviewExcludeStart, previewExcludeStartDefault)
|
||||
|
||||
@@ -14,6 +14,7 @@ import (
|
||||
type PreviewGenerator struct {
|
||||
Info *GeneratorInfo
|
||||
|
||||
VideoChecksum string
|
||||
VideoFilename string
|
||||
ImageFilename string
|
||||
OutputDirectory string
|
||||
@@ -26,7 +27,7 @@ type PreviewGenerator struct {
|
||||
Overwrite bool
|
||||
}
|
||||
|
||||
func NewPreviewGenerator(videoFile ffmpeg.VideoFile, videoFilename string, imageFilename string, outputDirectory string, generateVideo bool, generateImage bool, previewPreset string) (*PreviewGenerator, error) {
|
||||
func NewPreviewGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, videoFilename string, imageFilename string, outputDirectory string, generateVideo bool, generateImage bool, previewPreset string) (*PreviewGenerator, error) {
|
||||
exists, err := utils.FileExists(videoFile.Path)
|
||||
if !exists {
|
||||
return nil, err
|
||||
@@ -39,6 +40,7 @@ func NewPreviewGenerator(videoFile ffmpeg.VideoFile, videoFilename string, image
|
||||
|
||||
return &PreviewGenerator{
|
||||
Info: generator,
|
||||
VideoChecksum: videoChecksum,
|
||||
VideoFilename: videoFilename,
|
||||
ImageFilename: imageFilename,
|
||||
OutputDirectory: outputDirectory,
|
||||
@@ -87,7 +89,7 @@ func (g *PreviewGenerator) generateConcatFile() error {
|
||||
w := bufio.NewWriter(f)
|
||||
for i := 0; i < g.Info.ChunkCount; i++ {
|
||||
num := fmt.Sprintf("%.3d", i)
|
||||
filename := "preview" + num + ".mp4"
|
||||
filename := "preview_" + g.VideoChecksum + "_" + num + ".mp4"
|
||||
_, _ = w.WriteString(fmt.Sprintf("file '%s'\n", filename))
|
||||
}
|
||||
return w.Flush()
|
||||
@@ -105,7 +107,7 @@ func (g *PreviewGenerator) generateVideo(encoder *ffmpeg.Encoder, fallback bool)
|
||||
for i := 0; i < g.Info.ChunkCount; i++ {
|
||||
time := offset + (float64(i) * stepSize)
|
||||
num := fmt.Sprintf("%.3d", i)
|
||||
filename := "preview" + num + ".mp4"
|
||||
filename := "preview_" + g.VideoChecksum + "_" + num + ".mp4"
|
||||
chunkOutputPath := instance.Paths.Generated.GetTmpPath(filename)
|
||||
|
||||
options := ffmpeg.ScenePreviewChunkOptions{
|
||||
@@ -148,5 +150,5 @@ func (g *PreviewGenerator) generateImage(encoder *ffmpeg.Encoder) error {
|
||||
}
|
||||
|
||||
func (g *PreviewGenerator) getConcatFilePath() string {
|
||||
return instance.Paths.Generated.GetTmpPath("files.txt")
|
||||
return instance.Paths.Generated.GetTmpPath(fmt.Sprintf("files_%s.txt", g.VideoChecksum))
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ import (
|
||||
type SpriteGenerator struct {
|
||||
Info *GeneratorInfo
|
||||
|
||||
VideoChecksum string
|
||||
ImageOutputPath string
|
||||
VTTOutputPath string
|
||||
Rows int
|
||||
@@ -28,7 +29,7 @@ type SpriteGenerator struct {
|
||||
Overwrite bool
|
||||
}
|
||||
|
||||
func NewSpriteGenerator(videoFile ffmpeg.VideoFile, imageOutputPath string, vttOutputPath string, rows int, cols int) (*SpriteGenerator, error) {
|
||||
func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageOutputPath string, vttOutputPath string, rows int, cols int) (*SpriteGenerator, error) {
|
||||
exists, err := utils.FileExists(videoFile.Path)
|
||||
if !exists {
|
||||
return nil, err
|
||||
@@ -44,6 +45,7 @@ func NewSpriteGenerator(videoFile ffmpeg.VideoFile, imageOutputPath string, vttO
|
||||
|
||||
return &SpriteGenerator{
|
||||
Info: generator,
|
||||
VideoChecksum: videoChecksum,
|
||||
ImageOutputPath: imageOutputPath,
|
||||
VTTOutputPath: vttOutputPath,
|
||||
Rows: rows,
|
||||
@@ -74,7 +76,7 @@ func (g *SpriteGenerator) generateSpriteImage(encoder *ffmpeg.Encoder) error {
|
||||
for i := 0; i < g.Info.ChunkCount; i++ {
|
||||
time := float64(i) * stepSize
|
||||
num := fmt.Sprintf("%.3d", i)
|
||||
filename := "thumbnail" + num + ".jpg"
|
||||
filename := "thumbnail_" + g.VideoChecksum + "_" + num + ".jpg"
|
||||
|
||||
options := ffmpeg.ScreenshotOptions{
|
||||
OutputPath: instance.Paths.Generated.GetTmpPath(filename),
|
||||
@@ -85,7 +87,7 @@ func (g *SpriteGenerator) generateSpriteImage(encoder *ffmpeg.Encoder) error {
|
||||
}
|
||||
|
||||
// Combine all of the thumbnails into a sprite image
|
||||
globPath := filepath.Join(instance.Paths.Generated.Tmp, "thumbnail*.jpg")
|
||||
globPath := filepath.Join(instance.Paths.Generated.Tmp, fmt.Sprintf("thumbnail_%s_*.jpg", g.VideoChecksum))
|
||||
imagePaths, _ := doublestar.Glob(globPath)
|
||||
utils.NaturalSort(imagePaths)
|
||||
var images []image.Image
|
||||
|
||||
@@ -2,11 +2,14 @@ package manager
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"strconv"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/remeh/sizedwaitgroup"
|
||||
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/manager/config"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
@@ -176,7 +179,11 @@ func (s *singleton) Scan(input models.ScanMetadataInput) {
|
||||
logger.Infof("Starting scan of %d files. %d New files found", *total, *newFiles)
|
||||
}
|
||||
|
||||
var wg sync.WaitGroup
|
||||
start := time.Now()
|
||||
parallelTasks := config.GetParallelTasksWithAutoDetection()
|
||||
logger.Infof("Scan started with %d parallel tasks", parallelTasks)
|
||||
wg := sizedwaitgroup.New(parallelTasks)
|
||||
|
||||
s.Status.Progress = 0
|
||||
fileNamingAlgo := config.GetVideoFileNamingAlgorithm()
|
||||
calculateMD5 := config.IsCalculateMD5()
|
||||
@@ -201,10 +208,11 @@ func (s *singleton) Scan(input models.ScanMetadataInput) {
|
||||
galleries = append(galleries, path)
|
||||
}
|
||||
|
||||
wg.Add(1)
|
||||
task := ScanTask{FilePath: path, UseFileMetadata: input.UseFileMetadata, fileNamingAlgorithm: fileNamingAlgo, calculateMD5: calculateMD5}
|
||||
instance.Paths.Generated.EnsureTmpDir()
|
||||
|
||||
wg.Add()
|
||||
task := ScanTask{FilePath: path, UseFileMetadata: input.UseFileMetadata, fileNamingAlgorithm: fileNamingAlgo, calculateMD5: calculateMD5, GeneratePreview: input.ScanGeneratePreviews, GenerateImagePreview: input.ScanGenerateImagePreviews, GenerateSprite: input.ScanGenerateSprites}
|
||||
go task.Start(&wg)
|
||||
wg.Wait()
|
||||
|
||||
return nil
|
||||
})
|
||||
@@ -224,9 +232,14 @@ func (s *singleton) Scan(input models.ScanMetadataInput) {
|
||||
return
|
||||
}
|
||||
|
||||
logger.Info("Finished scan")
|
||||
wg.Wait()
|
||||
instance.Paths.Generated.EmptyTmpDir()
|
||||
|
||||
elapsed := time.Since(start)
|
||||
logger.Info(fmt.Sprintf("Scan finished (%s)", elapsed))
|
||||
|
||||
for _, path := range galleries {
|
||||
wg.Add(1)
|
||||
wg.Add()
|
||||
task := ScanTask{FilePath: path, UseFileMetadata: false}
|
||||
go task.associateGallery(&wg)
|
||||
wg.Wait()
|
||||
@@ -357,8 +370,10 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
|
||||
return
|
||||
}
|
||||
|
||||
delta := utils.Btoi(input.Sprites) + utils.Btoi(input.Previews) + utils.Btoi(input.Markers) + utils.Btoi(input.Transcodes)
|
||||
var wg sync.WaitGroup
|
||||
parallelTasks := config.GetParallelTasksWithAutoDetection()
|
||||
|
||||
logger.Infof("Generate started with %d parallel tasks", parallelTasks)
|
||||
wg := sizedwaitgroup.New(parallelTasks)
|
||||
|
||||
s.Status.Progress = 0
|
||||
lenScenes := len(scenes)
|
||||
@@ -397,6 +412,10 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
|
||||
}
|
||||
setGeneratePreviewOptionsInput(generatePreviewOptions)
|
||||
|
||||
// Start measuring how long the scan has taken. (consider moving this up)
|
||||
start := time.Now()
|
||||
instance.Paths.Generated.EnsureTmpDir()
|
||||
|
||||
for i, scene := range scenes {
|
||||
s.Status.setProgress(i, total)
|
||||
if s.Status.stopping {
|
||||
@@ -409,15 +428,9 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
|
||||
continue
|
||||
}
|
||||
|
||||
wg.Add(delta)
|
||||
|
||||
// Clear the tmp directory for each scene
|
||||
if input.Sprites || input.Previews || input.Markers {
|
||||
instance.Paths.Generated.EmptyTmpDir()
|
||||
}
|
||||
|
||||
if input.Sprites {
|
||||
task := GenerateSpriteTask{Scene: *scene, Overwrite: overwrite, fileNamingAlgorithm: fileNamingAlgo}
|
||||
wg.Add()
|
||||
go task.Start(&wg)
|
||||
}
|
||||
|
||||
@@ -429,21 +442,24 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
|
||||
Overwrite: overwrite,
|
||||
fileNamingAlgorithm: fileNamingAlgo,
|
||||
}
|
||||
wg.Add()
|
||||
go task.Start(&wg)
|
||||
}
|
||||
|
||||
if input.Markers {
|
||||
wg.Add()
|
||||
task := GenerateMarkersTask{Scene: scene, Overwrite: overwrite, fileNamingAlgorithm: fileNamingAlgo}
|
||||
go task.Start(&wg)
|
||||
}
|
||||
|
||||
if input.Transcodes {
|
||||
wg.Add()
|
||||
task := GenerateTranscodeTask{Scene: *scene, Overwrite: overwrite, fileNamingAlgorithm: fileNamingAlgo}
|
||||
go task.Start(&wg)
|
||||
}
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
for i, marker := range markers {
|
||||
s.Status.setProgress(lenScenes+i, total)
|
||||
@@ -457,13 +473,16 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
|
||||
continue
|
||||
}
|
||||
|
||||
wg.Add(1)
|
||||
wg.Add()
|
||||
task := GenerateMarkersTask{Marker: marker, Overwrite: overwrite, fileNamingAlgorithm: fileNamingAlgo}
|
||||
go task.Start(&wg)
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
logger.Infof("Generate finished")
|
||||
wg.Wait()
|
||||
|
||||
instance.Paths.Generated.EmptyTmpDir()
|
||||
elapsed := time.Since(start)
|
||||
logger.Info(fmt.Sprintf("Generate finished (%s)", elapsed))
|
||||
}()
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,8 @@ package manager
|
||||
import (
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"sync"
|
||||
|
||||
"github.com/remeh/sizedwaitgroup"
|
||||
|
||||
"github.com/stashapp/stash/pkg/ffmpeg"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
@@ -18,7 +19,7 @@ type GenerateMarkersTask struct {
|
||||
fileNamingAlgorithm models.HashAlgorithm
|
||||
}
|
||||
|
||||
func (t *GenerateMarkersTask) Start(wg *sync.WaitGroup) {
|
||||
func (t *GenerateMarkersTask) Start(wg *sizedwaitgroup.SizedWaitGroup) {
|
||||
defer wg.Done()
|
||||
|
||||
if t.Scene != nil {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"github.com/remeh/sizedwaitgroup"
|
||||
|
||||
"github.com/stashapp/stash/pkg/ffmpeg"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
@@ -19,11 +19,13 @@ type GeneratePreviewTask struct {
|
||||
fileNamingAlgorithm models.HashAlgorithm
|
||||
}
|
||||
|
||||
func (t *GeneratePreviewTask) Start(wg *sync.WaitGroup) {
|
||||
func (t *GeneratePreviewTask) Start(wg *sizedwaitgroup.SizedWaitGroup) {
|
||||
defer wg.Done()
|
||||
|
||||
videoFilename := t.videoFilename()
|
||||
videoChecksum := t.Scene.GetHash(t.fileNamingAlgorithm)
|
||||
imageFilename := t.imageFilename()
|
||||
|
||||
if !t.Overwrite && !t.required() {
|
||||
return
|
||||
}
|
||||
@@ -35,7 +37,8 @@ func (t *GeneratePreviewTask) Start(wg *sync.WaitGroup) {
|
||||
}
|
||||
|
||||
const generateVideo = true
|
||||
generator, err := NewPreviewGenerator(*videoFile, videoFilename, imageFilename, instance.Paths.Generated.Screenshots, generateVideo, t.ImagePreview, t.Options.PreviewPreset.String())
|
||||
generator, err := NewPreviewGenerator(*videoFile, videoChecksum, videoFilename, imageFilename, instance.Paths.Generated.Screenshots, generateVideo, t.ImagePreview, t.Options.PreviewPreset.String())
|
||||
|
||||
if err != nil {
|
||||
logger.Errorf("error creating preview generator: %s", err.Error())
|
||||
return
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"github.com/remeh/sizedwaitgroup"
|
||||
|
||||
"github.com/stashapp/stash/pkg/ffmpeg"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
@@ -15,7 +15,7 @@ type GenerateSpriteTask struct {
|
||||
fileNamingAlgorithm models.HashAlgorithm
|
||||
}
|
||||
|
||||
func (t *GenerateSpriteTask) Start(wg *sync.WaitGroup) {
|
||||
func (t *GenerateSpriteTask) Start(wg *sizedwaitgroup.SizedWaitGroup) {
|
||||
defer wg.Done()
|
||||
|
||||
if !t.Overwrite && !t.required() {
|
||||
@@ -31,7 +31,8 @@ func (t *GenerateSpriteTask) Start(wg *sync.WaitGroup) {
|
||||
sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm)
|
||||
imagePath := instance.Paths.Scene.GetSpriteImageFilePath(sceneHash)
|
||||
vttPath := instance.Paths.Scene.GetSpriteVttFilePath(sceneHash)
|
||||
generator, err := NewSpriteGenerator(*videoFile, imagePath, vttPath, 9, 9)
|
||||
generator, err := NewSpriteGenerator(*videoFile, sceneHash, imagePath, vttPath, 9, 9)
|
||||
|
||||
if err != nil {
|
||||
logger.Errorf("error creating sprite generator: %s", err.Error())
|
||||
return
|
||||
|
||||
@@ -9,10 +9,10 @@ import (
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/remeh/sizedwaitgroup"
|
||||
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/ffmpeg"
|
||||
@@ -28,15 +28,57 @@ type ScanTask struct {
|
||||
UseFileMetadata bool
|
||||
calculateMD5 bool
|
||||
fileNamingAlgorithm models.HashAlgorithm
|
||||
|
||||
GenerateSprite bool
|
||||
GeneratePreview bool
|
||||
GenerateImagePreview bool
|
||||
zipGallery *models.Gallery
|
||||
}
|
||||
|
||||
func (t *ScanTask) Start(wg *sync.WaitGroup) {
|
||||
func (t *ScanTask) Start(wg *sizedwaitgroup.SizedWaitGroup) {
|
||||
if isGallery(t.FilePath) {
|
||||
t.scanGallery()
|
||||
} else if isVideo(t.FilePath) {
|
||||
t.scanScene()
|
||||
scene := t.scanScene()
|
||||
|
||||
if scene != nil {
|
||||
iwg := sizedwaitgroup.New(2)
|
||||
|
||||
if t.GenerateSprite {
|
||||
iwg.Add()
|
||||
taskSprite := GenerateSpriteTask{Scene: *scene, Overwrite: false, fileNamingAlgorithm: t.fileNamingAlgorithm}
|
||||
go taskSprite.Start(&iwg)
|
||||
}
|
||||
|
||||
if t.GeneratePreview {
|
||||
iwg.Add()
|
||||
|
||||
var previewSegmentDuration = config.GetPreviewSegmentDuration()
|
||||
var previewSegments = config.GetPreviewSegments()
|
||||
var previewExcludeStart = config.GetPreviewExcludeStart()
|
||||
var previewExcludeEnd = config.GetPreviewExcludeEnd()
|
||||
var previewPresent = config.GetPreviewPreset()
|
||||
|
||||
// NOTE: the reuse of this model like this is painful.
|
||||
previewOptions := models.GeneratePreviewOptionsInput{
|
||||
PreviewSegments: &previewSegments,
|
||||
PreviewSegmentDuration: &previewSegmentDuration,
|
||||
PreviewExcludeStart: &previewExcludeStart,
|
||||
PreviewExcludeEnd: &previewExcludeEnd,
|
||||
PreviewPreset: &previewPresent,
|
||||
}
|
||||
|
||||
taskPreview := GeneratePreviewTask{
|
||||
Scene: *scene,
|
||||
ImagePreview: t.GenerateImagePreview,
|
||||
Options: previewOptions,
|
||||
Overwrite: false,
|
||||
fileNamingAlgorithm: t.fileNamingAlgorithm,
|
||||
}
|
||||
go taskPreview.Start(&iwg)
|
||||
}
|
||||
|
||||
iwg.Wait()
|
||||
}
|
||||
} else if isImage(t.FilePath) {
|
||||
t.scanImage()
|
||||
}
|
||||
@@ -237,7 +279,7 @@ func (t *ScanTask) isFileModified(fileModTime time.Time, modTime models.NullSQLi
|
||||
}
|
||||
|
||||
// associates a gallery to a scene with the same basename
|
||||
func (t *ScanTask) associateGallery(wg *sync.WaitGroup) {
|
||||
func (t *ScanTask) associateGallery(wg *sizedwaitgroup.SizedWaitGroup) {
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
gallery, _ := qb.FindByPath(t.FilePath)
|
||||
if gallery == nil {
|
||||
@@ -291,14 +333,14 @@ func (t *ScanTask) associateGallery(wg *sync.WaitGroup) {
|
||||
wg.Done()
|
||||
}
|
||||
|
||||
func (t *ScanTask) scanScene() {
|
||||
func (t *ScanTask) scanScene() *models.Scene {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
scene, _ := qb.FindByPath(t.FilePath)
|
||||
|
||||
fileModTime, err := t.getFileModTime()
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
return nil
|
||||
}
|
||||
|
||||
if scene != nil {
|
||||
@@ -311,7 +353,7 @@ func (t *ScanTask) scanScene() {
|
||||
scene, err = qb.Find(scene.ID)
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
@@ -322,7 +364,7 @@ func (t *ScanTask) scanScene() {
|
||||
scene, err = t.rescanScene(scene, fileModTime)
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
@@ -335,7 +377,7 @@ func (t *ScanTask) scanScene() {
|
||||
videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.FilePath)
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
return nil
|
||||
}
|
||||
container := ffmpeg.MatchContainer(videoFile.Container, t.FilePath)
|
||||
logger.Infof("Adding container %s to file %s", container, t.FilePath)
|
||||
@@ -357,14 +399,14 @@ func (t *ScanTask) scanScene() {
|
||||
oshash, err := utils.OSHashFromFilePath(t.FilePath)
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
return nil
|
||||
}
|
||||
|
||||
// check if oshash clashes with existing scene
|
||||
dupe, _ := qb.FindByOSHash(oshash)
|
||||
if dupe != nil {
|
||||
logger.Errorf("OSHash for file %s is the same as that of %s", t.FilePath, dupe.Path)
|
||||
return
|
||||
return nil
|
||||
}
|
||||
|
||||
ctx := context.TODO()
|
||||
@@ -373,7 +415,7 @@ func (t *ScanTask) scanScene() {
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
tx.Rollback()
|
||||
return
|
||||
return nil
|
||||
} else if err := tx.Commit(); err != nil {
|
||||
logger.Error(err.Error())
|
||||
}
|
||||
@@ -384,14 +426,14 @@ func (t *ScanTask) scanScene() {
|
||||
checksum, err := t.calculateChecksum()
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
return nil
|
||||
}
|
||||
|
||||
// check if checksum clashes with existing scene
|
||||
dupe, _ := qb.FindByChecksum(checksum)
|
||||
if dupe != nil {
|
||||
logger.Errorf("MD5 for file %s is the same as that of %s", t.FilePath, dupe.Path)
|
||||
return
|
||||
return nil
|
||||
}
|
||||
|
||||
ctx := context.TODO()
|
||||
@@ -405,18 +447,18 @@ func (t *ScanTask) scanScene() {
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
return nil
|
||||
}
|
||||
|
||||
// Ignore directories.
|
||||
if isDir, _ := utils.DirExists(t.FilePath); isDir {
|
||||
return
|
||||
return nil
|
||||
}
|
||||
|
||||
videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.FilePath)
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
return nil
|
||||
}
|
||||
container := ffmpeg.MatchContainer(videoFile.Container, t.FilePath)
|
||||
|
||||
@@ -431,14 +473,14 @@ func (t *ScanTask) scanScene() {
|
||||
oshash, err := utils.OSHashFromFilePath(t.FilePath)
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
return nil
|
||||
}
|
||||
|
||||
if t.fileNamingAlgorithm == models.HashAlgorithmMd5 || t.calculateMD5 {
|
||||
checksum, err = t.calculateChecksum()
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
@@ -460,6 +502,8 @@ func (t *ScanTask) scanScene() {
|
||||
|
||||
t.makeScreenshots(videoFile, sceneHash)
|
||||
|
||||
var retScene *models.Scene
|
||||
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
if scene != nil {
|
||||
@@ -503,15 +547,21 @@ func (t *ScanTask) scanScene() {
|
||||
newScene.Details = sql.NullString{String: videoFile.Comment, Valid: true}
|
||||
newScene.Date = models.SQLiteDate{String: videoFile.CreationTime.Format("2006-01-02")}
|
||||
}
|
||||
_, err = qb.Create(newScene, tx)
|
||||
|
||||
retScene, err = qb.Create(newScene, tx)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
_ = tx.Rollback()
|
||||
return nil
|
||||
|
||||
} else if err := tx.Commit(); err != nil {
|
||||
logger.Error(err.Error())
|
||||
return nil
|
||||
}
|
||||
|
||||
return retScene
|
||||
}
|
||||
|
||||
func (t *ScanTask) rescanScene(scene *models.Scene, fileModTime time.Time) (*models.Scene, error) {
|
||||
@@ -630,9 +680,9 @@ func (t *ScanTask) scanZipImages(zipGallery *models.Gallery) {
|
||||
subTask.zipGallery = zipGallery
|
||||
|
||||
// run the subtask and wait for it to complete
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(1)
|
||||
subTask.Start(&wg)
|
||||
iwg := sizedwaitgroup.New(1)
|
||||
iwg.Add()
|
||||
subTask.Start(&iwg)
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"github.com/remeh/sizedwaitgroup"
|
||||
|
||||
"github.com/stashapp/stash/pkg/ffmpeg"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/manager/config"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
"sync"
|
||||
)
|
||||
|
||||
type GenerateTranscodeTask struct {
|
||||
@@ -15,7 +16,7 @@ type GenerateTranscodeTask struct {
|
||||
fileNamingAlgorithm models.HashAlgorithm
|
||||
}
|
||||
|
||||
func (t *GenerateTranscodeTask) Start(wg *sync.WaitGroup) {
|
||||
func (t *GenerateTranscodeTask) Start(wg *sizedwaitgroup.SizedWaitGroup) {
|
||||
defer wg.Done()
|
||||
|
||||
hasTranscode := HasTranscode(&t.Scene, t.fileNamingAlgorithm)
|
||||
|
||||
@@ -7,6 +7,7 @@ import V020 from "./versions/v020.md";
|
||||
import V021 from "./versions/v021.md";
|
||||
import V030 from "./versions/v030.md";
|
||||
import V040 from "./versions/v040.md";
|
||||
import V050 from "./versions/v050.md";
|
||||
import { MarkdownPage } from "../Shared/MarkdownPage";
|
||||
|
||||
const Changelog: React.FC = () => {
|
||||
@@ -36,11 +37,20 @@ const Changelog: React.FC = () => {
|
||||
<>
|
||||
<h1 className="mb-4">Changelog:</h1>
|
||||
<Version
|
||||
version={stashVersion || "v0.4.0"}
|
||||
version={stashVersion || "v0.5.0"}
|
||||
date={buildDate}
|
||||
openState={openState}
|
||||
setOpenState={setVersionOpenState}
|
||||
defaultOpen
|
||||
>
|
||||
<MarkdownPage page={V050} />
|
||||
</Version>
|
||||
<Version
|
||||
version="v0.4.0"
|
||||
date="2020-11-24"
|
||||
openState={openState}
|
||||
setOpenState={setVersionOpenState}
|
||||
defaultOpen
|
||||
>
|
||||
<MarkdownPage page={V040} />
|
||||
</Version>
|
||||
|
||||
3
ui/v2.5/src/components/Changelog/versions/v050.md
Normal file
3
ui/v2.5/src/components/Changelog/versions/v050.md
Normal file
@@ -0,0 +1,3 @@
|
||||
### 🎨 Improvements
|
||||
* Support optional preview and sprite generation during scanning.
|
||||
* Support configurable number of threads for scanning and generation.
|
||||
@@ -79,6 +79,7 @@ export const SettingsConfigurationPanel: React.FC = () => {
|
||||
const [videoFileNamingAlgorithm, setVideoFileNamingAlgorithm] = useState<
|
||||
GQL.HashAlgorithm | undefined
|
||||
>(undefined);
|
||||
const [parallelTasks, setParallelTasks] = useState<number>(0);
|
||||
const [previewSegments, setPreviewSegments] = useState<number>(0);
|
||||
const [previewSegmentDuration, setPreviewSegmentDuration] = useState<number>(
|
||||
0
|
||||
@@ -139,6 +140,7 @@ export const SettingsConfigurationPanel: React.FC = () => {
|
||||
calculateMD5,
|
||||
videoFileNamingAlgorithm:
|
||||
(videoFileNamingAlgorithm as GQL.HashAlgorithm) ?? undefined,
|
||||
parallelTasks,
|
||||
previewSegments,
|
||||
previewSegmentDuration,
|
||||
previewExcludeStart,
|
||||
@@ -182,6 +184,7 @@ export const SettingsConfigurationPanel: React.FC = () => {
|
||||
setCachePath(conf.general.cachePath);
|
||||
setVideoFileNamingAlgorithm(conf.general.videoFileNamingAlgorithm);
|
||||
setCalculateMD5(conf.general.calculateMD5);
|
||||
setParallelTasks(conf.general.parallelTasks);
|
||||
setPreviewSegments(conf.general.previewSegments);
|
||||
setPreviewSegmentDuration(conf.general.previewSegmentDuration);
|
||||
setPreviewExcludeStart(conf.general.previewExcludeStart);
|
||||
@@ -567,6 +570,31 @@ export const SettingsConfigurationPanel: React.FC = () => {
|
||||
|
||||
<hr />
|
||||
|
||||
<Form.Group>
|
||||
<h4>Parallel Scan/Generation</h4>
|
||||
|
||||
<Form.Group id="parallel-tasks">
|
||||
<h6>Number of parallel task for scan/generation</h6>
|
||||
<Form.Control
|
||||
className="col col-sm-6 text-input"
|
||||
type="number"
|
||||
value={parallelTasks}
|
||||
onChange={(e: React.ChangeEvent<HTMLInputElement>) =>
|
||||
setParallelTasks(
|
||||
Number.parseInt(e.currentTarget.value || "0", 10)
|
||||
)
|
||||
}
|
||||
/>
|
||||
<Form.Text className="text-muted">
|
||||
Set to 0 for auto-detection. Warning running more tasks than is
|
||||
required to achieve 100% cpu utilisation will decrease performance
|
||||
and potentially cause other issues.
|
||||
</Form.Text>
|
||||
</Form.Group>
|
||||
</Form.Group>
|
||||
|
||||
<hr />
|
||||
|
||||
<Form.Group>
|
||||
<h4>Preview Generation</h4>
|
||||
|
||||
|
||||
@@ -31,6 +31,16 @@ export const SettingsTasksPanel: React.FC = () => {
|
||||
const [isImportDialogOpen, setIsImportDialogOpen] = useState<boolean>(false);
|
||||
const [isScanDialogOpen, setIsScanDialogOpen] = useState<boolean>(false);
|
||||
const [useFileMetadata, setUseFileMetadata] = useState<boolean>(false);
|
||||
const [scanGeneratePreviews, setScanGeneratePreviews] = useState<boolean>(
|
||||
false
|
||||
);
|
||||
const [scanGenerateSprites, setScanGenerateSprites] = useState<boolean>(
|
||||
false
|
||||
);
|
||||
const [scanGenerateImagePreviews, setScanGenerateImagePreviews] = useState<
|
||||
boolean
|
||||
>(false);
|
||||
|
||||
const [status, setStatus] = useState<string>("");
|
||||
const [progress, setProgress] = useState<number>(0);
|
||||
|
||||
@@ -166,8 +176,11 @@ export const SettingsTasksPanel: React.FC = () => {
|
||||
async function onScan(paths?: string[]) {
|
||||
try {
|
||||
await mutateMetadataScan({
|
||||
useFileMetadata,
|
||||
paths,
|
||||
useFileMetadata,
|
||||
scanGeneratePreviews,
|
||||
scanGenerateImagePreviews,
|
||||
scanGenerateSprites,
|
||||
});
|
||||
Toast.success({ content: "Started scan" });
|
||||
jobStatus.refetch();
|
||||
@@ -304,6 +317,31 @@ export const SettingsTasksPanel: React.FC = () => {
|
||||
label="Set name, date, details from metadata (if present)"
|
||||
onChange={() => setUseFileMetadata(!useFileMetadata)}
|
||||
/>
|
||||
<Form.Check
|
||||
id="scan-generate-previews"
|
||||
checked={scanGeneratePreviews}
|
||||
label="Generate previews during scan (video previews which play when hovering over a scene)"
|
||||
onChange={() => setScanGeneratePreviews(!scanGeneratePreviews)}
|
||||
/>
|
||||
<div className="d-flex flex-row">
|
||||
<div>↳</div>
|
||||
<Form.Check
|
||||
id="scan-generate-image-previews"
|
||||
checked={scanGenerateImagePreviews}
|
||||
disabled={!scanGeneratePreviews}
|
||||
label="Generate image previews during scan (animated WebP previews, only required if Preview Type is set to Animated Image)"
|
||||
onChange={() =>
|
||||
setScanGenerateImagePreviews(!scanGenerateImagePreviews)
|
||||
}
|
||||
className="ml-2 flex-grow"
|
||||
/>
|
||||
</div>
|
||||
<Form.Check
|
||||
id="scan-generate-sprites"
|
||||
checked={scanGenerateSprites}
|
||||
label="Generate sprites during scan (for the scene scrubber)"
|
||||
onChange={() => setScanGenerateSprites(!scanGenerateSprites)}
|
||||
/>
|
||||
</Form.Group>
|
||||
<Form.Group>
|
||||
<Button
|
||||
|
||||
@@ -60,6 +60,21 @@ These instructions are for existing users whose systems will be defaulted to use
|
||||
2. In Settings -> Configuration page, untick `Calculate MD5` and select `oshash` as file naming hash. Save the configuration.
|
||||
3. In Settings -> Tasks page, click on the `Rename generated files` migration button.
|
||||
|
||||
|
||||
## Parallel Scan/Generation
|
||||
|
||||
#### Number of parallel task for scan/generation
|
||||
|
||||
This setting controls how many sub-tasks will be run in parallel during scanning and generation tasks. (See Tasks)
|
||||
|
||||
Auto-detection can be enabled by setting this to zero. This will calculate the number of parallel tasks to be cpu_cores/4 + 1.
|
||||
|
||||
This setting can be used to increase/decrease overall CPU utilisation in two scenarios:
|
||||
1) High performance 4+ core cpus.
|
||||
2) Media files stored on remote/cloud filesystem.
|
||||
|
||||
Note: If this is set too high it will decrease overall performance and causes failures (out of memory).
|
||||
|
||||
## Scraping
|
||||
|
||||
### User Agent string
|
||||
|
||||
21
vendor/github.com/remeh/sizedwaitgroup/LICENSE
generated
vendored
Normal file
21
vendor/github.com/remeh/sizedwaitgroup/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2018 Rémy Mathieu
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
54
vendor/github.com/remeh/sizedwaitgroup/README.md
generated
vendored
Normal file
54
vendor/github.com/remeh/sizedwaitgroup/README.md
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
# SizedWaitGroup
|
||||
|
||||
[](https://godoc.org/github.com/remeh/sizedwaitgroup)
|
||||
|
||||
`SizedWaitGroup` has the same role and API as `sync.WaitGroup` but it adds a limit of the amount of goroutines started concurrently.
|
||||
|
||||
`SizedWaitGroup` adds the feature of limiting the maximum number of concurrently started routines. It could for example be used to start multiples routines querying a database but without sending too much queries in order to not overload the given database.
|
||||
|
||||
# Example
|
||||
|
||||
```
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"time"
|
||||
|
||||
"github.com/remeh/sizedwaitgroup"
|
||||
)
|
||||
|
||||
func main() {
|
||||
rand.Seed(time.Now().UnixNano())
|
||||
|
||||
// Typical use-case:
|
||||
// 50 queries must be executed as quick as possible
|
||||
// but without overloading the database, so only
|
||||
// 8 routines should be started concurrently.
|
||||
swg := sizedwaitgroup.New(8)
|
||||
for i := 0; i < 50; i++ {
|
||||
swg.Add()
|
||||
go func(i int) {
|
||||
defer swg.Done()
|
||||
query(i)
|
||||
}(i)
|
||||
}
|
||||
|
||||
swg.Wait()
|
||||
}
|
||||
|
||||
func query(i int) {
|
||||
fmt.Println(i)
|
||||
ms := i + 500 + rand.Intn(500)
|
||||
time.Sleep(time.Duration(ms) * time.Millisecond)
|
||||
}
|
||||
```
|
||||
|
||||
# License
|
||||
|
||||
MIT
|
||||
|
||||
# Copyright
|
||||
|
||||
Rémy Mathieu © 2016
|
||||
84
vendor/github.com/remeh/sizedwaitgroup/sizedwaitgroup.go
generated
vendored
Normal file
84
vendor/github.com/remeh/sizedwaitgroup/sizedwaitgroup.go
generated
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
// Based upon sync.WaitGroup, SizedWaitGroup allows to start multiple
|
||||
// routines and to wait for their end using the simple API.
|
||||
|
||||
// SizedWaitGroup adds the feature of limiting the maximum number of
|
||||
// concurrently started routines. It could for example be used to start
|
||||
// multiples routines querying a database but without sending too much
|
||||
// queries in order to not overload the given database.
|
||||
//
|
||||
// Rémy Mathieu © 2016
|
||||
package sizedwaitgroup
|
||||
|
||||
import (
|
||||
"context"
|
||||
"math"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// SizedWaitGroup has the same role and close to the
|
||||
// same API as the Golang sync.WaitGroup but adds a limit of
|
||||
// the amount of goroutines started concurrently.
|
||||
type SizedWaitGroup struct {
|
||||
Size int
|
||||
|
||||
current chan struct{}
|
||||
wg sync.WaitGroup
|
||||
}
|
||||
|
||||
// New creates a SizedWaitGroup.
|
||||
// The limit parameter is the maximum amount of
|
||||
// goroutines which can be started concurrently.
|
||||
func New(limit int) SizedWaitGroup {
|
||||
size := math.MaxInt32 // 2^32 - 1
|
||||
if limit > 0 {
|
||||
size = limit
|
||||
}
|
||||
return SizedWaitGroup{
|
||||
Size: size,
|
||||
|
||||
current: make(chan struct{}, size),
|
||||
wg: sync.WaitGroup{},
|
||||
}
|
||||
}
|
||||
|
||||
// Add increments the internal WaitGroup counter.
|
||||
// It can be blocking if the limit of spawned goroutines
|
||||
// has been reached. It will stop blocking when Done is
|
||||
// been called.
|
||||
//
|
||||
// See sync.WaitGroup documentation for more information.
|
||||
func (s *SizedWaitGroup) Add() {
|
||||
s.AddWithContext(context.Background())
|
||||
}
|
||||
|
||||
// AddWithContext increments the internal WaitGroup counter.
|
||||
// It can be blocking if the limit of spawned goroutines
|
||||
// has been reached. It will stop blocking when Done is
|
||||
// been called, or when the context is canceled. Returns nil on
|
||||
// success or an error if the context is canceled before the lock
|
||||
// is acquired.
|
||||
//
|
||||
// See sync.WaitGroup documentation for more information.
|
||||
func (s *SizedWaitGroup) AddWithContext(ctx context.Context) error {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return ctx.Err()
|
||||
case s.current <- struct{}{}:
|
||||
break
|
||||
}
|
||||
s.wg.Add(1)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Done decrements the SizedWaitGroup counter.
|
||||
// See sync.WaitGroup documentation for more information.
|
||||
func (s *SizedWaitGroup) Done() {
|
||||
<-s.current
|
||||
s.wg.Done()
|
||||
}
|
||||
|
||||
// Wait blocks until the SizedWaitGroup counter is zero.
|
||||
// See sync.WaitGroup documentation for more information.
|
||||
func (s *SizedWaitGroup) Wait() {
|
||||
s.wg.Wait()
|
||||
}
|
||||
2
vendor/modules.txt
vendored
2
vendor/modules.txt
vendored
@@ -230,6 +230,8 @@ github.com/natefinch/pie
|
||||
github.com/pelletier/go-toml
|
||||
# github.com/pkg/errors v0.9.1
|
||||
github.com/pkg/errors
|
||||
# github.com/remeh/sizedwaitgroup v1.0.0
|
||||
github.com/remeh/sizedwaitgroup
|
||||
# github.com/pmezard/go-difflib v1.0.0
|
||||
github.com/pmezard/go-difflib/difflib
|
||||
# github.com/rogpeppe/go-internal v1.3.0
|
||||
|
||||
Reference in New Issue
Block a user