mirror of
https://github.com/stashapp/stash.git
synced 2025-12-17 12:24:38 +03:00
Fixups + enable the commentFormatting linter (#1866)
* Add a space after // comments
For consistency, the commentFormatting lint checker suggests a space
after each // comment block. This commit handles all the spots in
the code where that is needed.
* Rewrite documentation on functions
Use the Go idiom of commenting:
* First sentence declares the purpose.
* First word is the name being declared
The reason this style is preferred is such that grep is able to find
names the user might be interested in. Consider e.g.,
go doc -all pkg/ffmpeg | grep -i transcode
in which case a match will tell you the name of the function you are
interested in.
* Remove old code comment-blocks
There are some commented out old code blocks in the code base. These are
either 3 years old, or 2 years old. By now, I don't think their use is
going to come back any time soon, and Git will track old pieces of
deleted code anyway.
Opt for deletion.
* Reorder imports
Split stdlib imports from non-stdlib imports in files we are touching.
* Use a range over an iteration variable
Probably more go-idiomatic, and the code needed comment-fixing anyway.
* Use time.After rather than rolling our own
The idiom here is common enough that the stdlib contains a function for
it. Use the stdlib function over our own variant.
* Enable the commentFormatting linter
This commit is contained in:
@@ -38,8 +38,6 @@ linters:
|
|||||||
linters-settings:
|
linters-settings:
|
||||||
gocritic:
|
gocritic:
|
||||||
disabled-checks:
|
disabled-checks:
|
||||||
# Way too many errors to fix regarding comment formatting for now
|
|
||||||
- commentFormatting
|
|
||||||
- appendAssign
|
- appendAssign
|
||||||
|
|
||||||
gofmt:
|
gofmt:
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ import (
|
|||||||
"github.com/stashapp/stash/pkg/logger"
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
)
|
)
|
||||||
|
|
||||||
//we use the github REST V3 API as no login is required
|
// we use the github REST V3 API as no login is required
|
||||||
const apiReleases string = "https://api.github.com/repos/stashapp/stash/releases"
|
const apiReleases string = "https://api.github.com/repos/stashapp/stash/releases"
|
||||||
const apiTags string = "https://api.github.com/repos/stashapp/stash/tags"
|
const apiTags string = "https://api.github.com/repos/stashapp/stash/tags"
|
||||||
const apiAcceptHeader string = "application/vnd.github.v3+json"
|
const apiAcceptHeader string = "application/vnd.github.v3+json"
|
||||||
|
|||||||
@@ -164,7 +164,7 @@ func (r *queryResolver) Version(ctx context.Context) (*models.Version, error) {
|
|||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
//Gets latest version (git shorthash commit for now)
|
// Latestversion returns the latest git shorthash commit.
|
||||||
func (r *queryResolver) Latestversion(ctx context.Context) (*models.ShortVersion, error) {
|
func (r *queryResolver) Latestversion(ctx context.Context) (*models.ShortVersion, error) {
|
||||||
ver, url, err := GetLatestVersion(ctx, true)
|
ver, url, err := GetLatestVersion(ctx, true)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
|
|||||||
@@ -67,9 +67,9 @@ func (e *Encoder) Transcode(probeResult VideoFile, options TranscodeOptions) {
|
|||||||
_, _ = e.runTranscode(probeResult, args)
|
_, _ = e.runTranscode(probeResult, args)
|
||||||
}
|
}
|
||||||
|
|
||||||
//transcode the video, remove the audio
|
// TranscodeVideo transcodes the video, and removes the audio.
|
||||||
//in some videos where the audio codec is not supported by ffmpeg
|
// In some videos where the audio codec is not supported by ffmpeg,
|
||||||
//ffmpeg fails if you try to transcode the audio
|
// ffmpeg fails if you try to transcode the audio
|
||||||
func (e *Encoder) TranscodeVideo(probeResult VideoFile, options TranscodeOptions) {
|
func (e *Encoder) TranscodeVideo(probeResult VideoFile, options TranscodeOptions) {
|
||||||
scale := calculateTranscodeScale(probeResult, options.MaxTranscodeSize)
|
scale := calculateTranscodeScale(probeResult, options.MaxTranscodeSize)
|
||||||
args := []string{
|
args := []string{
|
||||||
@@ -87,7 +87,7 @@ func (e *Encoder) TranscodeVideo(probeResult VideoFile, options TranscodeOptions
|
|||||||
_, _ = e.runTranscode(probeResult, args)
|
_, _ = e.runTranscode(probeResult, args)
|
||||||
}
|
}
|
||||||
|
|
||||||
//copy the video stream as is, transcode audio
|
// TranscodeAudio will copy the video stream as is, and transcode audio.
|
||||||
func (e *Encoder) TranscodeAudio(probeResult VideoFile, options TranscodeOptions) {
|
func (e *Encoder) TranscodeAudio(probeResult VideoFile, options TranscodeOptions) {
|
||||||
args := []string{
|
args := []string{
|
||||||
"-i", probeResult.Path,
|
"-i", probeResult.Path,
|
||||||
@@ -99,7 +99,7 @@ func (e *Encoder) TranscodeAudio(probeResult VideoFile, options TranscodeOptions
|
|||||||
_, _ = e.runTranscode(probeResult, args)
|
_, _ = e.runTranscode(probeResult, args)
|
||||||
}
|
}
|
||||||
|
|
||||||
//copy the video stream as is, drop audio
|
// CopyVideo will copy the video stream as is, and drop the audio stream.
|
||||||
func (e *Encoder) CopyVideo(probeResult VideoFile, options TranscodeOptions) {
|
func (e *Encoder) CopyVideo(probeResult VideoFile, options TranscodeOptions) {
|
||||||
args := []string{
|
args := []string{
|
||||||
"-i", probeResult.Path,
|
"-i", probeResult.Path,
|
||||||
|
|||||||
@@ -72,8 +72,8 @@ var validAudioForMkv = []AudioCodec{Aac, Mp3, Vorbis, Opus}
|
|||||||
var validAudioForWebm = []AudioCodec{Vorbis, Opus}
|
var validAudioForWebm = []AudioCodec{Vorbis, Opus}
|
||||||
var validAudioForMp4 = []AudioCodec{Aac, Mp3}
|
var validAudioForMp4 = []AudioCodec{Aac, Mp3}
|
||||||
|
|
||||||
//maps user readable container strings to ffprobe's format_name
|
// ContainerToFfprobe maps user readable container strings to ffprobe's format_name.
|
||||||
//on some formats ffprobe can't differentiate
|
// On some formats ffprobe can't differentiate
|
||||||
var ContainerToFfprobe = map[Container]string{
|
var ContainerToFfprobe = map[Container]string{
|
||||||
Mp4: Mp4Ffmpeg,
|
Mp4: Mp4Ffmpeg,
|
||||||
M4v: M4vFfmpeg,
|
M4v: M4vFfmpeg,
|
||||||
@@ -155,7 +155,8 @@ func IsValidForContainer(format Container, validContainers []Container) bool {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
//extend stream validation check to take into account container
|
// IsValidCombo checks if a codec/container combination is valid.
|
||||||
|
// Returns true on validity, false otherwise
|
||||||
func IsValidCombo(codecName string, format Container, supportedVideoCodecs []string) bool {
|
func IsValidCombo(codecName string, format Container, supportedVideoCodecs []string) bool {
|
||||||
supportMKV := IsValidCodec(Mkv, supportedVideoCodecs)
|
supportMKV := IsValidCodec(Mkv, supportedVideoCodecs)
|
||||||
supportHEVC := IsValidCodec(Hevc, supportedVideoCodecs)
|
supportHEVC := IsValidCodec(Hevc, supportedVideoCodecs)
|
||||||
@@ -227,10 +228,6 @@ type FFProbe string
|
|||||||
// Execute exec command and bind result to struct.
|
// Execute exec command and bind result to struct.
|
||||||
func (f *FFProbe) NewVideoFile(videoPath string, stripExt bool) (*VideoFile, error) {
|
func (f *FFProbe) NewVideoFile(videoPath string, stripExt bool) (*VideoFile, error) {
|
||||||
args := []string{"-v", "quiet", "-print_format", "json", "-show_format", "-show_streams", "-show_error", videoPath}
|
args := []string{"-v", "quiet", "-print_format", "json", "-show_format", "-show_streams", "-show_error", videoPath}
|
||||||
//// Extremely slow on windows for some reason
|
|
||||||
//if runtime.GOOS != "windows" {
|
|
||||||
// args = append(args, "-count_frames")
|
|
||||||
//}
|
|
||||||
out, err := exec.Command(string(*f), args...).Output()
|
out, err := exec.Command(string(*f), args...).Output()
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -256,9 +253,6 @@ func parse(filePath string, probeJSON *FFProbeJSON, stripExt bool) (*VideoFile,
|
|||||||
if result.JSON.Error.Code != 0 {
|
if result.JSON.Error.Code != 0 {
|
||||||
return nil, fmt.Errorf("ffprobe error code %d: %s", result.JSON.Error.Code, result.JSON.Error.String)
|
return nil, fmt.Errorf("ffprobe error code %d: %s", result.JSON.Error.Code, result.JSON.Error.String)
|
||||||
}
|
}
|
||||||
//} else if (ffprobeResult.stderr.includes("could not find codec parameters")) {
|
|
||||||
// throw new Error(`FFProbe [${filePath}] -> Could not find codec parameters`);
|
|
||||||
//} // TODO nil_or_unsupported.(video_stream) && nil_or_unsupported.(audio_stream)
|
|
||||||
|
|
||||||
result.Path = filePath
|
result.Path = filePath
|
||||||
result.Title = probeJSON.Format.Tags.Title
|
result.Title = probeJSON.Format.Tags.Title
|
||||||
|
|||||||
@@ -2,8 +2,9 @@ package ffmpeg
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"github.com/stashapp/stash/pkg/logger"
|
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
)
|
)
|
||||||
|
|
||||||
// detect file format from magic file number
|
// detect file format from magic file number
|
||||||
@@ -37,11 +38,12 @@ func containsMatroskaSignature(buf, subType []byte) bool {
|
|||||||
return buf[index-3] == 0x42 && buf[index-2] == 0x82
|
return buf[index-3] == 0x42 && buf[index-2] == 0x82
|
||||||
}
|
}
|
||||||
|
|
||||||
//returns container as string ("" on error or no match)
|
// MagicContainer returns the container type of a file path.
|
||||||
//implements only mkv or webm as ffprobe can't distinguish between them
|
// Returns the zero-value on errors or no-match. Implements mkv or
|
||||||
//and not all browsers support mkv
|
// webm only, as ffprobe can't distinguish between them and not all
|
||||||
func MagicContainer(file_path string) Container {
|
// browsers support mkv
|
||||||
file, err := os.Open(file_path)
|
func MagicContainer(filePath string) Container {
|
||||||
|
file, err := os.Open(filePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Errorf("[magicfile] %v", err)
|
logger.Errorf("[magicfile] %v", err)
|
||||||
return ""
|
return ""
|
||||||
|
|||||||
@@ -50,7 +50,7 @@ const (
|
|||||||
|
|
||||||
const (
|
const (
|
||||||
studioName = "studioName"
|
studioName = "studioName"
|
||||||
//galleryChecksum = "galleryChecksum"
|
// galleryChecksum = "galleryChecksum"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
|||||||
@@ -295,7 +295,3 @@ func Fatal(args ...interface{}) {
|
|||||||
func Fatalf(format string, args ...interface{}) {
|
func Fatalf(format string, args ...interface{}) {
|
||||||
logger.Fatalf(format, args...)
|
logger.Fatalf(format, args...)
|
||||||
}
|
}
|
||||||
|
|
||||||
//func WithRequest(req *http.Request) *logrus.Entry {
|
|
||||||
// return logger.WithFields(RequestFields(req))
|
|
||||||
//}
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"sync"
|
"sync"
|
||||||
//"github.com/sasha-s/go-deadlock" // if you have deadlock issues
|
// "github.com/sasha-s/go-deadlock" // if you have deadlock issues
|
||||||
|
|
||||||
"golang.org/x/crypto/bcrypt"
|
"golang.org/x/crypto/bcrypt"
|
||||||
|
|
||||||
@@ -190,7 +190,7 @@ type Instance struct {
|
|||||||
certFile string
|
certFile string
|
||||||
keyFile string
|
keyFile string
|
||||||
sync.RWMutex
|
sync.RWMutex
|
||||||
//deadlock.RWMutex // for deadlock testing/issues
|
// deadlock.RWMutex // for deadlock testing/issues
|
||||||
}
|
}
|
||||||
|
|
||||||
var instance *Instance
|
var instance *Instance
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ func TestConcurrentConfigAccess(t *testing.T) {
|
|||||||
i := GetInstance()
|
i := GetInstance()
|
||||||
|
|
||||||
const workers = 8
|
const workers = 8
|
||||||
//const loops = 1000
|
|
||||||
const loops = 200
|
const loops = 200
|
||||||
var wg sync.WaitGroup
|
var wg sync.WaitGroup
|
||||||
for k := 0; k < workers; k++ {
|
for k := 0; k < workers; k++ {
|
||||||
|
|||||||
@@ -22,14 +22,13 @@ func excludeFiles(files []string, patterns []string) ([]string, int) {
|
|||||||
return files, 0
|
return files, 0
|
||||||
}
|
}
|
||||||
|
|
||||||
for i := 0; i < len(files); i++ {
|
for _, f := range files {
|
||||||
if matchFileSimple(files[i], fileRegexps) {
|
if matchFileSimple(f, fileRegexps) {
|
||||||
logger.Infof("File matched pattern. Excluding:\"%s\"", files[i])
|
logger.Infof("File matched pattern. Excluding:\"%s\"", f)
|
||||||
exclCount++
|
exclCount++
|
||||||
} else {
|
} else {
|
||||||
|
// if pattern doesn't match add file to list
|
||||||
//if pattern doesn't match add file to list
|
results = append(results, f)
|
||||||
results = append(results, files[i])
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
logger.Infof("Excluded %d file(s) from scan", exclCount)
|
logger.Infof("Excluded %d file(s) from scan", exclCount)
|
||||||
|
|||||||
@@ -2,8 +2,9 @@ package manager
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/stashapp/stash/pkg/logger"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
)
|
)
|
||||||
|
|
||||||
var excludeTestFilenames = []string{
|
var excludeTestFilenames = []string{
|
||||||
@@ -31,16 +32,16 @@ var excludeTests = []struct {
|
|||||||
testPattern []string
|
testPattern []string
|
||||||
expected int
|
expected int
|
||||||
}{
|
}{
|
||||||
{[]string{"sample\\.mp4$", "trash", "\\.[\\d]{3}\\.webm$"}, 6}, //generic
|
{[]string{"sample\\.mp4$", "trash", "\\.[\\d]{3}\\.webm$"}, 6}, // generic
|
||||||
{[]string{"no_match\\.mp4"}, 0}, //no match
|
{[]string{"no_match\\.mp4"}, 0}, // no match
|
||||||
{[]string{"^/stash/videos/exclude/", "/videos/xcl/"}, 3}, //linux
|
{[]string{"^/stash/videos/exclude/", "/videos/xcl/"}, 3}, // linux
|
||||||
{[]string{"/\\.[[:word:]]+/"}, 1}, //linux hidden dirs (handbrake unraid issue?)
|
{[]string{"/\\.[[:word:]]+/"}, 1}, // linux hidden dirs (handbrake unraid issue?)
|
||||||
{[]string{"c:\\\\stash\\\\videos\\\\exclude"}, 1}, //windows
|
{[]string{"c:\\\\stash\\\\videos\\\\exclude"}, 1}, // windows
|
||||||
{[]string{"\\/[/invalid"}, 0}, //invalid pattern
|
{[]string{"\\/[/invalid"}, 0}, // invalid pattern
|
||||||
{[]string{"\\/[/invalid", "sample\\.[[:alnum:]]+$"}, 3}, //invalid pattern but continue
|
{[]string{"\\/[/invalid", "sample\\.[[:alnum:]]+$"}, 3}, // invalid pattern but continue
|
||||||
{[]string{"^\\\\\\\\network"}, 4}, //windows net share
|
{[]string{"^\\\\\\\\network"}, 4}, // windows net share
|
||||||
{[]string{"\\\\private\\\\"}, 1}, //windows net share
|
{[]string{"\\\\private\\\\"}, 1}, // windows net share
|
||||||
{[]string{"\\\\private\\\\", "sample\\.mp4"}, 3}, //windows net share
|
{[]string{"\\\\private\\\\", "sample\\.mp4"}, 3}, // windows net share
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestExcludeFiles(t *testing.T) {
|
func TestExcludeFiles(t *testing.T) {
|
||||||
|
|||||||
@@ -3,13 +3,14 @@ package manager
|
|||||||
import (
|
import (
|
||||||
"database/sql"
|
"database/sql"
|
||||||
"errors"
|
"errors"
|
||||||
"github.com/stashapp/stash/pkg/studio"
|
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/studio"
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/tag"
|
"github.com/stashapp/stash/pkg/tag"
|
||||||
)
|
)
|
||||||
@@ -81,8 +82,6 @@ func initParserFields() {
|
|||||||
ret["title"] = newParserField("title", ".*", true)
|
ret["title"] = newParserField("title", ".*", true)
|
||||||
ret["ext"] = newParserField("ext", ".*$", false)
|
ret["ext"] = newParserField("ext", ".*$", false)
|
||||||
|
|
||||||
//I = new ParserField("i", undefined, "Matches any ignored word", false);
|
|
||||||
|
|
||||||
ret["d"] = newParserField("d", `(?:\.|-|_)`, false)
|
ret["d"] = newParserField("d", `(?:\.|-|_)`, false)
|
||||||
ret["rating"] = newParserField("rating", `\d`, true)
|
ret["rating"] = newParserField("rating", `\d`, true)
|
||||||
ret["performer"] = newParserField("performer", ".*", true)
|
ret["performer"] = newParserField("performer", ".*", true)
|
||||||
|
|||||||
@@ -514,14 +514,8 @@ func (s *singleton) neededGenerate(scenes []*models.Scene, input models.Generate
|
|||||||
var totals totalsGenerate
|
var totals totalsGenerate
|
||||||
const timeout = 90 * time.Second
|
const timeout = 90 * time.Second
|
||||||
|
|
||||||
// create a control channel through which to signal the counting loop when the timeout is reached
|
// Set a deadline.
|
||||||
chTimeout := make(chan struct{})
|
chTimeout := time.After(timeout)
|
||||||
|
|
||||||
//run the timeout function in a separate thread
|
|
||||||
go func() {
|
|
||||||
time.Sleep(timeout)
|
|
||||||
chTimeout <- struct{}{}
|
|
||||||
}()
|
|
||||||
|
|
||||||
fileNamingAlgo := config.GetInstance().GetVideoFileNamingAlgorithm()
|
fileNamingAlgo := config.GetInstance().GetVideoFileNamingAlgorithm()
|
||||||
overwrite := false
|
overwrite := false
|
||||||
@@ -592,7 +586,7 @@ func (s *singleton) neededGenerate(scenes []*models.Scene, input models.Generate
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
//check for timeout
|
// check for timeout
|
||||||
select {
|
select {
|
||||||
case <-chTimeout:
|
case <-chTimeout:
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
@@ -69,7 +69,7 @@ func (t *GenerateTranscodeTask) Start() {
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if audioCodec == ffmpeg.MissingUnsupported {
|
if audioCodec == ffmpeg.MissingUnsupported {
|
||||||
//ffmpeg fails if it trys to transcode an unsupported audio codec
|
// ffmpeg fails if it trys to transcode an unsupported audio codec
|
||||||
encoder.TranscodeVideo(*videoFile, options)
|
encoder.TranscodeVideo(*videoFile, options)
|
||||||
} else {
|
} else {
|
||||||
encoder.Transcode(*videoFile, options)
|
encoder.Transcode(*videoFile, options)
|
||||||
|
|||||||
@@ -432,7 +432,7 @@ func listKeys(i interface{}, addPrefix bool) string {
|
|||||||
var query []string
|
var query []string
|
||||||
v := reflect.ValueOf(i)
|
v := reflect.ValueOf(i)
|
||||||
for i := 0; i < v.NumField(); i++ {
|
for i := 0; i < v.NumField(); i++ {
|
||||||
//get key for struct tag
|
// Get key for struct tag
|
||||||
rawKey := v.Type().Field(i).Tag.Get("db")
|
rawKey := v.Type().Field(i).Tag.Get("db")
|
||||||
key := strings.Split(rawKey, ",")[0]
|
key := strings.Split(rawKey, ",")[0]
|
||||||
if key == "id" {
|
if key == "id" {
|
||||||
@@ -450,7 +450,7 @@ func updateSet(i interface{}, partial bool) string {
|
|||||||
var query []string
|
var query []string
|
||||||
v := reflect.ValueOf(i)
|
v := reflect.ValueOf(i)
|
||||||
for i := 0; i < v.NumField(); i++ {
|
for i := 0; i < v.NumField(); i++ {
|
||||||
//get key for struct tag
|
// Get key for struct tag
|
||||||
rawKey := v.Type().Field(i).Tag.Get("db")
|
rawKey := v.Type().Field(i).Tag.Get("db")
|
||||||
key := strings.Split(rawKey, ",")[0]
|
key := strings.Split(rawKey, ",")[0]
|
||||||
if key == "id" {
|
if key == "id" {
|
||||||
|
|||||||
@@ -106,13 +106,6 @@ func GetDataFromBase64String(encodedString string) ([]byte, error) {
|
|||||||
// GetBase64StringFromData returns the given byte slice as a base64 encoded string
|
// GetBase64StringFromData returns the given byte slice as a base64 encoded string
|
||||||
func GetBase64StringFromData(data []byte) string {
|
func GetBase64StringFromData(data []byte) string {
|
||||||
return base64.StdEncoding.EncodeToString(data)
|
return base64.StdEncoding.EncodeToString(data)
|
||||||
|
|
||||||
// Really slow
|
|
||||||
//result = regexp.MustCompile(`(.{60})`).ReplaceAllString(result, "$1\n")
|
|
||||||
//if result[len(result)-1:] != "\n" {
|
|
||||||
// result += "\n"
|
|
||||||
//}
|
|
||||||
//return result
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func ServeImage(image []byte, w http.ResponseWriter, r *http.Request) error {
|
func ServeImage(image []byte, w http.ResponseWriter, r *http.Request) error {
|
||||||
|
|||||||
Reference in New Issue
Block a user