From a9e2a590b29689406aae57ae84fc39d1a8b56b26 Mon Sep 17 00:00:00 2001 From: SmallCoccinelle <89733524+SmallCoccinelle@users.noreply.github.com> Date: Thu, 23 Sep 2021 09:15:50 +0200 Subject: [PATCH] Lint checks phase 2 (#1747) * Log 3 unchecked errors Rather than ignore errors, log them at the WARNING log level. The server has been functioning without these, so assume they are not at the ERROR level. * Log errors in concurrency test If we can't initialize the configuration, treat the test as a failure. * Undo the errcheck on configurations for now. * Handle unchecked errors in pkg/manager * Resolve unchecked errors * Handle DLNA/DMS unchecked errors * Handle error checking in concurrency test Generalize config initialization, so we can initialize a configuration without writing it to disk. Use this in the test case, since otherwise the test fails to write. * Handle the remaining unchecked errors * Heed gosimple in update test * Use one-line if-initializer statements While here, fix a wrong variable capture error. * testing.T doesn't support %w use %v instead which is supported. * Remove unused query builder functions The Int/String criterion handler functions are now generalized. Thus, there's no need to keep these functions around anymore. * Mark filterBuilder.addRecursiveWith nolint The function is useful in the future and no other refactors are looking nice. Keep the function around, but tell the linter to ignore it. * Remove utils.Btoi There are no users of this utility function * Return error on scan failure If we fail to scan the row when looking for the unique checksum index, then report the error upwards. * Fix comments on exported functions * Fix typos * Fix startup error --- pkg/api/resolver_mutation_metadata.go | 5 +- pkg/database/custom_migrations.go | 5 +- pkg/database/database.go | 9 ++- pkg/database/transaction.go | 9 ++- pkg/dlna/dms.go | 14 +++- pkg/ffmpeg/encoder.go | 4 +- pkg/ffmpeg/stream.go | 4 +- pkg/image/image.go | 5 +- pkg/manager/config/config.go | 20 ++++-- pkg/manager/config/config_concurrency_test.go | 4 +- pkg/manager/manager.go | 48 +++++++++---- pkg/manager/task_export.go | 4 +- pkg/manager/task_generate_markers.go | 4 +- pkg/manager/task_stash_box_tag.go | 5 +- pkg/models/transaction.go | 22 ++++-- pkg/scraper/script.go | 4 +- pkg/scraper/xpath.go | 4 +- pkg/sqlite/filter.go | 1 + pkg/sqlite/query.go | 68 ------------------- pkg/tag/update_test.go | 10 +-- pkg/utils/boolean.go | 8 --- pkg/utils/oshash_internal_test.go | 2 +- 22 files changed, 138 insertions(+), 121 deletions(-) diff --git a/pkg/api/resolver_mutation_metadata.go b/pkg/api/resolver_mutation_metadata.go index b43e1b224..298238dd7 100644 --- a/pkg/api/resolver_mutation_metadata.go +++ b/pkg/api/resolver_mutation_metadata.go @@ -2,6 +2,7 @@ package api import ( "context" + "fmt" "io/ioutil" "path/filepath" "strconv" @@ -105,7 +106,9 @@ func (r *mutationResolver) BackupDatabase(ctx context.Context, input models.Back mgr := manager.GetInstance() var backupPath string if download { - utils.EnsureDir(mgr.Paths.Generated.Downloads) + if err := utils.EnsureDir(mgr.Paths.Generated.Downloads); err != nil { + return nil, fmt.Errorf("could not create backup directory %v: %w", mgr.Paths.Generated.Downloads, err) + } f, err := ioutil.TempFile(mgr.Paths.Generated.Downloads, "backup*.sqlite") if err != nil { return nil, err diff --git a/pkg/database/custom_migrations.go b/pkg/database/custom_migrations.go index d3c234548..9e343a515 100644 --- a/pkg/database/custom_migrations.go +++ b/pkg/database/custom_migrations.go @@ -2,6 +2,7 @@ package database import ( "database/sql" + "fmt" "strings" "github.com/jmoiron/sqlx" @@ -26,7 +27,9 @@ func createImagesChecksumIndex() error { if err == nil { var found bool - row.Scan(&found) + if err := row.Scan(&found); err != nil && err != sql.ErrNoRows { + return fmt.Errorf("error while scanning for index: %w", err) + } if found { return nil } diff --git a/pkg/database/database.go b/pkg/database/database.go index 4b9d7ff79..f84207b65 100644 --- a/pkg/database/database.go +++ b/pkg/database/database.go @@ -151,7 +151,10 @@ func Reset(databasePath string) error { } } - Initialize(databasePath) + if err := Initialize(databasePath); err != nil { + return fmt.Errorf("[reset DB] unable to initialize: %w", err) + } + return nil } @@ -256,7 +259,9 @@ func RunMigrations() error { } // re-initialise the database - Initialize(dbPath) + if err = Initialize(dbPath); err != nil { + logger.Warnf("Error re-initializing the database: %v", err) + } // run a vacuum on the database logger.Info("Performing vacuum on database") diff --git a/pkg/database/transaction.go b/pkg/database/transaction.go index c3ef9a85f..32c1ab171 100644 --- a/pkg/database/transaction.go +++ b/pkg/database/transaction.go @@ -4,6 +4,7 @@ import ( "context" "github.com/jmoiron/sqlx" + "github.com/stashapp/stash/pkg/logger" ) // WithTxn executes the provided function within a transaction. It rolls back @@ -17,11 +18,15 @@ func WithTxn(fn func(tx *sqlx.Tx) error) error { defer func() { if p := recover(); p != nil { // a panic occurred, rollback and repanic - tx.Rollback() + if err := tx.Rollback(); err != nil { + logger.Warnf("failure when performing transaction rollback: %v", err) + } panic(p) } else if err != nil { // something went wrong, rollback - tx.Rollback() + if err := tx.Rollback(); err != nil { + logger.Warnf("failure when performing transaction rollback: %v", err) + } } else { // all good, commit err = tx.Commit() diff --git a/pkg/dlna/dms.go b/pkg/dlna/dms.go index 599cc7820..eb7d85531 100644 --- a/pkg/dlna/dms.go +++ b/pkg/dlna/dms.go @@ -416,7 +416,7 @@ func (me *Server) serveIcon(w http.ResponseWriter, r *http.Request) { } var scene *models.Scene - me.txnManager.WithReadTxn(context.Background(), func(r models.ReaderRepository) error { + err := me.txnManager.WithReadTxn(context.Background(), func(r models.ReaderRepository) error { idInt, err := strconv.Atoi(sceneId) if err != nil { return nil @@ -424,6 +424,9 @@ func (me *Server) serveIcon(w http.ResponseWriter, r *http.Request) { scene, _ = r.Scene().Find(idInt) return nil }) + if err != nil { + logger.Warnf("failed to execute read transaction while trying to serve an icon: %v", err) + } if scene == nil { return @@ -552,7 +555,7 @@ func (me *Server) initMux(mux *http.ServeMux) { mux.HandleFunc(resPath, func(w http.ResponseWriter, r *http.Request) { sceneId := r.URL.Query().Get("scene") var scene *models.Scene - me.txnManager.WithReadTxn(context.Background(), func(r models.ReaderRepository) error { + err := me.txnManager.WithReadTxn(context.Background(), func(r models.ReaderRepository) error { sceneIdInt, err := strconv.Atoi(sceneId) if err != nil { return nil @@ -560,6 +563,9 @@ func (me *Server) initMux(mux *http.ServeMux) { scene, _ = r.Scene().Find(sceneIdInt) return nil }) + if err != nil { + logger.Warnf("failed to execute read transaction for scene id (%v): %v", sceneId, err) + } if scene == nil { return @@ -571,7 +577,9 @@ func (me *Server) initMux(mux *http.ServeMux) { w.Header().Set("content-type", `text/xml; charset="utf-8"`) w.Header().Set("content-length", fmt.Sprint(len(me.rootDescXML))) w.Header().Set("server", serverField) - w.Write(me.rootDescXML) + if k, err := w.Write(me.rootDescXML); err != nil { + logger.Warnf("could not write rootDescXML (wrote %v bytes of %v): %v", k, len(me.rootDescXML), err) + } }) handleSCPDs(mux) mux.HandleFunc(serviceControlURL, me.serviceControlHandler) diff --git a/pkg/ffmpeg/encoder.go b/pkg/ffmpeg/encoder.go index 9d96dadf3..4b13244f2 100644 --- a/pkg/ffmpeg/encoder.go +++ b/pkg/ffmpeg/encoder.go @@ -63,7 +63,9 @@ func KillRunningEncoders(path string) { for _, process := range processes { // assume it worked, don't check for error logger.Infof("Killing encoder process for file: %s", path) - process.Kill() + if err := process.Kill(); err != nil { + logger.Warnf("failed to kill process %v: %v", process.Pid, err) + } // wait for the process to die before returning // don't wait more than a few seconds diff --git a/pkg/ffmpeg/stream.go b/pkg/ffmpeg/stream.go index 276f8c847..a0303df50 100644 --- a/pkg/ffmpeg/stream.go +++ b/pkg/ffmpeg/stream.go @@ -32,7 +32,9 @@ func (s *Stream) Serve(w http.ResponseWriter, r *http.Request) { notify := r.Context().Done() go func() { <-notify - s.Process.Kill() + if err := s.Process.Kill(); err != nil { + logger.Warnf("unable to kill os process %v: %v", s.Process.Pid, err) + } }() _, err := io.Copy(w, s.Stdout) diff --git a/pkg/image/image.go b/pkg/image/image.go index fc9cbe22f..5867c178c 100644 --- a/pkg/image/image.go +++ b/pkg/image/image.go @@ -13,6 +13,7 @@ import ( "strings" "time" + "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/utils" _ "golang.org/x/image/webp" @@ -252,7 +253,9 @@ func Serve(w http.ResponseWriter, r *http.Request, path string) { return } - w.Write(data) + if k, err := w.Write(data); err != nil { + logger.Warnf("failure while serving image (wrote %v bytes out of %v): %v", k, len(data), err) + } } } diff --git a/pkg/manager/config/config.go b/pkg/manager/config/config.go index 0629f8d87..ab45976f0 100644 --- a/pkg/manager/config/config.go +++ b/pkg/manager/config/config.go @@ -962,8 +962,7 @@ func (i *Instance) SetChecksumDefaultValues(defaultAlgorithm models.HashAlgorith viper.SetDefault(CalculateMD5, usingMD5) } -func (i *Instance) setDefaultValues() error { - +func (i *Instance) setDefaultValues(write bool) error { // read data before write lock scope defaultDatabaseFilePath := i.GetDefaultDatabaseFilePath() defaultScrapersPath := i.GetDefaultScrapersPath() @@ -989,11 +988,24 @@ func (i *Instance) setDefaultValues() error { // Set default scrapers and plugins paths viper.SetDefault(ScrapersPath, defaultScrapersPath) viper.SetDefault(PluginsPath, defaultPluginsPath) - return viper.WriteConfig() + if write { + return viper.WriteConfig() + } + + return nil } // SetInitialConfig fills in missing required config fields func (i *Instance) SetInitialConfig() error { + return i.setInitialConfig(true) +} + +// SetInitialMemoryConfig fills in missing required config fields without writing the configuration +func (i *Instance) SetInitialMemoryConfig() error { + return i.setInitialConfig(false) +} + +func (i *Instance) setInitialConfig(write bool) error { // generate some api keys const apiKeyLength = 32 @@ -1007,7 +1019,7 @@ func (i *Instance) SetInitialConfig() error { i.Set(SessionStoreKey, sessionStoreKey) } - return i.setDefaultValues() + return i.setDefaultValues(write) } func (i *Instance) FinalizeSetup() { diff --git a/pkg/manager/config/config_concurrency_test.go b/pkg/manager/config/config_concurrency_test.go index fbfadd81c..cc6b56d67 100644 --- a/pkg/manager/config/config_concurrency_test.go +++ b/pkg/manager/config/config_concurrency_test.go @@ -17,7 +17,9 @@ func TestConcurrentConfigAccess(t *testing.T) { wg.Add(1) go func(wk int) { for l := 0; l < loops; l++ { - i.SetInitialConfig() + if err := i.SetInitialMemoryConfig(); err != nil { + t.Errorf("Failure setting initial configuration in worker %v iteration %v: %v", wk, l, err) + } i.HasCredentials() i.GetCPUProfilePath() diff --git a/pkg/manager/manager.go b/pkg/manager/manager.go index 6581ec3fe..64160ed94 100644 --- a/pkg/manager/manager.go +++ b/pkg/manager/manager.go @@ -110,11 +110,15 @@ func Initialize() *singleton { logger.Warnf("config file %snot found. Assuming new system...", cfgFile) } - initFFMPEG() + if err = initFFMPEG(); err != nil { + logger.Warnf("could not initialize FFMPEG subsystem: %v", err) + } // if DLNA is enabled, start it now if instance.Config.GetDLNADefaultEnabled() { - instance.DLNAService.Start(nil) + if err := instance.DLNAService.Start(nil); err != nil { + logger.Warnf("could not start DLNA service: %v", err) + } } }) @@ -134,7 +138,9 @@ func initProfiling(cpuProfilePath string) { logger.Infof("profiling to %s", cpuProfilePath) // StopCPUProfile is defer called in main - pprof.StartCPUProfile(f) + if err = pprof.StartCPUProfile(f); err != nil { + logger.Warnf("could not start CPU profiling: %v", err) + } } func initFFMPEG() error { @@ -182,7 +188,9 @@ func initLog() { // configuration has been set. Should only be called if the configuration // is valid. func (s *singleton) PostInit() error { - s.Config.SetInitialConfig() + if err := s.Config.SetInitialConfig(); err != nil { + logger.Warnf("could not set initial configuration: %v", err) + } s.Paths = paths.NewPaths(s.Config.GetGeneratedPath()) s.RefreshConfig() @@ -201,8 +209,12 @@ func (s *singleton) PostInit() error { const deleteTimeout = 1 * time.Second utils.Timeout(func() { - utils.EmptyDir(instance.Paths.Generated.Downloads) - utils.EmptyDir(instance.Paths.Generated.Tmp) + if err := utils.EmptyDir(instance.Paths.Generated.Downloads); err != nil { + logger.Warnf("could not empty Downloads directory: %v", err) + } + if err := utils.EmptyDir(instance.Paths.Generated.Tmp); err != nil { + logger.Warnf("could not empty Tmp directory: %v", err) + } }, deleteTimeout, func(done chan struct{}) { logger.Info("Please wait. Deleting temporary files...") // print <-done // and wait for deletion @@ -236,11 +248,21 @@ func (s *singleton) RefreshConfig() { s.Paths = paths.NewPaths(s.Config.GetGeneratedPath()) config := s.Config if config.Validate() == nil { - utils.EnsureDir(s.Paths.Generated.Screenshots) - utils.EnsureDir(s.Paths.Generated.Vtt) - utils.EnsureDir(s.Paths.Generated.Markers) - utils.EnsureDir(s.Paths.Generated.Transcodes) - utils.EnsureDir(s.Paths.Generated.Downloads) + if err := utils.EnsureDir(s.Paths.Generated.Screenshots); err != nil { + logger.Warnf("could not create directory for Screenshots: %v", err) + } + if err := utils.EnsureDir(s.Paths.Generated.Vtt); err != nil { + logger.Warnf("could not create directory for VTT: %v", err) + } + if err := utils.EnsureDir(s.Paths.Generated.Markers); err != nil { + logger.Warnf("could not create directory for Markers: %v", err) + } + if err := utils.EnsureDir(s.Paths.Generated.Transcodes); err != nil { + logger.Warnf("could not create directory for Transcodes: %v", err) + } + if err := utils.EnsureDir(s.Paths.Generated.Downloads); err != nil { + logger.Warnf("could not create directory for Downloads: %v", err) + } } } @@ -304,7 +326,9 @@ func (s *singleton) Setup(input models.SetupInput) error { s.Config.FinalizeSetup() - initFFMPEG() + if err := initFFMPEG(); err != nil { + return fmt.Errorf("error initializing FFMPEG subsystem: %v", err) + } return nil } diff --git a/pkg/manager/task_export.go b/pkg/manager/task_export.go index 58c1ca834..b062aa269 100644 --- a/pkg/manager/task_export.go +++ b/pkg/manager/task_export.go @@ -174,7 +174,9 @@ func (t *ExportTask) Start(wg *sync.WaitGroup) { func (t *ExportTask) generateDownload() error { // zip the files and register a download link - utils.EnsureDir(instance.Paths.Generated.Downloads) + if err := utils.EnsureDir(instance.Paths.Generated.Downloads); err != nil { + return err + } z, err := ioutil.TempFile(instance.Paths.Generated.Downloads, "export*.zip") if err != nil { return err diff --git a/pkg/manager/task_generate_markers.go b/pkg/manager/task_generate_markers.go index 153fb3517..a89a5e538 100644 --- a/pkg/manager/task_generate_markers.go +++ b/pkg/manager/task_generate_markers.go @@ -78,7 +78,9 @@ func (t *GenerateMarkersTask) generateSceneMarkers() { // Make the folder for the scenes markers markersFolder := filepath.Join(instance.Paths.Generated.Markers, sceneHash) - utils.EnsureDir(markersFolder) + if err := utils.EnsureDir(markersFolder); err != nil { + logger.Warnf("could not create the markers folder (%v): %v", markersFolder, err) + } for i, sceneMarker := range sceneMarkers { index := i + 1 diff --git a/pkg/manager/task_stash_box_tag.go b/pkg/manager/task_stash_box_tag.go index dd15e7808..7a7e7b8f7 100644 --- a/pkg/manager/task_stash_box_tag.go +++ b/pkg/manager/task_stash_box_tag.go @@ -153,7 +153,7 @@ func (t *StashBoxPerformerTagTask) stashBoxPerformerTag() { partial.URL = &value } - t.txnManager.WithTxn(context.TODO(), func(r models.Repository) error { + txnErr := t.txnManager.WithTxn(context.TODO(), func(r models.Repository) error { _, err := r.Performer().Update(partial) if !t.refresh { @@ -188,6 +188,9 @@ func (t *StashBoxPerformerTagTask) stashBoxPerformerTag() { } return err }) + if txnErr != nil { + logger.Warnf("failure to execute partial update of performer: %v", err) + } } else if t.name != nil && performer.Name != nil { currentTime := time.Now() newPerformer := models.Performer{ diff --git a/pkg/models/transaction.go b/pkg/models/transaction.go index 525de5a99..c30f4cb9a 100644 --- a/pkg/models/transaction.go +++ b/pkg/models/transaction.go @@ -1,6 +1,10 @@ package models -import "context" +import ( + "context" + + "github.com/stashapp/stash/pkg/logger" +) type Transaction interface { Begin() error @@ -30,11 +34,15 @@ func WithTxn(txn Transaction, fn func(r Repository) error) error { defer func() { if p := recover(); p != nil { // a panic occurred, rollback and repanic - txn.Rollback() + if err := txn.Rollback(); err != nil { + logger.Warnf("error while trying to roll back transaction: %v", err) + } panic(p) } else if err != nil { // something went wrong, rollback - txn.Rollback() + if err := txn.Rollback(); err != nil { + logger.Warnf("error while trying to roll back transaction: %v", err) + } } else { // all good, commit err = txn.Commit() @@ -54,11 +62,15 @@ func WithROTxn(txn ReadTransaction, fn func(r ReaderRepository) error) error { defer func() { if p := recover(); p != nil { // a panic occurred, rollback and repanic - txn.Rollback() + if err := txn.Rollback(); err != nil { + logger.Warnf("error while trying to roll back RO transaction: %v", err) + } panic(p) } else if err != nil { // something went wrong, rollback - txn.Rollback() + if err := txn.Rollback(); err != nil { + logger.Warnf("error while trying to roll back RO transaction: %v", err) + } } else { // all good, commit err = txn.Commit() diff --git a/pkg/scraper/script.go b/pkg/scraper/script.go index ad86517aa..a53266e0e 100644 --- a/pkg/scraper/script.go +++ b/pkg/scraper/script.go @@ -48,7 +48,9 @@ func (s *scriptScraper) runScraperScript(inString string, out interface{}) error go func() { defer stdin.Close() - io.WriteString(stdin, inString) + if n, err := io.WriteString(stdin, inString); err != nil { + logger.Warnf("failure to write full input to script (wrote %v bytes out of %v): %v", n, len(inString), err) + } }() stderr, err := cmd.StderrPipe() diff --git a/pkg/scraper/xpath.go b/pkg/scraper/xpath.go index 270b9d381..0cd955788 100644 --- a/pkg/scraper/xpath.go +++ b/pkg/scraper/xpath.go @@ -235,7 +235,9 @@ func (s *xpathScraper) loadURL(url string) (*html.Node, error) { if err == nil && s.config.DebugOptions != nil && s.config.DebugOptions.PrintHTML { var b bytes.Buffer - html.Render(&b, ret) + if err := html.Render(&b, ret); err != nil { + logger.Warnf("could not render HTML: %v", err) + } logger.Infof("loadURL (%s) response: \n%s", url, b.String()) } diff --git a/pkg/sqlite/filter.go b/pkg/sqlite/filter.go index d29647cf9..40cf98857 100644 --- a/pkg/sqlite/filter.go +++ b/pkg/sqlite/filter.go @@ -193,6 +193,7 @@ func (f *filterBuilder) addWith(sql string, args ...interface{}) { } // addRecursiveWith adds a with clause and arguments to the filter, and sets it to recursive +//nolint:unused func (f *filterBuilder) addRecursiveWith(sql string, args ...interface{}) { if sql == "" { return diff --git a/pkg/sqlite/query.go b/pkg/sqlite/query.go index 158e7f971..59b200641 100644 --- a/pkg/sqlite/query.go +++ b/pkg/sqlite/query.go @@ -1,11 +1,7 @@ package sqlite import ( - "fmt" - "regexp" "strings" - - "github.com/stashapp/stash/pkg/models" ) type queryBuilder struct { @@ -139,67 +135,3 @@ func (qb *queryBuilder) addFilter(f *filterBuilder) { qb.addJoins(f.getAllJoins()...) } - -func (qb *queryBuilder) handleIntCriterionInput(c *models.IntCriterionInput, column string) { - if c != nil { - clause, args := getIntCriterionWhereClause(column, *c) - qb.addWhere(clause) - qb.addArg(args...) - } -} - -func (qb *queryBuilder) handleStringCriterionInput(c *models.StringCriterionInput, column string) { - if c != nil { - if modifier := c.Modifier; c.Modifier.IsValid() { - switch modifier { - case models.CriterionModifierIncludes: - clause, thisArgs := getSearchBinding([]string{column}, c.Value, false) - qb.addWhere(clause) - qb.addArg(thisArgs...) - case models.CriterionModifierExcludes: - clause, thisArgs := getSearchBinding([]string{column}, c.Value, true) - qb.addWhere(clause) - qb.addArg(thisArgs...) - case models.CriterionModifierEquals: - qb.addWhere(column + " LIKE ?") - qb.addArg(c.Value) - case models.CriterionModifierNotEquals: - qb.addWhere(column + " NOT LIKE ?") - qb.addArg(c.Value) - case models.CriterionModifierMatchesRegex: - if _, err := regexp.Compile(c.Value); err != nil { - qb.err = err - return - } - qb.addWhere(fmt.Sprintf("(%s IS NOT NULL AND %[1]s regexp ?)", column)) - qb.addArg(c.Value) - case models.CriterionModifierNotMatchesRegex: - if _, err := regexp.Compile(c.Value); err != nil { - qb.err = err - return - } - qb.addWhere(fmt.Sprintf("(%s IS NULL OR %[1]s NOT regexp ?)", column)) - qb.addArg(c.Value) - case models.CriterionModifierIsNull: - qb.addWhere("(" + column + " IS NULL OR TRIM(" + column + ") = '')") - case models.CriterionModifierNotNull: - qb.addWhere("(" + column + " IS NOT NULL AND TRIM(" + column + ") != '')") - default: - clause, count := getSimpleCriterionClause(modifier, "?") - qb.addWhere(column + " " + clause) - if count == 1 { - qb.addArg(c.Value) - } - } - } - } -} - -func (qb *queryBuilder) handleCountCriterion(countFilter *models.IntCriterionInput, primaryTable, joinTable, primaryFK string) { - if countFilter != nil { - clause, args := getCountCriterionClause(primaryTable, joinTable, primaryFK, *countFilter) - - qb.addWhere(clause) - qb.addArg(args...) - } -} diff --git a/pkg/tag/update_test.go b/pkg/tag/update_test.go index 5a9cddf9d..d3a7f226d 100644 --- a/pkg/tag/update_test.go +++ b/pkg/tag/update_test.go @@ -2,11 +2,12 @@ package tag import ( "fmt" + "testing" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/mocks" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" - "testing" ) var testUniqueHierarchyTags = map[int]*models.Tag{ @@ -211,8 +212,7 @@ func testEnsureUniqueHierarchy(t *testing.T, tc testUniqueHierarchyCase, queryPa mockTagReader := &mocks.TagReaderWriter{} var parentIDs, childIDs []int - var find map[int]*models.Tag - find = make(map[int]*models.Tag) + find := make(map[int]*models.Tag) if tc.parents != nil { parentIDs = make([]int, 0) for _, parent := range tc.parents { @@ -262,7 +262,7 @@ func testEnsureUniqueHierarchy(t *testing.T, tc testUniqueHierarchyCase, queryPa } return nil }, func(tagID int, excludeIDs []int) error { - for id, _ := range tc.onFindAllAncestors { + for id := range tc.onFindAllAncestors { if id == tagID { return nil } @@ -278,7 +278,7 @@ func testEnsureUniqueHierarchy(t *testing.T, tc testUniqueHierarchyCase, queryPa } return nil }, func(tagID int, excludeIDs []int) error { - for id, _ := range tc.onFindAllDescendants { + for id := range tc.onFindAllDescendants { if id == tagID { return nil } diff --git a/pkg/utils/boolean.go b/pkg/utils/boolean.go index a5f23733b..80fa77ec7 100644 --- a/pkg/utils/boolean.go +++ b/pkg/utils/boolean.go @@ -1,13 +1,5 @@ package utils -// Btoi transforms a boolean to an int. 1 for true, false otherwise -func Btoi(b bool) int { - if b { - return 1 - } - return 0 -} - // IsTrue returns true if the bool pointer is not nil and true. func IsTrue(b *bool) bool { return b != nil && *b diff --git a/pkg/utils/oshash_internal_test.go b/pkg/utils/oshash_internal_test.go index 0263bab8a..b4d5b9d35 100644 --- a/pkg/utils/oshash_internal_test.go +++ b/pkg/utils/oshash_internal_test.go @@ -12,7 +12,7 @@ func TestOshashEmpty(t *testing.T) { want := "0000000000000000" got, err := oshash(size, head, tail) if err != nil { - t.Errorf("TestOshashEmpty: Error from oshash: %w", err) + t.Errorf("TestOshashEmpty: Error from oshash: %v", err) } if got != want { t.Errorf("TestOshashEmpty: oshash(0, 0, 0) = %q; want %q", got, want)