mirror of
https://github.com/stashapp/stash.git
synced 2025-12-18 04:44:37 +03:00
Setup and migration UI refactor (#1190)
* Make config instance-based * Remove config dependency in paths * Refactor config init * Allow startup without database * Get system status at UI initialise * Add setup wizard * Cache and Metadata optional. Database mandatory * Handle metadata not set during full import/export * Add links * Remove config check middleware * Stash not mandatory * Panic on missing mandatory config fields * Redirect setup to main page if setup not required * Add migration UI * Remove unused stuff * Move UI initialisation into App * Don't create metadata paths on RefreshConfig * Add folder selector for generated in setup * Env variable to set and create config file. Make docker images use a fixed config file. * Set config file during setup
This commit is contained in:
@@ -14,21 +14,19 @@ import (
|
||||
)
|
||||
|
||||
// GlobalConfig contains the global scraper options.
|
||||
type GlobalConfig struct {
|
||||
// User Agent used when scraping using http.
|
||||
UserAgent string
|
||||
|
||||
// Path (file or remote address) to a Chrome CDP instance.
|
||||
CDPPath string
|
||||
Path string
|
||||
type GlobalConfig interface {
|
||||
GetScraperUserAgent() string
|
||||
GetScrapersPath() string
|
||||
GetScraperCDPPath() string
|
||||
GetScraperCertCheck() bool
|
||||
}
|
||||
|
||||
func (c GlobalConfig) isCDPPathHTTP() bool {
|
||||
return strings.HasPrefix(c.CDPPath, "http://") || strings.HasPrefix(c.CDPPath, "https://")
|
||||
func isCDPPathHTTP(c GlobalConfig) bool {
|
||||
return strings.HasPrefix(c.GetScraperCDPPath(), "http://") || strings.HasPrefix(c.GetScraperCDPPath(), "https://")
|
||||
}
|
||||
|
||||
func (c GlobalConfig) isCDPPathWS() bool {
|
||||
return strings.HasPrefix(c.CDPPath, "ws://")
|
||||
func isCDPPathWS(c GlobalConfig) bool {
|
||||
return strings.HasPrefix(c.GetScraperCDPPath(), "ws://")
|
||||
}
|
||||
|
||||
// Cache stores scraper details.
|
||||
@@ -45,7 +43,7 @@ type Cache struct {
|
||||
// Scraper configurations are loaded from yml files in the provided scrapers
|
||||
// directory and any subdirectories.
|
||||
func NewCache(globalConfig GlobalConfig, txnManager models.TransactionManager) (*Cache, error) {
|
||||
scrapers, err := loadScrapers(globalConfig.Path)
|
||||
scrapers, err := loadScrapers(globalConfig.GetScrapersPath())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -93,7 +91,7 @@ func loadScrapers(path string) ([]config, error) {
|
||||
// In the event of an error during loading, the cache will be left empty.
|
||||
func (c *Cache) ReloadScrapers() error {
|
||||
c.scrapers = nil
|
||||
scrapers, err := loadScrapers(c.globalConfig.Path)
|
||||
scrapers, err := loadScrapers(c.globalConfig.GetScrapersPath())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -102,6 +100,7 @@ func (c *Cache) ReloadScrapers() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// TODO - don't think this is needed
|
||||
// UpdateConfig updates the global config for the cache. If the scraper path
|
||||
// has changed, ReloadScrapers will need to be called separately.
|
||||
func (c *Cache) UpdateConfig(globalConfig GlobalConfig) {
|
||||
|
||||
Reference in New Issue
Block a user