Multiple scene URLs (#3852)

* Add URLs scene relationship
* Update unit tests
* Update scene edit and details pages
* Update scrapers to use urls
* Post-process scenes during query scrape
* Update UI for URLs
* Change urls label
This commit is contained in:
WithoutPants
2023-07-12 11:51:52 +10:00
committed by GitHub
parent 76a4bfa49a
commit 67d4f9729a
50 changed files with 978 additions and 205 deletions

View File

@@ -52,6 +52,11 @@ func isCDPPathWS(c GlobalConfig) bool {
return strings.HasPrefix(c.GetScraperCDPPath(), "ws://")
}
type SceneFinder interface {
scene.IDFinder
models.URLLoader
}
type PerformerFinder interface {
match.PerformerAutoTagQueryer
match.PerformerFinder
@@ -73,7 +78,7 @@ type GalleryFinder interface {
}
type Repository struct {
SceneFinder scene.IDFinder
SceneFinder SceneFinder
GalleryFinder GalleryFinder
TagFinder TagFinder
PerformerFinder PerformerFinder
@@ -240,7 +245,19 @@ func (c Cache) ScrapeName(ctx context.Context, id, query string, ty ScrapeConten
return nil, fmt.Errorf("%w: cannot use scraper %s to scrape by name", ErrNotSupported, id)
}
return ns.viaName(ctx, c.client, query, ty)
content, err := ns.viaName(ctx, c.client, query, ty)
if err != nil {
return nil, fmt.Errorf("error while name scraping with scraper %s: %w", id, err)
}
for i, cc := range content {
content[i], err = c.postScrape(ctx, cc)
if err != nil {
return nil, fmt.Errorf("error while post-scraping with scraper %s: %w", id, err)
}
}
return content, nil
}
// ScrapeFragment uses the given fragment input to scrape
@@ -361,7 +378,7 @@ func (c Cache) getScene(ctx context.Context, sceneID int) (*models.Scene, error)
return fmt.Errorf("scene with id %d not found", sceneID)
}
return nil
return ret.LoadURLs(ctx, c.repository.SceneFinder)
}); err != nil {
return nil, err
}

View File

@@ -106,6 +106,14 @@ func (c Cache) postScrapeScenePerformer(ctx context.Context, p models.ScrapedPer
}
func (c Cache) postScrapeScene(ctx context.Context, scene ScrapedScene) (ScrapedContent, error) {
// set the URL/URLs field
if scene.URL == nil && len(scene.URLs) > 0 {
scene.URL = &scene.URLs[0]
}
if scene.URL != nil && len(scene.URLs) == 0 {
scene.URLs = []string{*scene.URL}
}
if err := txn.WithReadTxn(ctx, c.txnManager, func(ctx context.Context) error {
pqb := c.repository.PerformerFinder
mqb := c.repository.MovieFinder

View File

@@ -20,8 +20,8 @@ func queryURLParametersFromScene(scene *models.Scene) queryURLParameters {
if scene.Title != "" {
ret["title"] = scene.Title
}
if scene.URL != "" {
ret["url"] = scene.URL
if len(scene.URLs.List()) > 0 {
ret["url"] = scene.URLs.List()[0]
}
return ret
}
@@ -37,7 +37,11 @@ func queryURLParametersFromScrapedScene(scene ScrapedSceneInput) queryURLParamet
setField("title", scene.Title)
setField("code", scene.Code)
setField("url", scene.URL)
if len(scene.URLs) > 0 {
setField("url", &scene.URLs[0])
} else {
setField("url", scene.URL)
}
setField("date", scene.Date)
setField("details", scene.Details)
setField("director", scene.Director)

View File

@@ -5,12 +5,13 @@ import (
)
type ScrapedScene struct {
Title *string `json:"title"`
Code *string `json:"code"`
Details *string `json:"details"`
Director *string `json:"director"`
URL *string `json:"url"`
Date *string `json:"date"`
Title *string `json:"title"`
Code *string `json:"code"`
Details *string `json:"details"`
Director *string `json:"director"`
URL *string `json:"url"`
URLs []string `json:"urls"`
Date *string `json:"date"`
// This should be a base64 encoded data URL
Image *string `json:"image"`
File *models.SceneFileType `json:"file"`
@@ -26,11 +27,12 @@ type ScrapedScene struct {
func (ScrapedScene) IsScrapedContent() {}
type ScrapedSceneInput struct {
Title *string `json:"title"`
Code *string `json:"code"`
Details *string `json:"details"`
Director *string `json:"director"`
URL *string `json:"url"`
Date *string `json:"date"`
RemoteSiteID *string `json:"remote_site_id"`
Title *string `json:"title"`
Code *string `json:"code"`
Details *string `json:"details"`
Director *string `json:"director"`
URL *string `json:"url"`
URLs []string `json:"urls"`
Date *string `json:"date"`
RemoteSiteID *string `json:"remote_site_id"`
}

View File

@@ -328,7 +328,7 @@ func sceneToUpdateInput(scene *models.Scene) models.SceneUpdateInput {
ID: strconv.Itoa(scene.ID),
Title: &title,
Details: &scene.Details,
URL: &scene.URL,
Urls: scene.URLs.List(),
Date: dateToStringPtr(scene.Date),
}
}

View File

@@ -684,6 +684,7 @@ func getFingerprints(scene *graphql.SceneFragment) []*models.StashBoxFingerprint
func (c Client) sceneFragmentToScrapedScene(ctx context.Context, s *graphql.SceneFragment) (*scraper.ScrapedScene, error) {
stashID := s.ID
ss := &scraper.ScrapedScene{
Title: s.Title,
Code: s.Code,
@@ -698,6 +699,14 @@ func (c Client) sceneFragmentToScrapedScene(ctx context.Context, s *graphql.Scen
// stash_id
}
for _, u := range s.Urls {
ss.URLs = append(ss.URLs, u.URL)
}
if len(ss.URLs) > 0 {
ss.URL = &ss.URLs[0]
}
if len(s.Images) > 0 {
// TODO - #454 code sorts images by aspect ratio according to a wanted
// orientation. I'm just grabbing the first for now
@@ -823,8 +832,9 @@ func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, endpo
if scene.Director != "" {
draft.Director = &scene.Director
}
if scene.URL != "" && len(strings.TrimSpace(scene.URL)) > 0 {
url := strings.TrimSpace(scene.URL)
// TODO - draft does not accept multiple URLs. Use single URL for now.
if len(scene.URLs.List()) > 0 {
url := strings.TrimSpace(scene.URLs.List()[0])
draft.URL = &url
}
if scene.Date != nil {