Add gallery scraping (#862)

This commit is contained in:
SpedNSFW
2020-10-21 09:24:32 +11:00
committed by GitHub
parent 872bb70f6e
commit 147d0067f5
19 changed files with 1296 additions and 1 deletions

View File

@@ -118,3 +118,22 @@ fragment ScrapedSceneData on ScrapedScene {
...ScrapedSceneMovieData ...ScrapedSceneMovieData
} }
} }
fragment ScrapedGalleryData on ScrapedGallery {
title
details
url
date
studio {
...ScrapedSceneStudioData
}
tags {
...ScrapedSceneTagData
}
performers {
...ScrapedScenePerformerData
}
}

View File

@@ -20,6 +20,17 @@ query ListSceneScrapers {
} }
} }
query ListGalleryScrapers {
listGalleryScrapers {
id
name
gallery {
urls
supported_scrapes
}
}
}
query ListMovieScrapers { query ListMovieScrapers {
listMovieScrapers { listMovieScrapers {
id id
@@ -61,6 +72,18 @@ query ScrapeSceneURL($url: String!) {
} }
} }
query ScrapeGallery($scraper_id: ID!, $gallery: GalleryUpdateInput!) {
scrapeGallery(scraper_id: $scraper_id, gallery: $gallery) {
...ScrapedGalleryData
}
}
query ScrapeGalleryURL($url: String!) {
scrapeGalleryURL(url: $url) {
...ScrapedGalleryData
}
}
query ScrapeMovieURL($url: String!) { query ScrapeMovieURL($url: String!) {
scrapeMovieURL(url: $url) { scrapeMovieURL(url: $url) {
...ScrapedMovieData ...ScrapedMovieData

View File

@@ -64,6 +64,7 @@ type Query {
"""List available scrapers""" """List available scrapers"""
listPerformerScrapers: [Scraper!]! listPerformerScrapers: [Scraper!]!
listSceneScrapers: [Scraper!]! listSceneScrapers: [Scraper!]!
listGalleryScrapers: [Scraper!]!
listMovieScrapers: [Scraper!]! listMovieScrapers: [Scraper!]!
"""Scrape a list of performers based on name""" """Scrape a list of performers based on name"""
@@ -76,6 +77,10 @@ type Query {
scrapeScene(scraper_id: ID!, scene: SceneUpdateInput!): ScrapedScene scrapeScene(scraper_id: ID!, scene: SceneUpdateInput!): ScrapedScene
"""Scrapes a complete performer record based on a URL""" """Scrapes a complete performer record based on a URL"""
scrapeSceneURL(url: String!): ScrapedScene scrapeSceneURL(url: String!): ScrapedScene
"""Scrapes a complete gallery record based on an existing gallery"""
scrapeGallery(scraper_id: ID!, gallery: GalleryUpdateInput!): ScrapedGallery
"""Scrapes a complete gallery record based on a URL"""
scrapeGalleryURL(url: String!): ScrapedGallery
"""Scrapes a complete movie record based on a URL""" """Scrapes a complete movie record based on a URL"""
scrapeMovieURL(url: String!): ScrapedMovie scrapeMovieURL(url: String!): ScrapedMovie

View File

@@ -20,6 +20,8 @@ type Scraper {
performer: ScraperSpec performer: ScraperSpec
"""Details for scene scraper""" """Details for scene scraper"""
scene: ScraperSpec scene: ScraperSpec
"""Details for gallery scraper"""
gallery: ScraperSpec
"""Details for movie scraper""" """Details for movie scraper"""
movie: ScraperSpec movie: ScraperSpec
} }
@@ -88,6 +90,17 @@ type ScrapedScene {
movies: [ScrapedSceneMovie!] movies: [ScrapedSceneMovie!]
} }
type ScrapedGallery {
title: String
details: String
url: String
date: String
studio: ScrapedSceneStudio
tags: [ScrapedSceneTag!]
performers: [ScrapedScenePerformer!]
}
input StashBoxQueryInput { input StashBoxQueryInput {
"""Index of the configured stash-box instance to use""" """Index of the configured stash-box instance to use"""
stash_box_index: Int! stash_box_index: Int!

View File

@@ -44,6 +44,10 @@ func (r *queryResolver) ListSceneScrapers(ctx context.Context) ([]*models.Scrape
return manager.GetInstance().ScraperCache.ListSceneScrapers(), nil return manager.GetInstance().ScraperCache.ListSceneScrapers(), nil
} }
func (r *queryResolver) ListGalleryScrapers(ctx context.Context) ([]*models.Scraper, error) {
return manager.GetInstance().ScraperCache.ListGalleryScrapers(), nil
}
func (r *queryResolver) ListMovieScrapers(ctx context.Context) ([]*models.Scraper, error) { func (r *queryResolver) ListMovieScrapers(ctx context.Context) ([]*models.Scraper, error) {
return manager.GetInstance().ScraperCache.ListMovieScrapers(), nil return manager.GetInstance().ScraperCache.ListMovieScrapers(), nil
} }
@@ -72,6 +76,14 @@ func (r *queryResolver) ScrapeSceneURL(ctx context.Context, url string) (*models
return manager.GetInstance().ScraperCache.ScrapeSceneURL(url) return manager.GetInstance().ScraperCache.ScrapeSceneURL(url)
} }
func (r *queryResolver) ScrapeGallery(ctx context.Context, scraperID string, gallery models.GalleryUpdateInput) (*models.ScrapedGallery, error) {
return manager.GetInstance().ScraperCache.ScrapeGallery(scraperID, gallery)
}
func (r *queryResolver) ScrapeGalleryURL(ctx context.Context, url string) (*models.ScrapedGallery, error) {
return manager.GetInstance().ScraperCache.ScrapeGalleryURL(url)
}
func (r *queryResolver) ScrapeMovieURL(ctx context.Context, url string) (*models.ScrapedMovie, error) { func (r *queryResolver) ScrapeMovieURL(ctx context.Context, url string) (*models.ScrapedMovie, error) {
return manager.GetInstance().ScraperCache.ScrapeMovieURL(url) return manager.GetInstance().ScraperCache.ScrapeMovieURL(url)
} }

View File

@@ -89,6 +89,18 @@ type ScrapedSceneStash struct {
Performers []*ScrapedScenePerformer `graphql:"performers" json:"performers"` Performers []*ScrapedScenePerformer `graphql:"performers" json:"performers"`
} }
type ScrapedGalleryStash struct {
ID string `graphql:"id" json:"id"`
Title *string `graphql:"title" json:"title"`
Details *string `graphql:"details" json:"details"`
URL *string `graphql:"url" json:"url"`
Date *string `graphql:"date" json:"date"`
File *SceneFileType `graphql:"file" json:"file"`
Studio *ScrapedSceneStudio `graphql:"studio" json:"studio"`
Tags []*ScrapedSceneTag `graphql:"tags" json:"tags"`
Performers []*ScrapedScenePerformer `graphql:"performers" json:"performers"`
}
type ScrapedScenePerformer struct { type ScrapedScenePerformer struct {
// Set if performer matched // Set if performer matched
ID *string `graphql:"id" json:"id"` ID *string `graphql:"id" json:"id"`

View File

@@ -40,6 +40,9 @@ type scraper interface {
scrapeSceneByFragment(scene models.SceneUpdateInput) (*models.ScrapedScene, error) scrapeSceneByFragment(scene models.SceneUpdateInput) (*models.ScrapedScene, error)
scrapeSceneByURL(url string) (*models.ScrapedScene, error) scrapeSceneByURL(url string) (*models.ScrapedScene, error)
scrapeGalleryByFragment(scene models.GalleryUpdateInput) (*models.ScrapedGallery, error)
scrapeGalleryByURL(url string) (*models.ScrapedGallery, error)
scrapeMovieByURL(url string) (*models.ScrapedMovie, error) scrapeMovieByURL(url string) (*models.ScrapedMovie, error)
} }

View File

@@ -32,9 +32,15 @@ type config struct {
// Configuration for querying scenes by a Scene fragment // Configuration for querying scenes by a Scene fragment
SceneByFragment *scraperTypeConfig `yaml:"sceneByFragment"` SceneByFragment *scraperTypeConfig `yaml:"sceneByFragment"`
// Configuration for querying gallery by a Gallery fragment
GalleryByFragment *scraperTypeConfig `yaml:"galleryByFragment"`
// Configuration for querying a scene by a URL // Configuration for querying a scene by a URL
SceneByURL []*scrapeByURLConfig `yaml:"sceneByURL"` SceneByURL []*scrapeByURLConfig `yaml:"sceneByURL"`
// Configuration for querying a gallery by a URL
GalleryByURL []*scrapeByURLConfig `yaml:"galleryByURL"`
// Configuration for querying a movie by a URL // Configuration for querying a movie by a URL
MovieByURL []*scrapeByURLConfig `yaml:"movieByURL"` MovieByURL []*scrapeByURLConfig `yaml:"movieByURL"`
@@ -234,6 +240,21 @@ func (c config) toScraper() *models.Scraper {
ret.Scene = &scene ret.Scene = &scene
} }
gallery := models.ScraperSpec{}
if c.GalleryByFragment != nil {
gallery.SupportedScrapes = append(gallery.SupportedScrapes, models.ScrapeTypeFragment)
}
if len(c.GalleryByURL) > 0 {
gallery.SupportedScrapes = append(gallery.SupportedScrapes, models.ScrapeTypeURL)
for _, v := range c.GalleryByURL {
gallery.Urls = append(gallery.Urls, v.URL...)
}
}
if len(gallery.SupportedScrapes) > 0 {
ret.Gallery = &gallery
}
movie := models.ScraperSpec{} movie := models.ScraperSpec{}
if len(c.MovieByURL) > 0 { if len(c.MovieByURL) > 0 {
movie.SupportedScrapes = append(movie.SupportedScrapes, models.ScrapeTypeURL) movie.SupportedScrapes = append(movie.SupportedScrapes, models.ScrapeTypeURL)
@@ -308,6 +329,10 @@ func (c config) supportsScenes() bool {
return c.SceneByFragment != nil || len(c.SceneByURL) > 0 return c.SceneByFragment != nil || len(c.SceneByURL) > 0
} }
func (c config) supportsGalleries() bool {
return c.GalleryByFragment != nil || len(c.GalleryByURL) > 0
}
func (c config) matchesSceneURL(url string) bool { func (c config) matchesSceneURL(url string) bool {
for _, scraper := range c.SceneByURL { for _, scraper := range c.SceneByURL {
if scraper.matchesURL(url) { if scraper.matchesURL(url) {
@@ -318,6 +343,15 @@ func (c config) matchesSceneURL(url string) bool {
return false return false
} }
func (c config) matchesGalleryURL(url string) bool {
for _, scraper := range c.GalleryByURL {
if scraper.matchesURL(url) {
return true
}
}
return false
}
func (c config) supportsMovies() bool { func (c config) supportsMovies() bool {
return len(c.MovieByURL) > 0 return len(c.MovieByURL) > 0
} }
@@ -359,6 +393,33 @@ func (c config) ScrapeSceneURL(url string, globalConfig GlobalConfig) (*models.S
return nil, nil return nil, nil
} }
func (c config) ScrapeGallery(gallery models.GalleryUpdateInput, globalConfig GlobalConfig) (*models.ScrapedGallery, error) {
if c.GalleryByFragment != nil {
s := getScraper(*c.GalleryByFragment, c, globalConfig)
return s.scrapeGalleryByFragment(gallery)
}
return nil, nil
}
func (c config) ScrapeGalleryURL(url string, globalConfig GlobalConfig) (*models.ScrapedGallery, error) {
for _, scraper := range c.GalleryByURL {
if scraper.matchesURL(url) {
s := getScraper(scraper.scraperTypeConfig, c, globalConfig)
ret, err := s.scrapeGalleryByURL(url)
if err != nil {
return nil, err
}
if ret != nil {
return ret, nil
}
}
}
return nil, nil
}
func (c config) ScrapeMovieURL(url string, globalConfig GlobalConfig) (*models.ScrapedMovie, error) { func (c config) ScrapeMovieURL(url string, globalConfig GlobalConfig) (*models.ScrapedMovie, error) {
for _, scraper := range c.MovieByURL { for _, scraper := range c.MovieByURL {
if scraper.matchesURL(url) { if scraper.matchesURL(url) {

View File

@@ -88,6 +88,16 @@ func (s *jsonScraper) scrapeSceneByURL(url string) (*models.ScrapedScene, error)
return scraper.scrapeScene(q) return scraper.scrapeScene(q)
} }
func (s *jsonScraper) scrapeGalleryByURL(url string) (*models.ScrapedGallery, error) {
doc, scraper, err := s.scrapeURL(url)
if err != nil {
return nil, err
}
q := s.getJsonQuery(doc)
return scraper.scrapeGallery(q)
}
func (s *jsonScraper) scrapeMovieByURL(url string) (*models.ScrapedMovie, error) { func (s *jsonScraper) scrapeMovieByURL(url string) (*models.ScrapedMovie, error) {
doc, scraper, err := s.scrapeURL(url) doc, scraper, err := s.scrapeURL(url)
if err != nil { if err != nil {
@@ -156,6 +166,34 @@ func (s *jsonScraper) scrapeSceneByFragment(scene models.SceneUpdateInput) (*mod
return scraper.scrapeScene(q) return scraper.scrapeScene(q)
} }
func (s *jsonScraper) scrapeGalleryByFragment(gallery models.GalleryUpdateInput) (*models.ScrapedGallery, error) {
storedGallery, err := galleryFromUpdateFragment(gallery)
if err != nil {
return nil, err
}
if storedGallery == nil {
return nil, errors.New("no scene found")
}
url := constructGalleryURL(s.scraper.QueryURL, storedGallery)
scraper := s.getJsonScraper()
if scraper == nil {
return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config")
}
doc, err := s.loadURL(url)
if err != nil {
return nil, err
}
q := s.getJsonQuery(doc)
return scraper.scrapeGallery(q)
}
func (s *jsonScraper) getJsonQuery(doc string) *jsonQuery { func (s *jsonScraper) getJsonQuery(doc string) *jsonQuery {
return &jsonQuery{ return &jsonQuery{
doc: doc, doc: doc,

View File

@@ -155,6 +155,60 @@ func (s *mappedSceneScraperConfig) UnmarshalYAML(unmarshal func(interface{}) err
return nil return nil
} }
type mappedGalleryScraperConfig struct {
mappedConfig
Tags mappedConfig `yaml:"Tags"`
Performers mappedConfig `yaml:"Performers"`
Studio mappedConfig `yaml:"Studio"`
}
type _mappedGalleryScraperConfig mappedGalleryScraperConfig
func (s *mappedGalleryScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
// HACK - unmarshal to map first, then remove known scene sub-fields, then
// remarshal to yaml and pass that down to the base map
parentMap := make(map[string]interface{})
if err := unmarshal(parentMap); err != nil {
return err
}
// move the known sub-fields to a separate map
thisMap := make(map[string]interface{})
thisMap[mappedScraperConfigSceneTags] = parentMap[mappedScraperConfigSceneTags]
thisMap[mappedScraperConfigScenePerformers] = parentMap[mappedScraperConfigScenePerformers]
thisMap[mappedScraperConfigSceneStudio] = parentMap[mappedScraperConfigSceneStudio]
delete(parentMap, mappedScraperConfigSceneTags)
delete(parentMap, mappedScraperConfigScenePerformers)
delete(parentMap, mappedScraperConfigSceneStudio)
// re-unmarshal the sub-fields
yml, err := yaml.Marshal(thisMap)
if err != nil {
return err
}
// needs to be a different type to prevent infinite recursion
c := _mappedGalleryScraperConfig{}
if err := yaml.Unmarshal(yml, &c); err != nil {
return err
}
*s = mappedGalleryScraperConfig(c)
yml, err = yaml.Marshal(parentMap)
if err != nil {
return err
}
if err := yaml.Unmarshal(yml, &s.mappedConfig); err != nil {
return err
}
return nil
}
type mappedPerformerScraperConfig struct { type mappedPerformerScraperConfig struct {
mappedConfig mappedConfig
} }
@@ -540,6 +594,7 @@ type mappedScrapers map[string]*mappedScraper
type mappedScraper struct { type mappedScraper struct {
Common commonMappedConfig `yaml:"common"` Common commonMappedConfig `yaml:"common"`
Scene *mappedSceneScraperConfig `yaml:"scene"` Scene *mappedSceneScraperConfig `yaml:"scene"`
Gallery *mappedGalleryScraperConfig `yaml:"gallery"`
Performer *mappedPerformerScraperConfig `yaml:"performer"` Performer *mappedPerformerScraperConfig `yaml:"performer"`
Movie *mappedMovieScraperConfig `yaml:"movie"` Movie *mappedMovieScraperConfig `yaml:"movie"`
} }
@@ -687,6 +742,62 @@ func (s mappedScraper) scrapeScene(q mappedQuery) (*models.ScrapedScene, error)
return &ret, nil return &ret, nil
} }
func (s mappedScraper) scrapeGallery(q mappedQuery) (*models.ScrapedGallery, error) {
var ret models.ScrapedGallery
galleryScraperConfig := s.Gallery
galleryMap := galleryScraperConfig.mappedConfig
if galleryMap == nil {
return nil, nil
}
galleryPerformersMap := galleryScraperConfig.Performers
galleryTagsMap := galleryScraperConfig.Tags
galleryStudioMap := galleryScraperConfig.Studio
logger.Debug(`Processing gallery:`)
results := galleryMap.process(q, s.Common)
if len(results) > 0 {
results[0].apply(&ret)
// now apply the performers and tags
if galleryPerformersMap != nil {
logger.Debug(`Processing gallery performers:`)
performerResults := galleryPerformersMap.process(q, s.Common)
for _, p := range performerResults {
performer := &models.ScrapedScenePerformer{}
p.apply(performer)
ret.Performers = append(ret.Performers, performer)
}
}
if galleryTagsMap != nil {
logger.Debug(`Processing gallery tags:`)
tagResults := galleryTagsMap.process(q, s.Common)
for _, p := range tagResults {
tag := &models.ScrapedSceneTag{}
p.apply(tag)
ret.Tags = append(ret.Tags, tag)
}
}
if galleryStudioMap != nil {
logger.Debug(`Processing gallery studio:`)
studioResults := galleryStudioMap.process(q, s.Common)
if len(studioResults) > 0 {
studio := &models.ScrapedSceneStudio{}
studioResults[0].apply(studio)
ret.Studio = studio
}
}
}
return &ret, nil
}
func (s mappedScraper) scrapeMovie(q mappedQuery) (*models.ScrapedMovie, error) { func (s mappedScraper) scrapeMovie(q mappedQuery) (*models.ScrapedMovie, error) {
var ret models.ScrapedMovie var ret models.ScrapedMovie

View File

@@ -132,6 +132,20 @@ func (c Cache) ListSceneScrapers() []*models.Scraper {
return ret return ret
} }
// ListGalleryScrapers returns a list of scrapers that are capable of
// scraping galleries.
func (c Cache) ListGalleryScrapers() []*models.Scraper {
var ret []*models.Scraper
for _, s := range c.scrapers {
// filter on type
if s.supportsGalleries() {
ret = append(ret, s.toScraper())
}
}
return ret
}
// ListMovieScrapers returns a list of scrapers that are capable of // ListMovieScrapers returns a list of scrapers that are capable of
// scraping scenes. // scraping scenes.
func (c Cache) ListMovieScrapers() []*models.Scraper { func (c Cache) ListMovieScrapers() []*models.Scraper {
@@ -251,6 +265,31 @@ func (c Cache) postScrapeScene(ret *models.ScrapedScene) error {
return nil return nil
} }
func (c Cache) postScrapeGallery(ret *models.ScrapedGallery) error {
for _, p := range ret.Performers {
err := models.MatchScrapedScenePerformer(p)
if err != nil {
return err
}
}
for _, t := range ret.Tags {
err := models.MatchScrapedSceneTag(t)
if err != nil {
return err
}
}
if ret.Studio != nil {
err := models.MatchScrapedSceneStudio(ret.Studio)
if err != nil {
return err
}
}
return nil
}
// ScrapeScene uses the scraper with the provided ID to scrape a scene. // ScrapeScene uses the scraper with the provided ID to scrape a scene.
func (c Cache) ScrapeScene(scraperID string, scene models.SceneUpdateInput) (*models.ScrapedScene, error) { func (c Cache) ScrapeScene(scraperID string, scene models.SceneUpdateInput) (*models.ScrapedScene, error) {
// find scraper with the provided id // find scraper with the provided id
@@ -299,6 +338,53 @@ func (c Cache) ScrapeSceneURL(url string) (*models.ScrapedScene, error) {
return nil, nil return nil, nil
} }
// ScrapeGallery uses the scraper with the provided ID to scrape a scene.
func (c Cache) ScrapeGallery(scraperID string, gallery models.GalleryUpdateInput) (*models.ScrapedGallery, error) {
s := c.findScraper(scraperID)
if s != nil {
ret, err := s.ScrapeGallery(gallery, c.globalConfig)
if err != nil {
return nil, err
}
if ret != nil {
err = c.postScrapeGallery(ret)
if err != nil {
return nil, err
}
}
return ret, nil
}
return nil, errors.New("Scraped with ID " + scraperID + " not found")
}
// ScrapeGalleryURL uses the first scraper it finds that matches the URL
// provided to scrape a scene. If no scrapers are found that matches
// the URL, then nil is returned.
func (c Cache) ScrapeGalleryURL(url string) (*models.ScrapedGallery, error) {
for _, s := range c.scrapers {
if s.matchesGalleryURL(url) {
ret, err := s.ScrapeGalleryURL(url, c.globalConfig)
if err != nil {
return nil, err
}
err = c.postScrapeGallery(ret)
if err != nil {
return nil, err
}
return ret, nil
}
}
return nil, nil
}
func matchMovieStudio(s *models.ScrapedMovieStudio) error { func matchMovieStudio(s *models.ScrapedMovieStudio) error {
qb := models.NewStudioQueryBuilder() qb := models.NewStudioQueryBuilder()

View File

@@ -137,6 +137,20 @@ func (s *scriptScraper) scrapeSceneByFragment(scene models.SceneUpdateInput) (*m
return &ret, err return &ret, err
} }
func (s *scriptScraper) scrapeGalleryByFragment(gallery models.GalleryUpdateInput) (*models.ScrapedGallery, error) {
inString, err := json.Marshal(gallery)
if err != nil {
return nil, err
}
var ret models.ScrapedGallery
err = s.runScraperScript(string(inString), &ret)
return &ret, err
}
func (s *scriptScraper) scrapeSceneByURL(url string) (*models.ScrapedScene, error) { func (s *scriptScraper) scrapeSceneByURL(url string) (*models.ScrapedScene, error) {
inString := `{"url": "` + url + `"}` inString := `{"url": "` + url + `"}`
@@ -147,6 +161,16 @@ func (s *scriptScraper) scrapeSceneByURL(url string) (*models.ScrapedScene, erro
return &ret, err return &ret, err
} }
func (s *scriptScraper) scrapeGalleryByURL(url string) (*models.ScrapedGallery, error) {
inString := `{"url": "` + url + `"}`
var ret models.ScrapedGallery
err := s.runScraperScript(string(inString), &ret)
return &ret, err
}
func (s *scriptScraper) scrapeMovieByURL(url string) (*models.ScrapedMovie, error) { func (s *scriptScraper) scrapeMovieByURL(url string) (*models.ScrapedMovie, error) {
inString := `{"url": "` + url + `"}` inString := `{"url": "` + url + `"}`

View File

@@ -184,6 +184,68 @@ func (s *stashScraper) scrapeSceneByFragment(scene models.SceneUpdateInput) (*mo
return &ret, nil return &ret, nil
} }
func (s *stashScraper) scrapeGalleryByFragment(scene models.GalleryUpdateInput) (*models.ScrapedGallery, error) {
// query by MD5
// assumes that the gallery exists in the database
qb := models.NewGalleryQueryBuilder()
id, err := strconv.Atoi(scene.ID)
if err != nil {
return nil, err
}
storedGallery, err := qb.Find(id, nil)
if err != nil {
return nil, err
}
var q struct {
FindGallery *models.ScrapedGalleryStash `graphql:"findGalleryByHash(input: $c)"`
}
type GalleryHashInput struct {
Checksum *string `graphql:"checksum" json:"checksum"`
}
input := GalleryHashInput{
Checksum: &storedGallery.Checksum,
}
vars := map[string]interface{}{
"c": &input,
}
client := s.getStashClient()
err = client.Query(context.Background(), &q, vars)
if err != nil {
return nil, err
}
if q.FindGallery != nil {
// the ids of the studio, performers and tags must be nilled
if q.FindGallery.Studio != nil {
q.FindGallery.Studio.ID = nil
}
for _, p := range q.FindGallery.Performers {
p.ID = nil
}
for _, t := range q.FindGallery.Tags {
t.ID = nil
}
}
// need to copy back to a scraped scene
ret := models.ScrapedGallery{}
err = copier.Copy(&ret, q.FindGallery)
if err != nil {
return nil, err
}
return &ret, nil
}
func (s *stashScraper) scrapePerformerByURL(url string) (*models.ScrapedPerformer, error) { func (s *stashScraper) scrapePerformerByURL(url string) (*models.ScrapedPerformer, error) {
return nil, errors.New("scrapePerformerByURL not supported for stash scraper") return nil, errors.New("scrapePerformerByURL not supported for stash scraper")
} }
@@ -192,6 +254,10 @@ func (s *stashScraper) scrapeSceneByURL(url string) (*models.ScrapedScene, error
return nil, errors.New("scrapeSceneByURL not supported for stash scraper") return nil, errors.New("scrapeSceneByURL not supported for stash scraper")
} }
func (s *stashScraper) scrapeGalleryByURL(url string) (*models.ScrapedGallery, error) {
return nil, errors.New("scrapeGalleryByURL not supported for stash scraper")
}
func (s *stashScraper) scrapeMovieByURL(url string) (*models.ScrapedMovie, error) { func (s *stashScraper) scrapeMovieByURL(url string) (*models.ScrapedMovie, error) {
return nil, errors.New("scrapeMovieByURL not supported for stash scraper") return nil, errors.New("scrapeMovieByURL not supported for stash scraper")
} }
@@ -206,3 +272,13 @@ func sceneFromUpdateFragment(scene models.SceneUpdateInput) (*models.Scene, erro
// TODO - should we modify it with the input? // TODO - should we modify it with the input?
return qb.Find(id) return qb.Find(id)
} }
func galleryFromUpdateFragment(gallery models.GalleryUpdateInput) (*models.Gallery, error) {
qb := models.NewGalleryQueryBuilder()
id, err := strconv.Atoi(gallery.ID)
if err != nil {
return nil, err
}
return qb.Find(id, nil)
}

View File

@@ -146,15 +146,34 @@ type SceneFragment struct {
Performers []*PerformerAppearanceFragment "json:\"performers\" graphql:\"performers\"" Performers []*PerformerAppearanceFragment "json:\"performers\" graphql:\"performers\""
Fingerprints []*FingerprintFragment "json:\"fingerprints\" graphql:\"fingerprints\"" Fingerprints []*FingerprintFragment "json:\"fingerprints\" graphql:\"fingerprints\""
} }
type GalleryFragment struct {
ID string "json:\"id\" graphql:\"id\""
Title *string "json:\"title\" graphql:\"title\""
Details *string "json:\"details\" graphql:\"details\""
Duration *int "json:\"duration\" graphql:\"duration\""
Date *string "json:\"date\" graphql:\"date\""
Urls []*URLFragment "json:\"urls\" graphql:\"urls\""
Images []*ImageFragment "json:\"images\" graphql:\"images\""
Studio *StudioFragment "json:\"studio\" graphql:\"studio\""
Tags []*TagFragment "json:\"tags\" graphql:\"tags\""
Performers []*PerformerAppearanceFragment "json:\"performers\" graphql:\"performers\""
Fingerprints []*FingerprintFragment "json:\"fingerprints\" graphql:\"fingerprints\""
}
type FindSceneByFingerprint struct { type FindSceneByFingerprint struct {
FindSceneByFingerprint []*SceneFragment "json:\"findSceneByFingerprint\" graphql:\"findSceneByFingerprint\"" FindSceneByFingerprint []*SceneFragment "json:\"findSceneByFingerprint\" graphql:\"findSceneByFingerprint\""
} }
type FindScenesByFingerprints struct { type FindScenesByFingerprints struct {
FindScenesByFingerprints []*SceneFragment "json:\"findScenesByFingerprints\" graphql:\"findScenesByFingerprints\"" FindScenesByFingerprints []*SceneFragment "json:\"findScenesByFingerprints\" graphql:\"findScenesByFingerprints\""
} }
type FindGalleriesByFingerprints struct {
FindGalleriesByFingerprints []*GalleryFragment `json:"findGalleriesByFingerprints" graphql:"findGalleriesByFingerprints"`
}
type SearchScene struct { type SearchScene struct {
SearchScene []*SceneFragment "json:\"searchScene\" graphql:\"searchScene\"" SearchScene []*SceneFragment "json:\"searchScene\" graphql:\"searchScene\""
} }
type SearchGallery struct {
SearchGallery []*GalleryFragment `json:"searchScene" graphql:"searchScene"`
}
type SubmitFingerprintPayload struct { type SubmitFingerprintPayload struct {
SubmitFingerprint bool "json:\"submitFingerprint\" graphql:\"submitFingerprint\"" SubmitFingerprint bool "json:\"submitFingerprint\" graphql:\"submitFingerprint\""
} }
@@ -527,6 +546,133 @@ fragment BodyModificationFragment on BodyModification {
} }
` `
func (c *Client) FindGalleriesByFingerprints(ctx context.Context, fingerprints []string, httpRequestOptions ...client.HTTPRequestOption) (*FindGalleriesByFingerprints, error) {
vars := map[string]interface{}{
"fingerprints": fingerprints,
}
var res FindGalleriesByFingerprints
if err := c.Client.Post(ctx, FindScenesByFingerprintsQuery, &res, vars, httpRequestOptions...); err != nil {
return nil, err
}
return &res, nil
}
const SearchGalleryQuery = `query SearchGallery ($term: String!) {
searchGallery(term: $term) {
... GalleryFragment
}
}
fragment FuzzyDateFragment on FuzzyDate {
date
accuracy
}
fragment MeasurementsFragment on Measurements {
band_size
cup_size
waist
hip
}
fragment FingerprintFragment on Fingerprint {
algorithm
hash
duration
}
fragment GalleryFragment on Gallery {
id
title
details
duration
date
urls {
... URLFragment
}
images {
... ImageFragment
}
studio {
... StudioFragment
}
tags {
... TagFragment
}
performers {
... PerformerAppearanceFragment
}
fingerprints {
... FingerprintFragment
}
}
fragment TagFragment on Tag {
name
id
}
fragment PerformerAppearanceFragment on PerformerAppearance {
as
performer {
... PerformerFragment
}
}
fragment PerformerFragment on Performer {
id
name
disambiguation
aliases
gender
urls {
... URLFragment
}
images {
... ImageFragment
}
birthdate {
... FuzzyDateFragment
}
ethnicity
country
eye_color
hair_color
height
measurements {
... MeasurementsFragment
}
breast_type
career_start_year
career_end_year
tattoos {
... BodyModificationFragment
}
piercings {
... BodyModificationFragment
}
}
fragment URLFragment on URL {
url
type
}
fragment ImageFragment on Image {
id
url
width
height
}
fragment StudioFragment on Studio {
name
id
urls {
... URLFragment
}
images {
... ImageFragment
}
}
fragment BodyModificationFragment on BodyModification {
location
description
}
`
func (c *Client) SearchScene(ctx context.Context, term string, httpRequestOptions ...client.HTTPRequestOption) (*SearchScene, error) { func (c *Client) SearchScene(ctx context.Context, term string, httpRequestOptions ...client.HTTPRequestOption) (*SearchScene, error) {
vars := map[string]interface{}{ vars := map[string]interface{}{
"term": term, "term": term,
@@ -540,6 +686,19 @@ func (c *Client) SearchScene(ctx context.Context, term string, httpRequestOption
return &res, nil return &res, nil
} }
func (c *Client) SearchGallery(ctx context.Context, term string, httpRequestOptions ...client.HTTPRequestOption) (*SearchGallery, error) {
vars := map[string]interface{}{
"term": term,
}
var res SearchGallery
if err := c.Client.Post(ctx, SearchGalleryQuery, &res, vars, httpRequestOptions...); err != nil {
return nil, err
}
return &res, nil
}
const SubmitFingerprintQuery = `mutation SubmitFingerprint ($input: FingerprintSubmission!) { const SubmitFingerprintQuery = `mutation SubmitFingerprint ($input: FingerprintSubmission!) {
submitFingerprint(input: $input) submitFingerprint(input: $input)
} }

View File

@@ -38,6 +38,17 @@ func constructSceneURL(url string, scene *models.Scene) string {
return ret return ret
} }
func constructGalleryURL(url string, gallery *models.Gallery) string {
// support checksum, title and filename
ret := strings.Replace(url, "{checksum}", gallery.Checksum, -1)
if gallery.Path.Valid {
ret = strings.Replace(url, "{filename}", filepath.Base(gallery.Path.String), -1)
}
ret = strings.Replace(url, "{title}", gallery.Title.String, -1)
return ret
}
func loadURL(url string, scraperConfig config, globalConfig GlobalConfig) (io.Reader, error) { func loadURL(url string, scraperConfig config, globalConfig GlobalConfig) (io.Reader, error) {
driverOptions := scraperConfig.DriverOptions driverOptions := scraperConfig.DriverOptions
if driverOptions != nil && driverOptions.UseCDP { if driverOptions != nil && driverOptions.UseCDP {

View File

@@ -69,6 +69,16 @@ func (s *xpathScraper) scrapeSceneByURL(url string) (*models.ScrapedScene, error
return scraper.scrapeScene(q) return scraper.scrapeScene(q)
} }
func (s *xpathScraper) scrapeGalleryByURL(url string) (*models.ScrapedGallery, error) {
doc, scraper, err := s.scrapeURL(url)
if err != nil {
return nil, err
}
q := s.getXPathQuery(doc)
return scraper.scrapeGallery(q)
}
func (s *xpathScraper) scrapeMovieByURL(url string) (*models.ScrapedMovie, error) { func (s *xpathScraper) scrapeMovieByURL(url string) (*models.ScrapedMovie, error) {
doc, scraper, err := s.scrapeURL(url) doc, scraper, err := s.scrapeURL(url)
if err != nil { if err != nil {
@@ -137,6 +147,35 @@ func (s *xpathScraper) scrapeSceneByFragment(scene models.SceneUpdateInput) (*mo
return scraper.scrapeScene(q) return scraper.scrapeScene(q)
} }
func (s *xpathScraper) scrapeGalleryByFragment(gallery models.GalleryUpdateInput) (*models.ScrapedGallery, error) {
storedGallery, err := galleryFromUpdateFragment(gallery)
if err != nil {
return nil, err
}
if storedGallery == nil {
return nil, errors.New("no scene found")
}
// construct the URL
url := constructGalleryURL(s.scraper.QueryURL, storedGallery)
scraper := s.getXpathScraper()
if scraper == nil {
return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config")
}
doc, err := s.loadURL(url)
if err != nil {
return nil, err
}
q := s.getXPathQuery(doc)
return scraper.scrapeGallery(q)
}
func (s *xpathScraper) loadURL(url string) (*html.Node, error) { func (s *xpathScraper) loadURL(url string) (*html.Node, error) {
r, err := loadURL(url, s.config, s.globalConfig) r, err := loadURL(url, s.config, s.globalConfig)
if err != nil { if err != nil {

View File

@@ -2,16 +2,23 @@ import React, { useEffect, useState } from "react";
import { useHistory } from "react-router-dom"; import { useHistory } from "react-router-dom";
import { Button, Form, Col, Row } from "react-bootstrap"; import { Button, Form, Col, Row } from "react-bootstrap";
import * as GQL from "src/core/generated-graphql"; import * as GQL from "src/core/generated-graphql";
import { useGalleryCreate, useGalleryUpdate } from "src/core/StashService"; import {
queryScrapeGalleryURL,
useGalleryCreate,
useGalleryUpdate,
useListGalleryScrapers,
} from "src/core/StashService";
import { import {
PerformerSelect, PerformerSelect,
TagSelect, TagSelect,
StudioSelect, StudioSelect,
Icon,
LoadingIndicator, LoadingIndicator,
} from "src/components/Shared"; } from "src/components/Shared";
import { useToast } from "src/hooks"; import { useToast } from "src/hooks";
import { FormUtils, EditableTextUtils } from "src/utils"; import { FormUtils, EditableTextUtils } from "src/utils";
import { RatingStars } from "src/components/Scenes/SceneDetails/RatingStars"; import { RatingStars } from "src/components/Scenes/SceneDetails/RatingStars";
import { GalleryScrapeDialog } from "./GalleryScrapeDialog";
interface IProps { interface IProps {
isVisible: boolean; isVisible: boolean;
@@ -42,6 +49,13 @@ export const GalleryEditPanel: React.FC<
const [performerIds, setPerformerIds] = useState<string[]>(); const [performerIds, setPerformerIds] = useState<string[]>();
const [tagIds, setTagIds] = useState<string[]>(); const [tagIds, setTagIds] = useState<string[]>();
const Scrapers = useListGalleryScrapers();
const [
scrapedGallery,
setScrapedGallery,
] = useState<GQL.ScrapedGallery | null>();
// Network state // Network state
const [isLoading, setIsLoading] = useState(true); const [isLoading, setIsLoading] = useState(true);
@@ -148,10 +162,121 @@ export const GalleryEditPanel: React.FC<
setIsLoading(false); setIsLoading(false);
} }
function onScrapeDialogClosed(gallery?: GQL.ScrapedGalleryDataFragment) {
if (gallery) {
updateGalleryFromScrapedGallery(gallery);
}
setScrapedGallery(undefined);
}
function maybeRenderScrapeDialog() {
if (!scrapedGallery) {
return;
}
const currentGallery = getGalleryInput();
return (
<GalleryScrapeDialog
gallery={currentGallery}
scraped={scrapedGallery}
onClose={(gallery) => {
onScrapeDialogClosed(gallery);
}}
/>
);
}
function urlScrapable(scrapedUrl: string): boolean {
return (Scrapers?.data?.listGalleryScrapers ?? []).some((s) =>
(s?.gallery?.urls ?? []).some((u) => scrapedUrl.includes(u))
);
}
function updateGalleryFromScrapedGallery(
gallery: GQL.ScrapedGalleryDataFragment
) {
if (gallery.title) {
setTitle(gallery.title);
}
if (gallery.details) {
setDetails(gallery.details);
}
if (gallery.date) {
setDate(gallery.date);
}
if (gallery.url) {
setUrl(gallery.url);
}
if (gallery.studio && gallery.studio.stored_id) {
setStudioId(gallery.studio.stored_id);
}
if (gallery.performers && gallery.performers.length > 0) {
const idPerfs = gallery.performers.filter((p) => {
return p.stored_id !== undefined && p.stored_id !== null;
});
if (idPerfs.length > 0) {
const newIds = idPerfs.map((p) => p.stored_id);
setPerformerIds(newIds as string[]);
}
}
if (gallery?.tags?.length) {
const idTags = gallery.tags.filter((p) => {
return p.stored_id !== undefined && p.stored_id !== null;
});
if (idTags.length > 0) {
const newIds = idTags.map((p) => p.stored_id);
setTagIds(newIds as string[]);
}
}
}
async function onScrapeGalleryURL() {
if (!url) {
return;
}
setIsLoading(true);
try {
const result = await queryScrapeGalleryURL(url);
if (!result || !result.data || !result.data.scrapeGalleryURL) {
return;
}
setScrapedGallery(result.data.scrapeGalleryURL);
} catch (e) {
Toast.error(e);
} finally {
setIsLoading(false);
}
}
function maybeRenderScrapeButton() {
if (!url || !urlScrapable(url)) {
return undefined;
}
return (
<Button
className="minimal scrape-url-button"
onClick={onScrapeGalleryURL}
title="Scrape"
>
<Icon className="fa-fw" icon="file-download" />
</Button>
);
}
if (isLoading) return <LoadingIndicator />; if (isLoading) return <LoadingIndicator />;
return ( return (
<div id="gallery-edit-details"> <div id="gallery-edit-details">
{maybeRenderScrapeDialog()}
<div className="form-container row px-3 pt-3"> <div className="form-container row px-3 pt-3">
<div className="col edit-buttons mb-3 pl-0"> <div className="col edit-buttons mb-3 pl-0">
<Button className="edit-button" variant="primary" onClick={onSave}> <Button className="edit-button" variant="primary" onClick={onSave}>
@@ -177,6 +302,9 @@ export const GalleryEditPanel: React.FC<
<Form.Group controlId="url" as={Row}> <Form.Group controlId="url" as={Row}>
<Col xs={3} className="pr-0 url-label"> <Col xs={3} className="pr-0 url-label">
<Form.Label className="col-form-label">URL</Form.Label> <Form.Label className="col-form-label">URL</Form.Label>
<div className="float-right scrape-button-container">
{maybeRenderScrapeButton()}
</div>
</Col> </Col>
<Col xs={9}> <Col xs={9}>
{EditableTextUtils.renderInputGroup({ {EditableTextUtils.renderInputGroup({

View File

@@ -0,0 +1,451 @@
import React, { useState } from "react";
import { StudioSelect, PerformerSelect } from "src/components/Shared";
import * as GQL from "src/core/generated-graphql";
import { TagSelect } from "src/components/Shared/Select";
import {
ScrapeDialog,
ScrapeDialogRow,
ScrapeResult,
ScrapedInputGroupRow,
ScrapedTextAreaRow,
} from "src/components/Shared/ScrapeDialog";
import _ from "lodash";
import {
useStudioCreate,
usePerformerCreate,
useTagCreate,
} from "src/core/StashService";
import { useToast } from "src/hooks";
function renderScrapedStudio(
result: ScrapeResult<string>,
isNew?: boolean,
onChange?: (value: string) => void
) {
const resultValue = isNew ? result.newValue : result.originalValue;
const value = resultValue ? [resultValue] : [];
return (
<StudioSelect
className="form-control react-select"
isDisabled={!isNew}
onSelect={(items) => {
if (onChange) {
onChange(items[0]?.id);
}
}}
ids={value}
/>
);
}
function renderScrapedStudioRow(
result: ScrapeResult<string>,
onChange: (value: ScrapeResult<string>) => void,
newStudio?: GQL.ScrapedSceneStudio,
onCreateNew?: (value: GQL.ScrapedSceneStudio) => void
) {
return (
<ScrapeDialogRow
title="Studio"
result={result}
renderOriginalField={() => renderScrapedStudio(result)}
renderNewField={() =>
renderScrapedStudio(result, true, (value) =>
onChange(result.cloneWithValue(value))
)
}
onChange={onChange}
newValues={newStudio ? [newStudio] : undefined}
onCreateNew={onCreateNew}
/>
);
}
function renderScrapedPerformers(
result: ScrapeResult<string[]>,
isNew?: boolean,
onChange?: (value: string[]) => void
) {
const resultValue = isNew ? result.newValue : result.originalValue;
const value = resultValue ?? [];
return (
<PerformerSelect
isMulti
className="form-control react-select"
isDisabled={!isNew}
onSelect={(items) => {
if (onChange) {
onChange(items.map((i) => i.id));
}
}}
ids={value}
/>
);
}
function renderScrapedPerformersRow(
result: ScrapeResult<string[]>,
onChange: (value: ScrapeResult<string[]>) => void,
newPerformers: GQL.ScrapedScenePerformer[],
onCreateNew?: (value: GQL.ScrapedScenePerformer) => void
) {
return (
<ScrapeDialogRow
title="Performers"
result={result}
renderOriginalField={() => renderScrapedPerformers(result)}
renderNewField={() =>
renderScrapedPerformers(result, true, (value) =>
onChange(result.cloneWithValue(value))
)
}
onChange={onChange}
newValues={newPerformers}
onCreateNew={onCreateNew}
/>
);
}
function renderScrapedTags(
result: ScrapeResult<string[]>,
isNew?: boolean,
onChange?: (value: string[]) => void
) {
const resultValue = isNew ? result.newValue : result.originalValue;
const value = resultValue ?? [];
return (
<TagSelect
isMulti
className="form-control react-select"
isDisabled={!isNew}
onSelect={(items) => {
if (onChange) {
onChange(items.map((i) => i.id));
}
}}
ids={value}
/>
);
}
function renderScrapedTagsRow(
result: ScrapeResult<string[]>,
onChange: (value: ScrapeResult<string[]>) => void,
newTags: GQL.ScrapedSceneTag[],
onCreateNew?: (value: GQL.ScrapedSceneTag) => void
) {
return (
<ScrapeDialogRow
title="Tags"
result={result}
renderOriginalField={() => renderScrapedTags(result)}
renderNewField={() =>
renderScrapedTags(result, true, (value) =>
onChange(result.cloneWithValue(value))
)
}
newValues={newTags}
onChange={onChange}
onCreateNew={onCreateNew}
/>
);
}
interface IGalleryScrapeDialogProps {
gallery: Partial<GQL.GalleryUpdateInput>;
scraped: GQL.ScrapedGallery;
onClose: (scrapedGallery?: GQL.ScrapedGallery) => void;
}
interface IHasStoredID {
stored_id?: string | null;
}
export const GalleryScrapeDialog: React.FC<IGalleryScrapeDialogProps> = (
props: IGalleryScrapeDialogProps
) => {
const [title, setTitle] = useState<ScrapeResult<string>>(
new ScrapeResult<string>(props.gallery.title, props.scraped.title)
);
const [url, setURL] = useState<ScrapeResult<string>>(
new ScrapeResult<string>(props.gallery.url, props.scraped.url)
);
const [date, setDate] = useState<ScrapeResult<string>>(
new ScrapeResult<string>(props.gallery.date, props.scraped.date)
);
const [studio, setStudio] = useState<ScrapeResult<string>>(
new ScrapeResult<string>(
props.gallery.studio_id,
props.scraped.studio?.stored_id
)
);
const [newStudio, setNewStudio] = useState<
GQL.ScrapedSceneStudio | undefined
>(
props.scraped.studio && !props.scraped.studio.stored_id
? props.scraped.studio
: undefined
);
function mapStoredIdObjects(
scrapedObjects?: IHasStoredID[]
): string[] | undefined {
if (!scrapedObjects) {
return undefined;
}
const ret = scrapedObjects
.map((p) => p.stored_id)
.filter((p) => {
return p !== undefined && p !== null;
}) as string[];
if (ret.length === 0) {
return undefined;
}
// sort by id numerically
ret.sort((a, b) => {
return parseInt(a, 10) - parseInt(b, 10);
});
return ret;
}
function sortIdList(idList?: string[] | null) {
if (!idList) {
return;
}
const ret = _.clone(idList);
// sort by id numerically
ret.sort((a, b) => {
return parseInt(a, 10) - parseInt(b, 10);
});
return ret;
}
const [performers, setPerformers] = useState<ScrapeResult<string[]>>(
new ScrapeResult<string[]>(
sortIdList(props.gallery.performer_ids),
mapStoredIdObjects(props.scraped.performers ?? undefined)
)
);
const [newPerformers, setNewPerformers] = useState<
GQL.ScrapedScenePerformer[]
>(props.scraped.performers?.filter((t) => !t.stored_id) ?? []);
const [tags, setTags] = useState<ScrapeResult<string[]>>(
new ScrapeResult<string[]>(
sortIdList(props.gallery.tag_ids),
mapStoredIdObjects(props.scraped.tags ?? undefined)
)
);
const [newTags, setNewTags] = useState<GQL.ScrapedSceneTag[]>(
props.scraped.tags?.filter((t) => !t.stored_id) ?? []
);
const [details, setDetails] = useState<ScrapeResult<string>>(
new ScrapeResult<string>(props.gallery.details, props.scraped.details)
);
const [createStudio] = useStudioCreate({ name: "" });
const [createPerformer] = usePerformerCreate();
const [createTag] = useTagCreate({ name: "" });
const Toast = useToast();
// don't show the dialog if nothing was scraped
if (
[title, url, date, studio, performers, tags, details].every(
(r) => !r.scraped
)
) {
props.onClose();
return <></>;
}
async function createNewStudio(toCreate: GQL.ScrapedSceneStudio) {
try {
const result = await createStudio({
variables: {
name: toCreate.name,
url: toCreate.url,
},
});
// set the new studio as the value
setStudio(studio.cloneWithValue(result.data!.studioCreate!.id));
setNewStudio(undefined);
Toast.success({
content: (
<span>
Created studio: <b>{toCreate.name}</b>
</span>
),
});
} catch (e) {
Toast.error(e);
}
}
async function createNewPerformer(toCreate: GQL.ScrapedScenePerformer) {
let performerInput: GQL.PerformerCreateInput = {};
try {
performerInput = Object.assign(performerInput, toCreate);
const result = await createPerformer({
variables: performerInput,
});
// add the new performer to the new performers value
const performerClone = performers.cloneWithValue(performers.newValue);
if (!performerClone.newValue) {
performerClone.newValue = [];
}
performerClone.newValue.push(result.data!.performerCreate!.id);
setPerformers(performerClone);
// remove the performer from the list
const newPerformersClone = newPerformers.concat();
const pIndex = newPerformersClone.indexOf(toCreate);
newPerformersClone.splice(pIndex, 1);
setNewPerformers(newPerformersClone);
Toast.success({
content: (
<span>
Created performer: <b>{toCreate.name}</b>
</span>
),
});
} catch (e) {
Toast.error(e);
}
}
async function createNewTag(toCreate: GQL.ScrapedSceneTag) {
let tagInput: GQL.TagCreateInput = { name: "" };
try {
tagInput = Object.assign(tagInput, toCreate);
const result = await createTag({
variables: tagInput,
});
// add the new tag to the new tags value
const tagClone = tags.cloneWithValue(tags.newValue);
if (!tagClone.newValue) {
tagClone.newValue = [];
}
tagClone.newValue.push(result.data!.tagCreate!.id);
setTags(tagClone);
// remove the tag from the list
const newTagsClone = newTags.concat();
const pIndex = newTagsClone.indexOf(toCreate);
newTagsClone.splice(pIndex, 1);
setNewTags(newTagsClone);
Toast.success({
content: (
<span>
Created tag: <b>{toCreate.name}</b>
</span>
),
});
} catch (e) {
Toast.error(e);
}
}
function makeNewScrapedItem(): GQL.ScrapedGalleryDataFragment {
const newStudioValue = studio.getNewValue();
return {
title: title.getNewValue(),
url: url.getNewValue(),
date: date.getNewValue(),
studio: newStudioValue
? {
stored_id: newStudioValue,
name: "",
}
: undefined,
performers: performers.getNewValue()?.map((p) => {
return {
stored_id: p,
name: "",
};
}),
tags: tags.getNewValue()?.map((m) => {
return {
stored_id: m,
name: "",
};
}),
details: details.getNewValue(),
};
}
function renderScrapeRows() {
return (
<>
<ScrapedInputGroupRow
title="Title"
result={title}
onChange={(value) => setTitle(value)}
/>
<ScrapedInputGroupRow
title="URL"
result={url}
onChange={(value) => setURL(value)}
/>
<ScrapedInputGroupRow
title="Date"
placeholder="YYYY-MM-DD"
result={date}
onChange={(value) => setDate(value)}
/>
{renderScrapedStudioRow(
studio,
(value) => setStudio(value),
newStudio,
createNewStudio
)}
{renderScrapedPerformersRow(
performers,
(value) => setPerformers(value),
newPerformers,
createNewPerformer
)}
{renderScrapedTagsRow(
tags,
(value) => setTags(value),
newTags,
createNewTag
)}
<ScrapedTextAreaRow
title="Details"
result={details}
onChange={(value) => setDetails(value)}
/>
</>
);
}
return (
<ScrapeDialog
title="Gallery Scrape Results"
renderScrapeRows={renderScrapeRows}
onClose={(apply) => {
props.onClose(apply ? makeNewScrapedItem() : undefined);
}}
/>
);
};

View File

@@ -218,6 +218,8 @@ export const useScrapePerformer = (
export const useListSceneScrapers = () => GQL.useListSceneScrapersQuery(); export const useListSceneScrapers = () => GQL.useListSceneScrapersQuery();
export const useListGalleryScrapers = () => GQL.useListGalleryScrapersQuery();
export const useListMovieScrapers = () => GQL.useListMovieScrapersQuery(); export const useListMovieScrapers = () => GQL.useListMovieScrapersQuery();
export const useScrapeFreeonesPerformers = (q: string) => export const useScrapeFreeonesPerformers = (q: string) =>
@@ -673,6 +675,15 @@ export const queryScrapeSceneURL = (url: string) =>
fetchPolicy: "network-only", fetchPolicy: "network-only",
}); });
export const queryScrapeGalleryURL = (url: string) =>
client.query<GQL.ScrapeGalleryUrlQuery>({
query: GQL.ScrapeGalleryUrlDocument,
variables: {
url,
},
fetchPolicy: "network-only",
});
export const queryScrapeMovieURL = (url: string) => export const queryScrapeMovieURL = (url: string) =>
client.query<GQL.ScrapeMovieUrlQuery>({ client.query<GQL.ScrapeMovieUrlQuery>({
query: GQL.ScrapeMovieUrlDocument, query: GQL.ScrapeMovieUrlDocument,
@@ -706,6 +717,19 @@ export const queryStashBoxScene = (stashBoxIndex: number, sceneID: string) =>
}, },
}); });
export const queryScrapeGallery = (
scraperId: string,
scene: GQL.GalleryUpdateInput
) =>
client.query<GQL.ScrapeGalleryQuery>({
query: GQL.ScrapeGalleryDocument,
variables: {
scraper_id: scraperId,
scene,
},
fetchPolicy: "network-only",
});
export const mutateReloadScrapers = () => export const mutateReloadScrapers = () =>
client.mutate<GQL.ReloadScrapersMutation>({ client.mutate<GQL.ReloadScrapersMutation>({
mutation: GQL.ReloadScrapersDocument, mutation: GQL.ReloadScrapersDocument,