diff --git a/graphql/documents/queries/scrapers/scrapers.graphql b/graphql/documents/queries/scrapers/scrapers.graphql new file mode 100644 index 000000000..9db8273e9 --- /dev/null +++ b/graphql/documents/queries/scrapers/scrapers.graphql @@ -0,0 +1,67 @@ +query ListScrapers($scraper_type: ScraperType!) { + listScrapers(scraper_type: $scraper_type) { + id + name + type + urls + supported_scrapes + } +} + +query ScrapePerformerList($scraper_id: ID!, $query: String!) { + scrapePerformerList(scraper_id: $scraper_id, query: $query) { + name + url + birthdate + ethnicity + country + eye_color + height + measurements + fake_tits + career_length + tattoos + piercings + aliases + } +} + +query ScrapePerformer($scraper_id: ID!, $scraped_performer: ScrapedPerformerInput!) { + scrapePerformer(scraper_id: $scraper_id, scraped_performer: $scraped_performer) { + name + url + twitter + instagram + birthdate + ethnicity + country + eye_color + height + measurements + fake_tits + career_length + tattoos + piercings + aliases + } +} + +query ScrapePerformerURL($url: String!) { + scrapePerformerURL(url: $url) { + name + url + twitter + instagram + birthdate + ethnicity + country + eye_color + height + measurements + fake_tits + career_length + tattoos + piercings + aliases + } +} \ No newline at end of file diff --git a/graphql/schema/schema.graphql b/graphql/schema/schema.graphql index 60f02cd6c..690658130 100644 --- a/graphql/schema/schema.graphql +++ b/graphql/schema/schema.graphql @@ -45,6 +45,15 @@ type Query { # Scrapers + """List available scrapers""" + listScrapers(scraper_type: ScraperType!): [Scraper!]! + """Scrape a list of performers based on name""" + scrapePerformerList(scraper_id: ID!, query: String!): [ScrapedPerformer!]! + """Scrapes a complete performer record based on a scrapePerformerList result""" + scrapePerformer(scraper_id: ID!, scraped_performer: ScrapedPerformerInput!): ScrapedPerformer + """Scrapes a complete performer record based on a URL""" + scrapePerformerURL(url: String!): ScrapedPerformer + """Scrape a performer using Freeones""" scrapeFreeones(performer_name: String!): ScrapedPerformer """Scrape a list of performers from a query""" diff --git a/graphql/schema/types/scraped-performer.graphql b/graphql/schema/types/scraped-performer.graphql index 4df0d3f37..a16f3df23 100644 --- a/graphql/schema/types/scraped-performer.graphql +++ b/graphql/schema/types/scraped-performer.graphql @@ -15,4 +15,22 @@ type ScrapedPerformer { tattoos: String piercings: String aliases: String +} + +input ScrapedPerformerInput { + name: String + url: String + twitter: String + instagram: String + birthdate: String + ethnicity: String + country: String + eye_color: String + height: String + measurements: String + fake_tits: String + career_length: String + tattoos: String + piercings: String + aliases: String } \ No newline at end of file diff --git a/graphql/schema/types/scraper.graphql b/graphql/schema/types/scraper.graphql new file mode 100644 index 000000000..c0c83c4cd --- /dev/null +++ b/graphql/schema/types/scraper.graphql @@ -0,0 +1,16 @@ +enum ScraperType { + PERFORMER +} + +enum ScrapeType { + QUERY + URL +} + +type Scraper { + id: ID! + name: String! + type: ScraperType! + urls: [String!] + supported_scrapes: [ScrapeType!]! +} \ No newline at end of file diff --git a/pkg/api/resolver.go b/pkg/api/resolver.go index 7e8f3d15f..40fcf1e9b 100644 --- a/pkg/api/resolver.go +++ b/pkg/api/resolver.go @@ -8,7 +8,6 @@ import ( "github.com/99designs/gqlgen/graphql" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/scraper" ) type Resolver struct{} @@ -161,14 +160,6 @@ func (r *queryResolver) SceneMarkerTags(ctx context.Context, scene_id string) ([ return result, nil } -func (r *queryResolver) ScrapeFreeones(ctx context.Context, performer_name string) (*models.ScrapedPerformer, error) { - return scraper.GetPerformer(performer_name) -} - -func (r *queryResolver) ScrapeFreeonesPerformerList(ctx context.Context, query string) ([]string, error) { - return scraper.GetPerformerNames(query) -} - // wasFieldIncluded returns true if the given field was included in the request. // Slices are unmarshalled to empty slices even if the field was omitted. This // method determines if it was omitted altogether. diff --git a/pkg/api/resolver_query_scraper.go b/pkg/api/resolver_query_scraper.go new file mode 100644 index 000000000..30d07548e --- /dev/null +++ b/pkg/api/resolver_query_scraper.go @@ -0,0 +1,53 @@ +package api + +import ( + "context" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/scraper" +) + +// deprecated +func (r *queryResolver) ScrapeFreeones(ctx context.Context, performer_name string) (*models.ScrapedPerformer, error) { + scrapedPerformer := models.ScrapedPerformerInput{ + Name: &performer_name, + } + return scraper.GetFreeonesScraper().ScrapePerformer(scrapedPerformer) +} + +// deprecated +func (r *queryResolver) ScrapeFreeonesPerformerList(ctx context.Context, query string) ([]string, error) { + scrapedPerformers, err := scraper.GetFreeonesScraper().ScrapePerformerNames(query) + + if err != nil { + return nil, err + } + + var ret []string + for _, v := range scrapedPerformers { + name := v.Name + ret = append(ret, *name) + } + + return ret, nil +} + +func (r *queryResolver) ListScrapers(ctx context.Context, scraperType models.ScraperType) ([]*models.Scraper, error) { + return scraper.ListScrapers(scraperType) +} + +func (r *queryResolver) ScrapePerformerList(ctx context.Context, scraperID string, query string) ([]*models.ScrapedPerformer, error) { + if query == "" { + return nil, nil + } + + return scraper.ScrapePerformerList(scraperID, query) +} + +func (r *queryResolver) ScrapePerformer(ctx context.Context, scraperID string, scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) { + return scraper.ScrapePerformer(scraperID, scrapedPerformer) +} + +func (r *queryResolver) ScrapePerformerURL(ctx context.Context, url string) (*models.ScrapedPerformer, error) { + return scraper.ScrapePerformerURL(url) +} diff --git a/pkg/manager/config/config.go b/pkg/manager/config/config.go index 2fb67dd9b..3d64de6fd 100644 --- a/pkg/manager/config/config.go +++ b/pkg/manager/config/config.go @@ -22,6 +22,8 @@ const Password = "password" const Database = "database" +const ScrapersPath = "scrapers_path" + const MaxTranscodeSize = "max_transcode_size" const MaxStreamingTranscodeSize = "max_streaming_transcode_size" @@ -73,6 +75,20 @@ func GetDatabasePath() string { return viper.GetString(Database) } +func GetDefaultScrapersPath() string { + // default to the same directory as the config file + configFileUsed := viper.ConfigFileUsed() + configDir := filepath.Dir(configFileUsed) + + fn := filepath.Join(configDir, "scrapers") + + return fn +} + +func GetScrapersPath() string { + return viper.GetString(ScrapersPath) +} + func GetHost() string { return viper.GetString(Host) } diff --git a/pkg/manager/manager.go b/pkg/manager/manager.go index 437f3d8da..85efe074c 100644 --- a/pkg/manager/manager.go +++ b/pkg/manager/manager.go @@ -71,6 +71,9 @@ func initConfig() { // Set generated to the metadata path for backwards compat viper.SetDefault(config.Generated, viper.GetString(config.Metadata)) + // Set default scrapers path + viper.SetDefault(config.ScrapersPath, config.GetDefaultScrapersPath()) + // Disabling config watching due to race condition issue // See: https://github.com/spf13/viper/issues/174 // Changes to the config outside the system will require a restart diff --git a/pkg/scraper/freeones.go b/pkg/scraper/freeones.go index a161e00cc..7b6f53168 100644 --- a/pkg/scraper/freeones.go +++ b/pkg/scraper/freeones.go @@ -2,17 +2,38 @@ package scraper import ( "fmt" - "github.com/PuerkitoBio/goquery" - "github.com/stashapp/stash/pkg/logger" - "github.com/stashapp/stash/pkg/models" "net/http" "net/url" "regexp" "strings" "time" + + "github.com/PuerkitoBio/goquery" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" ) -func GetPerformerNames(q string) ([]string, error) { +const freeonesScraperID = "builtin_freeones" +const freeonesName = "Freeones" + +var freeonesURLs = []string{ + "freeones.com", +} + +func GetFreeonesScraper() scraperConfig { + return scraperConfig{ + ID: freeonesScraperID, + Name: "Freeones", + Type: models.ScraperTypePerformer, + Method: ScraperMethodBuiltin, + URLs: freeonesURLs, + scrapePerformerNamesFunc: GetPerformerNames, + scrapePerformerFunc: GetPerformer, + scrapePerformerURLFunc: GetPerformerURL, + } +} + +func GetPerformerNames(c scraperConfig, q string) ([]*models.ScrapedPerformer, error) { // Request the HTML page. queryURL := "https://www.freeones.com/suggestions.php?q=" + url.PathEscape(q) + "&t=1" res, err := http.Get(queryURL) @@ -31,65 +52,42 @@ func GetPerformerNames(q string) ([]string, error) { } // Find the performers - var performerNames []string + var performers []*models.ScrapedPerformer doc.Find(".suggestion").Each(func(i int, s *goquery.Selection) { name := strings.Trim(s.Text(), " ") - performerNames = append(performerNames, name) + p := models.ScrapedPerformer{ + Name: &name, + } + performers = append(performers, &p) }) - return performerNames, nil + return performers, nil } -func GetPerformer(performerName string) (*models.ScrapedPerformer, error) { - queryURL := "https://www.freeones.com/search/?t=1&q=" + url.PathEscape(performerName) + "&view=thumbs" - res, err := http.Get(queryURL) - if err != nil { - return nil, err - } - defer res.Body.Close() - if res.StatusCode != 200 { - return nil, fmt.Errorf("status code error: %d %s", res.StatusCode, res.Status) +func GetPerformerURL(c scraperConfig, href string) (*models.ScrapedPerformer, error) { + // if we're already in the bio page, just scrape it + if regexp.MustCompile(`\/bio_.*\.php$`).MatchString(href) { + return getPerformerBio(c, href) } - // Load the HTML document - doc, err := goquery.NewDocumentFromReader(res.Body) - if err != nil { - return nil, err + // otherwise try to get the bio page from the url + profileRE := regexp.MustCompile(`_links\/(.*?)\/$`) + if profileRE.MatchString(href) { + href = profileRE.ReplaceAllString(href, "_links/bio_$1.php") + return getPerformerBio(c, href) } - performerLink := doc.Find("div.Block3 a").FilterFunction(func(i int, s *goquery.Selection) bool { - href, _ := s.Attr("href") - if href == "/html/j_links/Jenna_Leigh_c/" || href == "/html/a_links/Alexa_Grace_c/" { - return false - } - if strings.ToLower(s.Text()) == strings.ToLower(performerName) { - return true - } - alias := s.ParentsFiltered(".babeNameBlock").Find(".babeAlias").First(); - if strings.Contains( strings.ToLower(alias.Text()), strings.ToLower(performerName) ) { - return true - } - return false - }) + return nil, nil +} - href, _ := performerLink.Attr("href") - href = strings.TrimSuffix(href, "/") - regex := regexp.MustCompile(`.+_links\/(.+)`) - matches := regex.FindStringSubmatch(href) - if len(matches) < 2 { - return nil, fmt.Errorf("No matches found in %s",href) - } - - href = strings.Replace(href, matches[1], "bio_"+matches[1]+".php", -1) - href = "https://www.freeones.com" + href - +func getPerformerBio(c scraperConfig, href string) (*models.ScrapedPerformer, error) { bioRes, err := http.Get(href) if err != nil { return nil, err } defer bioRes.Body.Close() - if res.StatusCode != 200 { - return nil, fmt.Errorf("status code error: %d %s", res.StatusCode, res.Status) + if bioRes.StatusCode != 200 { + return nil, fmt.Errorf("status code error: %d %s", bioRes.StatusCode, bioRes.Status) } // Load the HTML document @@ -175,6 +173,57 @@ func GetPerformer(performerName string) (*models.ScrapedPerformer, error) { return &result, nil } +func GetPerformer(c scraperConfig, scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) { + if scrapedPerformer.Name == nil { + return nil, nil + } + + performerName := *scrapedPerformer.Name + queryURL := "https://www.freeones.com/search/?t=1&q=" + url.PathEscape(performerName) + "&view=thumbs" + res, err := http.Get(queryURL) + if err != nil { + return nil, err + } + defer res.Body.Close() + if res.StatusCode != 200 { + return nil, fmt.Errorf("status code error: %d %s", res.StatusCode, res.Status) + } + + // Load the HTML document + doc, err := goquery.NewDocumentFromReader(res.Body) + if err != nil { + return nil, err + } + + performerLink := doc.Find("div.Block3 a").FilterFunction(func(i int, s *goquery.Selection) bool { + href, _ := s.Attr("href") + if href == "/html/j_links/Jenna_Leigh_c/" || href == "/html/a_links/Alexa_Grace_c/" { + return false + } + if strings.ToLower(s.Text()) == strings.ToLower(performerName) { + return true + } + alias := s.ParentsFiltered(".babeNameBlock").Find(".babeAlias").First() + if strings.Contains(strings.ToLower(alias.Text()), strings.ToLower(performerName)) { + return true + } + return false + }) + + href, _ := performerLink.Attr("href") + href = strings.TrimSuffix(href, "/") + regex := regexp.MustCompile(`.+_links\/(.+)`) + matches := regex.FindStringSubmatch(href) + if len(matches) < 2 { + return nil, fmt.Errorf("No matches found in %s", href) + } + + href = strings.Replace(href, matches[1], "bio_"+matches[1]+".php", -1) + href = "https://www.freeones.com" + href + + return getPerformerBio(c, href) +} + func getIndexes(doc *goquery.Document) map[string]int { var indexes = make(map[string]int) doc.Find(".paramname").Each(func(i int, s *goquery.Selection) { @@ -236,7 +285,7 @@ func paramValue(params *goquery.Selection, paramIndex int) string { return content } node = node.NextSibling - if (node == nil) { + if node == nil { return "" } return trim(node.FirstChild.Data) diff --git a/pkg/scraper/scrapers.go b/pkg/scraper/scrapers.go new file mode 100644 index 000000000..44e7b862e --- /dev/null +++ b/pkg/scraper/scrapers.go @@ -0,0 +1,318 @@ +package scraper + +import ( + "encoding/json" + "errors" + "io" + "io/ioutil" + "os" + "os/exec" + "path/filepath" + "strings" + + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/manager/config" + "github.com/stashapp/stash/pkg/models" +) + +type ScraperMethod string + +const ( + ScraperMethodScript ScraperMethod = "SCRIPT" + ScraperMethodBuiltin ScraperMethod = "BUILTIN" +) + +var AllScraperMethod = []ScraperMethod{ + ScraperMethodScript, +} + +func (e ScraperMethod) IsValid() bool { + switch e { + case ScraperMethodScript: + return true + } + return false +} + +type scraperConfig struct { + ID string `json:"id"` + Name string `json:"name"` + Type models.ScraperType `json:"type"` + Method ScraperMethod `json:"method"` + URLs []string `json:"urls"` + GetPerformerNames []string `json:"get_performer_names"` + GetPerformer []string `json:"get_performer"` + GetPerformerURL []string `json:"get_performer_url"` + + scrapePerformerNamesFunc func(c scraperConfig, name string) ([]*models.ScrapedPerformer, error) + scrapePerformerFunc func(c scraperConfig, scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) + scrapePerformerURLFunc func(c scraperConfig, url string) (*models.ScrapedPerformer, error) +} + +func (c scraperConfig) toScraper() *models.Scraper { + ret := models.Scraper{ + ID: c.ID, + Name: c.Name, + Type: c.Type, + Urls: c.URLs, + } + + // determine supported actions + if len(c.URLs) > 0 { + ret.SupportedScrapes = append(ret.SupportedScrapes, models.ScrapeTypeURL) + } + + if c.scrapePerformerNamesFunc != nil && c.scrapePerformerFunc != nil { + ret.SupportedScrapes = append(ret.SupportedScrapes, models.ScrapeTypeQuery) + } + + return &ret +} + +func (c *scraperConfig) postDecode() { + if c.Method == ScraperMethodScript { + // only set scrape performer names/performer if the applicable field is set + if len(c.GetPerformer) > 0 && len(c.GetPerformerNames) > 0 { + c.scrapePerformerNamesFunc = scrapePerformerNamesScript + c.scrapePerformerFunc = scrapePerformerScript + } + c.scrapePerformerURLFunc = scrapePerformerURLScript + } +} + +func (c scraperConfig) ScrapePerformerNames(name string) ([]*models.ScrapedPerformer, error) { + return c.scrapePerformerNamesFunc(c, name) +} + +func (c scraperConfig) ScrapePerformer(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) { + return c.scrapePerformerFunc(c, scrapedPerformer) +} + +func (c scraperConfig) ScrapePerformerURL(url string) (*models.ScrapedPerformer, error) { + return c.scrapePerformerURLFunc(c, url) +} + +func runScraperScript(command []string, inString string, out interface{}) error { + cmd := exec.Command(command[0], command[1:]...) + cmd.Dir = config.GetScrapersPath() + + stdin, err := cmd.StdinPipe() + if err != nil { + return err + } + + go func() { + defer stdin.Close() + + io.WriteString(stdin, inString) + }() + + stderr, err := cmd.StderrPipe() + if err != nil { + logger.Error("Scraper stderr not available: " + err.Error()) + } + + stdout, err := cmd.StdoutPipe() + if nil != err { + logger.Error("Scraper stdout not available: " + err.Error()) + } + + if err = cmd.Start(); err != nil { + return errors.New("Error running scraper script") + } + + // TODO - add a timeout here + decodeErr := json.NewDecoder(stdout).Decode(out) + + stderrData, _ := ioutil.ReadAll(stderr) + stderrString := string(stderrData) + + err = cmd.Wait() + + if err != nil { + // error message should be in the stderr stream + logger.Errorf("scraper error when running command <%s>: %s", strings.Join(cmd.Args, " "), stderrString) + return errors.New("Error running scraper script") + } + + if decodeErr != nil { + logger.Errorf("error decoding performer from scraper data: %s", err.Error()) + return errors.New("Error decoding performer from scraper script") + } + + return nil +} + +func scrapePerformerNamesScript(c scraperConfig, name string) ([]*models.ScrapedPerformer, error) { + inString := `{"name": "` + name + `"}` + + var performers []models.ScrapedPerformer + + err := runScraperScript(c.GetPerformerNames, inString, &performers) + + // convert to pointers + var ret []*models.ScrapedPerformer + if err == nil { + for i := 0; i < len(performers); i++ { + ret = append(ret, &performers[i]) + } + } + + return ret, err +} + +func scrapePerformerScript(c scraperConfig, scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) { + inString, err := json.Marshal(scrapedPerformer) + + if err != nil { + return nil, err + } + + var ret models.ScrapedPerformer + + err = runScraperScript(c.GetPerformer, string(inString), &ret) + + return &ret, err +} + +func scrapePerformerURLScript(c scraperConfig, url string) (*models.ScrapedPerformer, error) { + inString := `{"url": "` + url + `"}` + + var ret models.ScrapedPerformer + + err := runScraperScript(c.GetPerformerURL, string(inString), &ret) + + return &ret, err +} + +var scrapers []scraperConfig + +func loadScraper(path string) (*scraperConfig, error) { + var scraper scraperConfig + file, err := os.Open(path) + defer file.Close() + if err != nil { + return nil, err + } + jsonParser := json.NewDecoder(file) + err = jsonParser.Decode(&scraper) + if err != nil { + return nil, err + } + + // set id to the filename + id := filepath.Base(path) + id = id[:strings.LastIndex(id, ".")] + scraper.ID = id + scraper.postDecode() + + return &scraper, nil +} + +func loadScrapers() ([]scraperConfig, error) { + if scrapers != nil { + return scrapers, nil + } + + path := config.GetScrapersPath() + scrapers = make([]scraperConfig, 0) + + logger.Debugf("Reading scraper configs from %s", path) + scraperFiles, err := filepath.Glob(filepath.Join(path, "*.json")) + + if err != nil { + logger.Errorf("Error reading scraper configs: %s", err.Error()) + return nil, err + } + + // add built-in freeones scraper + scrapers = append(scrapers, GetFreeonesScraper()) + + for _, file := range scraperFiles { + scraper, err := loadScraper(file) + if err != nil { + logger.Errorf("Error loading scraper %s: %s", file, err.Error()) + } else { + scrapers = append(scrapers, *scraper) + } + } + + return scrapers, nil +} + +func ListScrapers(scraperType models.ScraperType) ([]*models.Scraper, error) { + // read scraper config files from the directory and cache + scrapers, err := loadScrapers() + + if err != nil { + return nil, err + } + + var ret []*models.Scraper + for _, s := range scrapers { + // filter on type + if s.Type == scraperType { + ret = append(ret, s.toScraper()) + } + } + + return ret, nil +} + +func findPerformerScraper(scraperID string) *scraperConfig { + // read scraper config files from the directory and cache + loadScrapers() + + for _, s := range scrapers { + if s.ID == scraperID { + return &s + } + } + + return nil +} + +func findPerformerScraperURL(url string) *scraperConfig { + // read scraper config files from the directory and cache + loadScrapers() + + for _, s := range scrapers { + for _, thisURL := range s.URLs { + if strings.Contains(url, thisURL) { + return &s + } + } + } + + return nil +} + +func ScrapePerformerList(scraperID string, query string) ([]*models.ScrapedPerformer, error) { + // find scraper with the provided id + s := findPerformerScraper(scraperID) + if s != nil { + return s.ScrapePerformerNames(query) + } + + return nil, errors.New("Scraper with ID " + scraperID + " not found") +} + +func ScrapePerformer(scraperID string, scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) { + // find scraper with the provided id + s := findPerformerScraper(scraperID) + if s != nil { + return s.ScrapePerformer(scrapedPerformer) + } + + return nil, errors.New("Scraper with ID " + scraperID + " not found") +} + +func ScrapePerformerURL(url string) (*models.ScrapedPerformer, error) { + // find scraper that matches the url given + s := findPerformerScraperURL(url) + if s != nil { + return s.ScrapePerformerURL(url) + } + + return nil, nil +} diff --git a/ui/v2/src/components/Shared/DetailsEditNavbar.tsx b/ui/v2/src/components/Shared/DetailsEditNavbar.tsx index 2bedc0d48..8be580483 100644 --- a/ui/v2/src/components/Shared/DetailsEditNavbar.tsx +++ b/ui/v2/src/components/Shared/DetailsEditNavbar.tsx @@ -25,7 +25,8 @@ interface IProps { onImageChange: (event: React.FormEvent) => void; // TODO: only for performers. make generic - onDisplayFreeOnesDialog?: () => void; + scrapers?: GQL.ListScrapersListScrapers[]; + onDisplayScraperDialog?: (scraper: GQL.ListScrapersListScrapers) => void; } export const DetailsEditNavbar: FunctionComponent = (props: IProps) => { @@ -57,15 +58,21 @@ export const DetailsEditNavbar: FunctionComponent = (props: IProps) => { return ; } + function renderScraperMenuItem(scraper : GQL.ListScrapersListScrapers) { + return ( + { if (props.onDisplayScraperDialog) { props.onDisplayScraperDialog(scraper); }}} + /> + ); + } + function renderScraperMenu() { if (!props.performer) { return; } if (!props.isEditing) { return; } const scraperMenu = ( - { if (props.onDisplayFreeOnesDialog) { props.onDisplayFreeOnesDialog(); }}} - /> + {props.scrapers ? props.scrapers.map((s) => renderScraperMenuItem(s)) : undefined} ); return ( diff --git a/ui/v2/src/components/performers/PerformerDetails/Performer.tsx b/ui/v2/src/components/performers/PerformerDetails/Performer.tsx index 3ed069cbd..26f035c6c 100644 --- a/ui/v2/src/components/performers/PerformerDetails/Performer.tsx +++ b/ui/v2/src/components/performers/PerformerDetails/Performer.tsx @@ -13,7 +13,7 @@ import { StashService } from "../../../core/StashService"; import { IBaseProps } from "../../../models"; import { ErrorUtils } from "../../../utils/errors"; import { TableUtils } from "../../../utils/table"; -import { FreeOnesPerformerSuggest } from "../../select/FreeOnesPerformerSuggest"; +import { ScrapePerformerSuggest } from "../../select/ScrapePerformerSuggest"; import { DetailsEditNavbar } from "../../Shared/DetailsEditNavbar"; interface IPerformerProps extends IBaseProps {} @@ -23,8 +23,8 @@ export const Performer: FunctionComponent = (props: IPerformerP // Editing state const [isEditing, setIsEditing] = useState(isNew); - const [isDisplayingScraperDialog, setIsDisplayingScraperDialog] = useState<"freeones" | undefined>(undefined); - const [scrapePerformerName, setScrapePerformerName] = useState(""); + const [isDisplayingScraperDialog, setIsDisplayingScraperDialog] = useState(undefined); + const [scrapePerformerDetails, setScrapePerformerDetails] = useState(undefined); // Editing performer state const [image, setImage] = useState(undefined); @@ -52,6 +52,9 @@ export const Performer: FunctionComponent = (props: IPerformerP // Network state const [isLoading, setIsLoading] = useState(false); + const Scrapers = StashService.useListScrapers(GQL.ScraperType.Performer); + const [queryableScrapers, setQueryableScrapers] = useState([]); + const { data, error, loading } = StashService.useFindPerformer(props.match.params.id); const updatePerformer = StashService.usePerformerUpdate(getPerformerInput() as GQL.PerformerUpdateInput); const createPerformer = StashService.usePerformerCreate(getPerformerInput() as GQL.PerformerCreateInput); @@ -93,10 +96,23 @@ export const Performer: FunctionComponent = (props: IPerformerP } }, [performer]); - if (!isNew && !isEditing) { - if (!data || !data.findPerformer || isLoading) { return ; } - if (!!error) { return <>error...; } + useEffect(() => { + var newQueryableScrapers : GQL.ListScrapersListScrapers[] = []; + + if (!!Scrapers.data && Scrapers.data.listScrapers) { + newQueryableScrapers = Scrapers.data.listScrapers.filter((s) => { + return s.supported_scrapes.includes(GQL.ScrapeType.Query); + }); + } + + setQueryableScrapers(newQueryableScrapers); + + }, [Scrapers.data]) + + if ((!isNew && !isEditing && (!data || !data.findPerformer)) || isLoading) { + return ; } + if (!!error) { return <>error...; } function getPerformerInput() { const performerInput: Partial = { @@ -166,23 +182,46 @@ export const Performer: FunctionComponent = (props: IPerformerP reader.readAsDataURL(file); } - function onDisplayFreeOnesDialog() { - setIsDisplayingScraperDialog("freeones"); + function onDisplayFreeOnesDialog(scraper: GQL.ListScrapersListScrapers) { + setIsDisplayingScraperDialog(scraper); } - async function onScrapeFreeOnes() { + function getQueryScraperPerformerInput() { + if (!scrapePerformerDetails) { + return {}; + } + + let ret = _.clone(scrapePerformerDetails); + delete ret.__typename; + return ret as GQL.ScrapedPerformerInput; + } + + async function onScrapePerformer() { + setIsDisplayingScraperDialog(undefined); setIsLoading(true); try { - if (!scrapePerformerName) { return; } - const result = await StashService.queryScrapeFreeones(scrapePerformerName); - if (!result.data || !result.data.scrapeFreeones) { return; } - updatePerformerEditState(result.data.scrapeFreeones); + if (!scrapePerformerDetails || !isDisplayingScraperDialog) { return; } + const result = await StashService.queryScrapePerformer(isDisplayingScraperDialog.id, getQueryScraperPerformerInput()); + if (!result.data || !result.data.scrapePerformer) { return; } + updatePerformerEditState(result.data.scrapePerformer); + } catch (e) { + ErrorUtils.handle(e); + } + setIsLoading(false); + } + + async function onScrapePerformerURL() { + if (!url) { return; } + setIsLoading(true); + try { + const result = await StashService.queryScrapePerformerURL(url); + if (!result.data || !result.data.scrapePerformerURL) { return; } + updatePerformerEditState(result.data.scrapePerformerURL); } catch (e) { ErrorUtils.handle(e); } finally { - setIsDisplayingScraperDialog(undefined); + setIsLoading(false); } - setIsLoading(false); } function renderEthnicity() { @@ -203,21 +242,61 @@ export const Performer: FunctionComponent = (props: IPerformerP title="Scrape" >
- setScrapePerformerName(query)} + scraperId={isDisplayingScraperDialog ? isDisplayingScraperDialog.id : ""} + onSelectPerformer={(query) => setScrapePerformerDetails(query)} />
- +
); } + function urlScrapable(url: string) : boolean { + return !!url && !!Scrapers.data && Scrapers.data.listScrapers && Scrapers.data.listScrapers.some((s) => { + return !!s.urls && s.urls.some((u) => { return url.includes(u); }); + }); + } + + function maybeRenderScrapeButton() { + if (!url || !urlScrapable(url)) { + return undefined; + } + return ( +