mirror of
https://github.com/stashapp/stash.git
synced 2025-12-17 04:14:39 +03:00
Add scene metadata scraping functionality (#236)
* Add scene scraping functionality * Adapt to changed scraper config
This commit is contained in:
75
graphql/documents/data/scrapers.graphql
Normal file
75
graphql/documents/data/scrapers.graphql
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
fragment ScrapedPerformerData on ScrapedPerformer {
|
||||||
|
name
|
||||||
|
url
|
||||||
|
birthdate
|
||||||
|
ethnicity
|
||||||
|
country
|
||||||
|
eye_color
|
||||||
|
height
|
||||||
|
measurements
|
||||||
|
fake_tits
|
||||||
|
career_length
|
||||||
|
tattoos
|
||||||
|
piercings
|
||||||
|
aliases
|
||||||
|
}
|
||||||
|
|
||||||
|
fragment ScrapedScenePerformerData on ScrapedScenePerformer {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
url
|
||||||
|
twitter
|
||||||
|
instagram
|
||||||
|
birthdate
|
||||||
|
ethnicity
|
||||||
|
country
|
||||||
|
eye_color
|
||||||
|
height
|
||||||
|
measurements
|
||||||
|
fake_tits
|
||||||
|
career_length
|
||||||
|
tattoos
|
||||||
|
piercings
|
||||||
|
aliases
|
||||||
|
}
|
||||||
|
|
||||||
|
fragment ScrapedSceneStudioData on ScrapedSceneStudio {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
url
|
||||||
|
}
|
||||||
|
|
||||||
|
fragment ScrapedSceneTagData on ScrapedSceneTag {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
}
|
||||||
|
|
||||||
|
fragment ScrapedSceneData on ScrapedScene {
|
||||||
|
title
|
||||||
|
details
|
||||||
|
url
|
||||||
|
date
|
||||||
|
|
||||||
|
file {
|
||||||
|
size
|
||||||
|
duration
|
||||||
|
video_codec
|
||||||
|
audio_codec
|
||||||
|
width
|
||||||
|
height
|
||||||
|
framerate
|
||||||
|
bitrate
|
||||||
|
}
|
||||||
|
|
||||||
|
studio {
|
||||||
|
...ScrapedSceneStudioData
|
||||||
|
}
|
||||||
|
|
||||||
|
tags {
|
||||||
|
...ScrapedSceneTagData
|
||||||
|
}
|
||||||
|
|
||||||
|
performers {
|
||||||
|
...ScrapedScenePerformerData
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -9,71 +9,43 @@ query ListPerformerScrapers {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
# query ListSceneScrapers {
|
query ListSceneScrapers {
|
||||||
# listSceneScrapers {
|
listSceneScrapers {
|
||||||
# id
|
id
|
||||||
# name
|
name
|
||||||
# scene {
|
scene {
|
||||||
# urls
|
urls
|
||||||
# supported_scrapes
|
supported_scrapes
|
||||||
# }
|
}
|
||||||
# }
|
}
|
||||||
# }
|
}
|
||||||
|
|
||||||
query ScrapePerformerList($scraper_id: ID!, $query: String!) {
|
query ScrapePerformerList($scraper_id: ID!, $query: String!) {
|
||||||
scrapePerformerList(scraper_id: $scraper_id, query: $query) {
|
scrapePerformerList(scraper_id: $scraper_id, query: $query) {
|
||||||
name
|
...ScrapedPerformerData
|
||||||
url
|
|
||||||
birthdate
|
|
||||||
ethnicity
|
|
||||||
country
|
|
||||||
eye_color
|
|
||||||
height
|
|
||||||
measurements
|
|
||||||
fake_tits
|
|
||||||
career_length
|
|
||||||
tattoos
|
|
||||||
piercings
|
|
||||||
aliases
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
query ScrapePerformer($scraper_id: ID!, $scraped_performer: ScrapedPerformerInput!) {
|
query ScrapePerformer($scraper_id: ID!, $scraped_performer: ScrapedPerformerInput!) {
|
||||||
scrapePerformer(scraper_id: $scraper_id, scraped_performer: $scraped_performer) {
|
scrapePerformer(scraper_id: $scraper_id, scraped_performer: $scraped_performer) {
|
||||||
name
|
...ScrapedPerformerData
|
||||||
url
|
|
||||||
twitter
|
|
||||||
instagram
|
|
||||||
birthdate
|
|
||||||
ethnicity
|
|
||||||
country
|
|
||||||
eye_color
|
|
||||||
height
|
|
||||||
measurements
|
|
||||||
fake_tits
|
|
||||||
career_length
|
|
||||||
tattoos
|
|
||||||
piercings
|
|
||||||
aliases
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
query ScrapePerformerURL($url: String!) {
|
query ScrapePerformerURL($url: String!) {
|
||||||
scrapePerformerURL(url: $url) {
|
scrapePerformerURL(url: $url) {
|
||||||
name
|
...ScrapedPerformerData
|
||||||
url
|
}
|
||||||
twitter
|
}
|
||||||
instagram
|
|
||||||
birthdate
|
query ScrapeScene($scraper_id: ID!, $scene: SceneUpdateInput!) {
|
||||||
ethnicity
|
scrapeScene(scraper_id: $scraper_id, scene: $scene) {
|
||||||
country
|
...ScrapedSceneData
|
||||||
eye_color
|
}
|
||||||
height
|
}
|
||||||
measurements
|
|
||||||
fake_tits
|
query ScrapeSceneURL($url: String!) {
|
||||||
career_length
|
scrapeSceneURL(url: $url) {
|
||||||
tattoos
|
...ScrapedSceneData
|
||||||
piercings
|
|
||||||
aliases
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -47,13 +47,17 @@ type Query {
|
|||||||
|
|
||||||
"""List available scrapers"""
|
"""List available scrapers"""
|
||||||
listPerformerScrapers: [Scraper!]!
|
listPerformerScrapers: [Scraper!]!
|
||||||
#listSceneScrapers: [Scraper!]!
|
listSceneScrapers: [Scraper!]!
|
||||||
"""Scrape a list of performers based on name"""
|
"""Scrape a list of performers based on name"""
|
||||||
scrapePerformerList(scraper_id: ID!, query: String!): [ScrapedPerformer!]!
|
scrapePerformerList(scraper_id: ID!, query: String!): [ScrapedPerformer!]!
|
||||||
"""Scrapes a complete performer record based on a scrapePerformerList result"""
|
"""Scrapes a complete performer record based on a scrapePerformerList result"""
|
||||||
scrapePerformer(scraper_id: ID!, scraped_performer: ScrapedPerformerInput!): ScrapedPerformer
|
scrapePerformer(scraper_id: ID!, scraped_performer: ScrapedPerformerInput!): ScrapedPerformer
|
||||||
"""Scrapes a complete performer record based on a URL"""
|
"""Scrapes a complete performer record based on a URL"""
|
||||||
scrapePerformerURL(url: String!): ScrapedPerformer
|
scrapePerformerURL(url: String!): ScrapedPerformer
|
||||||
|
"""Scrapes a complete scene record based on an existing scene"""
|
||||||
|
scrapeScene(scraper_id: ID!, scene: SceneUpdateInput!): ScrapedScene
|
||||||
|
"""Scrapes a complete performer record based on a URL"""
|
||||||
|
scrapeSceneURL(url: String!): ScrapedScene
|
||||||
|
|
||||||
"""Scrape a performer using Freeones"""
|
"""Scrape a performer using Freeones"""
|
||||||
scrapeFreeones(performer_name: String!): ScrapedPerformer
|
scrapeFreeones(performer_name: String!): ScrapedPerformer
|
||||||
|
|||||||
@@ -1,7 +1,10 @@
|
|||||||
enum ScrapeType {
|
enum ScrapeType {
|
||||||
NAME
|
"""From text query"""
|
||||||
FRAGMENT
|
NAME
|
||||||
URL
|
"""From existing object"""
|
||||||
|
FRAGMENT
|
||||||
|
"""From URL"""
|
||||||
|
URL
|
||||||
}
|
}
|
||||||
|
|
||||||
type ScraperSpec {
|
type ScraperSpec {
|
||||||
@@ -15,7 +18,53 @@ type Scraper {
|
|||||||
name: String!
|
name: String!
|
||||||
"""Details for performer scraper"""
|
"""Details for performer scraper"""
|
||||||
performer: ScraperSpec
|
performer: ScraperSpec
|
||||||
# TODO
|
"""Details for scene scraper"""
|
||||||
# """Details for scene scraper"""
|
scene: ScraperSpec
|
||||||
# scene: ScraperSpec
|
}
|
||||||
}
|
|
||||||
|
|
||||||
|
type ScrapedScenePerformer {
|
||||||
|
"""Set if performer matched"""
|
||||||
|
id: ID
|
||||||
|
name: String!
|
||||||
|
url: String
|
||||||
|
twitter: String
|
||||||
|
instagram: String
|
||||||
|
birthdate: String
|
||||||
|
ethnicity: String
|
||||||
|
country: String
|
||||||
|
eye_color: String
|
||||||
|
height: String
|
||||||
|
measurements: String
|
||||||
|
fake_tits: String
|
||||||
|
career_length: String
|
||||||
|
tattoos: String
|
||||||
|
piercings: String
|
||||||
|
aliases: String
|
||||||
|
}
|
||||||
|
|
||||||
|
type ScrapedSceneStudio {
|
||||||
|
"""Set if studio matched"""
|
||||||
|
id: ID
|
||||||
|
name: String!
|
||||||
|
url: String
|
||||||
|
}
|
||||||
|
|
||||||
|
type ScrapedSceneTag {
|
||||||
|
"""Set if tag matched"""
|
||||||
|
id: ID
|
||||||
|
name: String!
|
||||||
|
}
|
||||||
|
|
||||||
|
type ScrapedScene {
|
||||||
|
title: String
|
||||||
|
details: String
|
||||||
|
url: String
|
||||||
|
date: String
|
||||||
|
|
||||||
|
file: SceneFileType # Resolver
|
||||||
|
|
||||||
|
studio: ScrapedSceneStudio
|
||||||
|
tags: [ScrapedSceneTag!]
|
||||||
|
performers: [ScrapedScenePerformer!]
|
||||||
|
}
|
||||||
|
|||||||
@@ -36,6 +36,10 @@ func (r *queryResolver) ListPerformerScrapers(ctx context.Context) ([]*models.Sc
|
|||||||
return scraper.ListPerformerScrapers()
|
return scraper.ListPerformerScrapers()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) ListSceneScrapers(ctx context.Context) ([]*models.Scraper, error) {
|
||||||
|
return scraper.ListSceneScrapers()
|
||||||
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ScrapePerformerList(ctx context.Context, scraperID string, query string) ([]*models.ScrapedPerformer, error) {
|
func (r *queryResolver) ScrapePerformerList(ctx context.Context, scraperID string, query string) ([]*models.ScrapedPerformer, error) {
|
||||||
if query == "" {
|
if query == "" {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
@@ -51,3 +55,11 @@ func (r *queryResolver) ScrapePerformer(ctx context.Context, scraperID string, s
|
|||||||
func (r *queryResolver) ScrapePerformerURL(ctx context.Context, url string) (*models.ScrapedPerformer, error) {
|
func (r *queryResolver) ScrapePerformerURL(ctx context.Context, url string) (*models.ScrapedPerformer, error) {
|
||||||
return scraper.ScrapePerformerURL(url)
|
return scraper.ScrapePerformerURL(url)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) ScrapeScene(ctx context.Context, scraperID string, scene models.SceneUpdateInput) (*models.ScrapedScene, error) {
|
||||||
|
return scraper.ScrapeScene(scraperID, scene)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) ScrapeSceneURL(ctx context.Context, url string) (*models.ScrapedScene, error) {
|
||||||
|
return scraper.ScrapeSceneURL(url)
|
||||||
|
}
|
||||||
|
|||||||
@@ -59,22 +59,13 @@ func (c *performerByFragmentConfig) resolveFn() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type scrapePerformerByURLFunc func(c scraperTypeConfig, url string) (*models.ScrapedPerformer, error)
|
type scrapeByURLConfig struct {
|
||||||
|
|
||||||
type scraperByURLConfig struct {
|
|
||||||
scraperTypeConfig `yaml:",inline"`
|
scraperTypeConfig `yaml:",inline"`
|
||||||
URL []string `yaml:"url,flow"`
|
URL []string `yaml:"url,flow"`
|
||||||
performScrape scrapePerformerByURLFunc
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *scraperByURLConfig) resolveFn() {
|
func (c scrapeByURLConfig) matchesURL(url string) bool {
|
||||||
if c.Action == scraperActionScript {
|
for _, thisURL := range c.URL {
|
||||||
c.performScrape = scrapePerformerURLScript
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s scraperByURLConfig) matchesURL(url string) bool {
|
|
||||||
for _, thisURL := range s.URL {
|
|
||||||
if strings.Contains(url, thisURL) {
|
if strings.Contains(url, thisURL) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
@@ -83,12 +74,53 @@ func (s scraperByURLConfig) matchesURL(url string) bool {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type scrapePerformerByURLFunc func(c scraperTypeConfig, url string) (*models.ScrapedPerformer, error)
|
||||||
|
|
||||||
|
type scrapePerformerByURLConfig struct {
|
||||||
|
scrapeByURLConfig `yaml:",inline"`
|
||||||
|
performScrape scrapePerformerByURLFunc
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *scrapePerformerByURLConfig) resolveFn() {
|
||||||
|
if c.Action == scraperActionScript {
|
||||||
|
c.performScrape = scrapePerformerURLScript
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type scrapeSceneFragmentFunc func(c scraperTypeConfig, scene models.SceneUpdateInput) (*models.ScrapedScene, error)
|
||||||
|
|
||||||
|
type sceneByFragmentConfig struct {
|
||||||
|
scraperTypeConfig `yaml:",inline"`
|
||||||
|
performScrape scrapeSceneFragmentFunc
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *sceneByFragmentConfig) resolveFn() {
|
||||||
|
if c.Action == scraperActionScript {
|
||||||
|
c.performScrape = scrapeSceneFragmentScript
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type scrapeSceneByURLFunc func(c scraperTypeConfig, url string) (*models.ScrapedScene, error)
|
||||||
|
|
||||||
|
type scrapeSceneByURLConfig struct {
|
||||||
|
scrapeByURLConfig `yaml:",inline"`
|
||||||
|
performScrape scrapeSceneByURLFunc
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *scrapeSceneByURLConfig) resolveFn() {
|
||||||
|
if c.Action == scraperActionScript {
|
||||||
|
c.performScrape = scrapeSceneURLScript
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
type scraperConfig struct {
|
type scraperConfig struct {
|
||||||
ID string
|
ID string
|
||||||
Name string `yaml:"name"`
|
Name string `yaml:"name"`
|
||||||
PerformerByName *performerByNameConfig `yaml:"performerByName"`
|
PerformerByName *performerByNameConfig `yaml:"performerByName"`
|
||||||
PerformerByFragment *performerByFragmentConfig `yaml:"performerByFragment"`
|
PerformerByFragment *performerByFragmentConfig `yaml:"performerByFragment"`
|
||||||
PerformerByURL []*scraperByURLConfig `yaml:"performerByURL"`
|
PerformerByURL []*scrapePerformerByURLConfig `yaml:"performerByURL"`
|
||||||
|
SceneByFragment *sceneByFragmentConfig `yaml:"sceneByFragment"`
|
||||||
|
SceneByURL []*scrapeSceneByURLConfig `yaml:"sceneByURL"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func loadScraperFromYAML(path string) (*scraperConfig, error) {
|
func loadScraperFromYAML(path string) (*scraperConfig, error) {
|
||||||
@@ -127,6 +159,13 @@ func (c *scraperConfig) initialiseConfigs() {
|
|||||||
for _, s := range c.PerformerByURL {
|
for _, s := range c.PerformerByURL {
|
||||||
s.resolveFn()
|
s.resolveFn()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if c.SceneByFragment != nil {
|
||||||
|
c.SceneByFragment.resolveFn()
|
||||||
|
}
|
||||||
|
for _, s := range c.SceneByURL {
|
||||||
|
s.resolveFn()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c scraperConfig) toScraper() *models.Scraper {
|
func (c scraperConfig) toScraper() *models.Scraper {
|
||||||
@@ -153,6 +192,21 @@ func (c scraperConfig) toScraper() *models.Scraper {
|
|||||||
ret.Performer = &performer
|
ret.Performer = &performer
|
||||||
}
|
}
|
||||||
|
|
||||||
|
scene := models.ScraperSpec{}
|
||||||
|
if c.SceneByFragment != nil {
|
||||||
|
scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeFragment)
|
||||||
|
}
|
||||||
|
if len(c.SceneByURL) > 0 {
|
||||||
|
scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeURL)
|
||||||
|
for _, v := range c.SceneByURL {
|
||||||
|
scene.Urls = append(scene.Urls, v.URL...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(scene.SupportedScrapes) > 0 {
|
||||||
|
ret.Scene = &scene
|
||||||
|
}
|
||||||
|
|
||||||
return &ret
|
return &ret
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -202,3 +256,42 @@ func (c scraperConfig) ScrapePerformerURL(url string) (*models.ScrapedPerformer,
|
|||||||
|
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c scraperConfig) supportsScenes() bool {
|
||||||
|
return c.SceneByFragment != nil || len(c.SceneByURL) > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c scraperConfig) matchesSceneURL(url string) bool {
|
||||||
|
for _, scraper := range c.SceneByURL {
|
||||||
|
if scraper.matchesURL(url) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c scraperConfig) ScrapeScene(scene models.SceneUpdateInput) (*models.ScrapedScene, error) {
|
||||||
|
if c.SceneByFragment != nil && c.SceneByFragment.performScrape != nil {
|
||||||
|
return c.SceneByFragment.performScrape(c.SceneByFragment.scraperTypeConfig, scene)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c scraperConfig) ScrapeSceneURL(url string) (*models.ScrapedScene, error) {
|
||||||
|
for _, scraper := range c.SceneByURL {
|
||||||
|
if scraper.matchesURL(url) && scraper.performScrape != nil {
|
||||||
|
ret, err := scraper.performScrape(scraper.scraperTypeConfig, url)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if ret != nil {
|
||||||
|
return ret, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|||||||
@@ -30,10 +30,12 @@ func GetFreeonesScraper() scraperConfig {
|
|||||||
PerformerByFragment: &performerByFragmentConfig{
|
PerformerByFragment: &performerByFragmentConfig{
|
||||||
performScrape: GetPerformer,
|
performScrape: GetPerformer,
|
||||||
},
|
},
|
||||||
PerformerByURL: []*scraperByURLConfig{
|
PerformerByURL: []*scrapePerformerByURLConfig{
|
||||||
&scraperByURLConfig{
|
&scrapePerformerByURLConfig{
|
||||||
|
scrapeByURLConfig: scrapeByURLConfig{
|
||||||
|
URL: freeonesURLs,
|
||||||
|
},
|
||||||
performScrape: GetPerformerURL,
|
performScrape: GetPerformerURL,
|
||||||
URL: freeonesURLs,
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ package scraper
|
|||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/logger"
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
"github.com/stashapp/stash/pkg/manager/config"
|
"github.com/stashapp/stash/pkg/manager/config"
|
||||||
@@ -61,7 +62,26 @@ func ListPerformerScrapers() ([]*models.Scraper, error) {
|
|||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func findPerformerScraper(scraperID string) *scraperConfig {
|
func ListSceneScrapers() ([]*models.Scraper, error) {
|
||||||
|
// read scraper config files from the directory and cache
|
||||||
|
scrapers, err := loadScrapers()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var ret []*models.Scraper
|
||||||
|
for _, s := range scrapers {
|
||||||
|
// filter on type
|
||||||
|
if s.supportsScenes() {
|
||||||
|
ret = append(ret, s.toScraper())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func findScraper(scraperID string) *scraperConfig {
|
||||||
// read scraper config files from the directory and cache
|
// read scraper config files from the directory and cache
|
||||||
loadScrapers()
|
loadScrapers()
|
||||||
|
|
||||||
@@ -76,7 +96,7 @@ func findPerformerScraper(scraperID string) *scraperConfig {
|
|||||||
|
|
||||||
func ScrapePerformerList(scraperID string, query string) ([]*models.ScrapedPerformer, error) {
|
func ScrapePerformerList(scraperID string, query string) ([]*models.ScrapedPerformer, error) {
|
||||||
// find scraper with the provided id
|
// find scraper with the provided id
|
||||||
s := findPerformerScraper(scraperID)
|
s := findScraper(scraperID)
|
||||||
if s != nil {
|
if s != nil {
|
||||||
return s.ScrapePerformerNames(query)
|
return s.ScrapePerformerNames(query)
|
||||||
}
|
}
|
||||||
@@ -86,7 +106,7 @@ func ScrapePerformerList(scraperID string, query string) ([]*models.ScrapedPerfo
|
|||||||
|
|
||||||
func ScrapePerformer(scraperID string, scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) {
|
func ScrapePerformer(scraperID string, scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) {
|
||||||
// find scraper with the provided id
|
// find scraper with the provided id
|
||||||
s := findPerformerScraper(scraperID)
|
s := findScraper(scraperID)
|
||||||
if s != nil {
|
if s != nil {
|
||||||
return s.ScrapePerformer(scrapedPerformer)
|
return s.ScrapePerformer(scrapedPerformer)
|
||||||
}
|
}
|
||||||
@@ -103,3 +123,127 @@ func ScrapePerformerURL(url string) (*models.ScrapedPerformer, error) {
|
|||||||
|
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func matchPerformer(p *models.ScrapedScenePerformer) error {
|
||||||
|
qb := models.NewPerformerQueryBuilder()
|
||||||
|
|
||||||
|
performers, err := qb.FindByNames([]string{p.Name}, nil)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(performers) != 1 {
|
||||||
|
// ignore - cannot match
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
id := strconv.Itoa(performers[0].ID)
|
||||||
|
p.ID = &id
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func matchStudio(s *models.ScrapedSceneStudio) error {
|
||||||
|
qb := models.NewStudioQueryBuilder()
|
||||||
|
|
||||||
|
studio, err := qb.FindByName(s.Name, nil)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if studio == nil {
|
||||||
|
// ignore - cannot match
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
id := strconv.Itoa(studio.ID)
|
||||||
|
s.ID = &id
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func matchTag(s *models.ScrapedSceneTag) error {
|
||||||
|
qb := models.NewTagQueryBuilder()
|
||||||
|
|
||||||
|
tag, err := qb.FindByName(s.Name, nil)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if tag == nil {
|
||||||
|
// ignore - cannot match
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
id := strconv.Itoa(tag.ID)
|
||||||
|
s.ID = &id
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func postScrapeScene(ret *models.ScrapedScene) error {
|
||||||
|
for _, p := range ret.Performers {
|
||||||
|
err := matchPerformer(p)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, t := range ret.Tags {
|
||||||
|
err := matchTag(t)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ret.Studio != nil {
|
||||||
|
err := matchStudio(ret.Studio)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func ScrapeScene(scraperID string, scene models.SceneUpdateInput) (*models.ScrapedScene, error) {
|
||||||
|
// find scraper with the provided id
|
||||||
|
s := findScraper(scraperID)
|
||||||
|
if s != nil {
|
||||||
|
ret, err := s.ScrapeScene(scene)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = postScrapeScene(ret)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, errors.New("Scraper with ID " + scraperID + " not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
func ScrapeSceneURL(url string) (*models.ScrapedScene, error) {
|
||||||
|
for _, s := range scrapers {
|
||||||
|
if s.matchesSceneURL(url) {
|
||||||
|
ret, err := s.ScrapeSceneURL(url)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = postScrapeScene(ret)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|||||||
@@ -106,3 +106,27 @@ func scrapePerformerURLScript(c scraperTypeConfig, url string) (*models.ScrapedP
|
|||||||
|
|
||||||
return &ret, err
|
return &ret, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func scrapeSceneFragmentScript(c scraperTypeConfig, scene models.SceneUpdateInput) (*models.ScrapedScene, error) {
|
||||||
|
inString, err := json.Marshal(scene)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var ret models.ScrapedScene
|
||||||
|
|
||||||
|
err = runScraperScript(c.Script, string(inString), &ret)
|
||||||
|
|
||||||
|
return &ret, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func scrapeSceneURLScript(c scraperTypeConfig, url string) (*models.ScrapedScene, error) {
|
||||||
|
inString := `{"url": "` + url + `"}`
|
||||||
|
|
||||||
|
var ret models.ScrapedScene
|
||||||
|
|
||||||
|
err := runScraperScript(c.Script, string(inString), &ret)
|
||||||
|
|
||||||
|
return &ret, err
|
||||||
|
}
|
||||||
|
|||||||
@@ -11,6 +11,9 @@ import {
|
|||||||
Collapse,
|
Collapse,
|
||||||
Icon,
|
Icon,
|
||||||
FileInput,
|
FileInput,
|
||||||
|
Menu,
|
||||||
|
Popover,
|
||||||
|
MenuItem,
|
||||||
} from "@blueprintjs/core";
|
} from "@blueprintjs/core";
|
||||||
import _ from "lodash";
|
import _ from "lodash";
|
||||||
import React, { FunctionComponent, useEffect, useState } from "react";
|
import React, { FunctionComponent, useEffect, useState } from "react";
|
||||||
@@ -42,6 +45,9 @@ export const SceneEditPanel: FunctionComponent<IProps> = (props: IProps) => {
|
|||||||
const [tagIds, setTagIds] = useState<string[] | undefined>(undefined);
|
const [tagIds, setTagIds] = useState<string[] | undefined>(undefined);
|
||||||
const [coverImage, setCoverImage] = useState<string | undefined>(undefined);
|
const [coverImage, setCoverImage] = useState<string | undefined>(undefined);
|
||||||
|
|
||||||
|
const Scrapers = StashService.useListSceneScrapers();
|
||||||
|
const [queryableScrapers, setQueryableScrapers] = useState<GQL.ListSceneScrapersListSceneScrapers[]>([]);
|
||||||
|
|
||||||
const [isDeleteAlertOpen, setIsDeleteAlertOpen] = useState<boolean>(false);
|
const [isDeleteAlertOpen, setIsDeleteAlertOpen] = useState<boolean>(false);
|
||||||
const [deleteFile, setDeleteFile] = useState<boolean>(false);
|
const [deleteFile, setDeleteFile] = useState<boolean>(false);
|
||||||
const [deleteGenerated, setDeleteGenerated] = useState<boolean>(true);
|
const [deleteGenerated, setDeleteGenerated] = useState<boolean>(true);
|
||||||
@@ -55,6 +61,19 @@ export const SceneEditPanel: FunctionComponent<IProps> = (props: IProps) => {
|
|||||||
const updateScene = StashService.useSceneUpdate(getSceneInput());
|
const updateScene = StashService.useSceneUpdate(getSceneInput());
|
||||||
const deleteScene = StashService.useSceneDestroy(getSceneDeleteInput());
|
const deleteScene = StashService.useSceneDestroy(getSceneDeleteInput());
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
var newQueryableScrapers : GQL.ListSceneScrapersListSceneScrapers[] = [];
|
||||||
|
|
||||||
|
if (!!Scrapers.data && Scrapers.data.listSceneScrapers) {
|
||||||
|
newQueryableScrapers = Scrapers.data.listSceneScrapers.filter((s) => {
|
||||||
|
return s.scene && s.scene.supported_scrapes.includes(GQL.ScrapeType.Fragment);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
setQueryableScrapers(newQueryableScrapers);
|
||||||
|
|
||||||
|
}, [Scrapers.data])
|
||||||
|
|
||||||
function updateSceneEditState(state: Partial<GQL.SceneDataFragment>) {
|
function updateSceneEditState(state: Partial<GQL.SceneDataFragment>) {
|
||||||
const perfIds = !!state.performers ? state.performers.map((performer) => performer.id) : undefined;
|
const perfIds = !!state.performers ? state.performers.map((performer) => performer.id) : undefined;
|
||||||
const tIds = !!state.tags ? state.tags.map((tag) => tag.id) : undefined;
|
const tIds = !!state.tags ? state.tags.map((tag) => tag.id) : undefined;
|
||||||
@@ -186,6 +205,118 @@ export const SceneEditPanel: FunctionComponent<IProps> = (props: IProps) => {
|
|||||||
function onCoverImageChange(event: React.FormEvent<HTMLInputElement>) {
|
function onCoverImageChange(event: React.FormEvent<HTMLInputElement>) {
|
||||||
ImageUtils.onImageChange(event, onImageLoad);
|
ImageUtils.onImageChange(event, onImageLoad);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function onScrapeClicked(scraper : GQL.ListSceneScrapersListSceneScrapers) {
|
||||||
|
setIsLoading(true);
|
||||||
|
try {
|
||||||
|
const result = await StashService.queryScrapeScene(scraper.id, getSceneInput());
|
||||||
|
if (!result.data || !result.data.scrapeScene) { return; }
|
||||||
|
updateSceneFromScrapedScene(result.data.scrapeScene);
|
||||||
|
} catch (e) {
|
||||||
|
ErrorUtils.handle(e);
|
||||||
|
} finally {
|
||||||
|
setIsLoading(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderScraperMenuItem(scraper : GQL.ListSceneScrapersListSceneScrapers) {
|
||||||
|
return (
|
||||||
|
<MenuItem
|
||||||
|
text={scraper.name}
|
||||||
|
onClick={() => { onScrapeClicked(scraper); }}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderScraperMenu() {
|
||||||
|
if (!queryableScrapers || queryableScrapers.length == 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const scraperMenu = (
|
||||||
|
<Menu>
|
||||||
|
{queryableScrapers ? queryableScrapers.map((s) => renderScraperMenuItem(s)) : undefined}
|
||||||
|
</Menu>
|
||||||
|
);
|
||||||
|
return (
|
||||||
|
<Popover content={scraperMenu} position="bottom">
|
||||||
|
<Button text="Scrape with..."/>
|
||||||
|
</Popover>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function urlScrapable(url: string) : boolean {
|
||||||
|
return !!url && !!Scrapers.data && Scrapers.data.listSceneScrapers && Scrapers.data.listSceneScrapers.some((s) => {
|
||||||
|
return !!s.scene && !!s.scene.urls && s.scene.urls.some((u) => { return url.includes(u); });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateSceneFromScrapedScene(scene : GQL.ScrapedSceneDataFragment) {
|
||||||
|
if (!title && scene.title) {
|
||||||
|
setTitle(scene.title);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!details && scene.details) {
|
||||||
|
setDetails(scene.details);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!date && scene.date) {
|
||||||
|
setDate(scene.date);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!studioId && scene.studio && scene.studio.id) {
|
||||||
|
setStudioId(scene.studio.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
if ((!performerIds || performerIds.length == 0) && scene.performers && scene.performers.length > 0) {
|
||||||
|
let idPerfs = scene.performers.filter((p) => {
|
||||||
|
return p.id !== undefined && p.id !== null;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (idPerfs.length > 0) {
|
||||||
|
let newIds = idPerfs.map((p) => p.id);
|
||||||
|
setPerformerIds(newIds as string[]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ((!tagIds || tagIds.length == 0) && scene.tags && scene.tags.length > 0) {
|
||||||
|
let idTags = scene.tags.filter((p) => {
|
||||||
|
return p.id !== undefined && p.id !== null;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (idTags.length > 0) {
|
||||||
|
let newIds = idTags.map((p) => p.id);
|
||||||
|
setTagIds(newIds as string[]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function onScrapeSceneURL() {
|
||||||
|
if (!url) { return; }
|
||||||
|
setIsLoading(true);
|
||||||
|
try {
|
||||||
|
const result = await StashService.queryScrapeSceneURL(url);
|
||||||
|
if (!result.data || !result.data.scrapeSceneURL) { return; }
|
||||||
|
updateSceneFromScrapedScene(result.data.scrapeSceneURL);
|
||||||
|
} catch (e) {
|
||||||
|
ErrorUtils.handle(e);
|
||||||
|
} finally {
|
||||||
|
setIsLoading(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function maybeRenderScrapeButton() {
|
||||||
|
if (!url || !urlScrapable(url)) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return (
|
||||||
|
<Button
|
||||||
|
minimal={true}
|
||||||
|
icon="import"
|
||||||
|
id="scrape-url-button"
|
||||||
|
onClick={() => onScrapeSceneURL()}/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
@@ -212,6 +343,7 @@ export const SceneEditPanel: FunctionComponent<IProps> = (props: IProps) => {
|
|||||||
onChange={(newValue: any) => setUrl(newValue.target.value)}
|
onChange={(newValue: any) => setUrl(newValue.target.value)}
|
||||||
value={url}
|
value={url}
|
||||||
/>
|
/>
|
||||||
|
{maybeRenderScrapeButton()}
|
||||||
</FormGroup>
|
</FormGroup>
|
||||||
|
|
||||||
<FormGroup label="Date" helperText="YYYY-MM-DD">
|
<FormGroup label="Date" helperText="YYYY-MM-DD">
|
||||||
@@ -267,6 +399,7 @@ export const SceneEditPanel: FunctionComponent<IProps> = (props: IProps) => {
|
|||||||
</div>
|
</div>
|
||||||
<Button className="edit-button" text="Save" intent="primary" onClick={() => onSave()}/>
|
<Button className="edit-button" text="Save" intent="primary" onClick={() => onSave()}/>
|
||||||
<Button className="edit-button" text="Delete" intent="danger" onClick={() => setIsDeleteAlertOpen(true)}/>
|
<Button className="edit-button" text="Delete" intent="danger" onClick={() => setIsDeleteAlertOpen(true)}/>
|
||||||
|
{renderScraperMenu()}
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -221,6 +221,10 @@ export class StashService {
|
|||||||
return GQL.useScrapePerformer({ variables: { scraper_id: scraperId, scraped_performer: scrapedPerformer }});
|
return GQL.useScrapePerformer({ variables: { scraper_id: scraperId, scraped_performer: scrapedPerformer }});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static useListSceneScrapers() {
|
||||||
|
return GQL.useListSceneScrapers();
|
||||||
|
}
|
||||||
|
|
||||||
public static useScrapeFreeonesPerformers(q: string) { return GQL.useScrapeFreeonesPerformers({ variables: { q } }); }
|
public static useScrapeFreeonesPerformers(q: string) { return GQL.useScrapeFreeonesPerformers({ variables: { q } }); }
|
||||||
public static useMarkerStrings() { return GQL.useMarkerStrings(); }
|
public static useMarkerStrings() { return GQL.useMarkerStrings(); }
|
||||||
public static useAllTags() { return GQL.useAllTags(); }
|
public static useAllTags() { return GQL.useAllTags(); }
|
||||||
@@ -425,6 +429,25 @@ export class StashService {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static queryScrapeSceneURL(url: string) {
|
||||||
|
return StashService.client.query<GQL.ScrapeSceneUrlQuery>({
|
||||||
|
query: GQL.ScrapeSceneUrlDocument,
|
||||||
|
variables: {
|
||||||
|
url: url,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public static queryScrapeScene(scraperId: string, scene: GQL.SceneUpdateInput) {
|
||||||
|
return StashService.client.query<GQL.ScrapeSceneQuery>({
|
||||||
|
query: GQL.ScrapeSceneDocument,
|
||||||
|
variables: {
|
||||||
|
scraper_id: scraperId,
|
||||||
|
scene: scene,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
public static queryMetadataScan(input: GQL.ScanMetadataInput) {
|
public static queryMetadataScan(input: GQL.ScanMetadataInput) {
|
||||||
return StashService.client.query<GQL.MetadataScanQuery>({
|
return StashService.client.query<GQL.MetadataScanQuery>({
|
||||||
query: GQL.MetadataScanDocument,
|
query: GQL.MetadataScanDocument,
|
||||||
|
|||||||
Reference in New Issue
Block a user