mirror of
https://github.com/stashapp/stash.git
synced 2025-12-17 12:24:38 +03:00
Separate graphql API from rest of the system (#2503)
* Move graphql generated files to api * Refactor identify options * Remove models.StashBoxes * Move ScraperSource to scraper package * Rename field strategy enums * Rename identify.TaskOptions to Options
This commit is contained in:
@@ -3,11 +3,139 @@ package scraper
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
type Source struct {
|
||||
// Index of the configured stash-box instance to use. Should be unset if scraper_id is set
|
||||
StashBoxIndex *int `json:"stash_box_index"`
|
||||
// Stash-box endpoint
|
||||
StashBoxEndpoint *string `json:"stash_box_endpoint"`
|
||||
// Scraper ID to scrape with. Should be unset if stash_box_index is set
|
||||
ScraperID *string `json:"scraper_id"`
|
||||
}
|
||||
|
||||
// Scraped Content is the forming union over the different scrapers
|
||||
type ScrapedContent interface {
|
||||
IsScrapedContent()
|
||||
}
|
||||
|
||||
// Type of the content a scraper generates
|
||||
type ScrapeContentType string
|
||||
|
||||
const (
|
||||
ScrapeContentTypeGallery ScrapeContentType = "GALLERY"
|
||||
ScrapeContentTypeMovie ScrapeContentType = "MOVIE"
|
||||
ScrapeContentTypePerformer ScrapeContentType = "PERFORMER"
|
||||
ScrapeContentTypeScene ScrapeContentType = "SCENE"
|
||||
)
|
||||
|
||||
var AllScrapeContentType = []ScrapeContentType{
|
||||
ScrapeContentTypeGallery,
|
||||
ScrapeContentTypeMovie,
|
||||
ScrapeContentTypePerformer,
|
||||
ScrapeContentTypeScene,
|
||||
}
|
||||
|
||||
func (e ScrapeContentType) IsValid() bool {
|
||||
switch e {
|
||||
case ScrapeContentTypeGallery, ScrapeContentTypeMovie, ScrapeContentTypePerformer, ScrapeContentTypeScene:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e ScrapeContentType) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e *ScrapeContentType) UnmarshalGQL(v interface{}) error {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("enums must be strings")
|
||||
}
|
||||
|
||||
*e = ScrapeContentType(str)
|
||||
if !e.IsValid() {
|
||||
return fmt.Errorf("%s is not a valid ScrapeContentType", str)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e ScrapeContentType) MarshalGQL(w io.Writer) {
|
||||
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||
}
|
||||
|
||||
type Scraper struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
// Details for performer scraper
|
||||
Performer *ScraperSpec `json:"performer"`
|
||||
// Details for scene scraper
|
||||
Scene *ScraperSpec `json:"scene"`
|
||||
// Details for gallery scraper
|
||||
Gallery *ScraperSpec `json:"gallery"`
|
||||
// Details for movie scraper
|
||||
Movie *ScraperSpec `json:"movie"`
|
||||
}
|
||||
|
||||
type ScraperSpec struct {
|
||||
// URLs matching these can be scraped with
|
||||
Urls []string `json:"urls"`
|
||||
SupportedScrapes []ScrapeType `json:"supported_scrapes"`
|
||||
}
|
||||
|
||||
type ScrapeType string
|
||||
|
||||
const (
|
||||
// From text query
|
||||
ScrapeTypeName ScrapeType = "NAME"
|
||||
// From existing object
|
||||
ScrapeTypeFragment ScrapeType = "FRAGMENT"
|
||||
// From URL
|
||||
ScrapeTypeURL ScrapeType = "URL"
|
||||
)
|
||||
|
||||
var AllScrapeType = []ScrapeType{
|
||||
ScrapeTypeName,
|
||||
ScrapeTypeFragment,
|
||||
ScrapeTypeURL,
|
||||
}
|
||||
|
||||
func (e ScrapeType) IsValid() bool {
|
||||
switch e {
|
||||
case ScrapeTypeName, ScrapeTypeFragment, ScrapeTypeURL:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e ScrapeType) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e *ScrapeType) UnmarshalGQL(v interface{}) error {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("enums must be strings")
|
||||
}
|
||||
|
||||
*e = ScrapeType(str)
|
||||
if !e.IsValid() {
|
||||
return fmt.Errorf("%s is not a valid ScrapeType", str)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e ScrapeType) MarshalGQL(w io.Writer) {
|
||||
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||
}
|
||||
|
||||
var (
|
||||
// ErrMaxRedirects is returned if the max number of HTTP redirects are reached.
|
||||
ErrMaxRedirects = errors.New("maximum number of HTTP redirects reached")
|
||||
@@ -24,9 +152,9 @@ var (
|
||||
// The system expects one of these to be set, and the remaining to be
|
||||
// set to nil.
|
||||
type Input struct {
|
||||
Performer *models.ScrapedPerformerInput
|
||||
Scene *models.ScrapedSceneInput
|
||||
Gallery *models.ScrapedGalleryInput
|
||||
Performer *ScrapedPerformerInput
|
||||
Scene *ScrapedSceneInput
|
||||
Gallery *ScrapedGalleryInput
|
||||
}
|
||||
|
||||
// simple type definitions that can help customize
|
||||
@@ -41,32 +169,32 @@ const (
|
||||
// scraper is the generic interface to the scraper subsystems
|
||||
type scraper interface {
|
||||
// spec returns the scraper specification, suitable for graphql
|
||||
spec() models.Scraper
|
||||
spec() Scraper
|
||||
// supports tests if the scraper supports a given content type
|
||||
supports(models.ScrapeContentType) bool
|
||||
supports(ScrapeContentType) bool
|
||||
// supportsURL tests if the scraper supports scrapes of a given url, producing a given content type
|
||||
supportsURL(url string, ty models.ScrapeContentType) bool
|
||||
supportsURL(url string, ty ScrapeContentType) bool
|
||||
}
|
||||
|
||||
// urlScraper is the interface of scrapers supporting url loads
|
||||
type urlScraper interface {
|
||||
scraper
|
||||
|
||||
viaURL(ctx context.Context, client *http.Client, url string, ty models.ScrapeContentType) (models.ScrapedContent, error)
|
||||
viaURL(ctx context.Context, client *http.Client, url string, ty ScrapeContentType) (ScrapedContent, error)
|
||||
}
|
||||
|
||||
// nameScraper is the interface of scrapers supporting name loads
|
||||
type nameScraper interface {
|
||||
scraper
|
||||
|
||||
viaName(ctx context.Context, client *http.Client, name string, ty models.ScrapeContentType) ([]models.ScrapedContent, error)
|
||||
viaName(ctx context.Context, client *http.Client, name string, ty ScrapeContentType) ([]ScrapedContent, error)
|
||||
}
|
||||
|
||||
// fragmentScraper is the interface of scrapers supporting fragment loads
|
||||
type fragmentScraper interface {
|
||||
scraper
|
||||
|
||||
viaFragment(ctx context.Context, client *http.Client, input Input) (models.ScrapedContent, error)
|
||||
viaFragment(ctx context.Context, client *http.Client, input Input) (ScrapedContent, error)
|
||||
}
|
||||
|
||||
// sceneScraper is a scraper which supports scene scrapes with
|
||||
@@ -74,7 +202,7 @@ type fragmentScraper interface {
|
||||
type sceneScraper interface {
|
||||
scraper
|
||||
|
||||
viaScene(ctx context.Context, client *http.Client, scene *models.Scene) (*models.ScrapedScene, error)
|
||||
viaScene(ctx context.Context, client *http.Client, scene *models.Scene) (*ScrapedScene, error)
|
||||
}
|
||||
|
||||
// galleryScraper is a scraper which supports gallery scrapes with
|
||||
@@ -82,5 +210,5 @@ type sceneScraper interface {
|
||||
type galleryScraper interface {
|
||||
scraper
|
||||
|
||||
viaGallery(ctx context.Context, client *http.Client, gallery *models.Gallery) (*models.ScrapedGallery, error)
|
||||
viaGallery(ctx context.Context, client *http.Client, gallery *models.Gallery) (*ScrapedGallery, error)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user