mirror of
https://github.com/stashapp/stash.git
synced 2025-12-17 20:34:37 +03:00
Add image scraping support (#370)
* Add sub-scraper functionality * Add scraping of performer image * Add scene cover image scraping * Port UI changes to v2.5 * Fix v2.5 dialog suggest color * Don't convert eol of UI to support pretty
This commit is contained in:
3
.gitattributes
vendored
3
.gitattributes
vendored
@@ -1,2 +1,3 @@
|
|||||||
go.mod text eol=lf
|
go.mod text eol=lf
|
||||||
go.sum text eol=lf
|
go.sum text eol=lf
|
||||||
|
ui/v2.5/** -text
|
||||||
|
|||||||
1
go.mod
1
go.mod
@@ -14,6 +14,7 @@ require (
|
|||||||
github.com/h2non/filetype v1.0.8
|
github.com/h2non/filetype v1.0.8
|
||||||
// this is required for generate
|
// this is required for generate
|
||||||
github.com/inconshreveable/mousetrap v1.0.0 // indirect
|
github.com/inconshreveable/mousetrap v1.0.0 // indirect
|
||||||
|
github.com/jinzhu/copier v0.0.0-20190924061706-b57f9002281a
|
||||||
github.com/jmoiron/sqlx v1.2.0
|
github.com/jmoiron/sqlx v1.2.0
|
||||||
github.com/mattn/go-sqlite3 v1.10.0
|
github.com/mattn/go-sqlite3 v1.10.0
|
||||||
github.com/rs/cors v1.6.0
|
github.com/rs/cors v1.6.0
|
||||||
|
|||||||
2
go.sum
2
go.sum
@@ -383,6 +383,8 @@ github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANyt
|
|||||||
github.com/jackc/fake v0.0.0-20150926172116-812a484cc733/go.mod h1:WrMFNQdiFJ80sQsxDoMokWK1W5TQtxBFNpzWTD84ibQ=
|
github.com/jackc/fake v0.0.0-20150926172116-812a484cc733/go.mod h1:WrMFNQdiFJ80sQsxDoMokWK1W5TQtxBFNpzWTD84ibQ=
|
||||||
github.com/jackc/pgx v3.2.0+incompatible/go.mod h1:0ZGrqGqkRlliWnWB4zKnWtjbSWbGkVEFm4TeybAXq+I=
|
github.com/jackc/pgx v3.2.0+incompatible/go.mod h1:0ZGrqGqkRlliWnWB4zKnWtjbSWbGkVEFm4TeybAXq+I=
|
||||||
github.com/jellevandenhooff/dkim v0.0.0-20150330215556-f50fe3d243e1/go.mod h1:E0B/fFc00Y+Rasa88328GlI/XbtyysCtTHZS8h7IrBU=
|
github.com/jellevandenhooff/dkim v0.0.0-20150330215556-f50fe3d243e1/go.mod h1:E0B/fFc00Y+Rasa88328GlI/XbtyysCtTHZS8h7IrBU=
|
||||||
|
github.com/jinzhu/copier v0.0.0-20190924061706-b57f9002281a h1:zPPuIq2jAWWPTrGt70eK/BSch+gFAGrNzecsoENgu2o=
|
||||||
|
github.com/jinzhu/copier v0.0.0-20190924061706-b57f9002281a/go.mod h1:yL958EeXv8Ylng6IfnvG4oflryUi3vgA3xPs9hmII1s=
|
||||||
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
|
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
|
||||||
github.com/jmoiron/sqlx v0.0.0-20180614180643-0dae4fefe7c0/go.mod h1:IiEW3SEiiErVyFdH8NTuWjSifiEQKUoyK3LNqr2kCHU=
|
github.com/jmoiron/sqlx v0.0.0-20180614180643-0dae4fefe7c0/go.mod h1:IiEW3SEiiErVyFdH8NTuWjSifiEQKUoyK3LNqr2kCHU=
|
||||||
github.com/jmoiron/sqlx v1.2.0 h1:41Ip0zITnmWNR/vHV+S4m+VoUivnWY5E4OJfLZjCJMA=
|
github.com/jmoiron/sqlx v1.2.0 h1:41Ip0zITnmWNR/vHV+S4m+VoUivnWY5E4OJfLZjCJMA=
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ fragment ScrapedPerformerData on ScrapedPerformer {
|
|||||||
tattoos
|
tattoos
|
||||||
piercings
|
piercings
|
||||||
aliases
|
aliases
|
||||||
|
image
|
||||||
}
|
}
|
||||||
|
|
||||||
fragment ScrapedScenePerformerData on ScrapedScenePerformer {
|
fragment ScrapedScenePerformerData on ScrapedScenePerformer {
|
||||||
@@ -75,6 +76,7 @@ fragment ScrapedSceneData on ScrapedScene {
|
|||||||
details
|
details
|
||||||
url
|
url
|
||||||
date
|
date
|
||||||
|
image
|
||||||
|
|
||||||
file {
|
file {
|
||||||
size
|
size
|
||||||
|
|||||||
@@ -15,6 +15,9 @@ type ScrapedPerformer {
|
|||||||
tattoos: String
|
tattoos: String
|
||||||
piercings: String
|
piercings: String
|
||||||
aliases: String
|
aliases: String
|
||||||
|
|
||||||
|
"""This should be base64 encoded"""
|
||||||
|
image: String
|
||||||
}
|
}
|
||||||
|
|
||||||
input ScrapedPerformerInput {
|
input ScrapedPerformerInput {
|
||||||
@@ -33,4 +36,6 @@ input ScrapedPerformerInput {
|
|||||||
tattoos: String
|
tattoos: String
|
||||||
piercings: String
|
piercings: String
|
||||||
aliases: String
|
aliases: String
|
||||||
|
|
||||||
|
# not including image for the input
|
||||||
}
|
}
|
||||||
@@ -76,6 +76,9 @@ type ScrapedScene {
|
|||||||
url: String
|
url: String
|
||||||
date: String
|
date: String
|
||||||
|
|
||||||
|
"""This should be base64 encoded"""
|
||||||
|
image: String
|
||||||
|
|
||||||
file: SceneFileType # Resolver
|
file: SceneFileType # Resolver
|
||||||
|
|
||||||
studio: ScrapedSceneStudio
|
studio: ScrapedSceneStudio
|
||||||
|
|||||||
@@ -39,6 +39,26 @@ type ScrapedPerformer struct {
|
|||||||
Tattoos *string `graphql:"tattoos" json:"tattoos"`
|
Tattoos *string `graphql:"tattoos" json:"tattoos"`
|
||||||
Piercings *string `graphql:"piercings" json:"piercings"`
|
Piercings *string `graphql:"piercings" json:"piercings"`
|
||||||
Aliases *string `graphql:"aliases" json:"aliases"`
|
Aliases *string `graphql:"aliases" json:"aliases"`
|
||||||
|
Image *string `graphql:"image" json:"image"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// this type has no Image field
|
||||||
|
type ScrapedPerformerStash struct {
|
||||||
|
Name *string `graphql:"name" json:"name"`
|
||||||
|
URL *string `graphql:"url" json:"url"`
|
||||||
|
Twitter *string `graphql:"twitter" json:"twitter"`
|
||||||
|
Instagram *string `graphql:"instagram" json:"instagram"`
|
||||||
|
Birthdate *string `graphql:"birthdate" json:"birthdate"`
|
||||||
|
Ethnicity *string `graphql:"ethnicity" json:"ethnicity"`
|
||||||
|
Country *string `graphql:"country" json:"country"`
|
||||||
|
EyeColor *string `graphql:"eye_color" json:"eye_color"`
|
||||||
|
Height *string `graphql:"height" json:"height"`
|
||||||
|
Measurements *string `graphql:"measurements" json:"measurements"`
|
||||||
|
FakeTits *string `graphql:"fake_tits" json:"fake_tits"`
|
||||||
|
CareerLength *string `graphql:"career_length" json:"career_length"`
|
||||||
|
Tattoos *string `graphql:"tattoos" json:"tattoos"`
|
||||||
|
Piercings *string `graphql:"piercings" json:"piercings"`
|
||||||
|
Aliases *string `graphql:"aliases" json:"aliases"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type ScrapedScene struct {
|
type ScrapedScene struct {
|
||||||
@@ -46,6 +66,7 @@ type ScrapedScene struct {
|
|||||||
Details *string `graphql:"details" json:"details"`
|
Details *string `graphql:"details" json:"details"`
|
||||||
URL *string `graphql:"url" json:"url"`
|
URL *string `graphql:"url" json:"url"`
|
||||||
Date *string `graphql:"date" json:"date"`
|
Date *string `graphql:"date" json:"date"`
|
||||||
|
Image *string `graphql:"image" json:"image"`
|
||||||
File *SceneFileType `graphql:"file" json:"file"`
|
File *SceneFileType `graphql:"file" json:"file"`
|
||||||
Studio *ScrapedSceneStudio `graphql:"studio" json:"studio"`
|
Studio *ScrapedSceneStudio `graphql:"studio" json:"studio"`
|
||||||
Movies []*ScrapedSceneMovie `graphql:"movies" json:"movies"`
|
Movies []*ScrapedSceneMovie `graphql:"movies" json:"movies"`
|
||||||
@@ -53,6 +74,19 @@ type ScrapedScene struct {
|
|||||||
Performers []*ScrapedScenePerformer `graphql:"performers" json:"performers"`
|
Performers []*ScrapedScenePerformer `graphql:"performers" json:"performers"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// stash doesn't return image, and we need id
|
||||||
|
type ScrapedSceneStash struct {
|
||||||
|
ID string `graphql:"id" json:"id"`
|
||||||
|
Title *string `graphql:"title" json:"title"`
|
||||||
|
Details *string `graphql:"details" json:"details"`
|
||||||
|
URL *string `graphql:"url" json:"url"`
|
||||||
|
Date *string `graphql:"date" json:"date"`
|
||||||
|
File *SceneFileType `graphql:"file" json:"file"`
|
||||||
|
Studio *ScrapedSceneStudio `graphql:"studio" json:"studio"`
|
||||||
|
Tags []*ScrapedSceneTag `graphql:"tags" json:"tags"`
|
||||||
|
Performers []*ScrapedScenePerformer `graphql:"performers" json:"performers"`
|
||||||
|
}
|
||||||
|
|
||||||
type ScrapedScenePerformer struct {
|
type ScrapedScenePerformer struct {
|
||||||
// Set if performer matched
|
// Set if performer matched
|
||||||
ID *string `graphql:"id" json:"id"`
|
ID *string `graphql:"id" json:"id"`
|
||||||
|
|||||||
84
pkg/scraper/image.go
Normal file
84
pkg/scraper/image.go
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
package scraper
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io/ioutil"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Timeout to get the image. Includes transfer time. May want to make this
|
||||||
|
// configurable at some point.
|
||||||
|
const imageGetTimeout = time.Second * 30
|
||||||
|
|
||||||
|
func setPerformerImage(p *models.ScrapedPerformer) error {
|
||||||
|
if p == nil || p.Image == nil || !strings.HasPrefix(*p.Image, "http") {
|
||||||
|
// nothing to do
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
img, err := getImage(*p.Image)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
p.Image = img
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func setSceneImage(s *models.ScrapedScene) error {
|
||||||
|
// don't try to get the image if it doesn't appear to be a URL
|
||||||
|
if s == nil || s.Image == nil || !strings.HasPrefix(*s.Image, "http") {
|
||||||
|
// nothing to do
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
img, err := getImage(*s.Image)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
s.Image = img
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getImage(url string) (*string, error) {
|
||||||
|
client := &http.Client{
|
||||||
|
Timeout: imageGetTimeout,
|
||||||
|
}
|
||||||
|
|
||||||
|
// assume is a URL for now
|
||||||
|
resp, err := client.Get(url)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
body, err := ioutil.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// determine the image type and set the base64 type
|
||||||
|
contentType := resp.Header.Get("Content-Type")
|
||||||
|
if contentType == "" {
|
||||||
|
contentType = http.DetectContentType(body)
|
||||||
|
}
|
||||||
|
|
||||||
|
img := "data:" + contentType + ";base64," + utils.GetBase64StringFromData(body)
|
||||||
|
return &img, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getStashPerformerImage(stashURL string, performerID string) (*string, error) {
|
||||||
|
return getImage(stashURL + "/performer/" + performerID + "/image")
|
||||||
|
}
|
||||||
|
|
||||||
|
func getStashSceneImage(stashURL string, sceneID string) (*string, error) {
|
||||||
|
return getImage(stashURL + "/scene/" + sceneID + "/screenshot")
|
||||||
|
}
|
||||||
@@ -108,7 +108,17 @@ func ScrapePerformer(scraperID string, scrapedPerformer models.ScrapedPerformerI
|
|||||||
// find scraper with the provided id
|
// find scraper with the provided id
|
||||||
s := findScraper(scraperID)
|
s := findScraper(scraperID)
|
||||||
if s != nil {
|
if s != nil {
|
||||||
return s.ScrapePerformer(scrapedPerformer)
|
ret, err := s.ScrapePerformer(scrapedPerformer)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// post-process - set the image if applicable
|
||||||
|
if err := setPerformerImage(ret); err != nil {
|
||||||
|
logger.Warnf("Could not set image using URL %s: %s", *ret.Image, err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, errors.New("Scraper with ID " + scraperID + " not found")
|
return nil, errors.New("Scraper with ID " + scraperID + " not found")
|
||||||
@@ -117,7 +127,17 @@ func ScrapePerformer(scraperID string, scrapedPerformer models.ScrapedPerformerI
|
|||||||
func ScrapePerformerURL(url string) (*models.ScrapedPerformer, error) {
|
func ScrapePerformerURL(url string) (*models.ScrapedPerformer, error) {
|
||||||
for _, s := range scrapers {
|
for _, s := range scrapers {
|
||||||
if s.matchesPerformerURL(url) {
|
if s.matchesPerformerURL(url) {
|
||||||
return s.ScrapePerformerURL(url)
|
ret, err := s.ScrapePerformerURL(url)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// post-process - set the image if applicable
|
||||||
|
if err := setPerformerImage(ret); err != nil {
|
||||||
|
logger.Warnf("Could not set image using URL %s: %s", *ret.Image, err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -228,6 +248,11 @@ func postScrapeScene(ret *models.ScrapedScene) error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// post-process - set the image if applicable
|
||||||
|
if err := setSceneImage(ret); err != nil {
|
||||||
|
logger.Warnf("Could not set image using URL %s: %s", *ret.Image, err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
|
||||||
|
"github.com/jinzhu/copier"
|
||||||
"github.com/shurcooL/graphql"
|
"github.com/shurcooL/graphql"
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
@@ -67,12 +68,14 @@ func scrapePerformerFragmentStash(c scraperTypeConfig, scrapedPerformer models.S
|
|||||||
client := getStashClient(c)
|
client := getStashClient(c)
|
||||||
|
|
||||||
var q struct {
|
var q struct {
|
||||||
FindPerformer *models.ScrapedPerformer `graphql:"findPerformer(id: $f)"`
|
FindPerformer *models.ScrapedPerformerStash `graphql:"findPerformer(id: $f)"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
performerID := *scrapedPerformer.URL
|
||||||
|
|
||||||
// get the id from the URL field
|
// get the id from the URL field
|
||||||
vars := map[string]interface{}{
|
vars := map[string]interface{}{
|
||||||
"f": *scrapedPerformer.URL,
|
"f": performerID,
|
||||||
}
|
}
|
||||||
|
|
||||||
err := client.Query(context.Background(), &q, vars)
|
err := client.Query(context.Background(), &q, vars)
|
||||||
@@ -80,7 +83,20 @@ func scrapePerformerFragmentStash(c scraperTypeConfig, scrapedPerformer models.S
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return q.FindPerformer, nil
|
// need to copy back to a scraped performer
|
||||||
|
ret := models.ScrapedPerformer{}
|
||||||
|
err = copier.Copy(&ret, q.FindPerformer)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// get the performer image directly
|
||||||
|
ret.Image, err = getStashPerformerImage(c.scraperConfig.StashServer.URL, performerID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func scrapeSceneFragmentStash(c scraperTypeConfig, scene models.SceneUpdateInput) (*models.ScrapedScene, error) {
|
func scrapeSceneFragmentStash(c scraperTypeConfig, scene models.SceneUpdateInput) (*models.ScrapedScene, error) {
|
||||||
@@ -99,7 +115,7 @@ func scrapeSceneFragmentStash(c scraperTypeConfig, scene models.SceneUpdateInput
|
|||||||
}
|
}
|
||||||
|
|
||||||
var q struct {
|
var q struct {
|
||||||
FindScene *models.ScrapedScene `graphql:"findScene(checksum: $c)"`
|
FindScene *models.ScrapedSceneStash `graphql:"findScene(checksum: $c)"`
|
||||||
}
|
}
|
||||||
|
|
||||||
checksum := graphql.String(storedScene.Checksum)
|
checksum := graphql.String(storedScene.Checksum)
|
||||||
@@ -128,5 +144,18 @@ func scrapeSceneFragmentStash(c scraperTypeConfig, scene models.SceneUpdateInput
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return q.FindScene, nil
|
// need to copy back to a scraped scene
|
||||||
|
ret := models.ScrapedScene{}
|
||||||
|
err = copier.Copy(&ret, q.FindScene)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// get the performer image directly
|
||||||
|
ret.Image, err = getStashSceneImage(c.scraperConfig.StashServer.URL, q.FindScene.ID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &ret, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -135,6 +135,22 @@ func (c xpathScraperAttrConfig) getReplace() xpathRegexConfigs {
|
|||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c xpathScraperAttrConfig) getSubScraper() xpathScraperAttrConfig {
|
||||||
|
const subScraperKey = "subScraper"
|
||||||
|
val, _ := c[subScraperKey]
|
||||||
|
|
||||||
|
if val == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
asMap, _ := val.(map[interface{}]interface{})
|
||||||
|
if asMap != nil {
|
||||||
|
return xpathScraperAttrConfig(asMap)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (c xpathScraperAttrConfig) concatenateResults(nodes []*html.Node) string {
|
func (c xpathScraperAttrConfig) concatenateResults(nodes []*html.Node) string {
|
||||||
separator := c.getConcat()
|
separator := c.getConcat()
|
||||||
result := []string{}
|
result := []string{}
|
||||||
@@ -174,10 +190,44 @@ func (c xpathScraperAttrConfig) replaceRegex(value string) string {
|
|||||||
return replace.apply(value)
|
return replace.apply(value)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c xpathScraperAttrConfig) applySubScraper(value string) string {
|
||||||
|
subScraper := c.getSubScraper()
|
||||||
|
|
||||||
|
if subScraper == nil {
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
|
doc, err := htmlquery.LoadURL(value)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
logger.Warnf("Error getting URL '%s' for sub-scraper: %s", value, err.Error())
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
found := runXPathQuery(doc, subScraper.getSelector(), nil)
|
||||||
|
|
||||||
|
if len(found) > 0 {
|
||||||
|
// check if we're concatenating the results into a single result
|
||||||
|
var result string
|
||||||
|
if subScraper.hasConcat() {
|
||||||
|
result = subScraper.concatenateResults(found)
|
||||||
|
} else {
|
||||||
|
result = htmlquery.InnerText(found[0])
|
||||||
|
result = commonPostProcess(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
result = subScraper.postProcess(result)
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
func (c xpathScraperAttrConfig) postProcess(value string) string {
|
func (c xpathScraperAttrConfig) postProcess(value string) string {
|
||||||
// perform regex replacements first
|
// perform regex replacements first
|
||||||
value = c.replaceRegex(value)
|
value = c.replaceRegex(value)
|
||||||
value = c.parseDate(value)
|
value = c.parseDate(value)
|
||||||
|
value = c.applySubScraper(value)
|
||||||
|
|
||||||
return value
|
return value
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -72,7 +72,7 @@ export const PerformerDetailsPanel: React.FC<IPerformerDetails> = ({
|
|||||||
const [queryableScrapers, setQueryableScrapers] = useState<GQL.Scraper[]>([]);
|
const [queryableScrapers, setQueryableScrapers] = useState<GQL.Scraper[]>([]);
|
||||||
|
|
||||||
function updatePerformerEditState(
|
function updatePerformerEditState(
|
||||||
state: Partial<GQL.PerformerDataFragment | GQL.ScrapedPerformer>
|
state: Partial<GQL.PerformerDataFragment | GQL.ScrapedPerformerDataFragment>
|
||||||
) {
|
) {
|
||||||
if ((state as GQL.PerformerDataFragment).favorite !== undefined) {
|
if ((state as GQL.PerformerDataFragment).favorite !== undefined) {
|
||||||
setFavorite((state as GQL.PerformerDataFragment).favorite);
|
setFavorite((state as GQL.PerformerDataFragment).favorite);
|
||||||
@@ -94,6 +94,21 @@ export const PerformerDetailsPanel: React.FC<IPerformerDetails> = ({
|
|||||||
setInstagram(state.instagram ?? undefined);
|
setInstagram(state.instagram ?? undefined);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function updatePerformerEditStateFromScraper(
|
||||||
|
state: Partial<GQL.ScrapedPerformerDataFragment>
|
||||||
|
) {
|
||||||
|
updatePerformerEditState(state);
|
||||||
|
|
||||||
|
// image is a base64 string
|
||||||
|
if ((state as GQL.ScrapedPerformerDataFragment).image !== undefined) {
|
||||||
|
let imageStr = (state as GQL.ScrapedPerformerDataFragment).image;
|
||||||
|
setImage(imageStr ?? undefined);
|
||||||
|
if (onImageChange) {
|
||||||
|
onImageChange(imageStr!);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
setImage(undefined);
|
setImage(undefined);
|
||||||
updatePerformerEditState(performer);
|
updatePerformerEditState(performer);
|
||||||
@@ -158,7 +173,8 @@ export const PerformerDetailsPanel: React.FC<IPerformerDetails> = ({
|
|||||||
function getQueryScraperPerformerInput() {
|
function getQueryScraperPerformerInput() {
|
||||||
if (!scrapePerformerDetails) return {};
|
if (!scrapePerformerDetails) return {};
|
||||||
|
|
||||||
const { __typename, ...ret } = scrapePerformerDetails;
|
// image is not supported
|
||||||
|
const { __typename, image, ...ret } = scrapePerformerDetails;
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -172,7 +188,7 @@ export const PerformerDetailsPanel: React.FC<IPerformerDetails> = ({
|
|||||||
getQueryScraperPerformerInput()
|
getQueryScraperPerformerInput()
|
||||||
);
|
);
|
||||||
if (!result?.data?.scrapePerformer) return;
|
if (!result?.data?.scrapePerformer) return;
|
||||||
updatePerformerEditState(result.data.scrapePerformer);
|
updatePerformerEditStateFromScraper(result.data.scrapePerformer);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
Toast.error(e);
|
Toast.error(e);
|
||||||
} finally {
|
} finally {
|
||||||
@@ -193,7 +209,7 @@ export const PerformerDetailsPanel: React.FC<IPerformerDetails> = ({
|
|||||||
if (!result.data.scrapePerformerURL.url) {
|
if (!result.data.scrapePerformerURL.url) {
|
||||||
result.data.scrapePerformerURL.url = url;
|
result.data.scrapePerformerURL.url = url;
|
||||||
}
|
}
|
||||||
updatePerformerEditState(result.data.scrapePerformerURL);
|
updatePerformerEditStateFromScraper(result.data.scrapePerformerURL);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
Toast.error(e);
|
Toast.error(e);
|
||||||
} finally {
|
} finally {
|
||||||
|
|||||||
@@ -257,6 +257,12 @@ export const SceneEditPanel: React.FC<IProps> = (props: IProps) => {
|
|||||||
setTagIds(newIds as string[]);
|
setTagIds(newIds as string[]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (scene.image) {
|
||||||
|
// image is a base64 string
|
||||||
|
setCoverImage(scene.image);
|
||||||
|
setCoverImagePreview(scene.image);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function onScrapeSceneURL() {
|
async function onScrapeSceneURL() {
|
||||||
|
|||||||
@@ -140,6 +140,7 @@ code,
|
|||||||
/* this is a bit of a hack, because we can't supply direct class names
|
/* this is a bit of a hack, because we can't supply direct class names
|
||||||
to the react-select controls */
|
to the react-select controls */
|
||||||
/* stylelint-disable selector-class-pattern */
|
/* stylelint-disable selector-class-pattern */
|
||||||
|
|
||||||
div.react-select__control {
|
div.react-select__control {
|
||||||
background-color: $secondary;
|
background-color: $secondary;
|
||||||
border-color: $secondary;
|
border-color: $secondary;
|
||||||
@@ -170,6 +171,39 @@ div.react-select__menu {
|
|||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* we don't want to override this for dialogs, which are light colored */
|
||||||
|
.modal {
|
||||||
|
div.react-select__control {
|
||||||
|
background-color: #fff;
|
||||||
|
border-color: inherit;
|
||||||
|
color: $dark-text;
|
||||||
|
|
||||||
|
.react-select__single-value,
|
||||||
|
.react-select__input {
|
||||||
|
color: $dark-text;
|
||||||
|
}
|
||||||
|
|
||||||
|
.react-select__multi-value {
|
||||||
|
background-color: #fff;
|
||||||
|
color: $dark-text;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
div.react-select__menu {
|
||||||
|
background-color: #fff;
|
||||||
|
color: $text-color;
|
||||||
|
|
||||||
|
.react-select__option {
|
||||||
|
color: $dark-text;
|
||||||
|
}
|
||||||
|
|
||||||
|
.react-select__option--is-focused {
|
||||||
|
background-color: rgba(167,182,194,.3);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/* stylelint-enable selector-class-pattern */
|
/* stylelint-enable selector-class-pattern */
|
||||||
|
|
||||||
.image-thumbnail {
|
.image-thumbnail {
|
||||||
|
|||||||
@@ -61,7 +61,7 @@ export const PerformerDetailsPanel: FunctionComponent<IPerformerDetailsProps> =
|
|||||||
const Scrapers = StashService.useListPerformerScrapers();
|
const Scrapers = StashService.useListPerformerScrapers();
|
||||||
const [queryableScrapers, setQueryableScrapers] = useState<GQL.ListPerformerScrapersListPerformerScrapers[]>([]);
|
const [queryableScrapers, setQueryableScrapers] = useState<GQL.ListPerformerScrapersListPerformerScrapers[]>([]);
|
||||||
|
|
||||||
function updatePerformerEditState(state: Partial<GQL.PerformerDataFragment | GQL.ScrapeFreeonesScrapeFreeones>) {
|
function updatePerformerEditState(state: Partial<GQL.PerformerDataFragment | GQL.ScrapedPerformerDataFragment | GQL.ScrapeFreeonesScrapeFreeones>) {
|
||||||
if ((state as GQL.PerformerDataFragment).favorite !== undefined) {
|
if ((state as GQL.PerformerDataFragment).favorite !== undefined) {
|
||||||
setFavorite((state as GQL.PerformerDataFragment).favorite);
|
setFavorite((state as GQL.PerformerDataFragment).favorite);
|
||||||
}
|
}
|
||||||
@@ -82,6 +82,19 @@ export const PerformerDetailsPanel: FunctionComponent<IPerformerDetailsProps> =
|
|||||||
setInstagram(state.instagram);
|
setInstagram(state.instagram);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function updatePerformerEditStateFromScraper(state: Partial<GQL.ScrapedPerformerDataFragment | GQL.ScrapeFreeonesScrapeFreeones>) {
|
||||||
|
updatePerformerEditState(state);
|
||||||
|
|
||||||
|
// image is a base64 string
|
||||||
|
if ((state as GQL.ScrapedPerformerDataFragment).image !== undefined) {
|
||||||
|
let imageStr = (state as GQL.ScrapedPerformerDataFragment).image;
|
||||||
|
setImage(imageStr);
|
||||||
|
if (props.onImageChange) {
|
||||||
|
props.onImageChange(imageStr!);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
setImage(undefined);
|
setImage(undefined);
|
||||||
updatePerformerEditState(props.performer);
|
updatePerformerEditState(props.performer);
|
||||||
@@ -169,6 +182,10 @@ export const PerformerDetailsPanel: FunctionComponent<IPerformerDetailsProps> =
|
|||||||
|
|
||||||
let ret = _.clone(scrapePerformerDetails);
|
let ret = _.clone(scrapePerformerDetails);
|
||||||
delete ret.__typename;
|
delete ret.__typename;
|
||||||
|
|
||||||
|
// image is not supported
|
||||||
|
delete ret.image;
|
||||||
|
|
||||||
return ret as GQL.ScrapedPerformerInput;
|
return ret as GQL.ScrapedPerformerInput;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -179,7 +196,7 @@ export const PerformerDetailsPanel: FunctionComponent<IPerformerDetailsProps> =
|
|||||||
setIsLoading(true);
|
setIsLoading(true);
|
||||||
const result = await StashService.queryScrapePerformer(isDisplayingScraperDialog.id, getQueryScraperPerformerInput());
|
const result = await StashService.queryScrapePerformer(isDisplayingScraperDialog.id, getQueryScraperPerformerInput());
|
||||||
if (!result.data || !result.data.scrapePerformer) { return; }
|
if (!result.data || !result.data.scrapePerformer) { return; }
|
||||||
updatePerformerEditState(result.data.scrapePerformer);
|
updatePerformerEditStateFromScraper(result.data.scrapePerformer);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ErrorUtils.handle(e);
|
ErrorUtils.handle(e);
|
||||||
} finally {
|
} finally {
|
||||||
@@ -199,7 +216,7 @@ export const PerformerDetailsPanel: FunctionComponent<IPerformerDetailsProps> =
|
|||||||
result.data.scrapePerformerURL.url = url;
|
result.data.scrapePerformerURL.url = url;
|
||||||
}
|
}
|
||||||
|
|
||||||
updatePerformerEditState(result.data.scrapePerformerURL);
|
updatePerformerEditStateFromScraper(result.data.scrapePerformerURL);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ErrorUtils.handle(e);
|
ErrorUtils.handle(e);
|
||||||
} finally {
|
} finally {
|
||||||
|
|||||||
@@ -360,6 +360,12 @@ export const SceneEditPanel: FunctionComponent<IProps> = (props: IProps) => {
|
|||||||
setTagIds(newIds as string[]);
|
setTagIds(newIds as string[]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (scene.image) {
|
||||||
|
// image is a base64 string
|
||||||
|
setCoverImage(scene.image);
|
||||||
|
setCoverImagePreview(scene.image);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function onScrapeSceneURL() {
|
async function onScrapeSceneURL() {
|
||||||
|
|||||||
3
vendor/github.com/jinzhu/copier/Guardfile
generated
vendored
Normal file
3
vendor/github.com/jinzhu/copier/Guardfile
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
guard 'gotest' do
|
||||||
|
watch(%r{\.go$})
|
||||||
|
end
|
||||||
20
vendor/github.com/jinzhu/copier/License
generated
vendored
Normal file
20
vendor/github.com/jinzhu/copier/License
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2015 Jinzhu
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||||
|
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||||
|
subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||||
|
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||||
|
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||||
|
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||||
|
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
100
vendor/github.com/jinzhu/copier/README.md
generated
vendored
Normal file
100
vendor/github.com/jinzhu/copier/README.md
generated
vendored
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
# Copier
|
||||||
|
|
||||||
|
I am a copier, I copy everything from one to another
|
||||||
|
|
||||||
|
[](https://app.wercker.com/project/byKey/9d44ad2d4e6253929c8fb71359effc0b)
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
* Copy from field to field with same name
|
||||||
|
* Copy from method to field with same name
|
||||||
|
* Copy from field to method with same name
|
||||||
|
* Copy from slice to slice
|
||||||
|
* Copy from struct to slice
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/jinzhu/copier"
|
||||||
|
)
|
||||||
|
|
||||||
|
type User struct {
|
||||||
|
Name string
|
||||||
|
Role string
|
||||||
|
Age int32
|
||||||
|
}
|
||||||
|
|
||||||
|
func (user *User) DoubleAge() int32 {
|
||||||
|
return 2 * user.Age
|
||||||
|
}
|
||||||
|
|
||||||
|
type Employee struct {
|
||||||
|
Name string
|
||||||
|
Age int32
|
||||||
|
DoubleAge int32
|
||||||
|
EmployeId int64
|
||||||
|
SuperRule string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (employee *Employee) Role(role string) {
|
||||||
|
employee.SuperRule = "Super " + role
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
var (
|
||||||
|
user = User{Name: "Jinzhu", Age: 18, Role: "Admin"}
|
||||||
|
users = []User{{Name: "Jinzhu", Age: 18, Role: "Admin"}, {Name: "jinzhu 2", Age: 30, Role: "Dev"}}
|
||||||
|
employee = Employee{}
|
||||||
|
employees = []Employee{}
|
||||||
|
)
|
||||||
|
|
||||||
|
copier.Copy(&employee, &user)
|
||||||
|
|
||||||
|
fmt.Printf("%#v \n", employee)
|
||||||
|
// Employee{
|
||||||
|
// Name: "Jinzhu", // Copy from field
|
||||||
|
// Age: 18, // Copy from field
|
||||||
|
// DoubleAge: 36, // Copy from method
|
||||||
|
// EmployeeId: 0, // Ignored
|
||||||
|
// SuperRule: "Super Admin", // Copy to method
|
||||||
|
// }
|
||||||
|
|
||||||
|
// Copy struct to slice
|
||||||
|
copier.Copy(&employees, &user)
|
||||||
|
|
||||||
|
fmt.Printf("%#v \n", employees)
|
||||||
|
// []Employee{
|
||||||
|
// {Name: "Jinzhu", Age: 18, DoubleAge: 36, EmployeId: 0, SuperRule: "Super Admin"}
|
||||||
|
// }
|
||||||
|
|
||||||
|
// Copy slice to slice
|
||||||
|
employees = []Employee{}
|
||||||
|
copier.Copy(&employees, &users)
|
||||||
|
|
||||||
|
fmt.Printf("%#v \n", employees)
|
||||||
|
// []Employee{
|
||||||
|
// {Name: "Jinzhu", Age: 18, DoubleAge: 36, EmployeId: 0, SuperRule: "Super Admin"},
|
||||||
|
// {Name: "jinzhu 2", Age: 30, DoubleAge: 60, EmployeId: 0, SuperRule: "Super Dev"},
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
You can help to make the project better, check out [http://gorm.io/contribute.html](http://gorm.io/contribute.html) for things you can do.
|
||||||
|
|
||||||
|
# Author
|
||||||
|
|
||||||
|
**jinzhu**
|
||||||
|
|
||||||
|
* <http://github.com/jinzhu>
|
||||||
|
* <wosmvp@gmail.com>
|
||||||
|
* <http://twitter.com/zhangjinzhu>
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
Released under the [MIT License](https://github.com/jinzhu/copier/blob/master/License).
|
||||||
189
vendor/github.com/jinzhu/copier/copier.go
generated
vendored
Normal file
189
vendor/github.com/jinzhu/copier/copier.go
generated
vendored
Normal file
@@ -0,0 +1,189 @@
|
|||||||
|
package copier
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"errors"
|
||||||
|
"reflect"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Copy copy things
|
||||||
|
func Copy(toValue interface{}, fromValue interface{}) (err error) {
|
||||||
|
var (
|
||||||
|
isSlice bool
|
||||||
|
amount = 1
|
||||||
|
from = indirect(reflect.ValueOf(fromValue))
|
||||||
|
to = indirect(reflect.ValueOf(toValue))
|
||||||
|
)
|
||||||
|
|
||||||
|
if !to.CanAddr() {
|
||||||
|
return errors.New("copy to value is unaddressable")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return is from value is invalid
|
||||||
|
if !from.IsValid() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
fromType := indirectType(from.Type())
|
||||||
|
toType := indirectType(to.Type())
|
||||||
|
|
||||||
|
// Just set it if possible to assign
|
||||||
|
// And need to do copy anyway if the type is struct
|
||||||
|
if fromType.Kind() != reflect.Struct && from.Type().AssignableTo(to.Type()) {
|
||||||
|
to.Set(from)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if fromType.Kind() != reflect.Struct || toType.Kind() != reflect.Struct {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if to.Kind() == reflect.Slice {
|
||||||
|
isSlice = true
|
||||||
|
if from.Kind() == reflect.Slice {
|
||||||
|
amount = from.Len()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := 0; i < amount; i++ {
|
||||||
|
var dest, source reflect.Value
|
||||||
|
|
||||||
|
if isSlice {
|
||||||
|
// source
|
||||||
|
if from.Kind() == reflect.Slice {
|
||||||
|
source = indirect(from.Index(i))
|
||||||
|
} else {
|
||||||
|
source = indirect(from)
|
||||||
|
}
|
||||||
|
// dest
|
||||||
|
dest = indirect(reflect.New(toType).Elem())
|
||||||
|
} else {
|
||||||
|
source = indirect(from)
|
||||||
|
dest = indirect(to)
|
||||||
|
}
|
||||||
|
|
||||||
|
// check source
|
||||||
|
if source.IsValid() {
|
||||||
|
fromTypeFields := deepFields(fromType)
|
||||||
|
//fmt.Printf("%#v", fromTypeFields)
|
||||||
|
// Copy from field to field or method
|
||||||
|
for _, field := range fromTypeFields {
|
||||||
|
name := field.Name
|
||||||
|
|
||||||
|
if fromField := source.FieldByName(name); fromField.IsValid() {
|
||||||
|
// has field
|
||||||
|
if toField := dest.FieldByName(name); toField.IsValid() {
|
||||||
|
if toField.CanSet() {
|
||||||
|
if !set(toField, fromField) {
|
||||||
|
if err := Copy(toField.Addr().Interface(), fromField.Interface()); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// try to set to method
|
||||||
|
var toMethod reflect.Value
|
||||||
|
if dest.CanAddr() {
|
||||||
|
toMethod = dest.Addr().MethodByName(name)
|
||||||
|
} else {
|
||||||
|
toMethod = dest.MethodByName(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if toMethod.IsValid() && toMethod.Type().NumIn() == 1 && fromField.Type().AssignableTo(toMethod.Type().In(0)) {
|
||||||
|
toMethod.Call([]reflect.Value{fromField})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy from method to field
|
||||||
|
for _, field := range deepFields(toType) {
|
||||||
|
name := field.Name
|
||||||
|
|
||||||
|
var fromMethod reflect.Value
|
||||||
|
if source.CanAddr() {
|
||||||
|
fromMethod = source.Addr().MethodByName(name)
|
||||||
|
} else {
|
||||||
|
fromMethod = source.MethodByName(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if fromMethod.IsValid() && fromMethod.Type().NumIn() == 0 && fromMethod.Type().NumOut() == 1 {
|
||||||
|
if toField := dest.FieldByName(name); toField.IsValid() && toField.CanSet() {
|
||||||
|
values := fromMethod.Call([]reflect.Value{})
|
||||||
|
if len(values) >= 1 {
|
||||||
|
set(toField, values[0])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if isSlice {
|
||||||
|
if dest.Addr().Type().AssignableTo(to.Type().Elem()) {
|
||||||
|
to.Set(reflect.Append(to, dest.Addr()))
|
||||||
|
} else if dest.Type().AssignableTo(to.Type().Elem()) {
|
||||||
|
to.Set(reflect.Append(to, dest))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func deepFields(reflectType reflect.Type) []reflect.StructField {
|
||||||
|
var fields []reflect.StructField
|
||||||
|
|
||||||
|
if reflectType = indirectType(reflectType); reflectType.Kind() == reflect.Struct {
|
||||||
|
for i := 0; i < reflectType.NumField(); i++ {
|
||||||
|
v := reflectType.Field(i)
|
||||||
|
if v.Anonymous {
|
||||||
|
fields = append(fields, deepFields(v.Type)...)
|
||||||
|
} else {
|
||||||
|
fields = append(fields, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return fields
|
||||||
|
}
|
||||||
|
|
||||||
|
func indirect(reflectValue reflect.Value) reflect.Value {
|
||||||
|
for reflectValue.Kind() == reflect.Ptr {
|
||||||
|
reflectValue = reflectValue.Elem()
|
||||||
|
}
|
||||||
|
return reflectValue
|
||||||
|
}
|
||||||
|
|
||||||
|
func indirectType(reflectType reflect.Type) reflect.Type {
|
||||||
|
for reflectType.Kind() == reflect.Ptr || reflectType.Kind() == reflect.Slice {
|
||||||
|
reflectType = reflectType.Elem()
|
||||||
|
}
|
||||||
|
return reflectType
|
||||||
|
}
|
||||||
|
|
||||||
|
func set(to, from reflect.Value) bool {
|
||||||
|
if from.IsValid() {
|
||||||
|
if to.Kind() == reflect.Ptr {
|
||||||
|
//set `to` to nil if from is nil
|
||||||
|
if from.Kind() == reflect.Ptr && from.IsNil() {
|
||||||
|
to.Set(reflect.Zero(to.Type()))
|
||||||
|
return true
|
||||||
|
} else if to.IsNil() {
|
||||||
|
to.Set(reflect.New(to.Type().Elem()))
|
||||||
|
}
|
||||||
|
to = to.Elem()
|
||||||
|
}
|
||||||
|
|
||||||
|
if from.Type().ConvertibleTo(to.Type()) {
|
||||||
|
to.Set(from.Convert(to.Type()))
|
||||||
|
} else if scanner, ok := to.Addr().Interface().(sql.Scanner); ok {
|
||||||
|
err := scanner.Scan(from.Interface())
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
} else if from.Kind() == reflect.Ptr {
|
||||||
|
return set(to, from.Elem())
|
||||||
|
} else {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
23
vendor/github.com/jinzhu/copier/wercker.yml
generated
vendored
Normal file
23
vendor/github.com/jinzhu/copier/wercker.yml
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
box: golang
|
||||||
|
|
||||||
|
build:
|
||||||
|
steps:
|
||||||
|
- setup-go-workspace
|
||||||
|
|
||||||
|
# Gets the dependencies
|
||||||
|
- script:
|
||||||
|
name: go get
|
||||||
|
code: |
|
||||||
|
go get
|
||||||
|
|
||||||
|
# Build the project
|
||||||
|
- script:
|
||||||
|
name: go build
|
||||||
|
code: |
|
||||||
|
go build ./...
|
||||||
|
|
||||||
|
# Test the project
|
||||||
|
- script:
|
||||||
|
name: go test
|
||||||
|
code: |
|
||||||
|
go test ./...
|
||||||
Reference in New Issue
Block a user