mirror of
https://github.com/stashapp/stash.git
synced 2025-12-17 04:14:39 +03:00
Reorg
This commit is contained in:
8
pkg/api/api-packr.go
Normal file
8
pkg/api/api-packr.go
Normal file
@@ -0,0 +1,8 @@
|
||||
// +build !skippackr
|
||||
// Code generated by github.com/gobuffalo/packr/v2. DO NOT EDIT.
|
||||
|
||||
// You can use the "packr clean" command to clean up this,
|
||||
// and any other packr generated files.
|
||||
package api
|
||||
|
||||
import _ "github.com/stashapp/stash/packrd"
|
||||
12
pkg/api/context_keys.go
Normal file
12
pkg/api/context_keys.go
Normal file
@@ -0,0 +1,12 @@
|
||||
package api
|
||||
|
||||
// https://stackoverflow.com/questions/40891345/fix-should-not-use-basic-type-string-as-key-in-context-withvalue-golint
|
||||
|
||||
type key int
|
||||
|
||||
const (
|
||||
galleryKey key = 0
|
||||
performerKey key = 1
|
||||
sceneKey key = 2
|
||||
studioKey key = 3
|
||||
)
|
||||
160
pkg/api/resolver.go
Normal file
160
pkg/api/resolver.go
Normal file
@@ -0,0 +1,160 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/scraper"
|
||||
"sort"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type Resolver struct{}
|
||||
|
||||
func (r *Resolver) Gallery() models.GalleryResolver {
|
||||
return &galleryResolver{r}
|
||||
}
|
||||
func (r *Resolver) Mutation() models.MutationResolver {
|
||||
return &mutationResolver{r}
|
||||
}
|
||||
func (r *Resolver) Performer() models.PerformerResolver {
|
||||
return &performerResolver{r}
|
||||
}
|
||||
func (r *Resolver) Query() models.QueryResolver {
|
||||
return &queryResolver{r}
|
||||
}
|
||||
func (r *Resolver) Scene() models.SceneResolver {
|
||||
return &sceneResolver{r}
|
||||
}
|
||||
func (r *Resolver) SceneMarker() models.SceneMarkerResolver {
|
||||
return &sceneMarkerResolver{r}
|
||||
}
|
||||
func (r *Resolver) Studio() models.StudioResolver {
|
||||
return &studioResolver{r}
|
||||
}
|
||||
func (r *Resolver) Subscription() models.SubscriptionResolver {
|
||||
return &subscriptionResolver{r}
|
||||
}
|
||||
func (r *Resolver) Tag() models.TagResolver {
|
||||
return &tagResolver{r}
|
||||
}
|
||||
|
||||
type mutationResolver struct{ *Resolver }
|
||||
type queryResolver struct{ *Resolver }
|
||||
type subscriptionResolver struct{ *Resolver }
|
||||
|
||||
type galleryResolver struct{ *Resolver }
|
||||
type performerResolver struct{ *Resolver }
|
||||
type sceneResolver struct{ *Resolver }
|
||||
type sceneMarkerResolver struct{ *Resolver }
|
||||
type studioResolver struct{ *Resolver }
|
||||
type tagResolver struct{ *Resolver }
|
||||
|
||||
func (r *queryResolver) MarkerWall(ctx context.Context, q *string) ([]models.SceneMarker, error) {
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
return qb.Wall(q)
|
||||
}
|
||||
|
||||
func (r *queryResolver) SceneWall(ctx context.Context, q *string) ([]models.Scene, error) {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
return qb.Wall(q)
|
||||
}
|
||||
|
||||
func (r *queryResolver) MarkerStrings(ctx context.Context, q *string, sort *string) ([]*models.MarkerStringsResultType, error) {
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
return qb.GetMarkerStrings(q, sort)
|
||||
}
|
||||
|
||||
func (r *queryResolver) ValidGalleriesForScene(ctx context.Context, scene_id *string) ([]models.Gallery, error) {
|
||||
if scene_id == nil {
|
||||
panic("nil scene id") // TODO make scene_id mandatory
|
||||
}
|
||||
sceneID, _ := strconv.Atoi(*scene_id)
|
||||
sqb := models.NewSceneQueryBuilder()
|
||||
scene, err := sqb.Find(sceneID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
validGalleries, err := qb.ValidGalleriesForScenePath(scene.Path)
|
||||
sceneGallery, _ := qb.FindBySceneID(sceneID, nil)
|
||||
if sceneGallery != nil {
|
||||
validGalleries = append(validGalleries, *sceneGallery)
|
||||
}
|
||||
return validGalleries, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) Stats(ctx context.Context) (models.StatsResultType, error) {
|
||||
scenesQB := models.NewSceneQueryBuilder()
|
||||
scenesCount, _ := scenesQB.Count()
|
||||
galleryQB := models.NewGalleryQueryBuilder()
|
||||
galleryCount, _ := galleryQB.Count()
|
||||
performersQB := models.NewPerformerQueryBuilder()
|
||||
performersCount, _ := performersQB.Count()
|
||||
studiosQB := models.NewStudioQueryBuilder()
|
||||
studiosCount, _ := studiosQB.Count()
|
||||
tagsQB := models.NewTagQueryBuilder()
|
||||
tagsCount, _ := tagsQB.Count()
|
||||
return models.StatsResultType{
|
||||
SceneCount: scenesCount,
|
||||
GalleryCount: galleryCount,
|
||||
PerformerCount: performersCount,
|
||||
StudioCount: studiosCount,
|
||||
TagCount: tagsCount,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Get scene marker tags which show up under the video.
|
||||
func (r *queryResolver) SceneMarkerTags(ctx context.Context, scene_id string) ([]models.SceneMarkerTag, error) {
|
||||
sceneID, _ := strconv.Atoi(scene_id)
|
||||
sqb := models.NewSceneMarkerQueryBuilder()
|
||||
sceneMarkers, err := sqb.FindBySceneID(sceneID, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tags := make(map[int]*models.SceneMarkerTag)
|
||||
var keys []int
|
||||
tqb := models.NewTagQueryBuilder()
|
||||
for _, sceneMarker := range sceneMarkers {
|
||||
if !sceneMarker.PrimaryTagID.Valid {
|
||||
panic("missing primary tag id")
|
||||
}
|
||||
markerPrimaryTag, err := tqb.Find(int(sceneMarker.PrimaryTagID.Int64), nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_, hasKey := tags[markerPrimaryTag.ID]
|
||||
var sceneMarkerTag *models.SceneMarkerTag
|
||||
if !hasKey {
|
||||
sceneMarkerTag = &models.SceneMarkerTag{Tag: *markerPrimaryTag}
|
||||
tags[markerPrimaryTag.ID] = sceneMarkerTag
|
||||
keys = append(keys, markerPrimaryTag.ID)
|
||||
} else {
|
||||
sceneMarkerTag = tags[markerPrimaryTag.ID]
|
||||
}
|
||||
tags[markerPrimaryTag.ID].SceneMarkers = append(tags[markerPrimaryTag.ID].SceneMarkers, sceneMarker)
|
||||
}
|
||||
|
||||
// Sort so that primary tags that show up earlier in the video are first.
|
||||
sort.Slice(keys, func(i, j int) bool {
|
||||
a := tags[keys[i]]
|
||||
b := tags[keys[j]]
|
||||
return a.SceneMarkers[0].Seconds < b.SceneMarkers[0].Seconds
|
||||
})
|
||||
|
||||
var result []models.SceneMarkerTag
|
||||
for _, key := range keys {
|
||||
result = append(result, *tags[key])
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapeFreeones(ctx context.Context, performer_name string) (*models.ScrapedPerformer, error) {
|
||||
return scraper.GetPerformer(performer_name)
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapeFreeonesPerformerList(ctx context.Context, query string) ([]string, error) {
|
||||
return scraper.GetPerformerNames(query)
|
||||
}
|
||||
20
pkg/api/resolver_model_gallery.go
Normal file
20
pkg/api/resolver_model_gallery.go
Normal file
@@ -0,0 +1,20 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
func (r *galleryResolver) ID(ctx context.Context, obj *models.Gallery) (string, error) {
|
||||
return strconv.Itoa(obj.ID), nil
|
||||
}
|
||||
|
||||
func (r *galleryResolver) Title(ctx context.Context, obj *models.Gallery) (*string, error) {
|
||||
return nil, nil // TODO remove this from schema
|
||||
}
|
||||
|
||||
func (r *galleryResolver) Files(ctx context.Context, obj *models.Gallery) ([]models.GalleryFilesType, error) {
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
return obj.GetFiles(baseURL), nil
|
||||
}
|
||||
141
pkg/api/resolver_model_performer.go
Normal file
141
pkg/api/resolver_model_performer.go
Normal file
@@ -0,0 +1,141 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/stashapp/stash/pkg/api/urlbuilders"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
func (r *performerResolver) ID(ctx context.Context, obj *models.Performer) (string, error) {
|
||||
return strconv.Itoa(obj.ID), nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) Name(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||
if obj.Name.Valid {
|
||||
return &obj.Name.String, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) URL(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||
if obj.URL.Valid {
|
||||
return &obj.URL.String, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) Twitter(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||
if obj.Twitter.Valid {
|
||||
return &obj.Twitter.String, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) Instagram(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||
if obj.Instagram.Valid {
|
||||
return &obj.Instagram.String, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) Birthdate(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||
if obj.Birthdate.Valid {
|
||||
return &obj.Birthdate.String, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) Ethnicity(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||
if obj.Ethnicity.Valid {
|
||||
return &obj.Ethnicity.String, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) Country(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||
if obj.Country.Valid {
|
||||
return &obj.Country.String, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) EyeColor(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||
if obj.EyeColor.Valid {
|
||||
return &obj.EyeColor.String, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) Height(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||
if obj.Height.Valid {
|
||||
return &obj.Height.String, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) Measurements(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||
if obj.Measurements.Valid {
|
||||
return &obj.Measurements.String, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) FakeTits(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||
if obj.FakeTits.Valid {
|
||||
return &obj.FakeTits.String, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) CareerLength(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||
if obj.CareerLength.Valid {
|
||||
return &obj.CareerLength.String, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) Tattoos(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||
if obj.Tattoos.Valid {
|
||||
return &obj.Tattoos.String, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) Piercings(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||
if obj.Piercings.Valid {
|
||||
return &obj.Piercings.String, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) Aliases(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||
if obj.Aliases.Valid {
|
||||
return &obj.Aliases.String, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) Favorite(ctx context.Context, obj *models.Performer) (bool, error) {
|
||||
if obj.Favorite.Valid {
|
||||
return obj.Favorite.Bool, nil
|
||||
}
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) ImagePath(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
imagePath := urlbuilders.NewPerformerURLBuilder(baseURL, obj.ID).GetPerformerImageURL()
|
||||
return &imagePath, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) SceneCount(ctx context.Context, obj *models.Performer) (*int, error) {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
res, err := qb.CountByPerformerID(obj.ID)
|
||||
return &res, err
|
||||
}
|
||||
|
||||
func (r *performerResolver) Scenes(ctx context.Context, obj *models.Performer) ([]models.Scene, error) {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
return qb.FindByPerformerID(obj.ID)
|
||||
}
|
||||
115
pkg/api/resolver_model_scene.go
Normal file
115
pkg/api/resolver_model_scene.go
Normal file
@@ -0,0 +1,115 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/stashapp/stash/pkg/api/urlbuilders"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
func (r *sceneResolver) ID(ctx context.Context, obj *models.Scene) (string, error) {
|
||||
return strconv.Itoa(obj.ID), nil
|
||||
}
|
||||
|
||||
func (r *sceneResolver) Title(ctx context.Context, obj *models.Scene) (*string, error) {
|
||||
if obj.Title.Valid {
|
||||
return &obj.Title.String, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *sceneResolver) Details(ctx context.Context, obj *models.Scene) (*string, error) {
|
||||
if obj.Details.Valid {
|
||||
return &obj.Details.String, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *sceneResolver) URL(ctx context.Context, obj *models.Scene) (*string, error) {
|
||||
if obj.URL.Valid {
|
||||
return &obj.URL.String, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *sceneResolver) Date(ctx context.Context, obj *models.Scene) (*string, error) {
|
||||
if obj.Date.Valid {
|
||||
result := utils.GetYMDFromDatabaseDate(obj.Date.String)
|
||||
return &result, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *sceneResolver) Rating(ctx context.Context, obj *models.Scene) (*int, error) {
|
||||
if obj.Rating.Valid {
|
||||
rating := int(obj.Rating.Int64)
|
||||
return &rating, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *sceneResolver) File(ctx context.Context, obj *models.Scene) (models.SceneFileType, error) {
|
||||
width := int(obj.Width.Int64)
|
||||
height := int(obj.Height.Int64)
|
||||
bitrate := int(obj.Bitrate.Int64)
|
||||
return models.SceneFileType{
|
||||
Size: &obj.Size.String,
|
||||
Duration: &obj.Duration.Float64,
|
||||
VideoCodec: &obj.VideoCodec.String,
|
||||
AudioCodec: &obj.AudioCodec.String,
|
||||
Width: &width,
|
||||
Height: &height,
|
||||
Framerate: &obj.Framerate.Float64,
|
||||
Bitrate: &bitrate,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (models.ScenePathsType, error) {
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
builder := urlbuilders.NewSceneURLBuilder(baseURL, obj.ID)
|
||||
screenshotPath := builder.GetScreenshotURL()
|
||||
previewPath := builder.GetStreamPreviewURL()
|
||||
streamPath := builder.GetStreamURL()
|
||||
webpPath := builder.GetStreamPreviewImageURL()
|
||||
vttPath := builder.GetSpriteVTTURL()
|
||||
chaptersVttPath := builder.GetChaptersVTTURL()
|
||||
return models.ScenePathsType{
|
||||
Screenshot: &screenshotPath,
|
||||
Preview: &previewPath,
|
||||
Stream: &streamPath,
|
||||
Webp: &webpPath,
|
||||
Vtt: &vttPath,
|
||||
ChaptersVtt: &chaptersVttPath,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (r *sceneResolver) IsStreamable(ctx context.Context, obj *models.Scene) (bool, error) {
|
||||
return manager.IsStreamable(obj)
|
||||
}
|
||||
|
||||
func (r *sceneResolver) SceneMarkers(ctx context.Context, obj *models.Scene) ([]models.SceneMarker, error) {
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
return qb.FindBySceneID(obj.ID, nil)
|
||||
}
|
||||
|
||||
func (r *sceneResolver) Gallery(ctx context.Context, obj *models.Scene) (*models.Gallery, error) {
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
return qb.FindBySceneID(obj.ID, nil)
|
||||
}
|
||||
|
||||
func (r *sceneResolver) Studio(ctx context.Context, obj *models.Scene) (*models.Studio, error) {
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
return qb.FindBySceneID(obj.ID)
|
||||
}
|
||||
|
||||
func (r *sceneResolver) Tags(ctx context.Context, obj *models.Scene) ([]models.Tag, error) {
|
||||
qb := models.NewTagQueryBuilder()
|
||||
return qb.FindBySceneID(obj.ID, nil)
|
||||
}
|
||||
|
||||
func (r *sceneResolver) Performers(ctx context.Context, obj *models.Scene) ([]models.Performer, error) {
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
return qb.FindBySceneID(obj.ID, nil)
|
||||
}
|
||||
48
pkg/api/resolver_model_scene_marker.go
Normal file
48
pkg/api/resolver_model_scene_marker.go
Normal file
@@ -0,0 +1,48 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/stashapp/stash/pkg/api/urlbuilders"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
func (r *sceneMarkerResolver) ID(ctx context.Context, obj *models.SceneMarker) (string, error) {
|
||||
return strconv.Itoa(obj.ID), nil
|
||||
}
|
||||
|
||||
func (r *sceneMarkerResolver) Scene(ctx context.Context, obj *models.SceneMarker) (models.Scene, error) {
|
||||
if !obj.SceneID.Valid {
|
||||
panic("Invalid scene id")
|
||||
}
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
sceneID := int(obj.SceneID.Int64)
|
||||
scene, err := qb.Find(sceneID)
|
||||
return *scene, err
|
||||
}
|
||||
|
||||
func (r *sceneMarkerResolver) PrimaryTag(ctx context.Context, obj *models.SceneMarker) (models.Tag, error) {
|
||||
qb := models.NewTagQueryBuilder()
|
||||
if !obj.PrimaryTagID.Valid {
|
||||
panic("TODO no primary tag id")
|
||||
}
|
||||
tag, err := qb.Find(int(obj.PrimaryTagID.Int64), nil) // TODO make primary tag id not null in DB
|
||||
return *tag, err
|
||||
}
|
||||
|
||||
func (r *sceneMarkerResolver) Tags(ctx context.Context, obj *models.SceneMarker) ([]models.Tag, error) {
|
||||
qb := models.NewTagQueryBuilder()
|
||||
return qb.FindBySceneMarkerID(obj.ID, nil)
|
||||
}
|
||||
|
||||
func (r *sceneMarkerResolver) Stream(ctx context.Context, obj *models.SceneMarker) (string, error) {
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
sceneID := int(obj.SceneID.Int64)
|
||||
return urlbuilders.NewSceneURLBuilder(baseURL, sceneID).GetSceneMarkerStreamURL(obj.ID), nil
|
||||
}
|
||||
|
||||
func (r *sceneMarkerResolver) Preview(ctx context.Context, obj *models.SceneMarker) (string, error) {
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
sceneID := int(obj.SceneID.Int64)
|
||||
return urlbuilders.NewSceneURLBuilder(baseURL, sceneID).GetSceneMarkerStreamPreviewURL(obj.ID), nil
|
||||
}
|
||||
38
pkg/api/resolver_model_studio.go
Normal file
38
pkg/api/resolver_model_studio.go
Normal file
@@ -0,0 +1,38 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/stashapp/stash/pkg/api/urlbuilders"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
func (r *studioResolver) ID(ctx context.Context, obj *models.Studio) (string, error) {
|
||||
return strconv.Itoa(obj.ID), nil
|
||||
}
|
||||
|
||||
func (r *studioResolver) Name(ctx context.Context, obj *models.Studio) (string, error) {
|
||||
if obj.Name.Valid {
|
||||
return obj.Name.String, nil
|
||||
}
|
||||
panic("null name") // TODO make name required
|
||||
}
|
||||
|
||||
func (r *studioResolver) URL(ctx context.Context, obj *models.Studio) (*string, error) {
|
||||
if obj.URL.Valid {
|
||||
return &obj.URL.String, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *studioResolver) ImagePath(ctx context.Context, obj *models.Studio) (*string, error) {
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
imagePath := urlbuilders.NewStudioURLBuilder(baseURL, obj.ID).GetStudioImageURL()
|
||||
return &imagePath, nil
|
||||
}
|
||||
|
||||
func (r *studioResolver) SceneCount(ctx context.Context, obj *models.Studio) (*int, error) {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
res, err := qb.CountByStudioID(obj.ID)
|
||||
return &res, err
|
||||
}
|
||||
29
pkg/api/resolver_model_tag.go
Normal file
29
pkg/api/resolver_model_tag.go
Normal file
@@ -0,0 +1,29 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
func (r *tagResolver) ID(ctx context.Context, obj *models.Tag) (string, error) {
|
||||
return strconv.Itoa(obj.ID), nil
|
||||
}
|
||||
|
||||
func (r *tagResolver) SceneCount(ctx context.Context, obj *models.Tag) (*int, error) {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
if obj == nil {
|
||||
return nil, nil
|
||||
}
|
||||
count, err := qb.CountByTagID(obj.ID)
|
||||
return &count, err
|
||||
}
|
||||
|
||||
func (r *tagResolver) SceneMarkerCount(ctx context.Context, obj *models.Tag) (*int, error) {
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
if obj == nil {
|
||||
return nil, nil
|
||||
}
|
||||
count, err := qb.CountByTagID(obj.ID)
|
||||
return &count, err
|
||||
}
|
||||
177
pkg/api/resolver_mutation_performer.go
Normal file
177
pkg/api/resolver_mutation_performer.go
Normal file
@@ -0,0 +1,177 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.PerformerCreateInput) (*models.Performer, error) {
|
||||
// Process the base 64 encoded image string
|
||||
checksum, imageData, err := utils.ProcessBase64Image(input.Image)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Populate a new performer from the input
|
||||
currentTime := time.Now()
|
||||
newPerformer := models.Performer{
|
||||
Image: imageData,
|
||||
Checksum: checksum,
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
if input.Name != nil {
|
||||
newPerformer.Name = sql.NullString{String: *input.Name, Valid: true}
|
||||
}
|
||||
if input.URL != nil {
|
||||
newPerformer.URL = sql.NullString{String: *input.URL, Valid: true}
|
||||
}
|
||||
if input.Birthdate != nil {
|
||||
newPerformer.Birthdate = sql.NullString{String: *input.Birthdate, Valid: true}
|
||||
}
|
||||
if input.Ethnicity != nil {
|
||||
newPerformer.Ethnicity = sql.NullString{String: *input.Ethnicity, Valid: true}
|
||||
}
|
||||
if input.Country != nil {
|
||||
newPerformer.Country = sql.NullString{String: *input.Country, Valid: true}
|
||||
}
|
||||
if input.EyeColor != nil {
|
||||
newPerformer.EyeColor = sql.NullString{String: *input.EyeColor, Valid: true}
|
||||
}
|
||||
if input.Height != nil {
|
||||
newPerformer.Height = sql.NullString{String: *input.Height, Valid: true}
|
||||
}
|
||||
if input.Measurements != nil {
|
||||
newPerformer.Measurements = sql.NullString{String: *input.Measurements, Valid: true}
|
||||
}
|
||||
if input.FakeTits != nil {
|
||||
newPerformer.FakeTits = sql.NullString{String: *input.FakeTits, Valid: true}
|
||||
}
|
||||
if input.CareerLength != nil {
|
||||
newPerformer.CareerLength = sql.NullString{String: *input.CareerLength, Valid: true}
|
||||
}
|
||||
if input.Tattoos != nil {
|
||||
newPerformer.Tattoos = sql.NullString{String: *input.Tattoos, Valid: true}
|
||||
}
|
||||
if input.Piercings != nil {
|
||||
newPerformer.Piercings = sql.NullString{String: *input.Piercings, Valid: true}
|
||||
}
|
||||
if input.Aliases != nil {
|
||||
newPerformer.Aliases = sql.NullString{String: *input.Aliases, Valid: true}
|
||||
}
|
||||
if input.Twitter != nil {
|
||||
newPerformer.Twitter = sql.NullString{String: *input.Twitter, Valid: true}
|
||||
}
|
||||
if input.Instagram != nil {
|
||||
newPerformer.Instagram = sql.NullString{String: *input.Instagram, Valid: true}
|
||||
}
|
||||
if input.Favorite != nil {
|
||||
newPerformer.Favorite = sql.NullBool{Bool: *input.Favorite, Valid: true}
|
||||
} else {
|
||||
newPerformer.Favorite = sql.NullBool{Bool: false, Valid: true}
|
||||
}
|
||||
|
||||
// Start the transaction and save the performer
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
performer, err := qb.Create(newPerformer, tx)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return performer, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.PerformerUpdateInput) (*models.Performer, error) {
|
||||
// Populate performer from the input
|
||||
performerID, _ := strconv.Atoi(input.ID)
|
||||
updatedPerformer := models.Performer{
|
||||
ID: performerID,
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: time.Now()},
|
||||
}
|
||||
if input.Image != nil {
|
||||
checksum, imageData, err := utils.ProcessBase64Image(*input.Image)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
updatedPerformer.Image = imageData
|
||||
updatedPerformer.Checksum = checksum
|
||||
}
|
||||
if input.Name != nil {
|
||||
updatedPerformer.Name = sql.NullString{String: *input.Name, Valid: true}
|
||||
}
|
||||
if input.URL != nil {
|
||||
updatedPerformer.URL = sql.NullString{String: *input.URL, Valid: true}
|
||||
}
|
||||
if input.Birthdate != nil {
|
||||
updatedPerformer.Birthdate = sql.NullString{String: *input.Birthdate, Valid: true}
|
||||
}
|
||||
if input.Ethnicity != nil {
|
||||
updatedPerformer.Ethnicity = sql.NullString{String: *input.Ethnicity, Valid: true}
|
||||
}
|
||||
if input.Country != nil {
|
||||
updatedPerformer.Country = sql.NullString{String: *input.Country, Valid: true}
|
||||
}
|
||||
if input.EyeColor != nil {
|
||||
updatedPerformer.EyeColor = sql.NullString{String: *input.EyeColor, Valid: true}
|
||||
}
|
||||
if input.Height != nil {
|
||||
updatedPerformer.Height = sql.NullString{String: *input.Height, Valid: true}
|
||||
}
|
||||
if input.Measurements != nil {
|
||||
updatedPerformer.Measurements = sql.NullString{String: *input.Measurements, Valid: true}
|
||||
}
|
||||
if input.FakeTits != nil {
|
||||
updatedPerformer.FakeTits = sql.NullString{String: *input.FakeTits, Valid: true}
|
||||
}
|
||||
if input.CareerLength != nil {
|
||||
updatedPerformer.CareerLength = sql.NullString{String: *input.CareerLength, Valid: true}
|
||||
}
|
||||
if input.Tattoos != nil {
|
||||
updatedPerformer.Tattoos = sql.NullString{String: *input.Tattoos, Valid: true}
|
||||
}
|
||||
if input.Piercings != nil {
|
||||
updatedPerformer.Piercings = sql.NullString{String: *input.Piercings, Valid: true}
|
||||
}
|
||||
if input.Aliases != nil {
|
||||
updatedPerformer.Aliases = sql.NullString{String: *input.Aliases, Valid: true}
|
||||
}
|
||||
if input.Twitter != nil {
|
||||
updatedPerformer.Twitter = sql.NullString{String: *input.Twitter, Valid: true}
|
||||
}
|
||||
if input.Instagram != nil {
|
||||
updatedPerformer.Instagram = sql.NullString{String: *input.Instagram, Valid: true}
|
||||
}
|
||||
if input.Favorite != nil {
|
||||
updatedPerformer.Favorite = sql.NullBool{Bool: *input.Favorite, Valid: true}
|
||||
} else {
|
||||
updatedPerformer.Favorite = sql.NullBool{Bool: false, Valid: true}
|
||||
}
|
||||
|
||||
// Start the transaction and save the performer
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
performer, err := qb.Update(updatedPerformer, tx)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return performer, nil
|
||||
}
|
||||
208
pkg/api/resolver_mutation_scene.go
Normal file
208
pkg/api/resolver_mutation_scene.go
Normal file
@@ -0,0 +1,208 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
func (r *mutationResolver) SceneUpdate(ctx context.Context, input models.SceneUpdateInput) (*models.Scene, error) {
|
||||
// Populate scene from the input
|
||||
sceneID, _ := strconv.Atoi(input.ID)
|
||||
updatedTime := time.Now()
|
||||
updatedScene := models.Scene{
|
||||
ID: sceneID,
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: updatedTime},
|
||||
}
|
||||
if input.Title != nil {
|
||||
updatedScene.Title = sql.NullString{String: *input.Title, Valid: true}
|
||||
}
|
||||
if input.Details != nil {
|
||||
updatedScene.Details = sql.NullString{String: *input.Details, Valid: true}
|
||||
}
|
||||
if input.URL != nil {
|
||||
updatedScene.URL = sql.NullString{String: *input.URL, Valid: true}
|
||||
}
|
||||
if input.Date != nil {
|
||||
updatedScene.Date = sql.NullString{String: *input.Date, Valid: true}
|
||||
}
|
||||
if input.Rating != nil {
|
||||
updatedScene.Rating = sql.NullInt64{Int64: int64(*input.Rating), Valid: true}
|
||||
}
|
||||
if input.StudioID != nil {
|
||||
studioID, _ := strconv.ParseInt(*input.StudioID, 10, 64)
|
||||
updatedScene.StudioID = sql.NullInt64{Int64: studioID, Valid: true}
|
||||
}
|
||||
|
||||
// Start the transaction and save the scene marker
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
scene, err := qb.Update(updatedScene, tx)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if input.GalleryID != nil {
|
||||
// Save the gallery
|
||||
galleryID, _ := strconv.Atoi(*input.GalleryID)
|
||||
updatedGallery := models.Gallery{
|
||||
ID: galleryID,
|
||||
SceneID: sql.NullInt64{Int64: int64(sceneID), Valid: true},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: updatedTime},
|
||||
}
|
||||
gqb := models.NewGalleryQueryBuilder()
|
||||
_, err := gqb.Update(updatedGallery, tx)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Save the performers
|
||||
var performerJoins []models.PerformersScenes
|
||||
for _, pid := range input.PerformerIds {
|
||||
performerID, _ := strconv.Atoi(pid)
|
||||
performerJoin := models.PerformersScenes{
|
||||
PerformerID: performerID,
|
||||
SceneID: sceneID,
|
||||
}
|
||||
performerJoins = append(performerJoins, performerJoin)
|
||||
}
|
||||
if err := jqb.UpdatePerformersScenes(sceneID, performerJoins, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Save the tags
|
||||
var tagJoins []models.ScenesTags
|
||||
for _, tid := range input.TagIds {
|
||||
tagID, _ := strconv.Atoi(tid)
|
||||
tagJoin := models.ScenesTags{
|
||||
SceneID: sceneID,
|
||||
TagID: tagID,
|
||||
}
|
||||
tagJoins = append(tagJoins, tagJoin)
|
||||
}
|
||||
if err := jqb.UpdateScenesTags(sceneID, tagJoins, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return scene, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) SceneMarkerCreate(ctx context.Context, input models.SceneMarkerCreateInput) (*models.SceneMarker, error) {
|
||||
primaryTagID, _ := strconv.Atoi(input.PrimaryTagID)
|
||||
sceneID, _ := strconv.Atoi(input.SceneID)
|
||||
currentTime := time.Now()
|
||||
newSceneMarker := models.SceneMarker{
|
||||
Title: input.Title,
|
||||
Seconds: input.Seconds,
|
||||
PrimaryTagID: sql.NullInt64{Int64: int64(primaryTagID), Valid: primaryTagID != 0},
|
||||
SceneID: sql.NullInt64{Int64: int64(sceneID), Valid: sceneID != 0},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
|
||||
// Start the transaction and save the scene marker
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
smqb := models.NewSceneMarkerQueryBuilder()
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
sceneMarker, err := smqb.Create(newSceneMarker, tx)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Save the marker tags
|
||||
var markerTagJoins []models.SceneMarkersTags
|
||||
for _, tid := range input.TagIds {
|
||||
tagID, _ := strconv.Atoi(tid)
|
||||
markerTag := models.SceneMarkersTags{
|
||||
SceneMarkerID: sceneMarker.ID,
|
||||
TagID: tagID,
|
||||
}
|
||||
markerTagJoins = append(markerTagJoins, markerTag)
|
||||
}
|
||||
if err := jqb.CreateSceneMarkersTags(markerTagJoins, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return sceneMarker, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input models.SceneMarkerUpdateInput) (*models.SceneMarker, error) {
|
||||
// Populate scene marker from the input
|
||||
sceneMarkerID, _ := strconv.Atoi(input.ID)
|
||||
sceneID, _ := strconv.Atoi(input.SceneID)
|
||||
primaryTagID, _ := strconv.Atoi(input.PrimaryTagID)
|
||||
updatedSceneMarker := models.SceneMarker{
|
||||
ID: sceneMarkerID,
|
||||
Title: input.Title,
|
||||
Seconds: input.Seconds,
|
||||
SceneID: sql.NullInt64{Int64: int64(sceneID), Valid: sceneID != 0},
|
||||
PrimaryTagID: sql.NullInt64{Int64: int64(primaryTagID), Valid: primaryTagID != 0},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: time.Now()},
|
||||
}
|
||||
|
||||
// Start the transaction and save the scene marker
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
sceneMarker, err := qb.Update(updatedSceneMarker, tx)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Save the marker tags
|
||||
var markerTagJoins []models.SceneMarkersTags
|
||||
for _, tid := range input.TagIds {
|
||||
tagID, _ := strconv.Atoi(tid)
|
||||
markerTag := models.SceneMarkersTags{
|
||||
SceneMarkerID: sceneMarkerID,
|
||||
TagID: tagID,
|
||||
}
|
||||
markerTagJoins = append(markerTagJoins, markerTag)
|
||||
}
|
||||
if err := jqb.UpdateSceneMarkersTags(sceneMarkerID, markerTagJoins, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return sceneMarker, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (bool, error) {
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
if err := qb.Destroy(id, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return false, err
|
||||
}
|
||||
if err := tx.Commit(); err != nil {
|
||||
return false, err
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
87
pkg/api/resolver_mutation_studio.go
Normal file
87
pkg/api/resolver_mutation_studio.go
Normal file
@@ -0,0 +1,87 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
func (r *mutationResolver) StudioCreate(ctx context.Context, input models.StudioCreateInput) (*models.Studio, error) {
|
||||
// Process the base 64 encoded image string
|
||||
checksum, imageData, err := utils.ProcessBase64Image(input.Image)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Populate a new studio from the input
|
||||
currentTime := time.Now()
|
||||
newStudio := models.Studio{
|
||||
Image: imageData,
|
||||
Checksum: checksum,
|
||||
Name: sql.NullString{String: input.Name, Valid: true},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
if input.URL != nil {
|
||||
newStudio.URL = sql.NullString{String: *input.URL, Valid: true}
|
||||
}
|
||||
|
||||
// Start the transaction and save the studio
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
studio, err := qb.Create(newStudio, tx)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return studio, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.StudioUpdateInput) (*models.Studio, error) {
|
||||
// Populate studio from the input
|
||||
studioID, _ := strconv.Atoi(input.ID)
|
||||
updatedStudio := models.Studio{
|
||||
ID: studioID,
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: time.Now()},
|
||||
}
|
||||
if input.Image != nil {
|
||||
checksum, imageData, err := utils.ProcessBase64Image(*input.Image)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
updatedStudio.Image = imageData
|
||||
updatedStudio.Checksum = checksum
|
||||
}
|
||||
if input.Name != nil {
|
||||
updatedStudio.Name = sql.NullString{String: *input.Name, Valid: true}
|
||||
}
|
||||
if input.URL != nil {
|
||||
updatedStudio.URL = sql.NullString{String: *input.URL, Valid: true}
|
||||
}
|
||||
|
||||
// Start the transaction and save the studio
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
studio, err := qb.Update(updatedStudio, tx)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return studio, nil
|
||||
}
|
||||
74
pkg/api/resolver_mutation_tag.go
Normal file
74
pkg/api/resolver_mutation_tag.go
Normal file
@@ -0,0 +1,74 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
func (r *mutationResolver) TagCreate(ctx context.Context, input models.TagCreateInput) (*models.Tag, error) {
|
||||
// Populate a new tag from the input
|
||||
currentTime := time.Now()
|
||||
newTag := models.Tag{
|
||||
Name: input.Name,
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
|
||||
// Start the transaction and save the studio
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewTagQueryBuilder()
|
||||
tag, err := qb.Create(newTag, tx)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return tag, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdateInput) (*models.Tag, error) {
|
||||
// Populate tag from the input
|
||||
tagID, _ := strconv.Atoi(input.ID)
|
||||
updatedTag := models.Tag{
|
||||
ID: tagID,
|
||||
Name: input.Name,
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: time.Now()},
|
||||
}
|
||||
|
||||
// Start the transaction and save the tag
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewTagQueryBuilder()
|
||||
tag, err := qb.Update(updatedTag, tx)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return tag, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) TagDestroy(ctx context.Context, input models.TagDestroyInput) (bool, error) {
|
||||
qb := models.NewTagQueryBuilder()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
if err := qb.Destroy(input.ID, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return false, err
|
||||
}
|
||||
if err := tx.Commit(); err != nil {
|
||||
return false, err
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
22
pkg/api/resolver_query_find_gallery.go
Normal file
22
pkg/api/resolver_query_find_gallery.go
Normal file
@@ -0,0 +1,22 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
func (r *queryResolver) FindGallery(ctx context.Context, id string) (*models.Gallery, error) {
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
idInt, _ := strconv.Atoi(id)
|
||||
return qb.Find(idInt)
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindGalleries(ctx context.Context, filter *models.FindFilterType) (models.FindGalleriesResultType, error) {
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
galleries, total := qb.Query(filter)
|
||||
return models.FindGalleriesResultType{
|
||||
Count: total,
|
||||
Galleries: galleries,
|
||||
}, nil
|
||||
}
|
||||
27
pkg/api/resolver_query_find_performer.go
Normal file
27
pkg/api/resolver_query_find_performer.go
Normal file
@@ -0,0 +1,27 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
func (r *queryResolver) FindPerformer(ctx context.Context, id string) (*models.Performer, error) {
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
idInt, _ := strconv.Atoi(id)
|
||||
return qb.Find(idInt)
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindPerformers(ctx context.Context, performer_filter *models.PerformerFilterType, filter *models.FindFilterType) (models.FindPerformersResultType, error) {
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
performers, total := qb.Query(performer_filter, filter)
|
||||
return models.FindPerformersResultType{
|
||||
Count: total,
|
||||
Performers: performers,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) AllPerformers(ctx context.Context) ([]models.Performer, error) {
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
return qb.All()
|
||||
}
|
||||
29
pkg/api/resolver_query_find_scene.go
Normal file
29
pkg/api/resolver_query_find_scene.go
Normal file
@@ -0,0 +1,29 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
func (r *queryResolver) FindScene(ctx context.Context, id *string, checksum *string) (*models.Scene, error) {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
idInt, _ := strconv.Atoi(*id)
|
||||
var scene *models.Scene
|
||||
var err error
|
||||
if id != nil {
|
||||
scene, err = qb.Find(idInt)
|
||||
} else if checksum != nil {
|
||||
scene, err = qb.FindByChecksum(*checksum)
|
||||
}
|
||||
return scene, err
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindScenes(ctx context.Context, scene_filter *models.SceneFilterType, scene_ids []int, filter *models.FindFilterType) (models.FindScenesResultType, error) {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
scenes, total := qb.Query(scene_filter, filter)
|
||||
return models.FindScenesResultType{
|
||||
Count: total,
|
||||
Scenes: scenes,
|
||||
}, nil
|
||||
}
|
||||
15
pkg/api/resolver_query_find_scene_marker.go
Normal file
15
pkg/api/resolver_query_find_scene_marker.go
Normal file
@@ -0,0 +1,15 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *queryResolver) FindSceneMarkers(ctx context.Context, scene_marker_filter *models.SceneMarkerFilterType, filter *models.FindFilterType) (models.FindSceneMarkersResultType, error) {
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
sceneMarkers, total := qb.Query(scene_marker_filter, filter)
|
||||
return models.FindSceneMarkersResultType{
|
||||
Count: total,
|
||||
SceneMarkers: sceneMarkers,
|
||||
}, nil
|
||||
}
|
||||
27
pkg/api/resolver_query_find_studio.go
Normal file
27
pkg/api/resolver_query_find_studio.go
Normal file
@@ -0,0 +1,27 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
func (r *queryResolver) FindStudio(ctx context.Context, id string) (*models.Studio, error) {
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
idInt, _ := strconv.Atoi(id)
|
||||
return qb.Find(idInt, nil)
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindStudios(ctx context.Context, filter *models.FindFilterType) (models.FindStudiosResultType, error) {
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
studios, total := qb.Query(filter)
|
||||
return models.FindStudiosResultType{
|
||||
Count: total,
|
||||
Studios: studios,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) AllStudios(ctx context.Context) ([]models.Studio, error) {
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
return qb.All()
|
||||
}
|
||||
18
pkg/api/resolver_query_find_tag.go
Normal file
18
pkg/api/resolver_query_find_tag.go
Normal file
@@ -0,0 +1,18 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
func (r *queryResolver) FindTag(ctx context.Context, id string) (*models.Tag, error) {
|
||||
qb := models.NewTagQueryBuilder()
|
||||
idInt, _ := strconv.Atoi(id)
|
||||
return qb.Find(idInt, nil)
|
||||
}
|
||||
|
||||
func (r *queryResolver) AllTags(ctx context.Context) ([]models.Tag, error) {
|
||||
qb := models.NewTagQueryBuilder()
|
||||
return qb.All()
|
||||
}
|
||||
30
pkg/api/resolver_query_metadata.go
Normal file
30
pkg/api/resolver_query_metadata.go
Normal file
@@ -0,0 +1,30 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
)
|
||||
|
||||
func (r *queryResolver) MetadataScan(ctx context.Context) (string, error) {
|
||||
manager.GetInstance().Scan()
|
||||
return "todo", nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) MetadataImport(ctx context.Context) (string, error) {
|
||||
manager.GetInstance().Import()
|
||||
return "todo", nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) MetadataExport(ctx context.Context) (string, error) {
|
||||
manager.GetInstance().Export()
|
||||
return "todo", nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) MetadataGenerate(ctx context.Context) (string, error) {
|
||||
manager.GetInstance().Generate(true, true, true, true)
|
||||
return "todo", nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) MetadataClean(ctx context.Context) (string, error) {
|
||||
panic("not implemented")
|
||||
}
|
||||
28
pkg/api/resolver_subscription_metadata.go
Normal file
28
pkg/api/resolver_subscription_metadata.go
Normal file
@@ -0,0 +1,28 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"time"
|
||||
)
|
||||
|
||||
func (r *subscriptionResolver) MetadataUpdate(ctx context.Context) (<-chan string, error) {
|
||||
msg := make(chan string, 1)
|
||||
|
||||
ticker := time.NewTicker(5 * time.Second)
|
||||
|
||||
go func() {
|
||||
for {
|
||||
select {
|
||||
case _ = <-ticker.C:
|
||||
manager.GetInstance().HandleMetadataUpdateSubscriptionTick(msg)
|
||||
case <-ctx.Done():
|
||||
ticker.Stop()
|
||||
close(msg)
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
return msg, nil
|
||||
}
|
||||
54
pkg/api/routes_gallery.go
Normal file
54
pkg/api/routes_gallery.go
Normal file
@@ -0,0 +1,54 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/go-chi/chi"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"net/http"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type galleryRoutes struct{}
|
||||
|
||||
func (rs galleryRoutes) Routes() chi.Router {
|
||||
r := chi.NewRouter()
|
||||
|
||||
r.Route("/{galleryId}", func(r chi.Router) {
|
||||
r.Use(GalleryCtx)
|
||||
r.Get("/{fileIndex}", rs.File)
|
||||
})
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
func (rs galleryRoutes) File(w http.ResponseWriter, r *http.Request) {
|
||||
gallery := r.Context().Value(galleryKey).(*models.Gallery)
|
||||
fileIndex, _ := strconv.Atoi(chi.URLParam(r, "fileIndex"))
|
||||
thumb := r.URL.Query().Get("thumb")
|
||||
w.Header().Add("Cache-Control", "max-age=604800000") // 1 Week
|
||||
if thumb == "true" {
|
||||
_, _ = w.Write(gallery.GetThumbnail(fileIndex))
|
||||
} else {
|
||||
_, _ = w.Write(gallery.GetImage(fileIndex))
|
||||
}
|
||||
}
|
||||
|
||||
func GalleryCtx(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
galleryID, err := strconv.Atoi(chi.URLParam(r, "galleryId"))
|
||||
if err != nil {
|
||||
http.Error(w, http.StatusText(404), 404)
|
||||
return
|
||||
}
|
||||
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
gallery, err := qb.Find(galleryID)
|
||||
if err != nil {
|
||||
http.Error(w, http.StatusText(404), 404)
|
||||
return
|
||||
}
|
||||
|
||||
ctx := context.WithValue(r.Context(), galleryKey, gallery)
|
||||
next.ServeHTTP(w, r.WithContext(ctx))
|
||||
})
|
||||
}
|
||||
47
pkg/api/routes_performer.go
Normal file
47
pkg/api/routes_performer.go
Normal file
@@ -0,0 +1,47 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/go-chi/chi"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"net/http"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type performerRoutes struct{}
|
||||
|
||||
func (rs performerRoutes) Routes() chi.Router {
|
||||
r := chi.NewRouter()
|
||||
|
||||
r.Route("/{performerId}", func(r chi.Router) {
|
||||
r.Use(PerformerCtx)
|
||||
r.Get("/image", rs.Image)
|
||||
})
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
func (rs performerRoutes) Image(w http.ResponseWriter, r *http.Request) {
|
||||
performer := r.Context().Value(performerKey).(*models.Performer)
|
||||
_, _ = w.Write(performer.Image)
|
||||
}
|
||||
|
||||
func PerformerCtx(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
performerID, err := strconv.Atoi(chi.URLParam(r, "performerId"))
|
||||
if err != nil {
|
||||
http.Error(w, http.StatusText(404), 404)
|
||||
return
|
||||
}
|
||||
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
performer, err := qb.Find(performerID)
|
||||
if err != nil {
|
||||
http.Error(w, http.StatusText(404), 404)
|
||||
return
|
||||
}
|
||||
|
||||
ctx := context.WithValue(r.Context(), performerKey, performer)
|
||||
next.ServeHTTP(w, r.WithContext(ctx))
|
||||
})
|
||||
}
|
||||
151
pkg/api/routes_scene.go
Normal file
151
pkg/api/routes_scene.go
Normal file
@@ -0,0 +1,151 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/go-chi/chi"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type sceneRoutes struct{}
|
||||
|
||||
func (rs sceneRoutes) Routes() chi.Router {
|
||||
r := chi.NewRouter()
|
||||
|
||||
r.Route("/{sceneId}", func(r chi.Router) {
|
||||
r.Use(SceneCtx)
|
||||
r.Get("/stream", rs.Stream)
|
||||
r.Get("/stream.mp4", rs.Stream)
|
||||
r.Get("/screenshot", rs.Screenshot)
|
||||
r.Get("/preview", rs.Preview)
|
||||
r.Get("/webp", rs.Webp)
|
||||
r.Get("/vtt/chapter", rs.ChapterVtt)
|
||||
|
||||
r.Get("/scene_marker/{sceneMarkerId}/stream", rs.SceneMarkerStream)
|
||||
r.Get("/scene_marker/{sceneMarkerId}/preview", rs.SceneMarkerPreview)
|
||||
})
|
||||
r.With(SceneCtx).Get("/{sceneId}_thumbs.vtt", rs.VttThumbs)
|
||||
r.With(SceneCtx).Get("/{sceneId}_sprite.jpg", rs.VttSprite)
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
// region Handlers
|
||||
|
||||
func (rs sceneRoutes) Stream(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
filepath := manager.GetInstance().Paths.Scene.GetStreamPath(scene.Path, scene.Checksum)
|
||||
http.ServeFile(w, r, filepath)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) Screenshot(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
filepath := manager.GetInstance().Paths.Scene.GetScreenshotPath(scene.Checksum)
|
||||
http.ServeFile(w, r, filepath)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) Preview(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
filepath := manager.GetInstance().Paths.Scene.GetStreamPreviewPath(scene.Checksum)
|
||||
http.ServeFile(w, r, filepath)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) Webp(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
filepath := manager.GetInstance().Paths.Scene.GetStreamPreviewImagePath(scene.Checksum)
|
||||
http.ServeFile(w, r, filepath)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) ChapterVtt(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
sceneMarkers, err := qb.FindBySceneID(scene.ID, nil)
|
||||
if err != nil {
|
||||
panic("invalid scene markers for chapter vtt")
|
||||
}
|
||||
|
||||
vttLines := []string{"WEBVTT", ""}
|
||||
for _, marker := range sceneMarkers {
|
||||
time := utils.GetVTTTime(marker.Seconds)
|
||||
vttLines = append(vttLines, time+" --> "+time)
|
||||
vttLines = append(vttLines, marker.Title)
|
||||
vttLines = append(vttLines, "")
|
||||
}
|
||||
vtt := strings.Join(vttLines, "\n")
|
||||
|
||||
w.Header().Set("Content-Type", "text/vtt")
|
||||
_, _ = w.Write([]byte(vtt))
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) VttThumbs(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
w.Header().Set("Content-Type", "text/vtt")
|
||||
filepath := manager.GetInstance().Paths.Scene.GetSpriteVttFilePath(scene.Checksum)
|
||||
http.ServeFile(w, r, filepath)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) VttSprite(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
w.Header().Set("Content-Type", "image/jpeg")
|
||||
filepath := manager.GetInstance().Paths.Scene.GetSpriteImageFilePath(scene.Checksum)
|
||||
http.ServeFile(w, r, filepath)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) SceneMarkerStream(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
sceneMarkerID, _ := strconv.Atoi(chi.URLParam(r, "sceneMarkerId"))
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
sceneMarker, err := qb.Find(sceneMarkerID)
|
||||
if err != nil {
|
||||
logger.Warn("Error when getting scene marker for stream")
|
||||
http.Error(w, http.StatusText(404), 404)
|
||||
return
|
||||
}
|
||||
filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPath(scene.Checksum, int(sceneMarker.Seconds))
|
||||
http.ServeFile(w, r, filepath)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) SceneMarkerPreview(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
sceneMarkerID, _ := strconv.Atoi(chi.URLParam(r, "sceneMarkerId"))
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
sceneMarker, err := qb.Find(sceneMarkerID)
|
||||
if err != nil {
|
||||
logger.Warn("Error when getting scene marker for stream")
|
||||
http.Error(w, http.StatusText(404), 404)
|
||||
return
|
||||
}
|
||||
filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPreviewImagePath(scene.Checksum, int(sceneMarker.Seconds))
|
||||
http.ServeFile(w, r, filepath)
|
||||
}
|
||||
|
||||
// endregion
|
||||
|
||||
func SceneCtx(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
sceneIdentifierQueryParam := chi.URLParam(r, "sceneId")
|
||||
sceneID, _ := strconv.Atoi(sceneIdentifierQueryParam)
|
||||
|
||||
var scene *models.Scene
|
||||
var err error
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
if sceneID == 0 {
|
||||
scene, err = qb.FindByChecksum(sceneIdentifierQueryParam)
|
||||
} else {
|
||||
scene, err = qb.Find(sceneID)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
http.Error(w, http.StatusText(404), 404)
|
||||
return
|
||||
}
|
||||
|
||||
ctx := context.WithValue(r.Context(), sceneKey, scene)
|
||||
next.ServeHTTP(w, r.WithContext(ctx))
|
||||
})
|
||||
}
|
||||
47
pkg/api/routes_studio.go
Normal file
47
pkg/api/routes_studio.go
Normal file
@@ -0,0 +1,47 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/go-chi/chi"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"net/http"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type studioRoutes struct{}
|
||||
|
||||
func (rs studioRoutes) Routes() chi.Router {
|
||||
r := chi.NewRouter()
|
||||
|
||||
r.Route("/{studioId}", func(r chi.Router) {
|
||||
r.Use(StudioCtx)
|
||||
r.Get("/image", rs.Image)
|
||||
})
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
func (rs studioRoutes) Image(w http.ResponseWriter, r *http.Request) {
|
||||
studio := r.Context().Value(studioKey).(*models.Studio)
|
||||
_, _ = w.Write(studio.Image)
|
||||
}
|
||||
|
||||
func StudioCtx(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
studioID, err := strconv.Atoi(chi.URLParam(r, "studioId"))
|
||||
if err != nil {
|
||||
http.Error(w, http.StatusText(404), 404)
|
||||
return
|
||||
}
|
||||
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
studio, err := qb.Find(studioID, nil)
|
||||
if err != nil {
|
||||
http.Error(w, http.StatusText(404), 404)
|
||||
return
|
||||
}
|
||||
|
||||
ctx := context.WithValue(r.Context(), studioKey, studio)
|
||||
next.ServeHTTP(w, r.WithContext(ctx))
|
||||
})
|
||||
}
|
||||
229
pkg/api/server.go
Normal file
229
pkg/api/server.go
Normal file
@@ -0,0 +1,229 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/tls"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/99designs/gqlgen/handler"
|
||||
"github.com/go-chi/chi"
|
||||
"github.com/go-chi/chi/middleware"
|
||||
"github.com/gobuffalo/packr/v2"
|
||||
"github.com/gorilla/websocket"
|
||||
"github.com/rs/cors"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
"net/http"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"runtime/debug"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const httpPort = "9998"
|
||||
const httpsPort = "9999"
|
||||
|
||||
var certsBox *packr.Box
|
||||
var uiBox *packr.Box
|
||||
var setupUIBox *packr.Box
|
||||
|
||||
func Start() {
|
||||
//port := os.Getenv("PORT")
|
||||
//if port == "" {
|
||||
// port = defaultPort
|
||||
//}
|
||||
|
||||
certsBox = packr.New("Cert Box", "../../certs")
|
||||
uiBox = packr.New("UI Box", "../../ui/v1/dist/stash-frontend")
|
||||
setupUIBox = packr.New("Setup UI Box", "../../ui/setup")
|
||||
|
||||
r := chi.NewRouter()
|
||||
|
||||
r.Use(middleware.Recoverer)
|
||||
r.Use(middleware.Logger)
|
||||
r.Use(middleware.DefaultCompress)
|
||||
r.Use(middleware.StripSlashes)
|
||||
r.Use(cors.AllowAll().Handler)
|
||||
r.Use(BaseURLMiddleware)
|
||||
r.Use(ConfigCheckMiddleware)
|
||||
|
||||
recoverFunc := handler.RecoverFunc(func(ctx context.Context, err interface{}) error {
|
||||
logger.Error(err)
|
||||
debug.PrintStack()
|
||||
|
||||
message := fmt.Sprintf("Internal system error. Error <%v>", err)
|
||||
return errors.New(message)
|
||||
})
|
||||
requestMiddleware := handler.RequestMiddleware(func(ctx context.Context, next func(ctx context.Context) []byte) []byte {
|
||||
//api.GetRequestContext(ctx).Variables[]
|
||||
return next(ctx)
|
||||
})
|
||||
websocketUpgrader := handler.WebsocketUpgrader(websocket.Upgrader{
|
||||
CheckOrigin: func(r *http.Request) bool {
|
||||
return true
|
||||
},
|
||||
})
|
||||
gqlHandler := handler.GraphQL(models.NewExecutableSchema(models.Config{Resolvers: &Resolver{}}), recoverFunc, requestMiddleware, websocketUpgrader)
|
||||
|
||||
// https://stash.server:9999/certs/server.crt
|
||||
r.Handle("/certs/*", http.FileServer(certsBox))
|
||||
|
||||
r.Handle("/graphql", gqlHandler)
|
||||
r.Handle("/playground", handler.Playground("GraphQL playground", "/graphql"))
|
||||
|
||||
r.Mount("/gallery", galleryRoutes{}.Routes())
|
||||
r.Mount("/performer", performerRoutes{}.Routes())
|
||||
r.Mount("/scene", sceneRoutes{}.Routes())
|
||||
r.Mount("/studio", studioRoutes{}.Routes())
|
||||
|
||||
// Serve the setup UI
|
||||
r.HandleFunc("/setup*", func(w http.ResponseWriter, r *http.Request) {
|
||||
ext := path.Ext(r.URL.Path)
|
||||
if ext == ".html" || ext == "" {
|
||||
data := setupUIBox.Bytes("index.html")
|
||||
_, _ = w.Write(data)
|
||||
} else {
|
||||
r.URL.Path = strings.Replace(r.URL.Path, "/setup", "", 1)
|
||||
http.FileServer(setupUIBox).ServeHTTP(w, r)
|
||||
}
|
||||
})
|
||||
r.Post("/init", func(w http.ResponseWriter, r *http.Request) {
|
||||
err := r.ParseForm()
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("error: %s", err), 500)
|
||||
}
|
||||
stash := filepath.Clean(r.Form.Get("stash"))
|
||||
metadata := filepath.Clean(r.Form.Get("metadata"))
|
||||
cache := filepath.Clean(r.Form.Get("cache"))
|
||||
//downloads := filepath.Clean(r.Form.Get("downloads")) // TODO
|
||||
downloads := filepath.Join(metadata, "downloads")
|
||||
|
||||
exists, _ := utils.FileExists(stash)
|
||||
fileInfo, _ := os.Stat(stash)
|
||||
if !exists || !fileInfo.IsDir() {
|
||||
http.Error(w, fmt.Sprintf("the stash path either doesn't exist, or is not a directory <%s>. Go back and try again.", stash), 500)
|
||||
return
|
||||
}
|
||||
|
||||
exists, _ = utils.FileExists(metadata)
|
||||
fileInfo, _ = os.Stat(metadata)
|
||||
if !exists || !fileInfo.IsDir() {
|
||||
http.Error(w, fmt.Sprintf("the metadata path either doesn't exist, or is not a directory <%s> Go back and try again.", metadata), 500)
|
||||
return
|
||||
}
|
||||
|
||||
exists, _ = utils.FileExists(cache)
|
||||
fileInfo, _ = os.Stat(cache)
|
||||
if !exists || !fileInfo.IsDir() {
|
||||
http.Error(w, fmt.Sprintf("the cache path either doesn't exist, or is not a directory <%s> Go back and try again.", cache), 500)
|
||||
return
|
||||
}
|
||||
|
||||
_ = os.Mkdir(downloads, 0755)
|
||||
|
||||
config := &jsonschema.Config{
|
||||
Stash: stash,
|
||||
Metadata: metadata,
|
||||
Cache: cache,
|
||||
Downloads: downloads,
|
||||
}
|
||||
if err := manager.GetInstance().SaveConfig(config); err != nil {
|
||||
http.Error(w, fmt.Sprintf("there was an error saving the config file: %s", err), 500)
|
||||
return
|
||||
}
|
||||
|
||||
http.Redirect(w, r, "/", 301)
|
||||
})
|
||||
|
||||
// Serve the angular app
|
||||
r.HandleFunc("/*", func(w http.ResponseWriter, r *http.Request) {
|
||||
ext := path.Ext(r.URL.Path)
|
||||
if ext == ".html" || ext == "" {
|
||||
data := uiBox.Bytes("index.html")
|
||||
_, _ = w.Write(data)
|
||||
} else {
|
||||
http.FileServer(uiBox).ServeHTTP(w, r)
|
||||
}
|
||||
})
|
||||
|
||||
httpsServer := &http.Server{
|
||||
Addr: ":" + httpsPort,
|
||||
Handler: r,
|
||||
TLSConfig: makeTLSConfig(),
|
||||
}
|
||||
server := &http.Server{
|
||||
Addr: ":" + httpPort,
|
||||
Handler: r,
|
||||
}
|
||||
|
||||
go func() {
|
||||
logger.Infof("stash is running on HTTP at http://localhost:9998/")
|
||||
logger.Fatal(server.ListenAndServe())
|
||||
}()
|
||||
|
||||
go func() {
|
||||
logger.Infof("stash is running on HTTPS at https://localhost:9999/")
|
||||
logger.Fatal(httpsServer.ListenAndServeTLS("", ""))
|
||||
}()
|
||||
}
|
||||
|
||||
func makeTLSConfig() *tls.Config {
|
||||
cert, err := certsBox.Find("server.crt")
|
||||
key, err := certsBox.Find("server.key")
|
||||
|
||||
certs := make([]tls.Certificate, 1)
|
||||
certs[0], err = tls.X509KeyPair(cert, key)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
tlsConfig := &tls.Config{
|
||||
Certificates: certs,
|
||||
}
|
||||
|
||||
return tlsConfig
|
||||
}
|
||||
|
||||
type contextKey struct {
|
||||
name string
|
||||
}
|
||||
|
||||
var (
|
||||
BaseURLCtxKey = &contextKey{"BaseURL"}
|
||||
)
|
||||
|
||||
func BaseURLMiddleware(next http.Handler) http.Handler {
|
||||
fn := func(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
|
||||
var scheme string
|
||||
if strings.Compare("https", r.URL.Scheme) == 0 || r.Proto == "HTTP/2.0" {
|
||||
scheme = "https"
|
||||
} else {
|
||||
scheme = "http"
|
||||
}
|
||||
baseURL := scheme + "://" + r.Host
|
||||
|
||||
r = r.WithContext(context.WithValue(ctx, BaseURLCtxKey, baseURL))
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
}
|
||||
return http.HandlerFunc(fn)
|
||||
}
|
||||
|
||||
func ConfigCheckMiddleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
ext := path.Ext(r.URL.Path)
|
||||
shouldRedirect := ext == "" && r.Method == "GET" && r.URL.Path != "/init"
|
||||
if !manager.HasValidConfig() && shouldRedirect {
|
||||
if !strings.HasPrefix(r.URL.Path, "/setup") {
|
||||
http.Redirect(w, r, "/setup", 301)
|
||||
return
|
||||
}
|
||||
}
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
19
pkg/api/urlbuilders/gallery.go
Normal file
19
pkg/api/urlbuilders/gallery.go
Normal file
@@ -0,0 +1,19 @@
|
||||
package urlbuilders
|
||||
|
||||
import "strconv"
|
||||
|
||||
type GalleryURLBuilder struct {
|
||||
BaseURL string
|
||||
GalleryID string
|
||||
}
|
||||
|
||||
func NewGalleryURLBuilder(baseURL string, galleryID int) GalleryURLBuilder {
|
||||
return GalleryURLBuilder{
|
||||
BaseURL: baseURL,
|
||||
GalleryID: strconv.Itoa(galleryID),
|
||||
}
|
||||
}
|
||||
|
||||
func (b GalleryURLBuilder) GetGalleryImageURL(fileIndex int) string {
|
||||
return b.BaseURL + "/gallery/" + b.GalleryID + "/" + strconv.Itoa(fileIndex)
|
||||
}
|
||||
19
pkg/api/urlbuilders/performer.go
Normal file
19
pkg/api/urlbuilders/performer.go
Normal file
@@ -0,0 +1,19 @@
|
||||
package urlbuilders
|
||||
|
||||
import "strconv"
|
||||
|
||||
type PerformerURLBuilder struct {
|
||||
BaseURL string
|
||||
PerformerID string
|
||||
}
|
||||
|
||||
func NewPerformerURLBuilder(baseURL string, performerID int) PerformerURLBuilder {
|
||||
return PerformerURLBuilder{
|
||||
BaseURL: baseURL,
|
||||
PerformerID: strconv.Itoa(performerID),
|
||||
}
|
||||
}
|
||||
|
||||
func (b PerformerURLBuilder) GetPerformerImageURL() string {
|
||||
return b.BaseURL + "/performer/" + b.PerformerID + "/image"
|
||||
}
|
||||
47
pkg/api/urlbuilders/scene.go
Normal file
47
pkg/api/urlbuilders/scene.go
Normal file
@@ -0,0 +1,47 @@
|
||||
package urlbuilders
|
||||
|
||||
import "strconv"
|
||||
|
||||
type SceneURLBuilder struct {
|
||||
BaseURL string
|
||||
SceneID string
|
||||
}
|
||||
|
||||
func NewSceneURLBuilder(baseURL string, sceneID int) SceneURLBuilder {
|
||||
return SceneURLBuilder{
|
||||
BaseURL: baseURL,
|
||||
SceneID: strconv.Itoa(sceneID),
|
||||
}
|
||||
}
|
||||
|
||||
func (b SceneURLBuilder) GetStreamURL() string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/stream.mp4"
|
||||
}
|
||||
|
||||
func (b SceneURLBuilder) GetStreamPreviewURL() string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/preview"
|
||||
}
|
||||
|
||||
func (b SceneURLBuilder) GetStreamPreviewImageURL() string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/webp"
|
||||
}
|
||||
|
||||
func (b SceneURLBuilder) GetSpriteVTTURL() string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "_thumbs.vtt"
|
||||
}
|
||||
|
||||
func (b SceneURLBuilder) GetScreenshotURL() string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/screenshot"
|
||||
}
|
||||
|
||||
func (b SceneURLBuilder) GetChaptersVTTURL() string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/vtt/chapter"
|
||||
}
|
||||
|
||||
func (b SceneURLBuilder) GetSceneMarkerStreamURL(sceneMarkerID int) string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/scene_marker/" + strconv.Itoa(sceneMarkerID) + "/stream"
|
||||
}
|
||||
|
||||
func (b SceneURLBuilder) GetSceneMarkerStreamPreviewURL(sceneMarkerID int) string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/scene_marker/" + strconv.Itoa(sceneMarkerID) + "/preview"
|
||||
}
|
||||
19
pkg/api/urlbuilders/studio.go
Normal file
19
pkg/api/urlbuilders/studio.go
Normal file
@@ -0,0 +1,19 @@
|
||||
package urlbuilders
|
||||
|
||||
import "strconv"
|
||||
|
||||
type StudioURLBuilder struct {
|
||||
BaseURL string
|
||||
StudioID string
|
||||
}
|
||||
|
||||
func NewStudioURLBuilder(baseURL string, studioID int) StudioURLBuilder {
|
||||
return StudioURLBuilder{
|
||||
BaseURL: baseURL,
|
||||
StudioID: strconv.Itoa(studioID),
|
||||
}
|
||||
}
|
||||
|
||||
func (b StudioURLBuilder) GetStudioImageURL() string {
|
||||
return b.BaseURL + "/studio/" + b.StudioID + "/image"
|
||||
}
|
||||
8
pkg/database/database-packr.go
Normal file
8
pkg/database/database-packr.go
Normal file
@@ -0,0 +1,8 @@
|
||||
// +build !skippackr
|
||||
// Code generated by github.com/gobuffalo/packr/v2. DO NOT EDIT.
|
||||
|
||||
// You can use the "packr clean" command to clean up this,
|
||||
// and any other packr generated files.
|
||||
package database
|
||||
|
||||
import _ "github.com/stashapp/stash/packrd"
|
||||
64
pkg/database/database.go
Normal file
64
pkg/database/database.go
Normal file
@@ -0,0 +1,64 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/gobuffalo/packr/v2"
|
||||
"github.com/golang-migrate/migrate/v4"
|
||||
"github.com/golang-migrate/migrate/v4/source"
|
||||
"github.com/jmoiron/sqlx"
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
"os"
|
||||
)
|
||||
|
||||
var DB *sqlx.DB
|
||||
var appSchemaVersion uint = 1
|
||||
|
||||
func Initialize(databasePath string) {
|
||||
runMigrations(databasePath)
|
||||
|
||||
// https://github.com/mattn/go-sqlite3
|
||||
conn, err := sqlx.Open("sqlite3", "file:"+databasePath+"?_fk=true")
|
||||
conn.SetMaxOpenConns(25)
|
||||
conn.SetMaxIdleConns(4)
|
||||
if err != nil {
|
||||
logger.Fatalf("db.Open(): %q\n", err)
|
||||
}
|
||||
DB = conn
|
||||
}
|
||||
|
||||
func Reset(databasePath string) {
|
||||
_ = DB.Close()
|
||||
_ = os.Remove(databasePath)
|
||||
Initialize(databasePath)
|
||||
}
|
||||
|
||||
// Migrate the database
|
||||
func runMigrations(databasePath string) {
|
||||
migrationsBox := packr.New("Migrations Box", "./migrations")
|
||||
packrSource := &Packr2Source{
|
||||
Box: migrationsBox,
|
||||
Migrations: source.NewMigrations(),
|
||||
}
|
||||
|
||||
databasePath = utils.FixWindowsPath(databasePath)
|
||||
s, _ := WithInstance(packrSource)
|
||||
m, err := migrate.NewWithSourceInstance(
|
||||
"packr2",
|
||||
s,
|
||||
fmt.Sprintf("sqlite3://%s", "file:"+databasePath),
|
||||
)
|
||||
if err != nil {
|
||||
panic(err.Error())
|
||||
}
|
||||
|
||||
databaseSchemaVersion, _, _ := m.Version()
|
||||
stepNumber := appSchemaVersion - databaseSchemaVersion
|
||||
if stepNumber != 0 {
|
||||
err = m.Steps(int(stepNumber))
|
||||
if err != nil {
|
||||
panic(err.Error())
|
||||
}
|
||||
}
|
||||
}
|
||||
1
pkg/database/migrations/1_initial.down.sql
Normal file
1
pkg/database/migrations/1_initial.down.sql
Normal file
@@ -0,0 +1 @@
|
||||
DROP TABLE IF EXISTS scenes;
|
||||
139
pkg/database/migrations/1_initial.up.sql
Normal file
139
pkg/database/migrations/1_initial.up.sql
Normal file
@@ -0,0 +1,139 @@
|
||||
CREATE TABLE `tags` (
|
||||
`id` integer not null primary key autoincrement,
|
||||
`name` varchar(255),
|
||||
`created_at` datetime not null,
|
||||
`updated_at` datetime not null
|
||||
);
|
||||
CREATE TABLE `studios` (
|
||||
`id` integer not null primary key autoincrement,
|
||||
`image` blob not null,
|
||||
`checksum` varchar(255) not null,
|
||||
`name` varchar(255),
|
||||
`url` varchar(255),
|
||||
`created_at` datetime not null,
|
||||
`updated_at` datetime not null
|
||||
);
|
||||
CREATE TABLE `scraped_items` (
|
||||
`id` integer not null primary key autoincrement,
|
||||
`title` varchar(255),
|
||||
`description` text,
|
||||
`url` varchar(255),
|
||||
`date` date,
|
||||
`rating` varchar(255),
|
||||
`tags` varchar(510),
|
||||
`models` varchar(510),
|
||||
`episode` integer,
|
||||
`gallery_filename` varchar(255),
|
||||
`gallery_url` varchar(510),
|
||||
`video_filename` varchar(255),
|
||||
`video_url` varchar(255),
|
||||
`studio_id` integer,
|
||||
`created_at` datetime not null,
|
||||
`updated_at` datetime not null,
|
||||
foreign key(`studio_id`) references `studios`(`id`)
|
||||
);
|
||||
CREATE TABLE `scenes_tags` (
|
||||
`scene_id` integer,
|
||||
`tag_id` integer,
|
||||
foreign key(`scene_id`) references `scenes`(`id`) on delete CASCADE,
|
||||
foreign key(`tag_id`) references `tags`(`id`)
|
||||
);
|
||||
CREATE TABLE `scenes` (
|
||||
`id` integer not null primary key autoincrement,
|
||||
`path` varchar(510) not null,
|
||||
`checksum` varchar(255) not null,
|
||||
`title` varchar(255),
|
||||
`details` text,
|
||||
`url` varchar(255),
|
||||
`date` date,
|
||||
`rating` tinyint,
|
||||
`size` varchar(255),
|
||||
`duration` float,
|
||||
`video_codec` varchar(255),
|
||||
`audio_codec` varchar(255),
|
||||
`width` tinyint,
|
||||
`height` tinyint,
|
||||
`framerate` float,
|
||||
`bitrate` integer,
|
||||
`studio_id` integer,
|
||||
`created_at` datetime not null,
|
||||
`updated_at` datetime not null,
|
||||
foreign key(`studio_id`) references `studios`(`id`) on delete CASCADE
|
||||
);
|
||||
CREATE TABLE `scene_markers_tags` (
|
||||
`scene_marker_id` integer,
|
||||
`tag_id` integer,
|
||||
foreign key(`scene_marker_id`) references `scene_markers`(`id`) on delete CASCADE,
|
||||
foreign key(`tag_id`) references `tags`(`id`)
|
||||
);
|
||||
CREATE TABLE `scene_markers` (
|
||||
`id` integer not null primary key autoincrement,
|
||||
`title` varchar(255) not null,
|
||||
`seconds` float not null,
|
||||
`primary_tag_id` integer,
|
||||
`scene_id` integer,
|
||||
`created_at` datetime not null,
|
||||
`updated_at` datetime not null,
|
||||
foreign key(`primary_tag_id`) references `tags`(`id`),
|
||||
foreign key(`scene_id`) references `scenes`(`id`)
|
||||
);
|
||||
CREATE TABLE `performers_scenes` (
|
||||
`performer_id` integer,
|
||||
`scene_id` integer,
|
||||
foreign key(`performer_id`) references `performers`(`id`),
|
||||
foreign key(`scene_id`) references `scenes`(`id`)
|
||||
);
|
||||
CREATE TABLE `performers` (
|
||||
`id` integer not null primary key autoincrement,
|
||||
`image` blob not null,
|
||||
`checksum` varchar(255) not null,
|
||||
`name` varchar(255),
|
||||
`url` varchar(255),
|
||||
`twitter` varchar(255),
|
||||
`instagram` varchar(255),
|
||||
`birthdate` date,
|
||||
`ethnicity` varchar(255),
|
||||
`country` varchar(255),
|
||||
`eye_color` varchar(255),
|
||||
`height` varchar(255),
|
||||
`measurements` varchar(255),
|
||||
`fake_tits` varchar(255),
|
||||
`career_length` varchar(255),
|
||||
`tattoos` varchar(255),
|
||||
`piercings` varchar(255),
|
||||
`aliases` varchar(255),
|
||||
`favorite` boolean not null default '0',
|
||||
`created_at` datetime not null,
|
||||
`updated_at` datetime not null
|
||||
);
|
||||
CREATE TABLE `galleries` (
|
||||
`id` integer not null primary key autoincrement,
|
||||
`path` varchar(510) not null,
|
||||
`checksum` varchar(255) not null,
|
||||
`scene_id` integer,
|
||||
`created_at` datetime not null,
|
||||
`updated_at` datetime not null,
|
||||
foreign key(`scene_id`) references `scenes`(`id`)
|
||||
);
|
||||
CREATE UNIQUE INDEX `studios_checksum_unique` on `studios` (`checksum`);
|
||||
CREATE UNIQUE INDEX `scenes_path_unique` on `scenes` (`path`);
|
||||
CREATE UNIQUE INDEX `scenes_checksum_unique` on `scenes` (`checksum`);
|
||||
CREATE UNIQUE INDEX `performers_checksum_unique` on `performers` (`checksum`);
|
||||
CREATE INDEX `index_tags_on_name` on `tags` (`name`);
|
||||
CREATE INDEX `index_studios_on_name` on `studios` (`name`);
|
||||
CREATE INDEX `index_studios_on_checksum` on `studios` (`checksum`);
|
||||
CREATE INDEX `index_scraped_items_on_studio_id` on `scraped_items` (`studio_id`);
|
||||
CREATE INDEX `index_scenes_tags_on_tag_id` on `scenes_tags` (`tag_id`);
|
||||
CREATE INDEX `index_scenes_tags_on_scene_id` on `scenes_tags` (`scene_id`);
|
||||
CREATE INDEX `index_scenes_on_studio_id` on `scenes` (`studio_id`);
|
||||
CREATE INDEX `index_scene_markers_tags_on_tag_id` on `scene_markers_tags` (`tag_id`);
|
||||
CREATE INDEX `index_scene_markers_tags_on_scene_marker_id` on `scene_markers_tags` (`scene_marker_id`);
|
||||
CREATE INDEX `index_scene_markers_on_scene_id` on `scene_markers` (`scene_id`);
|
||||
CREATE INDEX `index_scene_markers_on_primary_tag_id` on `scene_markers` (`primary_tag_id`);
|
||||
CREATE INDEX `index_performers_scenes_on_scene_id` on `performers_scenes` (`scene_id`);
|
||||
CREATE INDEX `index_performers_scenes_on_performer_id` on `performers_scenes` (`performer_id`);
|
||||
CREATE INDEX `index_performers_on_name` on `performers` (`name`);
|
||||
CREATE INDEX `index_performers_on_checksum` on `performers` (`checksum`);
|
||||
CREATE INDEX `index_galleries_on_scene_id` on `galleries` (`scene_id`);
|
||||
CREATE UNIQUE INDEX `galleries_path_unique` on `galleries` (`path`);
|
||||
CREATE UNIQUE INDEX `galleries_checksum_unique` on `galleries` (`checksum`);
|
||||
91
pkg/database/packr_source.go
Normal file
91
pkg/database/packr_source.go
Normal file
@@ -0,0 +1,91 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"github.com/gobuffalo/packr/v2"
|
||||
"github.com/golang-migrate/migrate/v4"
|
||||
"github.com/golang-migrate/migrate/v4/source"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
)
|
||||
|
||||
type Packr2Source struct {
|
||||
Box *packr.Box
|
||||
Migrations *source.Migrations
|
||||
}
|
||||
|
||||
func init() {
|
||||
source.Register("packr2", &Packr2Source{})
|
||||
}
|
||||
|
||||
func WithInstance(instance *Packr2Source) (source.Driver, error) {
|
||||
for _, fi := range instance.Box.List() {
|
||||
m, err := source.DefaultParse(fi)
|
||||
if err != nil {
|
||||
continue // ignore files that we can't parse
|
||||
}
|
||||
|
||||
if !instance.Migrations.Append(m) {
|
||||
return nil, fmt.Errorf("unable to parse file %v", fi)
|
||||
}
|
||||
}
|
||||
|
||||
return instance, nil
|
||||
}
|
||||
|
||||
func (s *Packr2Source) Open(url string) (source.Driver, error) {
|
||||
return nil, fmt.Errorf("not implemented")
|
||||
}
|
||||
|
||||
func (s *Packr2Source) Close() error {
|
||||
s.Migrations = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Packr2Source) First() (version uint, err error) {
|
||||
if v, ok := s.Migrations.First(); !ok {
|
||||
return 0, os.ErrNotExist
|
||||
} else {
|
||||
return v, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Packr2Source) Prev(version uint) (prevVersion uint, err error) {
|
||||
if v, ok := s.Migrations.Prev(version); !ok {
|
||||
return 0, os.ErrNotExist
|
||||
} else {
|
||||
return v, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Packr2Source) Next(version uint) (nextVersion uint, err error) {
|
||||
if v, ok := s.Migrations.Next(version); !ok {
|
||||
return 0, os.ErrNotExist
|
||||
} else {
|
||||
return v, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Packr2Source) ReadUp(version uint) (r io.ReadCloser, identifier string, err error) {
|
||||
if migration, ok := s.Migrations.Up(version); !ok {
|
||||
return nil, "", os.ErrNotExist
|
||||
} else {
|
||||
b := s.Box.Bytes(migration.Raw)
|
||||
return ioutil.NopCloser(bytes.NewBuffer(b)),
|
||||
migration.Identifier,
|
||||
nil
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Packr2Source) ReadDown(version uint) (r io.ReadCloser, identifier string, err error) {
|
||||
if migration, ok := s.Migrations.Down(version); !ok {
|
||||
return nil, "", migrate.ErrNilVersion
|
||||
} else {
|
||||
b := s.Box.Bytes(migration.Raw)
|
||||
return ioutil.NopCloser(bytes.NewBuffer(b)),
|
||||
migration.Identifier,
|
||||
nil
|
||||
}
|
||||
}
|
||||
166
pkg/ffmpeg/downloader.go
Normal file
166
pkg/ffmpeg/downloader.go
Normal file
@@ -0,0 +1,166 @@
|
||||
package ffmpeg
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"fmt"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func GetPaths(configDirectory string) (string, string) {
|
||||
var ffmpegPath, ffprobePath string
|
||||
|
||||
// Check if ffmpeg exists in the PATH
|
||||
if pathBinaryHasCorrectFlags() {
|
||||
ffmpegPath, _ = exec.LookPath("ffmpeg")
|
||||
ffprobePath, _ = exec.LookPath("ffprobe")
|
||||
}
|
||||
|
||||
// Check if ffmpeg exists in the config directory
|
||||
ffmpegConfigPath := filepath.Join(configDirectory, getFFMPEGFilename())
|
||||
ffprobeConfigPath := filepath.Join(configDirectory, getFFProbeFilename())
|
||||
ffmpegConfigExists, _ := utils.FileExists(ffmpegConfigPath)
|
||||
ffprobeConfigExists, _ := utils.FileExists(ffprobeConfigPath)
|
||||
if ffmpegPath == "" && ffmpegConfigExists {
|
||||
ffmpegPath = ffmpegConfigPath
|
||||
}
|
||||
if ffprobePath == "" && ffprobeConfigExists {
|
||||
ffprobePath = ffprobeConfigPath
|
||||
}
|
||||
|
||||
return ffmpegPath, ffprobePath
|
||||
}
|
||||
|
||||
func Download(configDirectory string) error {
|
||||
url := getFFMPEGURL()
|
||||
if url == "" {
|
||||
return fmt.Errorf("no ffmpeg url for this platform")
|
||||
}
|
||||
|
||||
// Configure where we want to download the archive
|
||||
urlExt := path.Ext(url)
|
||||
archivePath := filepath.Join(configDirectory, "ffmpeg"+urlExt)
|
||||
_ = os.Remove(archivePath) // remove archive if it already exists
|
||||
out, err := os.Create(archivePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer out.Close()
|
||||
|
||||
// Make the HTTP request
|
||||
resp, err := http.Get(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
// Check server response
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return fmt.Errorf("bad status: %s", resp.Status)
|
||||
}
|
||||
|
||||
// Write the response to the archive file location
|
||||
_, err = io.Copy(out, resp.Body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if urlExt == ".zip" {
|
||||
if err := unzip(archivePath, configDirectory); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
return fmt.Errorf("ffmpeg was downloaded to %s", archivePath)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func getFFMPEGURL() string {
|
||||
switch runtime.GOOS {
|
||||
case "darwin":
|
||||
return "https://ffmpeg.zeranoe.com/builds/macos64/static/ffmpeg-4.1-macos64-static.zip"
|
||||
case "linux":
|
||||
// TODO: untar this
|
||||
//return "https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz"
|
||||
return ""
|
||||
case "windows":
|
||||
return "https://ffmpeg.zeranoe.com/builds/win64/static/ffmpeg-4.1-win64-static.zip"
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
func getFFMPEGFilename() string {
|
||||
if runtime.GOOS == "windows" {
|
||||
return "ffmpeg.exe"
|
||||
}
|
||||
return "ffmpeg"
|
||||
}
|
||||
|
||||
func getFFProbeFilename() string {
|
||||
if runtime.GOOS == "windows" {
|
||||
return "ffprobe.exe"
|
||||
}
|
||||
return "ffprobe"
|
||||
}
|
||||
|
||||
// Checks if FFMPEG in the path has the correct flags
|
||||
func pathBinaryHasCorrectFlags() bool {
|
||||
ffmpegPath, err := exec.LookPath("ffmpeg")
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
bytes, _ := exec.Command(ffmpegPath).CombinedOutput()
|
||||
output := string(bytes)
|
||||
hasOpus := strings.Contains(output, "--enable-libopus")
|
||||
hasVpx := strings.Contains(output, "--enable-libvpx")
|
||||
hasX264 := strings.Contains(output, "--enable-libx264")
|
||||
hasX265 := strings.Contains(output, "--enable-libx265")
|
||||
hasWebp := strings.Contains(output, "--enable-libwebp")
|
||||
return hasOpus && hasVpx && hasX264 && hasX265 && hasWebp
|
||||
}
|
||||
|
||||
func unzip(src, configDirectory string) error {
|
||||
zipReader, err := zip.OpenReader(src)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer zipReader.Close()
|
||||
|
||||
for _, f := range zipReader.File {
|
||||
if f.FileInfo().IsDir() {
|
||||
continue
|
||||
}
|
||||
filename := f.FileInfo().Name()
|
||||
if filename != "ffprobe" && filename != "ffmpeg" && filename != "ffprobe.exe" && filename != "ffmpeg.exe" {
|
||||
continue
|
||||
}
|
||||
|
||||
rc, err := f.Open()
|
||||
|
||||
unzippedPath := filepath.Join(configDirectory, filename)
|
||||
unzippedOutput, err := os.Create(unzippedPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = io.Copy(unzippedOutput, rc)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := unzippedOutput.Close(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
71
pkg/ffmpeg/encoder.go
Normal file
71
pkg/ffmpeg/encoder.go
Normal file
@@ -0,0 +1,71 @@
|
||||
package ffmpeg
|
||||
|
||||
import (
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"io/ioutil"
|
||||
"os/exec"
|
||||
"regexp"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
var progressRegex = regexp.MustCompile(`time=(\d+):(\d+):(\d+.\d+)`)
|
||||
|
||||
type Encoder struct {
|
||||
Path string
|
||||
}
|
||||
|
||||
func NewEncoder(ffmpegPath string) Encoder {
|
||||
return Encoder{
|
||||
Path: ffmpegPath,
|
||||
}
|
||||
}
|
||||
|
||||
func (e *Encoder) run(probeResult VideoFile, args []string) (string, error) {
|
||||
cmd := exec.Command(e.Path, args...)
|
||||
|
||||
stderr, err := cmd.StderrPipe()
|
||||
if err != nil {
|
||||
logger.Error("FFMPEG stderr not available: " + err.Error())
|
||||
}
|
||||
|
||||
stdout, err := cmd.StdoutPipe()
|
||||
if nil != err {
|
||||
logger.Error("FFMPEG stdout not available: " + err.Error())
|
||||
}
|
||||
|
||||
if err = cmd.Start(); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
buf := make([]byte, 80)
|
||||
for {
|
||||
n, err := stderr.Read(buf)
|
||||
if n > 0 {
|
||||
data := string(buf[0:n])
|
||||
regexResult := progressRegex.FindStringSubmatch(data)
|
||||
if len(regexResult) == 4 && probeResult.Duration > 0 {
|
||||
h, _ := strconv.ParseFloat(regexResult[1], 64)
|
||||
m, _ := strconv.ParseFloat(regexResult[2], 64)
|
||||
s, _ := strconv.ParseFloat(regexResult[3], 64)
|
||||
hours := h * 3600
|
||||
mins := m * 60
|
||||
secs := s
|
||||
time := hours + mins + secs
|
||||
progress := time / probeResult.Duration
|
||||
logger.Infof("Progress %.2f", progress)
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
stdoutData, _ := ioutil.ReadAll(stdout)
|
||||
stdoutString := string(stdoutData)
|
||||
|
||||
if err := cmd.Wait(); err != nil {
|
||||
return stdoutString, err
|
||||
}
|
||||
|
||||
return stdoutString, nil
|
||||
}
|
||||
58
pkg/ffmpeg/encoder_marker.go
Normal file
58
pkg/ffmpeg/encoder_marker.go
Normal file
@@ -0,0 +1,58 @@
|
||||
package ffmpeg
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type SceneMarkerOptions struct {
|
||||
ScenePath string
|
||||
Seconds int
|
||||
Width int
|
||||
OutputPath string
|
||||
}
|
||||
|
||||
func (e *Encoder) SceneMarkerVideo(probeResult VideoFile, options SceneMarkerOptions) error {
|
||||
args := []string{
|
||||
"-v", "quiet",
|
||||
"-ss", strconv.Itoa(options.Seconds),
|
||||
"-t", "20",
|
||||
"-i", probeResult.Path,
|
||||
"-c:v", "libx264",
|
||||
"-profile:v", "high",
|
||||
"-level", "4.2",
|
||||
"-preset", "veryslow",
|
||||
"-crf", "24",
|
||||
"-movflags", "+faststart",
|
||||
"-threads", "4",
|
||||
"-vf", fmt.Sprintf("scale=%v:-2", options.Width),
|
||||
"-sws_flags", "lanczos",
|
||||
"-c:a", "aac",
|
||||
"-b:a", "64k",
|
||||
"-strict", "-2",
|
||||
options.OutputPath,
|
||||
}
|
||||
_, err := e.run(probeResult, args)
|
||||
return err
|
||||
}
|
||||
|
||||
func (e *Encoder) SceneMarkerImage(probeResult VideoFile, options SceneMarkerOptions) error {
|
||||
args := []string{
|
||||
"-v", "quiet",
|
||||
"-ss", strconv.Itoa(options.Seconds),
|
||||
"-t", "5",
|
||||
"-i", probeResult.Path,
|
||||
"-c:v", "libwebp",
|
||||
"-lossless", "1",
|
||||
"-q:v", "70",
|
||||
"-compression_level", "6",
|
||||
"-preset", "default",
|
||||
"-loop", "0",
|
||||
"-threads", "4",
|
||||
"-vf", fmt.Sprintf("scale=%v:-2,fps=12", options.Width),
|
||||
"-an",
|
||||
options.OutputPath,
|
||||
}
|
||||
_, err := e.run(probeResult, args)
|
||||
return err
|
||||
}
|
||||
66
pkg/ffmpeg/encoder_scene_preview_chunk.go
Normal file
66
pkg/ffmpeg/encoder_scene_preview_chunk.go
Normal file
@@ -0,0 +1,66 @@
|
||||
package ffmpeg
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type ScenePreviewChunkOptions struct {
|
||||
Time int
|
||||
Width int
|
||||
OutputPath string
|
||||
}
|
||||
|
||||
func (e *Encoder) ScenePreviewVideoChunk(probeResult VideoFile, options ScenePreviewChunkOptions) {
|
||||
args := []string{
|
||||
"-v", "quiet",
|
||||
"-ss", strconv.Itoa(options.Time),
|
||||
"-t", "0.75",
|
||||
"-i", probeResult.Path,
|
||||
"-y",
|
||||
"-c:v", "libx264",
|
||||
"-profile:v", "high",
|
||||
"-level", "4.2",
|
||||
"-preset", "veryslow",
|
||||
"-crf", "21",
|
||||
"-threads", "4",
|
||||
"-vf", fmt.Sprintf("scale=%v:-2", options.Width),
|
||||
"-c:a", "aac",
|
||||
"-b:a", "128k",
|
||||
options.OutputPath,
|
||||
}
|
||||
_, _ = e.run(probeResult, args)
|
||||
}
|
||||
|
||||
func (e *Encoder) ScenePreviewVideoChunkCombine(probeResult VideoFile, concatFilePath string, outputPath string) {
|
||||
args := []string{
|
||||
"-v", "quiet",
|
||||
"-f", "concat",
|
||||
"-i", utils.FixWindowsPath(concatFilePath),
|
||||
"-y",
|
||||
"-c", "copy",
|
||||
outputPath,
|
||||
}
|
||||
_, _ = e.run(probeResult, args)
|
||||
}
|
||||
|
||||
func (e *Encoder) ScenePreviewVideoToImage(probeResult VideoFile, width int, videoPreviewPath string, outputPath string) error {
|
||||
args := []string{
|
||||
"-v", "quiet",
|
||||
"-i", videoPreviewPath,
|
||||
"-y",
|
||||
"-c:v", "libwebp",
|
||||
"-lossless", "1",
|
||||
"-q:v", "70",
|
||||
"-compression_level", "6",
|
||||
"-preset", "default",
|
||||
"-loop", "0",
|
||||
"-threads", "4",
|
||||
"-vf", fmt.Sprintf("scale=%v:-2,fps=12", width),
|
||||
"-an",
|
||||
outputPath,
|
||||
}
|
||||
_, err := e.run(probeResult, args)
|
||||
return err
|
||||
}
|
||||
32
pkg/ffmpeg/encoder_screenshot.go
Normal file
32
pkg/ffmpeg/encoder_screenshot.go
Normal file
@@ -0,0 +1,32 @@
|
||||
package ffmpeg
|
||||
|
||||
import "fmt"
|
||||
|
||||
type ScreenshotOptions struct {
|
||||
OutputPath string
|
||||
Quality int
|
||||
Time float64
|
||||
Width int
|
||||
Verbosity string
|
||||
}
|
||||
|
||||
func (e *Encoder) Screenshot(probeResult VideoFile, options ScreenshotOptions) {
|
||||
if options.Verbosity == "" {
|
||||
options.Verbosity = "quiet"
|
||||
}
|
||||
if options.Quality == 0 {
|
||||
options.Quality = 1
|
||||
}
|
||||
args := []string{
|
||||
"-v", options.Verbosity,
|
||||
"-ss", fmt.Sprintf("%v", options.Time),
|
||||
"-y",
|
||||
"-i", probeResult.Path, // TODO: Wrap in quotes?
|
||||
"-vframes", "1",
|
||||
"-q:v", fmt.Sprintf("%v", options.Quality),
|
||||
"-vf", fmt.Sprintf("scale=%v:-1", options.Width),
|
||||
"-f", "image2",
|
||||
options.OutputPath,
|
||||
}
|
||||
_, _ = e.run(probeResult, args)
|
||||
}
|
||||
20
pkg/ffmpeg/encoder_transcode.go
Normal file
20
pkg/ffmpeg/encoder_transcode.go
Normal file
@@ -0,0 +1,20 @@
|
||||
package ffmpeg
|
||||
|
||||
type TranscodeOptions struct {
|
||||
OutputPath string
|
||||
}
|
||||
|
||||
func (e *Encoder) Transcode(probeResult VideoFile, options TranscodeOptions) {
|
||||
args := []string{
|
||||
"-i", probeResult.Path,
|
||||
"-c:v", "libx264",
|
||||
"-profile:v", "high",
|
||||
"-level", "4.2",
|
||||
"-preset", "superfast",
|
||||
"-crf", "23",
|
||||
"-vf", "scale=iw:-2",
|
||||
"-c:a", "aac",
|
||||
options.OutputPath,
|
||||
}
|
||||
_, _ = e.run(probeResult, args)
|
||||
}
|
||||
152
pkg/ffmpeg/ffprobe.go
Normal file
152
pkg/ffmpeg/ffprobe.go
Normal file
@@ -0,0 +1,152 @@
|
||||
package ffmpeg
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
var ValidCodecs = []string{"h264", "h265", "vp8", "vp9"}
|
||||
|
||||
func IsValidCodec(codecName string) bool {
|
||||
for _, c := range ValidCodecs {
|
||||
if c == codecName {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type VideoFile struct {
|
||||
JSON FFProbeJSON
|
||||
AudioStream *FFProbeStream
|
||||
VideoStream *FFProbeStream
|
||||
|
||||
Path string
|
||||
Container string
|
||||
Duration float64
|
||||
StartTime float64
|
||||
Bitrate int64
|
||||
Size int64
|
||||
CreationTime time.Time
|
||||
|
||||
VideoCodec string
|
||||
VideoBitrate int64
|
||||
Width int
|
||||
Height int
|
||||
FrameRate float64
|
||||
Rotation int64
|
||||
|
||||
AudioCodec string
|
||||
}
|
||||
|
||||
// Execute exec command and bind result to struct.
|
||||
func NewVideoFile(ffprobePath string, videoPath string) (*VideoFile, error) {
|
||||
args := []string{"-v", "quiet", "-print_format", "json", "-show_format", "-show_streams", "-show_error", videoPath}
|
||||
//// Extremely slow on windows for some reason
|
||||
//if runtime.GOOS != "windows" {
|
||||
// args = append(args, "-count_frames")
|
||||
//}
|
||||
out, err := exec.Command(ffprobePath, args...).Output()
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("FFProbe encountered an error with <%s>.\nError JSON:\n%s\nError: %s", videoPath, string(out), err.Error())
|
||||
}
|
||||
|
||||
probeJSON := &FFProbeJSON{}
|
||||
if err := json.Unmarshal(out, probeJSON); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return parse(videoPath, probeJSON)
|
||||
}
|
||||
|
||||
func parse(filePath string, probeJSON *FFProbeJSON) (*VideoFile, error) {
|
||||
if probeJSON == nil {
|
||||
return nil, fmt.Errorf("failed to get ffprobe json")
|
||||
}
|
||||
|
||||
result := &VideoFile{}
|
||||
result.JSON = *probeJSON
|
||||
|
||||
if result.JSON.Error.Code != 0 {
|
||||
return nil, fmt.Errorf("ffprobe error code %d: %s", result.JSON.Error.Code, result.JSON.Error.String)
|
||||
}
|
||||
//} else if (ffprobeResult.stderr.includes("could not find codec parameters")) {
|
||||
// throw new Error(`FFProbe [${filePath}] -> Could not find codec parameters`);
|
||||
//} // TODO nil_or_unsupported.(video_stream) && nil_or_unsupported.(audio_stream)
|
||||
|
||||
result.Path = filePath
|
||||
|
||||
result.Bitrate, _ = strconv.ParseInt(probeJSON.Format.BitRate, 10, 64)
|
||||
result.Container = probeJSON.Format.FormatName
|
||||
duration, _ := strconv.ParseFloat(probeJSON.Format.Duration, 64)
|
||||
result.Duration = math.Round(duration*100) / 100
|
||||
fileStat, _ := os.Stat(filePath)
|
||||
result.Size = fileStat.Size()
|
||||
result.StartTime, _ = strconv.ParseFloat(probeJSON.Format.StartTime, 64)
|
||||
result.CreationTime = probeJSON.Format.Tags.CreationTime
|
||||
|
||||
audioStream := result.GetAudioStream()
|
||||
if audioStream != nil {
|
||||
result.AudioCodec = audioStream.CodecName
|
||||
result.AudioStream = audioStream
|
||||
}
|
||||
|
||||
videoStream := result.GetVideoStream()
|
||||
if videoStream != nil {
|
||||
result.VideoStream = videoStream
|
||||
result.VideoCodec = videoStream.CodecName
|
||||
result.VideoBitrate, _ = strconv.ParseInt(videoStream.BitRate, 10, 64)
|
||||
var framerate float64
|
||||
if strings.Contains(videoStream.AvgFrameRate, "/") {
|
||||
frameRateSplit := strings.Split(videoStream.AvgFrameRate, "/")
|
||||
numerator, _ := strconv.ParseFloat(frameRateSplit[0], 64)
|
||||
denominator, _ := strconv.ParseFloat(frameRateSplit[1], 64)
|
||||
framerate = numerator / denominator
|
||||
} else {
|
||||
framerate, _ = strconv.ParseFloat(videoStream.AvgFrameRate, 64)
|
||||
}
|
||||
result.FrameRate = math.Round(framerate*100) / 100
|
||||
if rotate, err := strconv.ParseInt(videoStream.Tags.Rotate, 10, 64); err == nil && rotate != 180 {
|
||||
result.Width = videoStream.Height
|
||||
result.Height = videoStream.Width
|
||||
} else {
|
||||
result.Width = videoStream.Width
|
||||
result.Height = videoStream.Height
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (v *VideoFile) GetAudioStream() *FFProbeStream {
|
||||
index := v.getStreamIndex("audio", v.JSON)
|
||||
if index != -1 {
|
||||
return &v.JSON.Streams[index]
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (v *VideoFile) GetVideoStream() *FFProbeStream {
|
||||
index := v.getStreamIndex("video", v.JSON)
|
||||
if index != -1 {
|
||||
return &v.JSON.Streams[index]
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (v *VideoFile) getStreamIndex(fileType string, probeJSON FFProbeJSON) int {
|
||||
for i, stream := range probeJSON.Streams {
|
||||
if stream.CodecType == fileType {
|
||||
return i
|
||||
}
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
92
pkg/ffmpeg/types.go
Normal file
92
pkg/ffmpeg/types.go
Normal file
@@ -0,0 +1,92 @@
|
||||
package ffmpeg
|
||||
|
||||
import (
|
||||
"time"
|
||||
)
|
||||
|
||||
type FFProbeJSON struct {
|
||||
Format struct {
|
||||
BitRate string `json:"bit_rate"`
|
||||
Duration string `json:"duration"`
|
||||
Filename string `json:"filename"`
|
||||
FormatLongName string `json:"format_long_name"`
|
||||
FormatName string `json:"format_name"`
|
||||
NbPrograms int `json:"nb_programs"`
|
||||
NbStreams int `json:"nb_streams"`
|
||||
ProbeScore int `json:"probe_score"`
|
||||
Size string `json:"size"`
|
||||
StartTime string `json:"start_time"`
|
||||
Tags struct {
|
||||
CompatibleBrands string `json:"compatible_brands"`
|
||||
CreationTime time.Time `json:"creation_time"`
|
||||
Encoder string `json:"encoder"`
|
||||
MajorBrand string `json:"major_brand"`
|
||||
MinorVersion string `json:"minor_version"`
|
||||
} `json:"tags"`
|
||||
} `json:"format"`
|
||||
Streams []FFProbeStream `json:"streams"`
|
||||
Error struct {
|
||||
Code int `json:"code"`
|
||||
String string `json:"string"`
|
||||
} `json:"error"`
|
||||
}
|
||||
|
||||
type FFProbeStream struct {
|
||||
AvgFrameRate string `json:"avg_frame_rate"`
|
||||
BitRate string `json:"bit_rate"`
|
||||
BitsPerRawSample string `json:"bits_per_raw_sample,omitempty"`
|
||||
ChromaLocation string `json:"chroma_location,omitempty"`
|
||||
CodecLongName string `json:"codec_long_name"`
|
||||
CodecName string `json:"codec_name"`
|
||||
CodecTag string `json:"codec_tag"`
|
||||
CodecTagString string `json:"codec_tag_string"`
|
||||
CodecTimeBase string `json:"codec_time_base"`
|
||||
CodecType string `json:"codec_type"`
|
||||
CodedHeight int `json:"coded_height,omitempty"`
|
||||
CodedWidth int `json:"coded_width,omitempty"`
|
||||
DisplayAspectRatio string `json:"display_aspect_ratio,omitempty"`
|
||||
Disposition struct {
|
||||
AttachedPic int `json:"attached_pic"`
|
||||
CleanEffects int `json:"clean_effects"`
|
||||
Comment int `json:"comment"`
|
||||
Default int `json:"default"`
|
||||
Dub int `json:"dub"`
|
||||
Forced int `json:"forced"`
|
||||
HearingImpaired int `json:"hearing_impaired"`
|
||||
Karaoke int `json:"karaoke"`
|
||||
Lyrics int `json:"lyrics"`
|
||||
Original int `json:"original"`
|
||||
TimedThumbnails int `json:"timed_thumbnails"`
|
||||
VisualImpaired int `json:"visual_impaired"`
|
||||
} `json:"disposition"`
|
||||
Duration string `json:"duration"`
|
||||
DurationTs int `json:"duration_ts"`
|
||||
HasBFrames int `json:"has_b_frames,omitempty"`
|
||||
Height int `json:"height,omitempty"`
|
||||
Index int `json:"index"`
|
||||
IsAvc string `json:"is_avc,omitempty"`
|
||||
Level int `json:"level,omitempty"`
|
||||
NalLengthSize string `json:"nal_length_size,omitempty"`
|
||||
NbFrames string `json:"nb_frames"`
|
||||
PixFmt string `json:"pix_fmt,omitempty"`
|
||||
Profile string `json:"profile"`
|
||||
RFrameRate string `json:"r_frame_rate"`
|
||||
Refs int `json:"refs,omitempty"`
|
||||
SampleAspectRatio string `json:"sample_aspect_ratio,omitempty"`
|
||||
StartPts int `json:"start_pts"`
|
||||
StartTime string `json:"start_time"`
|
||||
Tags struct {
|
||||
CreationTime time.Time `json:"creation_time"`
|
||||
HandlerName string `json:"handler_name"`
|
||||
Language string `json:"language"`
|
||||
Rotate string `json:"rotate"`
|
||||
} `json:"tags"`
|
||||
TimeBase string `json:"time_base"`
|
||||
Width int `json:"width,omitempty"`
|
||||
BitsPerSample int `json:"bits_per_sample,omitempty"`
|
||||
ChannelLayout string `json:"channel_layout,omitempty"`
|
||||
Channels int `json:"channels,omitempty"`
|
||||
MaxBitRate string `json:"max_bit_rate,omitempty"`
|
||||
SampleFmt string `json:"sample_fmt,omitempty"`
|
||||
SampleRate string `json:"sample_rate,omitempty"`
|
||||
}
|
||||
129
pkg/logger/logger.go
Normal file
129
pkg/logger/logger.go
Normal file
@@ -0,0 +1,129 @@
|
||||
package logger
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/sirupsen/logrus"
|
||||
"sync"
|
||||
)
|
||||
|
||||
type LogItem struct {
|
||||
Type string `json:"type"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
var logger = logrus.New()
|
||||
var progressLogger = logrus.New()
|
||||
|
||||
var LogCache []LogItem
|
||||
var mutex = &sync.Mutex{}
|
||||
|
||||
func addLogItem(l *LogItem) {
|
||||
mutex.Lock()
|
||||
LogCache = append([]LogItem{*l}, LogCache...)
|
||||
if len(LogCache) > 30 {
|
||||
LogCache = LogCache[:len(LogCache)-1]
|
||||
}
|
||||
mutex.Unlock()
|
||||
}
|
||||
|
||||
func init() {
|
||||
progressLogger.SetFormatter(new(ProgressFormatter))
|
||||
}
|
||||
|
||||
func Progressf(format string, args ...interface{}) {
|
||||
progressLogger.Infof(format, args...)
|
||||
l := &LogItem{
|
||||
Type: "progress",
|
||||
Message: fmt.Sprintf(format, args...),
|
||||
}
|
||||
addLogItem(l)
|
||||
|
||||
}
|
||||
|
||||
func Trace(args ...interface{}) {
|
||||
logger.Trace(args...)
|
||||
}
|
||||
|
||||
func Debug(args ...interface{}) {
|
||||
logger.Debug(args...)
|
||||
l := &LogItem{
|
||||
Type: "debug",
|
||||
Message: fmt.Sprint(args...),
|
||||
}
|
||||
addLogItem(l)
|
||||
}
|
||||
|
||||
func Debugf(format string, args ...interface{}) {
|
||||
logger.Debugf(format, args...)
|
||||
l := &LogItem{
|
||||
Type: "debug",
|
||||
Message: fmt.Sprintf(format, args...),
|
||||
}
|
||||
addLogItem(l)
|
||||
}
|
||||
|
||||
func Info(args ...interface{}) {
|
||||
logger.Info(args...)
|
||||
l := &LogItem{
|
||||
Type: "info",
|
||||
Message: fmt.Sprint(args...),
|
||||
}
|
||||
addLogItem(l)
|
||||
}
|
||||
|
||||
func Infof(format string, args ...interface{}) {
|
||||
logger.Infof(format, args...)
|
||||
l := &LogItem{
|
||||
Type: "info",
|
||||
Message: fmt.Sprintf(format, args...),
|
||||
}
|
||||
addLogItem(l)
|
||||
}
|
||||
|
||||
func Warn(args ...interface{}) {
|
||||
logger.Warn(args...)
|
||||
l := &LogItem{
|
||||
Type: "warn",
|
||||
Message: fmt.Sprint(args...),
|
||||
}
|
||||
addLogItem(l)
|
||||
}
|
||||
|
||||
func Warnf(format string, args ...interface{}) {
|
||||
logger.Warnf(format, args...)
|
||||
l := &LogItem{
|
||||
Type: "warn",
|
||||
Message: fmt.Sprintf(format, args...),
|
||||
}
|
||||
addLogItem(l)
|
||||
}
|
||||
|
||||
func Error(args ...interface{}) {
|
||||
logger.Error(args...)
|
||||
l := &LogItem{
|
||||
Type: "error",
|
||||
Message: fmt.Sprint(args...),
|
||||
}
|
||||
addLogItem(l)
|
||||
}
|
||||
|
||||
func Errorf(format string, args ...interface{}) {
|
||||
logger.Errorf(format, args...)
|
||||
l := &LogItem{
|
||||
Type: "error",
|
||||
Message: fmt.Sprintf(format, args...),
|
||||
}
|
||||
addLogItem(l)
|
||||
}
|
||||
|
||||
func Fatal(args ...interface{}) {
|
||||
logger.Fatal(args...)
|
||||
}
|
||||
|
||||
func Fatalf(format string, args ...interface{}) {
|
||||
logger.Fatalf(format, args...)
|
||||
}
|
||||
|
||||
//func WithRequest(req *http.Request) *logrus.Entry {
|
||||
// return logger.WithFields(RequestFields(req))
|
||||
//}
|
||||
12
pkg/logger/progress_formatter.go
Normal file
12
pkg/logger/progress_formatter.go
Normal file
@@ -0,0 +1,12 @@
|
||||
package logger
|
||||
|
||||
import (
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type ProgressFormatter struct{}
|
||||
|
||||
func (f *ProgressFormatter) Format(entry *logrus.Entry) ([]byte, error) {
|
||||
msg := []byte("Processing --> " + entry.Message + "\r")
|
||||
return msg, nil
|
||||
}
|
||||
62
pkg/manager/generator.go
Normal file
62
pkg/manager/generator.go
Normal file
@@ -0,0 +1,62 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/stashapp/stash/pkg/ffmpeg"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
"os/exec"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type GeneratorInfo struct {
|
||||
ChunkCount int
|
||||
FrameRate float64
|
||||
NumberOfFrames int
|
||||
NthFrame int
|
||||
|
||||
VideoFile ffmpeg.VideoFile
|
||||
}
|
||||
|
||||
func newGeneratorInfo(videoFile ffmpeg.VideoFile) (*GeneratorInfo, error) {
|
||||
exists, err := utils.FileExists(videoFile.Path)
|
||||
if !exists {
|
||||
logger.Errorf("video file not found")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
generator := &GeneratorInfo{VideoFile: videoFile}
|
||||
return generator, nil
|
||||
}
|
||||
|
||||
func (g *GeneratorInfo) configure() error {
|
||||
videoStream := g.VideoFile.VideoStream
|
||||
if videoStream == nil {
|
||||
return fmt.Errorf("missing video stream")
|
||||
}
|
||||
|
||||
var framerate float64
|
||||
if g.VideoFile.FrameRate == 0 {
|
||||
framerate, _ = strconv.ParseFloat(videoStream.RFrameRate, 64)
|
||||
} else {
|
||||
framerate = g.VideoFile.FrameRate
|
||||
}
|
||||
g.FrameRate = framerate
|
||||
|
||||
numberOfFrames, _ := strconv.Atoi(videoStream.NbFrames)
|
||||
if numberOfFrames == 0 {
|
||||
command := `ffmpeg -nostats -i ` + g.VideoFile.Path + ` -vcodec copy -f rawvideo -y /dev/null 2>&1 | \
|
||||
grep frame | \
|
||||
awk '{split($0,a,"fps")}END{print a[1]}' | \
|
||||
sed 's/.*= *//'`
|
||||
commandResult, _ := exec.Command(command).Output()
|
||||
numberOfFrames, _ := strconv.Atoi(string(commandResult))
|
||||
if numberOfFrames == 0 { // TODO: test
|
||||
numberOfFrames = int(framerate * g.VideoFile.Duration)
|
||||
}
|
||||
}
|
||||
g.NumberOfFrames = numberOfFrames
|
||||
g.NthFrame = g.NumberOfFrames / g.ChunkCount
|
||||
|
||||
return nil
|
||||
}
|
||||
126
pkg/manager/generator_preview.go
Normal file
126
pkg/manager/generator_preview.go
Normal file
@@ -0,0 +1,126 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"github.com/stashapp/stash/pkg/ffmpeg"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
type PreviewGenerator struct {
|
||||
Info *GeneratorInfo
|
||||
|
||||
VideoFilename string
|
||||
ImageFilename string
|
||||
OutputDirectory string
|
||||
}
|
||||
|
||||
func NewPreviewGenerator(videoFile ffmpeg.VideoFile, videoFilename string, imageFilename string, outputDirectory string) (*PreviewGenerator, error) {
|
||||
exists, err := utils.FileExists(videoFile.Path)
|
||||
if !exists {
|
||||
return nil, err
|
||||
}
|
||||
generator, err := newGeneratorInfo(videoFile)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
generator.ChunkCount = 12 // 12 segments to the preview
|
||||
if err := generator.configure(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &PreviewGenerator{
|
||||
Info: generator,
|
||||
VideoFilename: videoFilename,
|
||||
ImageFilename: imageFilename,
|
||||
OutputDirectory: outputDirectory,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (g *PreviewGenerator) Generate() error {
|
||||
instance.Paths.Generated.EmptyTmpDir()
|
||||
logger.Infof("[generator] generating scene preview for %s", g.Info.VideoFile.Path)
|
||||
encoder := ffmpeg.NewEncoder(instance.StaticPaths.FFMPEG)
|
||||
|
||||
if err := g.generateConcatFile(); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := g.generateVideo(&encoder); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := g.generateImage(&encoder); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *PreviewGenerator) generateConcatFile() error {
|
||||
f, err := os.Create(g.getConcatFilePath())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
w := bufio.NewWriter(f)
|
||||
for i := 0; i < g.Info.ChunkCount; i++ {
|
||||
num := fmt.Sprintf("%.3d", i)
|
||||
filename := "preview" + num + ".mp4"
|
||||
_, _ = w.WriteString(fmt.Sprintf("file '%s'\n", filename))
|
||||
}
|
||||
return w.Flush()
|
||||
}
|
||||
|
||||
func (g *PreviewGenerator) generateVideo(encoder *ffmpeg.Encoder) error {
|
||||
outputPath := filepath.Join(g.OutputDirectory, g.VideoFilename)
|
||||
outputExists, _ := utils.FileExists(outputPath)
|
||||
if outputExists {
|
||||
return nil
|
||||
}
|
||||
|
||||
stepSize := int(g.Info.VideoFile.Duration / float64(g.Info.ChunkCount))
|
||||
for i := 0; i < g.Info.ChunkCount; i++ {
|
||||
time := i * stepSize
|
||||
num := fmt.Sprintf("%.3d", i)
|
||||
filename := "preview" + num + ".mp4"
|
||||
chunkOutputPath := instance.Paths.Generated.GetTmpPath(filename)
|
||||
|
||||
options := ffmpeg.ScenePreviewChunkOptions{
|
||||
Time: time,
|
||||
Width: 640,
|
||||
OutputPath: chunkOutputPath,
|
||||
}
|
||||
encoder.ScenePreviewVideoChunk(g.Info.VideoFile, options)
|
||||
}
|
||||
|
||||
videoOutputPath := filepath.Join(g.OutputDirectory, g.VideoFilename)
|
||||
encoder.ScenePreviewVideoChunkCombine(g.Info.VideoFile, g.getConcatFilePath(), videoOutputPath)
|
||||
logger.Debug("created video preview: ", videoOutputPath)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *PreviewGenerator) generateImage(encoder *ffmpeg.Encoder) error {
|
||||
outputPath := filepath.Join(g.OutputDirectory, g.ImageFilename)
|
||||
outputExists, _ := utils.FileExists(outputPath)
|
||||
if outputExists {
|
||||
return nil
|
||||
}
|
||||
|
||||
videoPreviewPath := filepath.Join(g.OutputDirectory, g.VideoFilename)
|
||||
tmpOutputPath := instance.Paths.Generated.GetTmpPath(g.ImageFilename)
|
||||
if err := encoder.ScenePreviewVideoToImage(g.Info.VideoFile, 640, videoPreviewPath, tmpOutputPath); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := os.Rename(tmpOutputPath, outputPath); err != nil {
|
||||
return err
|
||||
}
|
||||
logger.Debug("created video preview image: ", outputPath)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *PreviewGenerator) getConcatFilePath() string {
|
||||
return instance.Paths.Generated.GetTmpPath("files.txt")
|
||||
}
|
||||
135
pkg/manager/generator_sprite.go
Normal file
135
pkg/manager/generator_sprite.go
Normal file
@@ -0,0 +1,135 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/bmatcuk/doublestar"
|
||||
"github.com/disintegration/imaging"
|
||||
"github.com/stashapp/stash/pkg/ffmpeg"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
"image"
|
||||
"image/color"
|
||||
"io/ioutil"
|
||||
"math"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type SpriteGenerator struct {
|
||||
Info *GeneratorInfo
|
||||
|
||||
ImageOutputPath string
|
||||
VTTOutputPath string
|
||||
Rows int
|
||||
Columns int
|
||||
}
|
||||
|
||||
func NewSpriteGenerator(videoFile ffmpeg.VideoFile, imageOutputPath string, vttOutputPath string, rows int, cols int) (*SpriteGenerator, error) {
|
||||
exists, err := utils.FileExists(videoFile.Path)
|
||||
if !exists {
|
||||
return nil, err
|
||||
}
|
||||
generator, err := newGeneratorInfo(videoFile)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
generator.ChunkCount = rows * cols
|
||||
if err := generator.configure(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &SpriteGenerator{
|
||||
Info: generator,
|
||||
ImageOutputPath: imageOutputPath,
|
||||
VTTOutputPath: vttOutputPath,
|
||||
Rows: rows,
|
||||
Columns: cols,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (g *SpriteGenerator) Generate() error {
|
||||
instance.Paths.Generated.EmptyTmpDir()
|
||||
encoder := ffmpeg.NewEncoder(instance.StaticPaths.FFMPEG)
|
||||
|
||||
if err := g.generateSpriteImage(&encoder); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := g.generateSpriteVTT(&encoder); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *SpriteGenerator) generateSpriteImage(encoder *ffmpeg.Encoder) error {
|
||||
logger.Infof("[generator] generating sprite image for %s", g.Info.VideoFile.Path)
|
||||
|
||||
// Create `this.chunkCount` thumbnails in the tmp directory
|
||||
stepSize := int(g.Info.VideoFile.Duration / float64(g.Info.ChunkCount))
|
||||
for i := 0; i < g.Info.ChunkCount; i++ {
|
||||
time := i * stepSize
|
||||
num := fmt.Sprintf("%.3d", i)
|
||||
filename := "thumbnail" + num + ".jpg"
|
||||
|
||||
options := ffmpeg.ScreenshotOptions{
|
||||
OutputPath: instance.Paths.Generated.GetTmpPath(filename),
|
||||
Time: float64(time),
|
||||
Width: 160,
|
||||
}
|
||||
encoder.Screenshot(g.Info.VideoFile, options)
|
||||
}
|
||||
|
||||
// Combine all of the thumbnails into a sprite image
|
||||
globPath := filepath.Join(instance.Paths.Generated.Tmp, "thumbnail*.jpg")
|
||||
imagePaths, _ := doublestar.Glob(globPath)
|
||||
var images []image.Image
|
||||
for _, imagePath := range imagePaths {
|
||||
img, err := imaging.Open(imagePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
images = append(images, img)
|
||||
}
|
||||
|
||||
width := images[0].Bounds().Size().X
|
||||
height := images[0].Bounds().Size().Y
|
||||
canvasWidth := width * g.Columns
|
||||
canvasHeight := height * g.Rows
|
||||
montage := imaging.New(canvasWidth, canvasHeight, color.NRGBA{})
|
||||
for index := 0; index < len(images); index++ {
|
||||
x := width * (index % g.Columns)
|
||||
y := height * int(math.Floor(float64(index)/float64(g.Rows)))
|
||||
img := images[index]
|
||||
montage = imaging.Paste(montage, img, image.Pt(x, y))
|
||||
}
|
||||
|
||||
return imaging.Save(montage, g.ImageOutputPath)
|
||||
}
|
||||
|
||||
func (g *SpriteGenerator) generateSpriteVTT(encoder *ffmpeg.Encoder) error {
|
||||
logger.Infof("[generator] generating sprite vtt for %s", g.Info.VideoFile.Path)
|
||||
|
||||
spriteImage, err := imaging.Open(g.ImageOutputPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
spriteImageName := filepath.Base(g.ImageOutputPath)
|
||||
width := spriteImage.Bounds().Size().X / g.Columns
|
||||
height := spriteImage.Bounds().Size().Y / g.Rows
|
||||
|
||||
stepSize := float64(g.Info.NthFrame) / g.Info.FrameRate
|
||||
|
||||
vttLines := []string{"WEBVTT", ""}
|
||||
for index := 0; index < g.Info.ChunkCount; index++ {
|
||||
x := width * (index % g.Columns)
|
||||
y := height * int(math.Floor(float64(index)/float64(g.Rows)))
|
||||
startTime := utils.GetVTTTime(float64(index) * stepSize)
|
||||
endTime := utils.GetVTTTime(float64(index+1) * stepSize)
|
||||
|
||||
vttLines = append(vttLines, startTime+" --> "+endTime)
|
||||
vttLines = append(vttLines, fmt.Sprintf("%s#xywh=%d,%d,%d,%d", spriteImageName, x, y, width, height))
|
||||
vttLines = append(vttLines, "")
|
||||
}
|
||||
vtt := strings.Join(vttLines, "\n")
|
||||
|
||||
return ioutil.WriteFile(g.VTTOutputPath, []byte(vtt), 0755)
|
||||
}
|
||||
13
pkg/manager/job_status.go
Normal file
13
pkg/manager/job_status.go
Normal file
@@ -0,0 +1,13 @@
|
||||
package manager
|
||||
|
||||
type JobStatus int
|
||||
|
||||
const (
|
||||
Idle JobStatus = 0
|
||||
Import JobStatus = 1
|
||||
Export JobStatus = 2
|
||||
Scan JobStatus = 3
|
||||
Generate JobStatus = 4
|
||||
Clean JobStatus = 5
|
||||
Scrape JobStatus = 6
|
||||
)
|
||||
47
pkg/manager/json_utils.go
Normal file
47
pkg/manager/json_utils.go
Normal file
@@ -0,0 +1,47 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||
)
|
||||
|
||||
type jsonUtils struct{}
|
||||
|
||||
func (jp *jsonUtils) getMappings() (*jsonschema.Mappings, error) {
|
||||
return jsonschema.LoadMappingsFile(instance.Paths.JSON.MappingsFile)
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) saveMappings(mappings *jsonschema.Mappings) error {
|
||||
return jsonschema.SaveMappingsFile(instance.Paths.JSON.MappingsFile, mappings)
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) getScraped() ([]jsonschema.ScrapedItem, error) {
|
||||
return jsonschema.LoadScrapedFile(instance.Paths.JSON.ScrapedFile)
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) saveScaped(scraped []jsonschema.ScrapedItem) error {
|
||||
return jsonschema.SaveScrapedFile(instance.Paths.JSON.ScrapedFile, scraped)
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) getPerformer(checksum string) (*jsonschema.Performer, error) {
|
||||
return jsonschema.LoadPerformerFile(instance.Paths.JSON.PerformerJSONPath(checksum))
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) savePerformer(checksum string, performer *jsonschema.Performer) error {
|
||||
return jsonschema.SavePerformerFile(instance.Paths.JSON.PerformerJSONPath(checksum), performer)
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) getStudio(checksum string) (*jsonschema.Studio, error) {
|
||||
return jsonschema.LoadStudioFile(instance.Paths.JSON.StudioJSONPath(checksum))
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) saveStudio(checksum string, studio *jsonschema.Studio) error {
|
||||
return jsonschema.SaveStudioFile(instance.Paths.JSON.StudioJSONPath(checksum), studio)
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) getScene(checksum string) (*jsonschema.Scene, error) {
|
||||
return jsonschema.LoadSceneFile(instance.Paths.JSON.SceneJSONPath(checksum))
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) saveScene(checksum string, scene *jsonschema.Scene) error {
|
||||
return jsonschema.SaveSceneFile(instance.Paths.JSON.SceneJSONPath(checksum), scene)
|
||||
}
|
||||
38
pkg/manager/jsonschema/config.go
Normal file
38
pkg/manager/jsonschema/config.go
Normal file
@@ -0,0 +1,38 @@
|
||||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"os"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
Stash string `json:"stash"`
|
||||
Metadata string `json:"metadata"`
|
||||
// Generated string `json:"generated"` // TODO: Generated directory instead of metadata
|
||||
Cache string `json:"cache"`
|
||||
Downloads string `json:"downloads"`
|
||||
}
|
||||
|
||||
func LoadConfigFile(file string) *Config {
|
||||
var config Config
|
||||
configFile, err := os.Open(file)
|
||||
defer configFile.Close()
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
}
|
||||
jsonParser := json.NewDecoder(configFile)
|
||||
parseError := jsonParser.Decode(&config)
|
||||
if parseError != nil {
|
||||
logger.Errorf("config file parse error: %s", parseError)
|
||||
}
|
||||
return &config
|
||||
}
|
||||
|
||||
func SaveConfigFile(filePath string, config *Config) error {
|
||||
if config == nil {
|
||||
return fmt.Errorf("config must not be nil")
|
||||
}
|
||||
return marshalToFile(filePath, config)
|
||||
}
|
||||
46
pkg/manager/jsonschema/mappings.go
Normal file
46
pkg/manager/jsonschema/mappings.go
Normal file
@@ -0,0 +1,46 @@
|
||||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
)
|
||||
|
||||
type NameMapping struct {
|
||||
Name string `json:"name"`
|
||||
Checksum string `json:"checksum"`
|
||||
}
|
||||
|
||||
type PathMapping struct {
|
||||
Path string `json:"path"`
|
||||
Checksum string `json:"checksum"`
|
||||
}
|
||||
|
||||
type Mappings struct {
|
||||
Performers []NameMapping `json:"performers"`
|
||||
Studios []NameMapping `json:"studios"`
|
||||
Galleries []PathMapping `json:"galleries"`
|
||||
Scenes []PathMapping `json:"scenes"`
|
||||
}
|
||||
|
||||
func LoadMappingsFile(filePath string) (*Mappings, error) {
|
||||
var mappings Mappings
|
||||
file, err := os.Open(filePath)
|
||||
defer file.Close()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
jsonParser := json.NewDecoder(file)
|
||||
err = jsonParser.Decode(&mappings)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &mappings, nil
|
||||
}
|
||||
|
||||
func SaveMappingsFile(filePath string, mappings *Mappings) error {
|
||||
if mappings == nil {
|
||||
return fmt.Errorf("mappings must not be nil")
|
||||
}
|
||||
return marshalToFile(filePath, mappings)
|
||||
}
|
||||
49
pkg/manager/jsonschema/performer.go
Normal file
49
pkg/manager/jsonschema/performer.go
Normal file
@@ -0,0 +1,49 @@
|
||||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
)
|
||||
|
||||
type Performer struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
URL string `json:"url,omitempty"`
|
||||
Twitter string `json:"twitter,omitempty"`
|
||||
Instagram string `json:"instagram,omitempty"`
|
||||
Birthdate string `json:"birthdate,omitempty"`
|
||||
Ethnicity string `json:"ethnicity,omitempty"`
|
||||
Country string `json:"country,omitempty"`
|
||||
EyeColor string `json:"eye_color,omitempty"`
|
||||
Height string `json:"height,omitempty"`
|
||||
Measurements string `json:"measurements,omitempty"`
|
||||
FakeTits string `json:"fake_tits,omitempty"`
|
||||
CareerLength string `json:"career_length,omitempty"`
|
||||
Tattoos string `json:"tattoos,omitempty"`
|
||||
Piercings string `json:"piercings,omitempty"`
|
||||
Aliases string `json:"aliases,omitempty"`
|
||||
Favorite bool `json:"favorite,omitempty"`
|
||||
Image string `json:"image,omitempty"`
|
||||
}
|
||||
|
||||
func LoadPerformerFile(filePath string) (*Performer, error) {
|
||||
var performer Performer
|
||||
file, err := os.Open(filePath)
|
||||
defer file.Close()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
jsonParser := json.NewDecoder(file)
|
||||
err = jsonParser.Decode(&performer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &performer, nil
|
||||
}
|
||||
|
||||
func SavePerformerFile(filePath string, performer *Performer) error {
|
||||
if performer == nil {
|
||||
return fmt.Errorf("performer must not be nil")
|
||||
}
|
||||
return marshalToFile(filePath, performer)
|
||||
}
|
||||
61
pkg/manager/jsonschema/scene.go
Normal file
61
pkg/manager/jsonschema/scene.go
Normal file
@@ -0,0 +1,61 @@
|
||||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
)
|
||||
|
||||
type SceneMarker struct {
|
||||
Title string `json:"title,omitempty"`
|
||||
Seconds string `json:"seconds,omitempty"`
|
||||
PrimaryTag string `json:"primary_tag,omitempty"`
|
||||
Tags []string `json:"tags,omitempty"`
|
||||
}
|
||||
|
||||
type SceneFile struct {
|
||||
Size string `json:"size"`
|
||||
Duration string `json:"duration"`
|
||||
VideoCodec string `json:"video_codec"`
|
||||
AudioCodec string `json:"audio_codec"`
|
||||
Width int `json:"width"`
|
||||
Height int `json:"height"`
|
||||
Framerate string `json:"framerate"`
|
||||
Bitrate int `json:"bitrate"`
|
||||
}
|
||||
|
||||
type Scene struct {
|
||||
Title string `json:"title,omitempty"`
|
||||
Studio string `json:"studio,omitempty"`
|
||||
URL string `json:"url,omitempty"`
|
||||
Date string `json:"date,omitempty"`
|
||||
Rating int `json:"rating,omitempty"`
|
||||
Details string `json:"details,omitempty"`
|
||||
Gallery string `json:"gallery,omitempty"`
|
||||
Performers []string `json:"performers,omitempty"`
|
||||
Tags []string `json:"tags,omitempty"`
|
||||
Markers []SceneMarker `json:"markers,omitempty"`
|
||||
File *SceneFile `json:"file,omitempty"`
|
||||
}
|
||||
|
||||
func LoadSceneFile(filePath string) (*Scene, error) {
|
||||
var scene Scene
|
||||
file, err := os.Open(filePath)
|
||||
defer file.Close()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
jsonParser := json.NewDecoder(file)
|
||||
err = jsonParser.Decode(&scene)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &scene, nil
|
||||
}
|
||||
|
||||
func SaveSceneFile(filePath string, scene *Scene) error {
|
||||
if scene == nil {
|
||||
return fmt.Errorf("scene must not be nil")
|
||||
}
|
||||
return marshalToFile(filePath, scene)
|
||||
}
|
||||
46
pkg/manager/jsonschema/scraped.go
Normal file
46
pkg/manager/jsonschema/scraped.go
Normal file
@@ -0,0 +1,46 @@
|
||||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
)
|
||||
|
||||
type ScrapedItem struct {
|
||||
Title string `json:"title,omitempty"`
|
||||
Description string `json:"description,omitempty"`
|
||||
URL string `json:"url,omitempty"`
|
||||
Date string `json:"date,omitempty"`
|
||||
Rating string `json:"rating,omitempty"`
|
||||
Tags string `json:"tags,omitempty"`
|
||||
Models string `json:"models,omitempty"`
|
||||
Episode int `json:"episode,omitempty"`
|
||||
GalleryFilename string `json:"gallery_filename,omitempty"`
|
||||
GalleryURL string `json:"gallery_url,omitempty"`
|
||||
VideoFilename string `json:"video_filename,omitempty"`
|
||||
VideoURL string `json:"video_url,omitempty"`
|
||||
Studio string `json:"studio,omitempty"`
|
||||
UpdatedAt RailsTime `json:"updated_at,omitempty"`
|
||||
}
|
||||
|
||||
func LoadScrapedFile(filePath string) ([]ScrapedItem, error) {
|
||||
var scraped []ScrapedItem
|
||||
file, err := os.Open(filePath)
|
||||
defer file.Close()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
jsonParser := json.NewDecoder(file)
|
||||
err = jsonParser.Decode(&scraped)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return scraped, nil
|
||||
}
|
||||
|
||||
func SaveScrapedFile(filePath string, scrapedItems []ScrapedItem) error {
|
||||
if scrapedItems == nil {
|
||||
return fmt.Errorf("scraped items must not be nil")
|
||||
}
|
||||
return marshalToFile(filePath, scrapedItems)
|
||||
}
|
||||
35
pkg/manager/jsonschema/studio.go
Normal file
35
pkg/manager/jsonschema/studio.go
Normal file
@@ -0,0 +1,35 @@
|
||||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
)
|
||||
|
||||
type Studio struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
URL string `json:"url,omitempty"`
|
||||
Image string `json:"image,omitempty"`
|
||||
}
|
||||
|
||||
func LoadStudioFile(filePath string) (*Studio, error) {
|
||||
var studio Studio
|
||||
file, err := os.Open(filePath)
|
||||
defer file.Close()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
jsonParser := json.NewDecoder(file)
|
||||
err = jsonParser.Decode(&studio)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &studio, nil
|
||||
}
|
||||
|
||||
func SaveStudioFile(filePath string, studio *Studio) error {
|
||||
if studio == nil {
|
||||
return fmt.Errorf("studio must not be nil")
|
||||
}
|
||||
return marshalToFile(filePath, studio)
|
||||
}
|
||||
37
pkg/manager/jsonschema/time_rails.go
Normal file
37
pkg/manager/jsonschema/time_rails.go
Normal file
@@ -0,0 +1,37 @@
|
||||
package jsonschema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type RailsTime struct {
|
||||
time.Time
|
||||
}
|
||||
|
||||
const railsTimeLayout = "2006-01-02 15:04:05 MST"
|
||||
|
||||
func (ct *RailsTime) UnmarshalJSON(b []byte) (err error) {
|
||||
s := strings.Trim(string(b), "\"")
|
||||
if s == "null" {
|
||||
ct.Time = time.Time{}
|
||||
return
|
||||
}
|
||||
ct.Time, err = time.Parse(railsTimeLayout, s)
|
||||
if err != nil {
|
||||
ct.Time, err = time.Parse(time.RFC3339, s)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (ct *RailsTime) MarshalJSON() ([]byte, error) {
|
||||
if ct.Time.UnixNano() == nilTime {
|
||||
return []byte("null"), nil
|
||||
}
|
||||
return []byte(fmt.Sprintf("\"%s\"", ct.Time.Format(time.RFC3339))), nil
|
||||
}
|
||||
|
||||
func (ct *RailsTime) IsSet() bool {
|
||||
return ct.UnixNano() != nilTime
|
||||
}
|
||||
36
pkg/manager/jsonschema/utils.go
Normal file
36
pkg/manager/jsonschema/utils.go
Normal file
@@ -0,0 +1,36 @@
|
||||
package jsonschema
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"time"
|
||||
)
|
||||
|
||||
var nilTime = (time.Time{}).UnixNano()
|
||||
|
||||
func CompareJSON(a interface{}, b interface{}) bool {
|
||||
aBuf, _ := encode(a)
|
||||
bBuf, _ := encode(b)
|
||||
return bytes.Compare(aBuf, bBuf) == 0
|
||||
}
|
||||
|
||||
func marshalToFile(filePath string, j interface{}) error {
|
||||
data, err := encode(j)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return ioutil.WriteFile(filePath, data, 0755)
|
||||
}
|
||||
|
||||
func encode(j interface{}) ([]byte, error) {
|
||||
buffer := &bytes.Buffer{}
|
||||
encoder := json.NewEncoder(buffer)
|
||||
encoder.SetEscapeHTML(false)
|
||||
encoder.SetIndent("", " ")
|
||||
if err := encoder.Encode(j); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// Strip the newline at the end of the file
|
||||
return bytes.TrimRight(buffer.Bytes(), "\n"), nil
|
||||
}
|
||||
101
pkg/manager/manager.go
Normal file
101
pkg/manager/manager.go
Normal file
@@ -0,0 +1,101 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"github.com/stashapp/stash/pkg/ffmpeg"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||
"github.com/stashapp/stash/pkg/manager/paths"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
"sync"
|
||||
)
|
||||
|
||||
type singleton struct {
|
||||
Status JobStatus
|
||||
Paths *paths.Paths
|
||||
StaticPaths *paths.StaticPathsType
|
||||
JSON *jsonUtils
|
||||
}
|
||||
|
||||
var instance *singleton
|
||||
var once sync.Once
|
||||
|
||||
func GetInstance() *singleton {
|
||||
Initialize()
|
||||
return instance
|
||||
}
|
||||
|
||||
func Initialize() *singleton {
|
||||
once.Do(func() {
|
||||
configFile := jsonschema.LoadConfigFile(paths.StaticPaths.ConfigFile)
|
||||
instance = &singleton{
|
||||
Status: Idle,
|
||||
Paths: paths.NewPaths(configFile),
|
||||
StaticPaths: &paths.StaticPaths,
|
||||
JSON: &jsonUtils{},
|
||||
}
|
||||
|
||||
instance.refreshConfig(configFile)
|
||||
|
||||
initFFMPEG()
|
||||
})
|
||||
|
||||
return instance
|
||||
}
|
||||
|
||||
func initFFMPEG() {
|
||||
ffmpegPath, ffprobePath := ffmpeg.GetPaths(instance.StaticPaths.ConfigDirectory)
|
||||
if ffmpegPath == "" || ffprobePath == "" {
|
||||
logger.Infof("couldn't find FFMPEG, attempting to download it")
|
||||
if err := ffmpeg.Download(instance.StaticPaths.ConfigDirectory); err != nil {
|
||||
msg := `Unable to locate / automatically download FFMPEG
|
||||
|
||||
Check the readme for download links.
|
||||
The FFMPEG and FFProbe binaries should be placed in %s
|
||||
|
||||
The error was: %s
|
||||
`
|
||||
logger.Fatalf(msg, instance.StaticPaths.ConfigDirectory, err)
|
||||
}
|
||||
}
|
||||
|
||||
instance.StaticPaths.FFMPEG = ffmpegPath
|
||||
instance.StaticPaths.FFProbe = ffprobePath
|
||||
}
|
||||
|
||||
func HasValidConfig() bool {
|
||||
configFileExists, _ := utils.FileExists(instance.StaticPaths.ConfigFile) // TODO: Verify JSON is correct
|
||||
if configFileExists && instance.Paths.Config != nil {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (s *singleton) SaveConfig(config *jsonschema.Config) error {
|
||||
if err := jsonschema.SaveConfigFile(s.StaticPaths.ConfigFile, config); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Reload the config
|
||||
s.refreshConfig(config)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *singleton) refreshConfig(config *jsonschema.Config) {
|
||||
if config == nil {
|
||||
config = jsonschema.LoadConfigFile(s.StaticPaths.ConfigFile)
|
||||
}
|
||||
s.Paths = paths.NewPaths(config)
|
||||
|
||||
if HasValidConfig() {
|
||||
_ = utils.EnsureDir(s.Paths.Generated.Screenshots)
|
||||
_ = utils.EnsureDir(s.Paths.Generated.Vtt)
|
||||
_ = utils.EnsureDir(s.Paths.Generated.Markers)
|
||||
_ = utils.EnsureDir(s.Paths.Generated.Transcodes)
|
||||
|
||||
_ = utils.EnsureDir(s.Paths.JSON.Performers)
|
||||
_ = utils.EnsureDir(s.Paths.JSON.Scenes)
|
||||
_ = utils.EnsureDir(s.Paths.JSON.Galleries)
|
||||
_ = utils.EnsureDir(s.Paths.JSON.Studios)
|
||||
}
|
||||
}
|
||||
36
pkg/manager/manager_subscription_handler.go
Normal file
36
pkg/manager/manager_subscription_handler.go
Normal file
@@ -0,0 +1,36 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
)
|
||||
|
||||
type metadataUpdatePayload struct {
|
||||
Progress float64 `json:"progress"`
|
||||
Message string `json:"message"`
|
||||
Logs []logger.LogItem `json:"logs"`
|
||||
}
|
||||
|
||||
func (s *singleton) HandleMetadataUpdateSubscriptionTick(msg chan string) {
|
||||
var statusMessage string
|
||||
switch instance.Status {
|
||||
case Idle:
|
||||
statusMessage = "Idle"
|
||||
case Import:
|
||||
statusMessage = "Import"
|
||||
case Export:
|
||||
statusMessage = "Export"
|
||||
case Scan:
|
||||
statusMessage = "Scan"
|
||||
case Generate:
|
||||
statusMessage = "Generate"
|
||||
}
|
||||
payload := &metadataUpdatePayload{
|
||||
Progress: 0, // TODO
|
||||
Message: statusMessage,
|
||||
Logs: logger.LogCache,
|
||||
}
|
||||
payloadJSON, _ := json.Marshal(payload)
|
||||
|
||||
msg <- string(payloadJSON)
|
||||
}
|
||||
127
pkg/manager/manager_tasks.go
Normal file
127
pkg/manager/manager_tasks.go
Normal file
@@ -0,0 +1,127 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"github.com/bmatcuk/doublestar"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
"path/filepath"
|
||||
"sync"
|
||||
)
|
||||
|
||||
func (s *singleton) Scan() {
|
||||
if s.Status != Idle {
|
||||
return
|
||||
}
|
||||
s.Status = Scan
|
||||
|
||||
go func() {
|
||||
defer s.returnToIdleState()
|
||||
|
||||
globPath := filepath.Join(s.Paths.Config.Stash, "**/*.{zip,m4v,mp4,mov,wmv}")
|
||||
globResults, _ := doublestar.Glob(globPath)
|
||||
logger.Infof("Starting scan of %d files", len(globResults))
|
||||
|
||||
var wg sync.WaitGroup
|
||||
for _, path := range globResults {
|
||||
wg.Add(1)
|
||||
task := ScanTask{FilePath: path}
|
||||
go task.Start(&wg)
|
||||
wg.Wait()
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func (s *singleton) Import() {
|
||||
if s.Status != Idle {
|
||||
return
|
||||
}
|
||||
s.Status = Import
|
||||
|
||||
go func() {
|
||||
defer s.returnToIdleState()
|
||||
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(1)
|
||||
task := ImportTask{}
|
||||
go task.Start(&wg)
|
||||
wg.Wait()
|
||||
}()
|
||||
}
|
||||
|
||||
func (s *singleton) Export() {
|
||||
if s.Status != Idle {
|
||||
return
|
||||
}
|
||||
s.Status = Export
|
||||
|
||||
go func() {
|
||||
defer s.returnToIdleState()
|
||||
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(1)
|
||||
task := ExportTask{}
|
||||
go task.Start(&wg)
|
||||
wg.Wait()
|
||||
}()
|
||||
}
|
||||
|
||||
func (s *singleton) Generate(sprites bool, previews bool, markers bool, transcodes bool) {
|
||||
if s.Status != Idle {
|
||||
return
|
||||
}
|
||||
s.Status = Generate
|
||||
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
//this.job.total = await ObjectionUtils.getCount(Scene);
|
||||
instance.Paths.Generated.EnsureTmpDir()
|
||||
|
||||
go func() {
|
||||
defer s.returnToIdleState()
|
||||
|
||||
scenes, err := qb.All()
|
||||
if err != nil {
|
||||
logger.Errorf("failed to get scenes for generate")
|
||||
return
|
||||
}
|
||||
|
||||
delta := utils.Btoi(sprites) + utils.Btoi(previews) + utils.Btoi(markers) + utils.Btoi(transcodes)
|
||||
var wg sync.WaitGroup
|
||||
for _, scene := range scenes {
|
||||
wg.Add(delta)
|
||||
|
||||
if sprites {
|
||||
task := GenerateSpriteTask{Scene: scene}
|
||||
go task.Start(&wg)
|
||||
}
|
||||
|
||||
if previews {
|
||||
task := GeneratePreviewTask{Scene: scene}
|
||||
go task.Start(&wg)
|
||||
}
|
||||
|
||||
if markers {
|
||||
task := GenerateMarkersTask{Scene: scene}
|
||||
go task.Start(&wg)
|
||||
}
|
||||
|
||||
if transcodes {
|
||||
task := GenerateTranscodeTask{Scene: scene}
|
||||
go task.Start(&wg)
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func (s *singleton) returnToIdleState() {
|
||||
if r := recover(); r != nil {
|
||||
logger.Info("recovered from ", r)
|
||||
}
|
||||
|
||||
if s.Status == Generate {
|
||||
instance.Paths.Generated.RemoveTmpDir()
|
||||
}
|
||||
s.Status = Idle
|
||||
}
|
||||
27
pkg/manager/paths/paths.go
Normal file
27
pkg/manager/paths/paths.go
Normal file
@@ -0,0 +1,27 @@
|
||||
package paths
|
||||
|
||||
import (
|
||||
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||
)
|
||||
|
||||
type Paths struct {
|
||||
Config *jsonschema.Config
|
||||
Generated *generatedPaths
|
||||
JSON *jsonPaths
|
||||
|
||||
Gallery *galleryPaths
|
||||
Scene *scenePaths
|
||||
SceneMarkers *sceneMarkerPaths
|
||||
}
|
||||
|
||||
func NewPaths(config *jsonschema.Config) *Paths {
|
||||
p := Paths{}
|
||||
p.Config = config
|
||||
p.Generated = newGeneratedPaths(p)
|
||||
p.JSON = newJSONPaths(p)
|
||||
|
||||
p.Gallery = newGalleryPaths(p.Config)
|
||||
p.Scene = newScenePaths(p)
|
||||
p.SceneMarkers = newSceneMarkerPaths(p)
|
||||
return &p
|
||||
}
|
||||
24
pkg/manager/paths/paths_gallery.go
Normal file
24
pkg/manager/paths/paths_gallery.go
Normal file
@@ -0,0 +1,24 @@
|
||||
package paths
|
||||
|
||||
import (
|
||||
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
type galleryPaths struct {
|
||||
config *jsonschema.Config
|
||||
}
|
||||
|
||||
func newGalleryPaths(c *jsonschema.Config) *galleryPaths {
|
||||
gp := galleryPaths{}
|
||||
gp.config = c
|
||||
return &gp
|
||||
}
|
||||
|
||||
func (gp *galleryPaths) GetExtractedPath(checksum string) string {
|
||||
return filepath.Join(gp.config.Cache, checksum)
|
||||
}
|
||||
|
||||
func (gp *galleryPaths) GetExtractedFilePath(checksum string, fileName string) string {
|
||||
return filepath.Join(gp.config.Cache, checksum, fileName)
|
||||
}
|
||||
40
pkg/manager/paths/paths_generated.go
Normal file
40
pkg/manager/paths/paths_generated.go
Normal file
@@ -0,0 +1,40 @@
|
||||
package paths
|
||||
|
||||
import (
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
type generatedPaths struct {
|
||||
Screenshots string
|
||||
Vtt string
|
||||
Markers string
|
||||
Transcodes string
|
||||
Tmp string
|
||||
}
|
||||
|
||||
func newGeneratedPaths(p Paths) *generatedPaths {
|
||||
gp := generatedPaths{}
|
||||
gp.Screenshots = filepath.Join(p.Config.Metadata, "screenshots")
|
||||
gp.Vtt = filepath.Join(p.Config.Metadata, "vtt")
|
||||
gp.Markers = filepath.Join(p.Config.Metadata, "markers")
|
||||
gp.Transcodes = filepath.Join(p.Config.Metadata, "transcodes")
|
||||
gp.Tmp = filepath.Join(p.Config.Metadata, "tmp")
|
||||
return &gp
|
||||
}
|
||||
|
||||
func (gp *generatedPaths) GetTmpPath(fileName string) string {
|
||||
return filepath.Join(gp.Tmp, fileName)
|
||||
}
|
||||
|
||||
func (gp *generatedPaths) EnsureTmpDir() {
|
||||
_ = utils.EnsureDir(gp.Tmp)
|
||||
}
|
||||
|
||||
func (gp *generatedPaths) EmptyTmpDir() {
|
||||
_ = utils.EmptyDir(gp.Tmp)
|
||||
}
|
||||
|
||||
func (gp *generatedPaths) RemoveTmpDir() {
|
||||
_ = utils.RemoveDir(gp.Tmp)
|
||||
}
|
||||
38
pkg/manager/paths/paths_json.go
Normal file
38
pkg/manager/paths/paths_json.go
Normal file
@@ -0,0 +1,38 @@
|
||||
package paths
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
type jsonPaths struct {
|
||||
MappingsFile string
|
||||
ScrapedFile string
|
||||
|
||||
Performers string
|
||||
Scenes string
|
||||
Galleries string
|
||||
Studios string
|
||||
}
|
||||
|
||||
func newJSONPaths(p Paths) *jsonPaths {
|
||||
jp := jsonPaths{}
|
||||
jp.MappingsFile = filepath.Join(p.Config.Metadata, "mappings.json")
|
||||
jp.ScrapedFile = filepath.Join(p.Config.Metadata, "scraped.json")
|
||||
jp.Performers = filepath.Join(p.Config.Metadata, "performers")
|
||||
jp.Scenes = filepath.Join(p.Config.Metadata, "scenes")
|
||||
jp.Galleries = filepath.Join(p.Config.Metadata, "galleries")
|
||||
jp.Studios = filepath.Join(p.Config.Metadata, "studios")
|
||||
return &jp
|
||||
}
|
||||
|
||||
func (jp *jsonPaths) PerformerJSONPath(checksum string) string {
|
||||
return filepath.Join(jp.Performers, checksum+".json")
|
||||
}
|
||||
|
||||
func (jp *jsonPaths) SceneJSONPath(checksum string) string {
|
||||
return filepath.Join(jp.Scenes, checksum+".json")
|
||||
}
|
||||
|
||||
func (jp *jsonPaths) StudioJSONPath(checksum string) string {
|
||||
return filepath.Join(jp.Studios, checksum+".json")
|
||||
}
|
||||
24
pkg/manager/paths/paths_scene_markers.go
Normal file
24
pkg/manager/paths/paths_scene_markers.go
Normal file
@@ -0,0 +1,24 @@
|
||||
package paths
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type sceneMarkerPaths struct {
|
||||
generated generatedPaths
|
||||
}
|
||||
|
||||
func newSceneMarkerPaths(p Paths) *sceneMarkerPaths {
|
||||
sp := sceneMarkerPaths{}
|
||||
sp.generated = *p.Generated
|
||||
return &sp
|
||||
}
|
||||
|
||||
func (sp *sceneMarkerPaths) GetStreamPath(checksum string, seconds int) string {
|
||||
return filepath.Join(sp.generated.Markers, checksum, strconv.Itoa(seconds)+".mp4")
|
||||
}
|
||||
|
||||
func (sp *sceneMarkerPaths) GetStreamPreviewImagePath(checksum string, seconds int) string {
|
||||
return filepath.Join(sp.generated.Markers, checksum, strconv.Itoa(seconds)+".webp")
|
||||
}
|
||||
53
pkg/manager/paths/paths_scenes.go
Normal file
53
pkg/manager/paths/paths_scenes.go
Normal file
@@ -0,0 +1,53 @@
|
||||
package paths
|
||||
|
||||
import (
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
type scenePaths struct {
|
||||
generated generatedPaths
|
||||
}
|
||||
|
||||
func newScenePaths(p Paths) *scenePaths {
|
||||
sp := scenePaths{}
|
||||
sp.generated = *p.Generated
|
||||
return &sp
|
||||
}
|
||||
|
||||
func (sp *scenePaths) GetScreenshotPath(checksum string) string {
|
||||
return filepath.Join(sp.generated.Screenshots, checksum+".jpg")
|
||||
}
|
||||
|
||||
func (sp *scenePaths) GetThumbnailScreenshotPath(checksum string) string {
|
||||
return filepath.Join(sp.generated.Screenshots, checksum+".thumb.jpg")
|
||||
}
|
||||
|
||||
func (sp *scenePaths) GetTranscodePath(checksum string) string {
|
||||
return filepath.Join(sp.generated.Transcodes, checksum+".mp4")
|
||||
}
|
||||
|
||||
func (sp *scenePaths) GetStreamPath(scenePath string, checksum string) string {
|
||||
transcodePath := sp.GetTranscodePath(checksum)
|
||||
transcodeExists, _ := utils.FileExists(transcodePath)
|
||||
if transcodeExists {
|
||||
return transcodePath
|
||||
}
|
||||
return scenePath
|
||||
}
|
||||
|
||||
func (sp *scenePaths) GetStreamPreviewPath(checksum string) string {
|
||||
return filepath.Join(sp.generated.Screenshots, checksum+".mp4")
|
||||
}
|
||||
|
||||
func (sp *scenePaths) GetStreamPreviewImagePath(checksum string) string {
|
||||
return filepath.Join(sp.generated.Screenshots, checksum+".webp")
|
||||
}
|
||||
|
||||
func (sp *scenePaths) GetSpriteImageFilePath(checksum string) string {
|
||||
return filepath.Join(sp.generated.Vtt, checksum+"_sprite.jpg")
|
||||
}
|
||||
|
||||
func (sp *scenePaths) GetSpriteVttFilePath(checksum string) string {
|
||||
return filepath.Join(sp.generated.Vtt, checksum+"_thumbs.vtt")
|
||||
}
|
||||
44
pkg/manager/paths/paths_static.go
Normal file
44
pkg/manager/paths/paths_static.go
Normal file
@@ -0,0 +1,44 @@
|
||||
package paths
|
||||
|
||||
import (
|
||||
"os"
|
||||
"os/user"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
type StaticPathsType struct {
|
||||
ExecutionDirectory string
|
||||
ConfigDirectory string
|
||||
ConfigFile string
|
||||
DatabaseFile string
|
||||
|
||||
FFMPEG string
|
||||
FFProbe string
|
||||
}
|
||||
|
||||
var StaticPaths = StaticPathsType{
|
||||
ExecutionDirectory: getExecutionDirectory(),
|
||||
ConfigDirectory: getConfigDirectory(),
|
||||
ConfigFile: filepath.Join(getConfigDirectory(), "config.json"),
|
||||
DatabaseFile: filepath.Join(getConfigDirectory(), "stash-go.sqlite"),
|
||||
}
|
||||
|
||||
func getExecutionDirectory() string {
|
||||
ex, err := os.Executable()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return filepath.Dir(ex)
|
||||
}
|
||||
|
||||
func getHomeDirectory() string {
|
||||
currentUser, err := user.Current()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return currentUser.HomeDir
|
||||
}
|
||||
|
||||
func getConfigDirectory() string {
|
||||
return filepath.Join(getHomeDirectory(), ".stash")
|
||||
}
|
||||
7
pkg/manager/task.go
Normal file
7
pkg/manager/task.go
Normal file
@@ -0,0 +1,7 @@
|
||||
package manager
|
||||
|
||||
import "sync"
|
||||
|
||||
type Task interface {
|
||||
Start(wg *sync.WaitGroup)
|
||||
}
|
||||
457
pkg/manager/task_export.go
Normal file
457
pkg/manager/task_export.go
Normal file
@@ -0,0 +1,457 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
"math"
|
||||
"strconv"
|
||||
"sync"
|
||||
)
|
||||
|
||||
type ExportTask struct {
|
||||
Mappings *jsonschema.Mappings
|
||||
Scraped []jsonschema.ScrapedItem
|
||||
}
|
||||
|
||||
func (t *ExportTask) Start(wg *sync.WaitGroup) {
|
||||
defer wg.Done()
|
||||
// @manager.total = Scene.count + Gallery.count + Performer.count + Studio.count
|
||||
|
||||
t.Mappings = &jsonschema.Mappings{}
|
||||
t.Scraped = []jsonschema.ScrapedItem{}
|
||||
|
||||
ctx := context.TODO()
|
||||
|
||||
t.ExportScenes(ctx)
|
||||
t.ExportGalleries(ctx)
|
||||
t.ExportPerformers(ctx)
|
||||
t.ExportStudios(ctx)
|
||||
|
||||
if err := instance.JSON.saveMappings(t.Mappings); err != nil {
|
||||
logger.Errorf("[mappings] failed to save json: %s", err.Error())
|
||||
}
|
||||
|
||||
t.ExportScrapedItems(ctx)
|
||||
}
|
||||
|
||||
func (t *ExportTask) ExportScenes(ctx context.Context) {
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
defer tx.Commit()
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
studioQB := models.NewStudioQueryBuilder()
|
||||
galleryQB := models.NewGalleryQueryBuilder()
|
||||
performerQB := models.NewPerformerQueryBuilder()
|
||||
tagQB := models.NewTagQueryBuilder()
|
||||
sceneMarkerQB := models.NewSceneMarkerQueryBuilder()
|
||||
scenes, err := qb.All()
|
||||
if err != nil {
|
||||
logger.Errorf("[scenes] failed to fetch all scenes: %s", err.Error())
|
||||
}
|
||||
|
||||
logger.Info("[scenes] exporting")
|
||||
|
||||
for i, scene := range scenes {
|
||||
index := i + 1
|
||||
logger.Progressf("[scenes] %d of %d", index, len(scenes))
|
||||
|
||||
t.Mappings.Scenes = append(t.Mappings.Scenes, jsonschema.PathMapping{Path: scene.Path, Checksum: scene.Checksum})
|
||||
newSceneJSON := jsonschema.Scene{}
|
||||
|
||||
var studioName string
|
||||
if scene.StudioID.Valid {
|
||||
studio, _ := studioQB.Find(int(scene.StudioID.Int64), tx)
|
||||
if studio != nil {
|
||||
studioName = studio.Name.String
|
||||
}
|
||||
}
|
||||
|
||||
var galleryChecksum string
|
||||
gallery, _ := galleryQB.FindBySceneID(scene.ID, tx)
|
||||
if gallery != nil {
|
||||
galleryChecksum = gallery.Checksum
|
||||
}
|
||||
|
||||
performers, _ := performerQB.FindBySceneID(scene.ID, tx)
|
||||
tags, _ := tagQB.FindBySceneID(scene.ID, tx)
|
||||
sceneMarkers, _ := sceneMarkerQB.FindBySceneID(scene.ID, tx)
|
||||
|
||||
if scene.Title.Valid {
|
||||
newSceneJSON.Title = scene.Title.String
|
||||
}
|
||||
if studioName != "" {
|
||||
newSceneJSON.Studio = studioName
|
||||
}
|
||||
if scene.URL.Valid {
|
||||
newSceneJSON.URL = scene.URL.String
|
||||
}
|
||||
if scene.Date.Valid {
|
||||
newSceneJSON.Date = utils.GetYMDFromDatabaseDate(scene.Date.String)
|
||||
}
|
||||
if scene.Rating.Valid {
|
||||
newSceneJSON.Rating = int(scene.Rating.Int64)
|
||||
}
|
||||
if scene.Details.Valid {
|
||||
newSceneJSON.Details = scene.Details.String
|
||||
}
|
||||
if galleryChecksum != "" {
|
||||
newSceneJSON.Gallery = galleryChecksum
|
||||
}
|
||||
|
||||
newSceneJSON.Performers = t.getPerformerNames(performers)
|
||||
newSceneJSON.Tags = t.getTagNames(tags)
|
||||
|
||||
for _, sceneMarker := range sceneMarkers {
|
||||
var primaryTagID int
|
||||
if sceneMarker.PrimaryTagID.Valid {
|
||||
primaryTagID = int(sceneMarker.PrimaryTagID.Int64)
|
||||
}
|
||||
primaryTag, err := tagQB.Find(primaryTagID, tx)
|
||||
if err != nil {
|
||||
logger.Errorf("[scenes] <%s> invalid primary tag for scene marker: %s", scene.Checksum, err.Error())
|
||||
continue
|
||||
}
|
||||
sceneMarkerTags, err := tagQB.FindBySceneMarkerID(sceneMarker.ID, tx)
|
||||
if err != nil {
|
||||
logger.Errorf("[scenes] <%s> invalid tags for scene marker: %s", scene.Checksum, err.Error())
|
||||
continue
|
||||
}
|
||||
if sceneMarker.Title == "" || sceneMarker.Seconds == 0 || primaryTag.Name == "" {
|
||||
logger.Errorf("[scenes] invalid scene marker: %v", sceneMarker)
|
||||
}
|
||||
|
||||
sceneMarkerJSON := jsonschema.SceneMarker{
|
||||
Title: sceneMarker.Title,
|
||||
Seconds: t.getDecimalString(sceneMarker.Seconds),
|
||||
PrimaryTag: primaryTag.Name,
|
||||
Tags: t.getTagNames(sceneMarkerTags),
|
||||
}
|
||||
|
||||
newSceneJSON.Markers = append(newSceneJSON.Markers, sceneMarkerJSON)
|
||||
}
|
||||
|
||||
newSceneJSON.File = &jsonschema.SceneFile{}
|
||||
if scene.Size.Valid {
|
||||
newSceneJSON.File.Size = scene.Size.String
|
||||
}
|
||||
if scene.Duration.Valid {
|
||||
newSceneJSON.File.Duration = t.getDecimalString(scene.Duration.Float64)
|
||||
}
|
||||
if scene.VideoCodec.Valid {
|
||||
newSceneJSON.File.VideoCodec = scene.VideoCodec.String
|
||||
}
|
||||
if scene.AudioCodec.Valid {
|
||||
newSceneJSON.File.AudioCodec = scene.AudioCodec.String
|
||||
}
|
||||
if scene.Width.Valid {
|
||||
newSceneJSON.File.Width = int(scene.Width.Int64)
|
||||
}
|
||||
if scene.Height.Valid {
|
||||
newSceneJSON.File.Height = int(scene.Height.Int64)
|
||||
}
|
||||
if scene.Framerate.Valid {
|
||||
newSceneJSON.File.Framerate = t.getDecimalString(scene.Framerate.Float64)
|
||||
}
|
||||
if scene.Bitrate.Valid {
|
||||
newSceneJSON.File.Bitrate = int(scene.Bitrate.Int64)
|
||||
}
|
||||
|
||||
sceneJSON, err := instance.JSON.getScene(scene.Checksum)
|
||||
if err != nil {
|
||||
logger.Debugf("[scenes] error reading scene json: %s", err.Error())
|
||||
} else if jsonschema.CompareJSON(*sceneJSON, newSceneJSON) {
|
||||
continue
|
||||
}
|
||||
|
||||
if err := instance.JSON.saveScene(scene.Checksum, &newSceneJSON); err != nil {
|
||||
logger.Errorf("[scenes] <%s> failed to save json: %s", scene.Checksum, err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
logger.Infof("[scenes] export complete")
|
||||
}
|
||||
|
||||
func (t *ExportTask) ExportGalleries(ctx context.Context) {
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
galleries, err := qb.All()
|
||||
if err != nil {
|
||||
logger.Errorf("[galleries] failed to fetch all galleries: %s", err.Error())
|
||||
}
|
||||
|
||||
logger.Info("[galleries] exporting")
|
||||
|
||||
for i, gallery := range galleries {
|
||||
index := i + 1
|
||||
logger.Progressf("[galleries] %d of %d", index, len(galleries))
|
||||
t.Mappings.Galleries = append(t.Mappings.Galleries, jsonschema.PathMapping{Path: gallery.Path, Checksum: gallery.Checksum})
|
||||
}
|
||||
|
||||
logger.Infof("[galleries] export complete")
|
||||
}
|
||||
|
||||
func (t *ExportTask) ExportPerformers(ctx context.Context) {
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
performers, err := qb.All()
|
||||
if err != nil {
|
||||
logger.Errorf("[performers] failed to fetch all performers: %s", err.Error())
|
||||
}
|
||||
|
||||
logger.Info("[performers] exporting")
|
||||
|
||||
for i, performer := range performers {
|
||||
index := i + 1
|
||||
logger.Progressf("[performers] %d of %d", index, len(performers))
|
||||
|
||||
t.Mappings.Performers = append(t.Mappings.Performers, jsonschema.NameMapping{Name: performer.Name.String, Checksum: performer.Checksum})
|
||||
|
||||
newPerformerJSON := jsonschema.Performer{}
|
||||
|
||||
if performer.Name.Valid {
|
||||
newPerformerJSON.Name = performer.Name.String
|
||||
}
|
||||
if performer.URL.Valid {
|
||||
newPerformerJSON.URL = performer.URL.String
|
||||
}
|
||||
if performer.Birthdate.Valid {
|
||||
newPerformerJSON.Birthdate = utils.GetYMDFromDatabaseDate(performer.Birthdate.String)
|
||||
}
|
||||
if performer.Ethnicity.Valid {
|
||||
newPerformerJSON.Ethnicity = performer.Ethnicity.String
|
||||
}
|
||||
if performer.Country.Valid {
|
||||
newPerformerJSON.Country = performer.Country.String
|
||||
}
|
||||
if performer.EyeColor.Valid {
|
||||
newPerformerJSON.EyeColor = performer.EyeColor.String
|
||||
}
|
||||
if performer.Height.Valid {
|
||||
newPerformerJSON.Height = performer.Height.String
|
||||
}
|
||||
if performer.Measurements.Valid {
|
||||
newPerformerJSON.Measurements = performer.Measurements.String
|
||||
}
|
||||
if performer.FakeTits.Valid {
|
||||
newPerformerJSON.FakeTits = performer.FakeTits.String
|
||||
}
|
||||
if performer.CareerLength.Valid {
|
||||
newPerformerJSON.CareerLength = performer.CareerLength.String
|
||||
}
|
||||
if performer.Tattoos.Valid {
|
||||
newPerformerJSON.Tattoos = performer.Tattoos.String
|
||||
}
|
||||
if performer.Piercings.Valid {
|
||||
newPerformerJSON.Piercings = performer.Piercings.String
|
||||
}
|
||||
if performer.Aliases.Valid {
|
||||
newPerformerJSON.Aliases = performer.Aliases.String
|
||||
}
|
||||
if performer.Twitter.Valid {
|
||||
newPerformerJSON.Twitter = performer.Twitter.String
|
||||
}
|
||||
if performer.Instagram.Valid {
|
||||
newPerformerJSON.Instagram = performer.Instagram.String
|
||||
}
|
||||
if performer.Favorite.Valid {
|
||||
newPerformerJSON.Favorite = performer.Favorite.Bool
|
||||
}
|
||||
|
||||
newPerformerJSON.Image = utils.GetBase64StringFromData(performer.Image)
|
||||
|
||||
performerJSON, err := instance.JSON.getPerformer(performer.Checksum)
|
||||
if err != nil {
|
||||
logger.Debugf("[performers] error reading performer json: %s", err.Error())
|
||||
} else if jsonschema.CompareJSON(*performerJSON, newPerformerJSON) {
|
||||
continue
|
||||
}
|
||||
|
||||
if err := instance.JSON.savePerformer(performer.Checksum, &newPerformerJSON); err != nil {
|
||||
logger.Errorf("[performers] <%s> failed to save json: %s", performer.Checksum, err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
logger.Infof("[performers] export complete")
|
||||
}
|
||||
|
||||
func (t *ExportTask) ExportStudios(ctx context.Context) {
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
studios, err := qb.All()
|
||||
if err != nil {
|
||||
logger.Errorf("[studios] failed to fetch all studios: %s", err.Error())
|
||||
}
|
||||
|
||||
logger.Info("[studios] exporting")
|
||||
|
||||
for i, studio := range studios {
|
||||
index := i + 1
|
||||
logger.Progressf("[studios] %d of %d", index, len(studios))
|
||||
|
||||
t.Mappings.Studios = append(t.Mappings.Studios, jsonschema.NameMapping{Name: studio.Name.String, Checksum: studio.Checksum})
|
||||
|
||||
newStudioJSON := jsonschema.Studio{}
|
||||
|
||||
if studio.Name.Valid {
|
||||
newStudioJSON.Name = studio.Name.String
|
||||
}
|
||||
if studio.URL.Valid {
|
||||
newStudioJSON.URL = studio.URL.String
|
||||
}
|
||||
|
||||
newStudioJSON.Image = utils.GetBase64StringFromData(studio.Image)
|
||||
|
||||
studioJSON, err := instance.JSON.getStudio(studio.Checksum)
|
||||
if err != nil {
|
||||
logger.Debugf("[studios] error reading studio json: %s", err.Error())
|
||||
} else if jsonschema.CompareJSON(*studioJSON, newStudioJSON) {
|
||||
continue
|
||||
}
|
||||
|
||||
if err := instance.JSON.saveStudio(studio.Checksum, &newStudioJSON); err != nil {
|
||||
logger.Errorf("[studios] <%s> failed to save json: %s", studio.Checksum, err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
logger.Infof("[studios] export complete")
|
||||
}
|
||||
|
||||
func (t *ExportTask) ExportScrapedItems(ctx context.Context) {
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
defer tx.Commit()
|
||||
qb := models.NewScrapedItemQueryBuilder()
|
||||
sqb := models.NewStudioQueryBuilder()
|
||||
scrapedItems, err := qb.All()
|
||||
if err != nil {
|
||||
logger.Errorf("[scraped sites] failed to fetch all items: %s", err.Error())
|
||||
}
|
||||
|
||||
logger.Info("[scraped sites] exporting")
|
||||
|
||||
for i, scrapedItem := range scrapedItems {
|
||||
index := i + 1
|
||||
logger.Progressf("[scraped sites] %d of %d", index, len(scrapedItems))
|
||||
|
||||
var studioName string
|
||||
if scrapedItem.StudioID.Valid {
|
||||
studio, _ := sqb.Find(int(scrapedItem.StudioID.Int64), tx)
|
||||
if studio != nil {
|
||||
studioName = studio.Name.String
|
||||
}
|
||||
}
|
||||
|
||||
newScrapedItemJSON := jsonschema.ScrapedItem{}
|
||||
|
||||
if scrapedItem.Title.Valid {
|
||||
newScrapedItemJSON.Title = scrapedItem.Title.String
|
||||
}
|
||||
if scrapedItem.Description.Valid {
|
||||
newScrapedItemJSON.Description = scrapedItem.Description.String
|
||||
}
|
||||
if scrapedItem.URL.Valid {
|
||||
newScrapedItemJSON.URL = scrapedItem.URL.String
|
||||
}
|
||||
if scrapedItem.Date.Valid {
|
||||
newScrapedItemJSON.Date = utils.GetYMDFromDatabaseDate(scrapedItem.Date.String)
|
||||
}
|
||||
if scrapedItem.Rating.Valid {
|
||||
newScrapedItemJSON.Rating = scrapedItem.Rating.String
|
||||
}
|
||||
if scrapedItem.Tags.Valid {
|
||||
newScrapedItemJSON.Tags = scrapedItem.Tags.String
|
||||
}
|
||||
if scrapedItem.Models.Valid {
|
||||
newScrapedItemJSON.Models = scrapedItem.Models.String
|
||||
}
|
||||
if scrapedItem.Episode.Valid {
|
||||
newScrapedItemJSON.Episode = int(scrapedItem.Episode.Int64)
|
||||
}
|
||||
if scrapedItem.GalleryFilename.Valid {
|
||||
newScrapedItemJSON.GalleryFilename = scrapedItem.GalleryFilename.String
|
||||
}
|
||||
if scrapedItem.GalleryURL.Valid {
|
||||
newScrapedItemJSON.GalleryURL = scrapedItem.GalleryURL.String
|
||||
}
|
||||
if scrapedItem.VideoFilename.Valid {
|
||||
newScrapedItemJSON.VideoFilename = scrapedItem.VideoFilename.String
|
||||
}
|
||||
if scrapedItem.VideoURL.Valid {
|
||||
newScrapedItemJSON.VideoURL = scrapedItem.VideoURL.String
|
||||
}
|
||||
|
||||
newScrapedItemJSON.Studio = studioName
|
||||
updatedAt := jsonschema.RailsTime{Time: scrapedItem.UpdatedAt.Timestamp} // TODO keeping ruby format
|
||||
newScrapedItemJSON.UpdatedAt = updatedAt
|
||||
|
||||
t.Scraped = append(t.Scraped, newScrapedItemJSON)
|
||||
}
|
||||
|
||||
scrapedJSON, err := instance.JSON.getScraped()
|
||||
if err != nil {
|
||||
logger.Debugf("[scraped sites] error reading json: %s", err.Error())
|
||||
}
|
||||
if !jsonschema.CompareJSON(scrapedJSON, t.Scraped) {
|
||||
if err := instance.JSON.saveScaped(t.Scraped); err != nil {
|
||||
logger.Errorf("[scraped sites] failed to save json: %s", err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
logger.Infof("[scraped sites] export complete")
|
||||
}
|
||||
|
||||
func (t *ExportTask) getPerformerNames(performers []models.Performer) []string {
|
||||
if len(performers) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var results []string
|
||||
for _, performer := range performers {
|
||||
if performer.Name.Valid {
|
||||
results = append(results, performer.Name.String)
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
func (t *ExportTask) getTagNames(tags []models.Tag) []string {
|
||||
if len(tags) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var results []string
|
||||
for _, tag := range tags {
|
||||
if tag.Name != "" {
|
||||
results = append(results, tag.Name)
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
func (t *ExportTask) getDecimalString(num float64) string {
|
||||
if num == 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
precision := getPrecision(num)
|
||||
if precision == 0 {
|
||||
precision = 1
|
||||
}
|
||||
return fmt.Sprintf("%."+strconv.Itoa(precision)+"f", num)
|
||||
}
|
||||
|
||||
func getPrecision(num float64) int {
|
||||
if num == 0 {
|
||||
return 0
|
||||
}
|
||||
|
||||
e := 1.0
|
||||
p := 0
|
||||
for (math.Round(num*e) / e) != num {
|
||||
e *= 10
|
||||
p++
|
||||
}
|
||||
return p
|
||||
}
|
||||
77
pkg/manager/task_generate_markers.go
Normal file
77
pkg/manager/task_generate_markers.go
Normal file
@@ -0,0 +1,77 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"github.com/stashapp/stash/pkg/ffmpeg"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"sync"
|
||||
)
|
||||
|
||||
type GenerateMarkersTask struct {
|
||||
Scene models.Scene
|
||||
}
|
||||
|
||||
func (t *GenerateMarkersTask) Start(wg *sync.WaitGroup) {
|
||||
defer wg.Done()
|
||||
|
||||
instance.Paths.Generated.EmptyTmpDir()
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
sceneMarkers, _ := qb.FindBySceneID(t.Scene.ID, nil)
|
||||
if len(sceneMarkers) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
videoFile, err := ffmpeg.NewVideoFile(instance.StaticPaths.FFProbe, t.Scene.Path)
|
||||
if err != nil {
|
||||
logger.Errorf("error reading video file: %s", err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
// Make the folder for the scenes markers
|
||||
markersFolder := filepath.Join(instance.Paths.Generated.Markers, t.Scene.Checksum)
|
||||
_ = utils.EnsureDir(markersFolder)
|
||||
|
||||
encoder := ffmpeg.NewEncoder(instance.StaticPaths.FFMPEG)
|
||||
for i, sceneMarker := range sceneMarkers {
|
||||
index := i + 1
|
||||
logger.Progressf("[generator] <%s> scene marker %d of %d", t.Scene.Checksum, index, len(sceneMarkers))
|
||||
|
||||
seconds := int(sceneMarker.Seconds)
|
||||
baseFilename := strconv.Itoa(seconds)
|
||||
videoFilename := baseFilename + ".mp4"
|
||||
imageFilename := baseFilename + ".webp"
|
||||
videoPath := instance.Paths.SceneMarkers.GetStreamPath(t.Scene.Checksum, seconds)
|
||||
imagePath := instance.Paths.SceneMarkers.GetStreamPreviewImagePath(t.Scene.Checksum, seconds)
|
||||
videoExists, _ := utils.FileExists(videoPath)
|
||||
imageExists, _ := utils.FileExists(imagePath)
|
||||
|
||||
options := ffmpeg.SceneMarkerOptions{
|
||||
ScenePath: t.Scene.Path,
|
||||
Seconds: seconds,
|
||||
Width: 640,
|
||||
}
|
||||
if !videoExists {
|
||||
options.OutputPath = instance.Paths.Generated.GetTmpPath(videoFilename) // tmp output in case the process ends abruptly
|
||||
if err := encoder.SceneMarkerVideo(*videoFile, options); err != nil {
|
||||
logger.Errorf("[generator] failed to generate marker video: %s", err)
|
||||
} else {
|
||||
_ = os.Rename(options.OutputPath, videoPath)
|
||||
logger.Debug("created marker video: ", videoPath)
|
||||
}
|
||||
}
|
||||
|
||||
if !imageExists {
|
||||
options.OutputPath = instance.Paths.Generated.GetTmpPath(imageFilename) // tmp output in case the process ends abruptly
|
||||
if err := encoder.SceneMarkerImage(*videoFile, options); err != nil {
|
||||
logger.Errorf("[generator] failed to generate marker image: %s", err)
|
||||
} else {
|
||||
_ = os.Rename(options.OutputPath, imagePath)
|
||||
logger.Debug("created marker image: ", videoPath)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
54
pkg/manager/task_generate_preview.go
Normal file
54
pkg/manager/task_generate_preview.go
Normal file
@@ -0,0 +1,54 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"github.com/stashapp/stash/pkg/ffmpeg"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
"sync"
|
||||
)
|
||||
|
||||
type GeneratePreviewTask struct {
|
||||
Scene models.Scene
|
||||
}
|
||||
|
||||
func (t *GeneratePreviewTask) Start(wg *sync.WaitGroup) {
|
||||
defer wg.Done()
|
||||
|
||||
videoFilename := t.videoFilename()
|
||||
imageFilename := t.imageFilename()
|
||||
if t.doesPreviewExist(t.Scene.Checksum) {
|
||||
return
|
||||
}
|
||||
|
||||
videoFile, err := ffmpeg.NewVideoFile(instance.StaticPaths.FFProbe, t.Scene.Path)
|
||||
if err != nil {
|
||||
logger.Errorf("error reading video file: %s", err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
generator, err := NewPreviewGenerator(*videoFile, videoFilename, imageFilename, instance.Paths.Generated.Screenshots)
|
||||
if err != nil {
|
||||
logger.Errorf("error creating preview generator: %s", err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
if err := generator.Generate(); err != nil {
|
||||
logger.Errorf("error generating preview: %s", err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func (t *GeneratePreviewTask) doesPreviewExist(sceneChecksum string) bool {
|
||||
videoExists, _ := utils.FileExists(instance.Paths.Scene.GetStreamPreviewPath(sceneChecksum))
|
||||
imageExists, _ := utils.FileExists(instance.Paths.Scene.GetStreamPreviewImagePath(sceneChecksum))
|
||||
return videoExists && imageExists
|
||||
}
|
||||
|
||||
func (t *GeneratePreviewTask) videoFilename() string {
|
||||
return t.Scene.Checksum + ".mp4"
|
||||
}
|
||||
|
||||
func (t *GeneratePreviewTask) imageFilename() string {
|
||||
return t.Scene.Checksum + ".webp"
|
||||
}
|
||||
46
pkg/manager/task_generate_sprite.go
Normal file
46
pkg/manager/task_generate_sprite.go
Normal file
@@ -0,0 +1,46 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"github.com/stashapp/stash/pkg/ffmpeg"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
"sync"
|
||||
)
|
||||
|
||||
type GenerateSpriteTask struct {
|
||||
Scene models.Scene
|
||||
}
|
||||
|
||||
func (t *GenerateSpriteTask) Start(wg *sync.WaitGroup) {
|
||||
defer wg.Done()
|
||||
|
||||
if t.doesSpriteExist(t.Scene.Checksum) {
|
||||
return
|
||||
}
|
||||
|
||||
videoFile, err := ffmpeg.NewVideoFile(instance.StaticPaths.FFProbe, t.Scene.Path)
|
||||
if err != nil {
|
||||
logger.Errorf("error reading video file: %s", err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
imagePath := instance.Paths.Scene.GetSpriteImageFilePath(t.Scene.Checksum)
|
||||
vttPath := instance.Paths.Scene.GetSpriteVttFilePath(t.Scene.Checksum)
|
||||
generator, err := NewSpriteGenerator(*videoFile, imagePath, vttPath, 9, 9)
|
||||
if err != nil {
|
||||
logger.Errorf("error creating sprite generator: %s", err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
if err := generator.Generate(); err != nil {
|
||||
logger.Errorf("error generating sprite: %s", err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func (t *GenerateSpriteTask) doesSpriteExist(sceneChecksum string) bool {
|
||||
imageExists, _ := utils.FileExists(instance.Paths.Scene.GetSpriteImageFilePath(sceneChecksum))
|
||||
vttExists, _ := utils.FileExists(instance.Paths.Scene.GetSpriteVttFilePath(sceneChecksum))
|
||||
return imageExists && vttExists
|
||||
}
|
||||
637
pkg/manager/task_import.go
Normal file
637
pkg/manager/task_import.go
Normal file
@@ -0,0 +1,637 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
"strconv"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
type ImportTask struct {
|
||||
Mappings *jsonschema.Mappings
|
||||
Scraped []jsonschema.ScrapedItem
|
||||
}
|
||||
|
||||
func (t *ImportTask) Start(wg *sync.WaitGroup) {
|
||||
defer wg.Done()
|
||||
|
||||
t.Mappings, _ = instance.JSON.getMappings()
|
||||
if t.Mappings == nil {
|
||||
logger.Error("missing mappings json")
|
||||
return
|
||||
}
|
||||
scraped, _ := instance.JSON.getScraped()
|
||||
if scraped == nil {
|
||||
logger.Warn("missing scraped json")
|
||||
}
|
||||
t.Scraped = scraped
|
||||
|
||||
database.Reset(instance.StaticPaths.DatabaseFile)
|
||||
|
||||
ctx := context.TODO()
|
||||
|
||||
t.ImportPerformers(ctx)
|
||||
t.ImportStudios(ctx)
|
||||
t.ImportGalleries(ctx)
|
||||
t.ImportTags(ctx)
|
||||
|
||||
t.ImportScrapedItems(ctx)
|
||||
t.ImportScenes(ctx)
|
||||
}
|
||||
|
||||
func (t *ImportTask) ImportPerformers(ctx context.Context) {
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
|
||||
for i, mappingJSON := range t.Mappings.Performers {
|
||||
index := i + 1
|
||||
performerJSON, err := instance.JSON.getPerformer(mappingJSON.Checksum)
|
||||
if err != nil {
|
||||
logger.Errorf("[performers] failed to read json: %s", err.Error())
|
||||
continue
|
||||
}
|
||||
if mappingJSON.Checksum == "" || mappingJSON.Name == "" || performerJSON == nil {
|
||||
return
|
||||
}
|
||||
|
||||
logger.Progressf("[performers] %d of %d", index, len(t.Mappings.Performers))
|
||||
|
||||
// Process the base 64 encoded image string
|
||||
checksum, imageData, err := utils.ProcessBase64Image(performerJSON.Image)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
logger.Errorf("[performers] <%s> invalid image: %s", mappingJSON.Checksum, err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
// Populate a new performer from the input
|
||||
currentTime := time.Now()
|
||||
newPerformer := models.Performer{
|
||||
Image: imageData,
|
||||
Checksum: checksum,
|
||||
Favorite: sql.NullBool{Bool: performerJSON.Favorite, Valid: true},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
|
||||
if performerJSON.Name != "" {
|
||||
newPerformer.Name = sql.NullString{String: performerJSON.Name, Valid: true}
|
||||
}
|
||||
if performerJSON.URL != "" {
|
||||
newPerformer.URL = sql.NullString{String: performerJSON.URL, Valid: true}
|
||||
}
|
||||
if performerJSON.Birthdate != "" {
|
||||
newPerformer.Birthdate = sql.NullString{String: performerJSON.Birthdate, Valid: true}
|
||||
}
|
||||
if performerJSON.Ethnicity != "" {
|
||||
newPerformer.Ethnicity = sql.NullString{String: performerJSON.Ethnicity, Valid: true}
|
||||
}
|
||||
if performerJSON.Country != "" {
|
||||
newPerformer.Country = sql.NullString{String: performerJSON.Country, Valid: true}
|
||||
}
|
||||
if performerJSON.EyeColor != "" {
|
||||
newPerformer.EyeColor = sql.NullString{String: performerJSON.EyeColor, Valid: true}
|
||||
}
|
||||
if performerJSON.Height != "" {
|
||||
newPerformer.Height = sql.NullString{String: performerJSON.Height, Valid: true}
|
||||
}
|
||||
if performerJSON.Measurements != "" {
|
||||
newPerformer.Measurements = sql.NullString{String: performerJSON.Measurements, Valid: true}
|
||||
}
|
||||
if performerJSON.FakeTits != "" {
|
||||
newPerformer.FakeTits = sql.NullString{String: performerJSON.FakeTits, Valid: true}
|
||||
}
|
||||
if performerJSON.CareerLength != "" {
|
||||
newPerformer.CareerLength = sql.NullString{String: performerJSON.CareerLength, Valid: true}
|
||||
}
|
||||
if performerJSON.Tattoos != "" {
|
||||
newPerformer.Tattoos = sql.NullString{String: performerJSON.Tattoos, Valid: true}
|
||||
}
|
||||
if performerJSON.Piercings != "" {
|
||||
newPerformer.Piercings = sql.NullString{String: performerJSON.Piercings, Valid: true}
|
||||
}
|
||||
if performerJSON.Aliases != "" {
|
||||
newPerformer.Aliases = sql.NullString{String: performerJSON.Aliases, Valid: true}
|
||||
}
|
||||
if performerJSON.Twitter != "" {
|
||||
newPerformer.Twitter = sql.NullString{String: performerJSON.Twitter, Valid: true}
|
||||
}
|
||||
if performerJSON.Instagram != "" {
|
||||
newPerformer.Instagram = sql.NullString{String: performerJSON.Instagram, Valid: true}
|
||||
}
|
||||
|
||||
_, err = qb.Create(newPerformer, tx)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
logger.Errorf("[performers] <%s> failed to create: %s", mappingJSON.Checksum, err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
logger.Info("[performers] importing")
|
||||
if err := tx.Commit(); err != nil {
|
||||
logger.Errorf("[performers] import failed to commit: %s", err.Error())
|
||||
}
|
||||
logger.Info("[performers] import complete")
|
||||
}
|
||||
|
||||
func (t *ImportTask) ImportStudios(ctx context.Context) {
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
|
||||
for i, mappingJSON := range t.Mappings.Studios {
|
||||
index := i + 1
|
||||
studioJSON, err := instance.JSON.getStudio(mappingJSON.Checksum)
|
||||
if err != nil {
|
||||
logger.Errorf("[studios] failed to read json: %s", err.Error())
|
||||
continue
|
||||
}
|
||||
if mappingJSON.Checksum == "" || mappingJSON.Name == "" || studioJSON == nil {
|
||||
return
|
||||
}
|
||||
|
||||
logger.Progressf("[studios] %d of %d", index, len(t.Mappings.Studios))
|
||||
|
||||
// Process the base 64 encoded image string
|
||||
checksum, imageData, err := utils.ProcessBase64Image(studioJSON.Image)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
logger.Errorf("[studios] <%s> invalid image: %s", mappingJSON.Checksum, err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
// Populate a new studio from the input
|
||||
currentTime := time.Now()
|
||||
newStudio := models.Studio{
|
||||
Image: imageData,
|
||||
Checksum: checksum,
|
||||
Name: sql.NullString{String: studioJSON.Name, Valid: true},
|
||||
URL: sql.NullString{String: studioJSON.URL, Valid: true},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
|
||||
_, err = qb.Create(newStudio, tx)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
logger.Errorf("[studios] <%s> failed to create: %s", mappingJSON.Checksum, err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
logger.Info("[studios] importing")
|
||||
if err := tx.Commit(); err != nil {
|
||||
logger.Errorf("[studios] import failed to commit: %s", err.Error())
|
||||
}
|
||||
logger.Info("[studios] import complete")
|
||||
}
|
||||
|
||||
func (t *ImportTask) ImportGalleries(ctx context.Context) {
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
|
||||
for i, mappingJSON := range t.Mappings.Galleries {
|
||||
index := i + 1
|
||||
if mappingJSON.Checksum == "" || mappingJSON.Path == "" {
|
||||
return
|
||||
}
|
||||
|
||||
logger.Progressf("[galleries] %d of %d", index, len(t.Mappings.Galleries))
|
||||
|
||||
// Populate a new gallery from the input
|
||||
currentTime := time.Now()
|
||||
newGallery := models.Gallery{
|
||||
Checksum: mappingJSON.Checksum,
|
||||
Path: mappingJSON.Path,
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
|
||||
_, err := qb.Create(newGallery, tx)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
logger.Errorf("[galleries] <%s> failed to create: %s", mappingJSON.Checksum, err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
logger.Info("[galleries] importing")
|
||||
if err := tx.Commit(); err != nil {
|
||||
logger.Errorf("[galleries] import failed to commit: %s", err.Error())
|
||||
}
|
||||
logger.Info("[galleries] import complete")
|
||||
}
|
||||
|
||||
func (t *ImportTask) ImportTags(ctx context.Context) {
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewTagQueryBuilder()
|
||||
|
||||
var tagNames []string
|
||||
|
||||
for i, mappingJSON := range t.Mappings.Scenes {
|
||||
index := i + 1
|
||||
if mappingJSON.Checksum == "" || mappingJSON.Path == "" {
|
||||
_ = tx.Rollback()
|
||||
logger.Warn("[tags] scene mapping without checksum or path: ", mappingJSON)
|
||||
return
|
||||
}
|
||||
|
||||
logger.Progressf("[tags] %d of %d scenes", index, len(t.Mappings.Scenes))
|
||||
|
||||
sceneJSON, err := instance.JSON.getScene(mappingJSON.Checksum)
|
||||
if err != nil {
|
||||
logger.Infof("[tags] <%s> json parse failure: %s", mappingJSON.Checksum, err.Error())
|
||||
}
|
||||
// Return early if we are missing a json file.
|
||||
if sceneJSON == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// Get the tags from the tags json if we have it
|
||||
if len(sceneJSON.Tags) > 0 {
|
||||
tagNames = append(tagNames, sceneJSON.Tags...)
|
||||
}
|
||||
|
||||
// Get the tags from the markers if we have marker json
|
||||
if len(sceneJSON.Markers) == 0 {
|
||||
continue
|
||||
}
|
||||
for _, markerJSON := range sceneJSON.Markers {
|
||||
if markerJSON.PrimaryTag != "" {
|
||||
tagNames = append(tagNames, markerJSON.PrimaryTag)
|
||||
}
|
||||
if len(markerJSON.Tags) > 0 {
|
||||
tagNames = append(tagNames, markerJSON.Tags...)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
uniqueTagNames := t.getUnique(tagNames)
|
||||
for _, tagName := range uniqueTagNames {
|
||||
currentTime := time.Now()
|
||||
newTag := models.Tag{
|
||||
Name: tagName,
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
|
||||
_, err := qb.Create(newTag, tx)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
logger.Errorf("[tags] <%s> failed to create: %s", tagName, err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
logger.Info("[tags] importing")
|
||||
if err := tx.Commit(); err != nil {
|
||||
logger.Errorf("[tags] import failed to commit: %s", err.Error())
|
||||
}
|
||||
logger.Info("[tags] import complete")
|
||||
}
|
||||
|
||||
func (t *ImportTask) ImportScrapedItems(ctx context.Context) {
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewScrapedItemQueryBuilder()
|
||||
sqb := models.NewStudioQueryBuilder()
|
||||
currentTime := time.Now()
|
||||
|
||||
for i, mappingJSON := range t.Scraped {
|
||||
index := i + 1
|
||||
logger.Progressf("[scraped sites] %d of %d", index, len(t.Mappings.Scenes))
|
||||
|
||||
var updatedAt time.Time
|
||||
if currentTime.Location() != nil {
|
||||
updatedAt = mappingJSON.UpdatedAt.Time.In(currentTime.Location())
|
||||
} else {
|
||||
updatedAt = mappingJSON.UpdatedAt.Time
|
||||
}
|
||||
newScrapedItem := models.ScrapedItem{
|
||||
Title: sql.NullString{String: mappingJSON.Title, Valid: true},
|
||||
Description: sql.NullString{String: mappingJSON.Description, Valid: true},
|
||||
URL: sql.NullString{String: mappingJSON.URL, Valid: true},
|
||||
Date: sql.NullString{String: mappingJSON.Date, Valid: true},
|
||||
Rating: sql.NullString{String: mappingJSON.Rating, Valid: true},
|
||||
Tags: sql.NullString{String: mappingJSON.Tags, Valid: true},
|
||||
Models: sql.NullString{String: mappingJSON.Models, Valid: true},
|
||||
Episode: sql.NullInt64{Int64: int64(mappingJSON.Episode), Valid: true},
|
||||
GalleryFilename: sql.NullString{String: mappingJSON.GalleryFilename, Valid: true},
|
||||
GalleryURL: sql.NullString{String: mappingJSON.GalleryURL, Valid: true},
|
||||
VideoFilename: sql.NullString{String: mappingJSON.VideoFilename, Valid: true},
|
||||
VideoURL: sql.NullString{String: mappingJSON.VideoURL, Valid: true},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: updatedAt},
|
||||
}
|
||||
|
||||
studio, err := sqb.FindByName(mappingJSON.Studio, tx)
|
||||
if err != nil {
|
||||
logger.Errorf("[scraped sites] failed to fetch studio: %s", err.Error())
|
||||
}
|
||||
if studio != nil {
|
||||
newScrapedItem.StudioID = sql.NullInt64{Int64: int64(studio.ID), Valid: true}
|
||||
}
|
||||
|
||||
_, err = qb.Create(newScrapedItem, tx)
|
||||
if err != nil {
|
||||
logger.Errorf("[scraped sites] <%s> failed to create: %s", newScrapedItem.Title.String, err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
logger.Info("[scraped sites] importing")
|
||||
if err := tx.Commit(); err != nil {
|
||||
logger.Errorf("[scraped sites] import failed to commit: %s", err.Error())
|
||||
}
|
||||
logger.Info("[scraped sites] import complete")
|
||||
}
|
||||
|
||||
func (t *ImportTask) ImportScenes(ctx context.Context) {
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
currentTime := time.Now()
|
||||
|
||||
for i, mappingJSON := range t.Mappings.Scenes {
|
||||
index := i + 1
|
||||
if mappingJSON.Checksum == "" || mappingJSON.Path == "" {
|
||||
_ = tx.Rollback()
|
||||
logger.Warn("[scenes] scene mapping without checksum or path: ", mappingJSON)
|
||||
return
|
||||
}
|
||||
|
||||
logger.Progressf("[scenes] %d of %d", index, len(t.Mappings.Scenes))
|
||||
|
||||
newScene := models.Scene{
|
||||
Checksum: mappingJSON.Checksum,
|
||||
Path: mappingJSON.Path,
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
|
||||
sceneJSON, err := instance.JSON.getScene(mappingJSON.Checksum)
|
||||
if err != nil {
|
||||
logger.Infof("[scenes] <%s> json parse failure: %s", mappingJSON.Checksum, err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
// Populate scene fields
|
||||
if sceneJSON != nil {
|
||||
if sceneJSON.Title != "" {
|
||||
newScene.Title = sql.NullString{String: sceneJSON.Title, Valid: true}
|
||||
}
|
||||
if sceneJSON.Details != "" {
|
||||
newScene.Details = sql.NullString{String: sceneJSON.Details, Valid: true}
|
||||
}
|
||||
if sceneJSON.URL != "" {
|
||||
newScene.URL = sql.NullString{String: sceneJSON.URL, Valid: true}
|
||||
}
|
||||
if sceneJSON.Date != "" {
|
||||
newScene.Date = sql.NullString{String: sceneJSON.Date, Valid: true}
|
||||
}
|
||||
if sceneJSON.Rating != 0 {
|
||||
newScene.Rating = sql.NullInt64{Int64: int64(sceneJSON.Rating), Valid: true}
|
||||
}
|
||||
|
||||
if sceneJSON.File != nil {
|
||||
if sceneJSON.File.Size != "" {
|
||||
newScene.Size = sql.NullString{String: sceneJSON.File.Size, Valid: true}
|
||||
}
|
||||
if sceneJSON.File.Duration != "" {
|
||||
duration, _ := strconv.ParseFloat(sceneJSON.File.Duration, 64)
|
||||
newScene.Duration = sql.NullFloat64{Float64: duration, Valid: true}
|
||||
}
|
||||
if sceneJSON.File.VideoCodec != "" {
|
||||
newScene.VideoCodec = sql.NullString{String: sceneJSON.File.VideoCodec, Valid: true}
|
||||
}
|
||||
if sceneJSON.File.AudioCodec != "" {
|
||||
newScene.AudioCodec = sql.NullString{String: sceneJSON.File.AudioCodec, Valid: true}
|
||||
}
|
||||
if sceneJSON.File.Width != 0 {
|
||||
newScene.Width = sql.NullInt64{Int64: int64(sceneJSON.File.Width), Valid: true}
|
||||
}
|
||||
if sceneJSON.File.Height != 0 {
|
||||
newScene.Height = sql.NullInt64{Int64: int64(sceneJSON.File.Height), Valid: true}
|
||||
}
|
||||
if sceneJSON.File.Framerate != "" {
|
||||
framerate, _ := strconv.ParseFloat(sceneJSON.File.Framerate, 64)
|
||||
newScene.Framerate = sql.NullFloat64{Float64: framerate, Valid: true}
|
||||
}
|
||||
if sceneJSON.File.Bitrate != 0 {
|
||||
newScene.Bitrate = sql.NullInt64{Int64: int64(sceneJSON.File.Bitrate), Valid: true}
|
||||
}
|
||||
} else {
|
||||
// TODO: Get FFMPEG data?
|
||||
}
|
||||
}
|
||||
|
||||
// Populate the studio ID
|
||||
if sceneJSON.Studio != "" {
|
||||
sqb := models.NewStudioQueryBuilder()
|
||||
studio, err := sqb.FindByName(sceneJSON.Studio, tx)
|
||||
if err != nil {
|
||||
logger.Warnf("[scenes] studio <%s> does not exist: %s", sceneJSON.Studio, err.Error())
|
||||
} else {
|
||||
newScene.StudioID = sql.NullInt64{Int64: int64(studio.ID), Valid: true}
|
||||
}
|
||||
}
|
||||
|
||||
// Create the scene in the DB
|
||||
scene, err := qb.Create(newScene, tx)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
logger.Errorf("[scenes] <%s> failed to create: %s", scene.Checksum, err.Error())
|
||||
return
|
||||
}
|
||||
if scene.ID == 0 {
|
||||
_ = tx.Rollback()
|
||||
logger.Errorf("[scenes] <%s> invalid id after scene creation", mappingJSON.Checksum)
|
||||
return
|
||||
}
|
||||
|
||||
// Relate the scene to the gallery
|
||||
if sceneJSON.Gallery != "" {
|
||||
gqb := models.NewGalleryQueryBuilder()
|
||||
gallery, err := gqb.FindByChecksum(sceneJSON.Gallery, tx)
|
||||
if err != nil {
|
||||
logger.Warnf("[scenes] gallery <%s> does not exist: %s", sceneJSON.Gallery, err.Error())
|
||||
} else {
|
||||
gallery.SceneID = sql.NullInt64{Int64: int64(scene.ID), Valid: true}
|
||||
_, err := gqb.Update(*gallery, tx)
|
||||
if err != nil {
|
||||
logger.Errorf("[scenes] <%s> failed to update gallery: %s", scene.Checksum, err.Error())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Relate the scene to the performers
|
||||
if len(sceneJSON.Performers) > 0 {
|
||||
performers, err := t.getPerformers(sceneJSON.Performers, tx)
|
||||
if err != nil {
|
||||
logger.Warnf("[scenes] <%s> failed to fetch performers: %s", scene.Checksum, err.Error())
|
||||
} else {
|
||||
var performerJoins []models.PerformersScenes
|
||||
for _, performer := range performers {
|
||||
join := models.PerformersScenes{
|
||||
PerformerID: performer.ID,
|
||||
SceneID: scene.ID,
|
||||
}
|
||||
performerJoins = append(performerJoins, join)
|
||||
}
|
||||
if err := jqb.CreatePerformersScenes(performerJoins, tx); err != nil {
|
||||
logger.Errorf("[scenes] <%s> failed to associate performers: %s", scene.Checksum, err.Error())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Relate the scene to the tags
|
||||
if len(sceneJSON.Tags) > 0 {
|
||||
tags, err := t.getTags(scene.Checksum, sceneJSON.Tags, tx)
|
||||
if err != nil {
|
||||
logger.Warnf("[scenes] <%s> failed to fetch tags: %s", scene.Checksum, err.Error())
|
||||
} else {
|
||||
var tagJoins []models.ScenesTags
|
||||
for _, tag := range tags {
|
||||
join := models.ScenesTags{
|
||||
SceneID: scene.ID,
|
||||
TagID: tag.ID,
|
||||
}
|
||||
tagJoins = append(tagJoins, join)
|
||||
}
|
||||
if err := jqb.CreateScenesTags(tagJoins, tx); err != nil {
|
||||
logger.Errorf("[scenes] <%s> failed to associate tags: %s", scene.Checksum, err.Error())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Relate the scene to the scene markers
|
||||
if len(sceneJSON.Markers) > 0 {
|
||||
smqb := models.NewSceneMarkerQueryBuilder()
|
||||
tqb := models.NewTagQueryBuilder()
|
||||
for _, marker := range sceneJSON.Markers {
|
||||
seconds, _ := strconv.ParseFloat(marker.Seconds, 64)
|
||||
newSceneMarker := models.SceneMarker{
|
||||
Title: marker.Title,
|
||||
Seconds: seconds,
|
||||
SceneID: sql.NullInt64{Int64: int64(scene.ID), Valid: true},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
|
||||
primaryTag, err := tqb.FindByName(marker.PrimaryTag, tx)
|
||||
if err != nil {
|
||||
logger.Errorf("[scenes] <%s> failed to find primary tag for marker: %s", scene.Checksum, err.Error())
|
||||
} else {
|
||||
newSceneMarker.PrimaryTagID = sql.NullInt64{Int64: int64(primaryTag.ID), Valid: true}
|
||||
}
|
||||
|
||||
// Create the scene marker in the DB
|
||||
sceneMarker, err := smqb.Create(newSceneMarker, tx)
|
||||
if err != nil {
|
||||
logger.Warnf("[scenes] <%s> failed to create scene marker: %s", scene.Checksum, err.Error())
|
||||
continue
|
||||
}
|
||||
if sceneMarker.ID == 0 {
|
||||
logger.Warnf("[scenes] <%s> invalid scene marker id after scene marker creation", scene.Checksum)
|
||||
continue
|
||||
}
|
||||
|
||||
// Get the scene marker tags and create the joins
|
||||
tags, err := t.getTags(scene.Checksum, marker.Tags, tx)
|
||||
if err != nil {
|
||||
logger.Warnf("[scenes] <%s> failed to fetch scene marker tags: %s", scene.Checksum, err.Error())
|
||||
} else {
|
||||
var tagJoins []models.SceneMarkersTags
|
||||
for _, tag := range tags {
|
||||
join := models.SceneMarkersTags{
|
||||
SceneMarkerID: sceneMarker.ID,
|
||||
TagID: tag.ID,
|
||||
}
|
||||
tagJoins = append(tagJoins, join)
|
||||
}
|
||||
if err := jqb.CreateSceneMarkersTags(tagJoins, tx); err != nil {
|
||||
logger.Errorf("[scenes] <%s> failed to associate scene marker tags: %s", scene.Checksum, err.Error())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.Info("[scenes] importing")
|
||||
if err := tx.Commit(); err != nil {
|
||||
logger.Errorf("[scenes] import failed to commit: %s", err.Error())
|
||||
}
|
||||
logger.Info("[scenes] import complete")
|
||||
}
|
||||
|
||||
func (t *ImportTask) getPerformers(names []string, tx *sqlx.Tx) ([]models.Performer, error) {
|
||||
pqb := models.NewPerformerQueryBuilder()
|
||||
performers, err := pqb.FindByNames(names, tx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var pluckedNames []string
|
||||
for _, performer := range performers {
|
||||
if !performer.Name.Valid {
|
||||
continue
|
||||
}
|
||||
pluckedNames = append(pluckedNames, performer.Name.String)
|
||||
}
|
||||
|
||||
missingPerformers := utils.StrFilter(names, func(name string) bool {
|
||||
return !utils.StrInclude(pluckedNames, name)
|
||||
})
|
||||
|
||||
for _, missingPerformer := range missingPerformers {
|
||||
logger.Warnf("[scenes] performer %s does not exist", missingPerformer)
|
||||
}
|
||||
|
||||
return performers, nil
|
||||
}
|
||||
|
||||
func (t *ImportTask) getTags(sceneChecksum string, names []string, tx *sqlx.Tx) ([]models.Tag, error) {
|
||||
tqb := models.NewTagQueryBuilder()
|
||||
tags, err := tqb.FindByNames(names, tx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var pluckedNames []string
|
||||
for _, tag := range tags {
|
||||
if tag.Name == "" {
|
||||
continue
|
||||
}
|
||||
pluckedNames = append(pluckedNames, tag.Name)
|
||||
}
|
||||
|
||||
missingTags := utils.StrFilter(names, func(name string) bool {
|
||||
return !utils.StrInclude(pluckedNames, name)
|
||||
})
|
||||
|
||||
for _, missingTag := range missingTags {
|
||||
logger.Warnf("[scenes] <%s> tag %s does not exist", sceneChecksum, missingTag)
|
||||
}
|
||||
|
||||
return tags, nil
|
||||
}
|
||||
|
||||
// https://www.reddit.com/r/golang/comments/5ia523/idiomatic_way_to_remove_duplicates_in_a_slice/db6qa2e
|
||||
func (t *ImportTask) getUnique(s []string) []string {
|
||||
seen := make(map[string]struct{}, len(s))
|
||||
j := 0
|
||||
for _, v := range s {
|
||||
if _, ok := seen[v]; ok {
|
||||
continue
|
||||
}
|
||||
seen[v] = struct{}{}
|
||||
s[j] = v
|
||||
j++
|
||||
}
|
||||
return s[:j]
|
||||
}
|
||||
163
pkg/manager/task_scan.go
Normal file
163
pkg/manager/task_scan.go
Normal file
@@ -0,0 +1,163 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/ffmpeg"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
type ScanTask struct {
|
||||
FilePath string
|
||||
}
|
||||
|
||||
func (t *ScanTask) Start(wg *sync.WaitGroup) {
|
||||
if filepath.Ext(t.FilePath) == ".zip" {
|
||||
t.scanGallery()
|
||||
} else {
|
||||
t.scanScene()
|
||||
}
|
||||
|
||||
wg.Done()
|
||||
}
|
||||
|
||||
func (t *ScanTask) scanGallery() {
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
gallery, _ := qb.FindByPath(t.FilePath)
|
||||
if gallery != nil {
|
||||
// We already have this item in the database, keep going
|
||||
return
|
||||
}
|
||||
|
||||
checksum, err := t.calculateChecksum()
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
gallery, _ = qb.FindByChecksum(checksum, tx)
|
||||
if gallery != nil {
|
||||
logger.Infof("%s already exists. Updating path...", t.FilePath)
|
||||
gallery.Path = t.FilePath
|
||||
_, err = qb.Update(*gallery, tx)
|
||||
} else {
|
||||
logger.Infof("%s doesn't exist. Creating new item...", t.FilePath)
|
||||
currentTime := time.Now()
|
||||
newGallery := models.Gallery{
|
||||
Checksum: checksum,
|
||||
Path: t.FilePath,
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
_, err = qb.Create(newGallery, tx)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
_ = tx.Rollback()
|
||||
} else if err := tx.Commit(); err != nil {
|
||||
logger.Error(err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func (t *ScanTask) scanScene() {
|
||||
videoFile, err := ffmpeg.NewVideoFile(instance.StaticPaths.FFProbe, t.FilePath)
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
scene, _ := qb.FindByPath(t.FilePath)
|
||||
if scene != nil {
|
||||
// We already have this item in the database, keep going
|
||||
return
|
||||
}
|
||||
|
||||
checksum, err := t.calculateChecksum()
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
t.makeScreenshots(*videoFile, checksum)
|
||||
|
||||
scene, _ = qb.FindByChecksum(checksum)
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
if scene != nil {
|
||||
logger.Infof("%s already exists. Updating path...", t.FilePath)
|
||||
scene.Path = t.FilePath
|
||||
_, err = qb.Update(*scene, tx)
|
||||
} else {
|
||||
logger.Infof("%s doesn't exist. Creating new item...", t.FilePath)
|
||||
currentTime := time.Now()
|
||||
newScene := models.Scene{
|
||||
Checksum: checksum,
|
||||
Path: t.FilePath,
|
||||
Duration: sql.NullFloat64{Float64: videoFile.Duration, Valid: true},
|
||||
VideoCodec: sql.NullString{String: videoFile.VideoCodec, Valid: true},
|
||||
AudioCodec: sql.NullString{String: videoFile.AudioCodec, Valid: true},
|
||||
Width: sql.NullInt64{Int64: int64(videoFile.Width), Valid: true},
|
||||
Height: sql.NullInt64{Int64: int64(videoFile.Height), Valid: true},
|
||||
Framerate: sql.NullFloat64{Float64: videoFile.FrameRate, Valid: true},
|
||||
Bitrate: sql.NullInt64{Int64: videoFile.Bitrate, Valid: true},
|
||||
Size: sql.NullString{String: strconv.Itoa(int(videoFile.Size)), Valid: true},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
_, err = qb.Create(newScene, tx)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
_ = tx.Rollback()
|
||||
} else if err := tx.Commit(); err != nil {
|
||||
logger.Error(err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func (t *ScanTask) makeScreenshots(probeResult ffmpeg.VideoFile, checksum string) {
|
||||
thumbPath := instance.Paths.Scene.GetThumbnailScreenshotPath(checksum)
|
||||
normalPath := instance.Paths.Scene.GetScreenshotPath(checksum)
|
||||
|
||||
thumbExists, _ := utils.FileExists(thumbPath)
|
||||
normalExists, _ := utils.FileExists(normalPath)
|
||||
if thumbExists && normalExists {
|
||||
logger.Debug("Screenshots already exist for this path... skipping")
|
||||
return
|
||||
}
|
||||
|
||||
t.makeScreenshot(probeResult, thumbPath, 5, 320)
|
||||
t.makeScreenshot(probeResult, normalPath, 2, probeResult.Width)
|
||||
}
|
||||
|
||||
func (t *ScanTask) makeScreenshot(probeResult ffmpeg.VideoFile, outputPath string, quality int, width int) {
|
||||
encoder := ffmpeg.NewEncoder(instance.StaticPaths.FFMPEG)
|
||||
options := ffmpeg.ScreenshotOptions{
|
||||
OutputPath: outputPath,
|
||||
Quality: quality,
|
||||
Time: float64(probeResult.Duration) * 0.2,
|
||||
Width: width,
|
||||
}
|
||||
encoder.Screenshot(probeResult, options)
|
||||
}
|
||||
|
||||
func (t *ScanTask) calculateChecksum() (string, error) {
|
||||
logger.Infof("%s not found. Calculating checksum...", t.FilePath)
|
||||
checksum, err := utils.MD5FromFilePath(t.FilePath)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
logger.Debugf("Checksum calculated: %s", checksum)
|
||||
return checksum, nil
|
||||
}
|
||||
47
pkg/manager/task_transcode.go
Normal file
47
pkg/manager/task_transcode.go
Normal file
@@ -0,0 +1,47 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"github.com/stashapp/stash/pkg/ffmpeg"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"os"
|
||||
"sync"
|
||||
)
|
||||
|
||||
type GenerateTranscodeTask struct {
|
||||
Scene models.Scene
|
||||
}
|
||||
|
||||
func (t *GenerateTranscodeTask) Start(wg *sync.WaitGroup) {
|
||||
defer wg.Done()
|
||||
videoCodec := t.Scene.VideoCodec.String
|
||||
if ffmpeg.IsValidCodec(videoCodec) {
|
||||
return
|
||||
}
|
||||
|
||||
hasTranscode, _ := HasTranscode(&t.Scene)
|
||||
if hasTranscode {
|
||||
return
|
||||
}
|
||||
|
||||
logger.Infof("[transcode] <%s> scene has codec %s", t.Scene.Checksum, t.Scene.VideoCodec.String)
|
||||
|
||||
videoFile, err := ffmpeg.NewVideoFile(instance.StaticPaths.FFProbe, t.Scene.Path)
|
||||
if err != nil {
|
||||
logger.Errorf("[transcode] error reading video file: %s", err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
outputPath := instance.Paths.Generated.GetTmpPath(t.Scene.Checksum + ".mp4")
|
||||
options := ffmpeg.TranscodeOptions{
|
||||
OutputPath: outputPath,
|
||||
}
|
||||
encoder := ffmpeg.NewEncoder(instance.StaticPaths.FFMPEG)
|
||||
encoder.Transcode(*videoFile, options)
|
||||
if err := os.Rename(outputPath, instance.Paths.Scene.GetTranscodePath(t.Scene.Checksum)); err != nil {
|
||||
logger.Errorf("[transcode] error generating transcode: %s", err.Error())
|
||||
return
|
||||
}
|
||||
logger.Debugf("[transcode] <%s> created transcode: %s", t.Scene.Checksum, outputPath)
|
||||
return
|
||||
}
|
||||
32
pkg/manager/utils.go
Normal file
32
pkg/manager/utils.go
Normal file
@@ -0,0 +1,32 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
func IsStreamable(scene *models.Scene) (bool, error) {
|
||||
if scene == nil {
|
||||
return false, fmt.Errorf("nil scene")
|
||||
}
|
||||
fileType, err := utils.FileType(scene.Path)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
switch fileType.MIME.Value {
|
||||
case "video/quicktime", "video/mp4", "video/webm", "video/x-m4v":
|
||||
return true, nil
|
||||
default:
|
||||
return HasTranscode(scene)
|
||||
}
|
||||
}
|
||||
|
||||
func HasTranscode(scene *models.Scene) (bool, error) {
|
||||
if scene == nil {
|
||||
return false, fmt.Errorf("nil scene")
|
||||
}
|
||||
transcodePath := instance.Paths.Scene.GetTranscodePath(scene.Checksum)
|
||||
return utils.FileExists(transcodePath)
|
||||
}
|
||||
25
pkg/models/extension_find_filter.go
Normal file
25
pkg/models/extension_find_filter.go
Normal file
@@ -0,0 +1,25 @@
|
||||
package models
|
||||
|
||||
func (ff FindFilterType) GetSort(defaultSort string) string {
|
||||
var sort string
|
||||
if ff.Sort == nil {
|
||||
sort = defaultSort
|
||||
} else {
|
||||
sort = *ff.Sort
|
||||
}
|
||||
return sort
|
||||
}
|
||||
|
||||
func (ff FindFilterType) GetDirection() string {
|
||||
var direction string
|
||||
if directionFilter := ff.Direction; directionFilter != nil {
|
||||
if dir := directionFilter.String(); directionFilter.IsValid() {
|
||||
direction = dir
|
||||
} else {
|
||||
direction = "ASC"
|
||||
}
|
||||
} else {
|
||||
direction = "ASC"
|
||||
}
|
||||
return direction
|
||||
}
|
||||
11893
pkg/models/generated_exec.go
Normal file
11893
pkg/models/generated_exec.go
Normal file
File diff suppressed because it is too large
Load Diff
334
pkg/models/generated_models.go
Normal file
334
pkg/models/generated_models.go
Normal file
@@ -0,0 +1,334 @@
|
||||
// Code generated by github.com/99designs/gqlgen, DO NOT EDIT.
|
||||
|
||||
package models
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type FindFilterType struct {
|
||||
Q *string `json:"q"`
|
||||
Page *int `json:"page"`
|
||||
PerPage *int `json:"per_page"`
|
||||
Sort *string `json:"sort"`
|
||||
Direction *SortDirectionEnum `json:"direction"`
|
||||
}
|
||||
|
||||
type FindGalleriesResultType struct {
|
||||
Count int `json:"count"`
|
||||
Galleries []Gallery `json:"galleries"`
|
||||
}
|
||||
|
||||
type FindPerformersResultType struct {
|
||||
Count int `json:"count"`
|
||||
Performers []Performer `json:"performers"`
|
||||
}
|
||||
|
||||
type FindSceneMarkersResultType struct {
|
||||
Count int `json:"count"`
|
||||
SceneMarkers []SceneMarker `json:"scene_markers"`
|
||||
}
|
||||
|
||||
type FindScenesResultType struct {
|
||||
Count int `json:"count"`
|
||||
Scenes []Scene `json:"scenes"`
|
||||
}
|
||||
|
||||
type FindStudiosResultType struct {
|
||||
Count int `json:"count"`
|
||||
Studios []Studio `json:"studios"`
|
||||
}
|
||||
|
||||
type GalleryFilesType struct {
|
||||
Index int `json:"index"`
|
||||
Name *string `json:"name"`
|
||||
Path *string `json:"path"`
|
||||
}
|
||||
|
||||
type MarkerStringsResultType struct {
|
||||
Count int `json:"count"`
|
||||
ID string `json:"id"`
|
||||
Title string `json:"title"`
|
||||
}
|
||||
|
||||
type PerformerCreateInput struct {
|
||||
Name *string `json:"name"`
|
||||
URL *string `json:"url"`
|
||||
Birthdate *string `json:"birthdate"`
|
||||
Ethnicity *string `json:"ethnicity"`
|
||||
Country *string `json:"country"`
|
||||
EyeColor *string `json:"eye_color"`
|
||||
Height *string `json:"height"`
|
||||
Measurements *string `json:"measurements"`
|
||||
FakeTits *string `json:"fake_tits"`
|
||||
CareerLength *string `json:"career_length"`
|
||||
Tattoos *string `json:"tattoos"`
|
||||
Piercings *string `json:"piercings"`
|
||||
Aliases *string `json:"aliases"`
|
||||
Twitter *string `json:"twitter"`
|
||||
Instagram *string `json:"instagram"`
|
||||
Favorite *bool `json:"favorite"`
|
||||
// This should be base64 encoded
|
||||
Image string `json:"image"`
|
||||
}
|
||||
|
||||
type PerformerFilterType struct {
|
||||
// Filter by favorite
|
||||
FilterFavorites *bool `json:"filter_favorites"`
|
||||
}
|
||||
|
||||
type PerformerUpdateInput struct {
|
||||
ID string `json:"id"`
|
||||
Name *string `json:"name"`
|
||||
URL *string `json:"url"`
|
||||
Birthdate *string `json:"birthdate"`
|
||||
Ethnicity *string `json:"ethnicity"`
|
||||
Country *string `json:"country"`
|
||||
EyeColor *string `json:"eye_color"`
|
||||
Height *string `json:"height"`
|
||||
Measurements *string `json:"measurements"`
|
||||
FakeTits *string `json:"fake_tits"`
|
||||
CareerLength *string `json:"career_length"`
|
||||
Tattoos *string `json:"tattoos"`
|
||||
Piercings *string `json:"piercings"`
|
||||
Aliases *string `json:"aliases"`
|
||||
Twitter *string `json:"twitter"`
|
||||
Instagram *string `json:"instagram"`
|
||||
Favorite *bool `json:"favorite"`
|
||||
// This should be base64 encoded
|
||||
Image *string `json:"image"`
|
||||
}
|
||||
|
||||
type SceneFileType struct {
|
||||
Size *string `json:"size"`
|
||||
Duration *float64 `json:"duration"`
|
||||
VideoCodec *string `json:"video_codec"`
|
||||
AudioCodec *string `json:"audio_codec"`
|
||||
Width *int `json:"width"`
|
||||
Height *int `json:"height"`
|
||||
Framerate *float64 `json:"framerate"`
|
||||
Bitrate *int `json:"bitrate"`
|
||||
}
|
||||
|
||||
type SceneFilterType struct {
|
||||
// Filter by rating
|
||||
Rating *int `json:"rating"`
|
||||
// Filter by resolution
|
||||
Resolution *ResolutionEnum `json:"resolution"`
|
||||
// Filter to only include scenes which have markers. `true` or `false`
|
||||
HasMarkers *string `json:"has_markers"`
|
||||
// Filter to only include scenes missing this property
|
||||
IsMissing *string `json:"is_missing"`
|
||||
// Filter to only include scenes with this studio
|
||||
StudioID *string `json:"studio_id"`
|
||||
// Filter to only include scenes with these tags
|
||||
Tags []string `json:"tags"`
|
||||
// Filter to only include scenes with this performer
|
||||
PerformerID *string `json:"performer_id"`
|
||||
}
|
||||
|
||||
type SceneMarkerCreateInput struct {
|
||||
Title string `json:"title"`
|
||||
Seconds float64 `json:"seconds"`
|
||||
SceneID string `json:"scene_id"`
|
||||
PrimaryTagID string `json:"primary_tag_id"`
|
||||
TagIds []string `json:"tag_ids"`
|
||||
}
|
||||
|
||||
type SceneMarkerFilterType struct {
|
||||
// Filter to only include scene markers with this tag
|
||||
TagID *string `json:"tag_id"`
|
||||
// Filter to only include scene markers with these tags
|
||||
Tags []string `json:"tags"`
|
||||
// Filter to only include scene markers attached to a scene with these tags
|
||||
SceneTags []string `json:"scene_tags"`
|
||||
// Filter to only include scene markers with these performers
|
||||
Performers []string `json:"performers"`
|
||||
}
|
||||
|
||||
type SceneMarkerTag struct {
|
||||
Tag Tag `json:"tag"`
|
||||
SceneMarkers []SceneMarker `json:"scene_markers"`
|
||||
}
|
||||
|
||||
type SceneMarkerUpdateInput struct {
|
||||
ID string `json:"id"`
|
||||
Title string `json:"title"`
|
||||
Seconds float64 `json:"seconds"`
|
||||
SceneID string `json:"scene_id"`
|
||||
PrimaryTagID string `json:"primary_tag_id"`
|
||||
TagIds []string `json:"tag_ids"`
|
||||
}
|
||||
|
||||
type ScenePathsType struct {
|
||||
Screenshot *string `json:"screenshot"`
|
||||
Preview *string `json:"preview"`
|
||||
Stream *string `json:"stream"`
|
||||
Webp *string `json:"webp"`
|
||||
Vtt *string `json:"vtt"`
|
||||
ChaptersVtt *string `json:"chapters_vtt"`
|
||||
}
|
||||
|
||||
type SceneUpdateInput struct {
|
||||
ClientMutationID *string `json:"clientMutationId"`
|
||||
ID string `json:"id"`
|
||||
Title *string `json:"title"`
|
||||
Details *string `json:"details"`
|
||||
URL *string `json:"url"`
|
||||
Date *string `json:"date"`
|
||||
Rating *int `json:"rating"`
|
||||
StudioID *string `json:"studio_id"`
|
||||
GalleryID *string `json:"gallery_id"`
|
||||
PerformerIds []string `json:"performer_ids"`
|
||||
TagIds []string `json:"tag_ids"`
|
||||
}
|
||||
|
||||
// A performer from a scraping operation...
|
||||
type ScrapedPerformer struct {
|
||||
Name *string `json:"name"`
|
||||
URL *string `json:"url"`
|
||||
Twitter *string `json:"twitter"`
|
||||
Instagram *string `json:"instagram"`
|
||||
Birthdate *string `json:"birthdate"`
|
||||
Ethnicity *string `json:"ethnicity"`
|
||||
Country *string `json:"country"`
|
||||
EyeColor *string `json:"eye_color"`
|
||||
Height *string `json:"height"`
|
||||
Measurements *string `json:"measurements"`
|
||||
FakeTits *string `json:"fake_tits"`
|
||||
CareerLength *string `json:"career_length"`
|
||||
Tattoos *string `json:"tattoos"`
|
||||
Piercings *string `json:"piercings"`
|
||||
Aliases *string `json:"aliases"`
|
||||
}
|
||||
|
||||
type StatsResultType struct {
|
||||
SceneCount int `json:"scene_count"`
|
||||
GalleryCount int `json:"gallery_count"`
|
||||
PerformerCount int `json:"performer_count"`
|
||||
StudioCount int `json:"studio_count"`
|
||||
TagCount int `json:"tag_count"`
|
||||
}
|
||||
|
||||
type StudioCreateInput struct {
|
||||
Name string `json:"name"`
|
||||
URL *string `json:"url"`
|
||||
// This should be base64 encoded
|
||||
Image string `json:"image"`
|
||||
}
|
||||
|
||||
type StudioUpdateInput struct {
|
||||
ID string `json:"id"`
|
||||
Name *string `json:"name"`
|
||||
URL *string `json:"url"`
|
||||
// This should be base64 encoded
|
||||
Image *string `json:"image"`
|
||||
}
|
||||
|
||||
type TagCreateInput struct {
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
type TagDestroyInput struct {
|
||||
ID string `json:"id"`
|
||||
}
|
||||
|
||||
type TagUpdateInput struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
type ResolutionEnum string
|
||||
|
||||
const (
|
||||
// 240p
|
||||
ResolutionEnumLow ResolutionEnum = "LOW"
|
||||
// 480p
|
||||
ResolutionEnumStandard ResolutionEnum = "STANDARD"
|
||||
// 720p
|
||||
ResolutionEnumStandardHd ResolutionEnum = "STANDARD_HD"
|
||||
// 1080p
|
||||
ResolutionEnumFullHd ResolutionEnum = "FULL_HD"
|
||||
// 4k
|
||||
ResolutionEnumFourK ResolutionEnum = "FOUR_K"
|
||||
)
|
||||
|
||||
var AllResolutionEnum = []ResolutionEnum{
|
||||
ResolutionEnumLow,
|
||||
ResolutionEnumStandard,
|
||||
ResolutionEnumStandardHd,
|
||||
ResolutionEnumFullHd,
|
||||
ResolutionEnumFourK,
|
||||
}
|
||||
|
||||
func (e ResolutionEnum) IsValid() bool {
|
||||
switch e {
|
||||
case ResolutionEnumLow, ResolutionEnumStandard, ResolutionEnumStandardHd, ResolutionEnumFullHd, ResolutionEnumFourK:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e ResolutionEnum) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e *ResolutionEnum) UnmarshalGQL(v interface{}) error {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("enums must be strings")
|
||||
}
|
||||
|
||||
*e = ResolutionEnum(str)
|
||||
if !e.IsValid() {
|
||||
return fmt.Errorf("%s is not a valid ResolutionEnum", str)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e ResolutionEnum) MarshalGQL(w io.Writer) {
|
||||
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||
}
|
||||
|
||||
type SortDirectionEnum string
|
||||
|
||||
const (
|
||||
SortDirectionEnumAsc SortDirectionEnum = "ASC"
|
||||
SortDirectionEnumDesc SortDirectionEnum = "DESC"
|
||||
)
|
||||
|
||||
var AllSortDirectionEnum = []SortDirectionEnum{
|
||||
SortDirectionEnumAsc,
|
||||
SortDirectionEnumDesc,
|
||||
}
|
||||
|
||||
func (e SortDirectionEnum) IsValid() bool {
|
||||
switch e {
|
||||
case SortDirectionEnumAsc, SortDirectionEnumDesc:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e SortDirectionEnum) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e *SortDirectionEnum) UnmarshalGQL(v interface{}) error {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("enums must be strings")
|
||||
}
|
||||
|
||||
*e = SortDirectionEnum(str)
|
||||
if !e.IsValid() {
|
||||
return fmt.Errorf("%s is not a valid SortDirectionEnum", str)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e SortDirectionEnum) MarshalGQL(w io.Writer) {
|
||||
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||
}
|
||||
116
pkg/models/model_gallery.go
Normal file
116
pkg/models/model_gallery.go
Normal file
@@ -0,0 +1,116 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"database/sql"
|
||||
"github.com/disintegration/imaging"
|
||||
"github.com/stashapp/stash/pkg/api/urlbuilders"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
"image"
|
||||
"image/jpeg"
|
||||
"io/ioutil"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Gallery struct {
|
||||
ID int `db:"id" json:"id"`
|
||||
Path string `db:"path" json:"path"`
|
||||
Checksum string `db:"checksum" json:"checksum"`
|
||||
SceneID sql.NullInt64 `db:"scene_id,omitempty" json:"scene_id"`
|
||||
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
}
|
||||
|
||||
func (g *Gallery) GetFiles(baseURL string) []GalleryFilesType {
|
||||
var galleryFiles []GalleryFilesType
|
||||
filteredFiles, readCloser, err := g.listZipContents()
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
defer readCloser.Close()
|
||||
|
||||
builder := urlbuilders.NewGalleryURLBuilder(baseURL, g.ID)
|
||||
for i, file := range filteredFiles {
|
||||
galleryURL := builder.GetGalleryImageURL(i)
|
||||
galleryFile := GalleryFilesType{
|
||||
Index: i,
|
||||
Name: &file.Name,
|
||||
Path: &galleryURL,
|
||||
}
|
||||
galleryFiles = append(galleryFiles, galleryFile)
|
||||
}
|
||||
|
||||
return galleryFiles
|
||||
}
|
||||
|
||||
func (g *Gallery) GetImage(index int) []byte {
|
||||
data, _ := g.readZipFile(index)
|
||||
return data
|
||||
}
|
||||
|
||||
func (g *Gallery) GetThumbnail(index int) []byte {
|
||||
data, _ := g.readZipFile(index)
|
||||
srcImage, _, err := image.Decode(bytes.NewReader(data))
|
||||
if err != nil {
|
||||
return data
|
||||
}
|
||||
resizedImage := imaging.Resize(srcImage, 512, 0, imaging.Lanczos)
|
||||
buf := new(bytes.Buffer)
|
||||
err = jpeg.Encode(buf, resizedImage, nil)
|
||||
if err != nil {
|
||||
return data
|
||||
}
|
||||
return buf.Bytes()
|
||||
}
|
||||
|
||||
func (g *Gallery) readZipFile(index int) ([]byte, error) {
|
||||
filteredFiles, readCloser, err := g.listZipContents()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer readCloser.Close()
|
||||
|
||||
zipFile := filteredFiles[index]
|
||||
zipFileReadCloser, err := zipFile.Open()
|
||||
if err != nil {
|
||||
logger.Warn("failed to read file inside zip file")
|
||||
return nil, err
|
||||
}
|
||||
defer zipFileReadCloser.Close()
|
||||
|
||||
return ioutil.ReadAll(zipFileReadCloser)
|
||||
}
|
||||
|
||||
func (g *Gallery) listZipContents() ([]*zip.File, *zip.ReadCloser, error) {
|
||||
readCloser, err := zip.OpenReader(g.Path)
|
||||
if err != nil {
|
||||
logger.Warn("failed to read zip file")
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
filteredFiles := make([]*zip.File, 0)
|
||||
for _, file := range readCloser.File {
|
||||
if file.FileInfo().IsDir() {
|
||||
continue
|
||||
}
|
||||
ext := filepath.Ext(file.Name)
|
||||
if ext != ".jpg" && ext != ".png" && ext != ".gif" {
|
||||
continue
|
||||
}
|
||||
if strings.Contains(file.Name, "__MACOSX") {
|
||||
continue
|
||||
}
|
||||
filteredFiles = append(filteredFiles, file)
|
||||
}
|
||||
sort.Slice(filteredFiles, func(i, j int) bool {
|
||||
a := filteredFiles[i]
|
||||
b := filteredFiles[j]
|
||||
return utils.NaturalCompare(a.Name, b.Name)
|
||||
})
|
||||
|
||||
return filteredFiles, readCloser, nil
|
||||
}
|
||||
16
pkg/models/model_joins.go
Normal file
16
pkg/models/model_joins.go
Normal file
@@ -0,0 +1,16 @@
|
||||
package models
|
||||
|
||||
type PerformersScenes struct {
|
||||
PerformerID int `db:"performer_id" json:"performer_id"`
|
||||
SceneID int `db:"scene_id" json:"scene_id"`
|
||||
}
|
||||
|
||||
type ScenesTags struct {
|
||||
SceneID int `db:"scene_id" json:"scene_id"`
|
||||
TagID int `db:"tag_id" json:"tag_id"`
|
||||
}
|
||||
|
||||
type SceneMarkersTags struct {
|
||||
SceneMarkerID int `db:"scene_marker_id" json:"scene_marker_id"`
|
||||
TagID int `db:"tag_id" json:"tag_id"`
|
||||
}
|
||||
29
pkg/models/model_performer.go
Normal file
29
pkg/models/model_performer.go
Normal file
@@ -0,0 +1,29 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
)
|
||||
|
||||
type Performer struct {
|
||||
ID int `db:"id" json:"id"`
|
||||
Image []byte `db:"image" json:"image"`
|
||||
Checksum string `db:"checksum" json:"checksum"`
|
||||
Name sql.NullString `db:"name" json:"name"`
|
||||
URL sql.NullString `db:"url" json:"url"`
|
||||
Twitter sql.NullString `db:"twitter" json:"twitter"`
|
||||
Instagram sql.NullString `db:"instagram" json:"instagram"`
|
||||
Birthdate sql.NullString `db:"birthdate" json:"birthdate"` // TODO dates?
|
||||
Ethnicity sql.NullString `db:"ethnicity" json:"ethnicity"`
|
||||
Country sql.NullString `db:"country" json:"country"`
|
||||
EyeColor sql.NullString `db:"eye_color" json:"eye_color"`
|
||||
Height sql.NullString `db:"height" json:"height"`
|
||||
Measurements sql.NullString `db:"measurements" json:"measurements"`
|
||||
FakeTits sql.NullString `db:"fake_tits" json:"fake_tits"`
|
||||
CareerLength sql.NullString `db:"career_length" json:"career_length"`
|
||||
Tattoos sql.NullString `db:"tattoos" json:"tattoos"`
|
||||
Piercings sql.NullString `db:"piercings" json:"piercings"`
|
||||
Aliases sql.NullString `db:"aliases" json:"aliases"`
|
||||
Favorite sql.NullBool `db:"favorite" json:"favorite"`
|
||||
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
}
|
||||
27
pkg/models/model_scene.go
Normal file
27
pkg/models/model_scene.go
Normal file
@@ -0,0 +1,27 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
)
|
||||
|
||||
type Scene struct {
|
||||
ID int `db:"id" json:"id"`
|
||||
Checksum string `db:"checksum" json:"checksum"`
|
||||
Path string `db:"path" json:"path"`
|
||||
Title sql.NullString `db:"title" json:"title"`
|
||||
Details sql.NullString `db:"details" json:"details"`
|
||||
URL sql.NullString `db:"url" json:"url"`
|
||||
Date sql.NullString `db:"date" json:"date"` // TODO dates?
|
||||
Rating sql.NullInt64 `db:"rating" json:"rating"`
|
||||
Size sql.NullString `db:"size" json:"size"`
|
||||
Duration sql.NullFloat64 `db:"duration" json:"duration"`
|
||||
VideoCodec sql.NullString `db:"video_codec" json:"video_codec"`
|
||||
AudioCodec sql.NullString `db:"audio_codec" json:"audio_codec"`
|
||||
Width sql.NullInt64 `db:"width" json:"width"`
|
||||
Height sql.NullInt64 `db:"height" json:"height"`
|
||||
Framerate sql.NullFloat64 `db:"framerate" json:"framerate"`
|
||||
Bitrate sql.NullInt64 `db:"bitrate" json:"bitrate"`
|
||||
StudioID sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
|
||||
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
}
|
||||
15
pkg/models/model_scene_marker.go
Normal file
15
pkg/models/model_scene_marker.go
Normal file
@@ -0,0 +1,15 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
)
|
||||
|
||||
type SceneMarker struct {
|
||||
ID int `db:"id" json:"id"`
|
||||
Title string `db:"title" json:"title"`
|
||||
Seconds float64 `db:"seconds" json:"seconds"`
|
||||
PrimaryTagID sql.NullInt64 `db:"primary_tag_id,omitempty" json:"primary_tag_id"`
|
||||
SceneID sql.NullInt64 `db:"scene_id,omitempty" json:"scene_id"`
|
||||
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
}
|
||||
24
pkg/models/model_scraped_item.go
Normal file
24
pkg/models/model_scraped_item.go
Normal file
@@ -0,0 +1,24 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
)
|
||||
|
||||
type ScrapedItem struct {
|
||||
ID int `db:"id" json:"id"`
|
||||
Title sql.NullString `db:"title" json:"title"`
|
||||
Description sql.NullString `db:"description" json:"description"`
|
||||
URL sql.NullString `db:"url" json:"url"`
|
||||
Date sql.NullString `db:"date" json:"date"` // TODO dates?
|
||||
Rating sql.NullString `db:"rating" json:"rating"`
|
||||
Tags sql.NullString `db:"tags" json:"tags"`
|
||||
Models sql.NullString `db:"models" json:"models"`
|
||||
Episode sql.NullInt64 `db:"episode" json:"episode"`
|
||||
GalleryFilename sql.NullString `db:"gallery_filename" json:"gallery_filename"`
|
||||
GalleryURL sql.NullString `db:"gallery_url" json:"gallery_url"`
|
||||
VideoFilename sql.NullString `db:"video_filename" json:"video_filename"`
|
||||
VideoURL sql.NullString `db:"video_url" json:"video_url"`
|
||||
StudioID sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
|
||||
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
}
|
||||
15
pkg/models/model_studio.go
Normal file
15
pkg/models/model_studio.go
Normal file
@@ -0,0 +1,15 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
)
|
||||
|
||||
type Studio struct {
|
||||
ID int `db:"id" json:"id"`
|
||||
Image []byte `db:"image" json:"image"`
|
||||
Checksum string `db:"checksum" json:"checksum"`
|
||||
Name sql.NullString `db:"name" json:"name"`
|
||||
URL sql.NullString `db:"url" json:"url"`
|
||||
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
}
|
||||
8
pkg/models/model_tag.go
Normal file
8
pkg/models/model_tag.go
Normal file
@@ -0,0 +1,8 @@
|
||||
package models
|
||||
|
||||
type Tag struct {
|
||||
ID int `db:"id" json:"id"`
|
||||
Name string `db:"name" json:"name"` // TODO make schema not null
|
||||
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
}
|
||||
167
pkg/models/querybuilder_gallery.go
Normal file
167
pkg/models/querybuilder_gallery.go
Normal file
@@ -0,0 +1,167 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
type GalleryQueryBuilder struct{}
|
||||
|
||||
func NewGalleryQueryBuilder() GalleryQueryBuilder {
|
||||
return GalleryQueryBuilder{}
|
||||
}
|
||||
|
||||
func (qb *GalleryQueryBuilder) Create(newGallery Gallery, tx *sqlx.Tx) (*Gallery, error) {
|
||||
ensureTx(tx)
|
||||
result, err := tx.NamedExec(
|
||||
`INSERT INTO galleries (path, checksum, scene_id, created_at, updated_at)
|
||||
VALUES (:path, :checksum, :scene_id, :created_at, :updated_at)
|
||||
`,
|
||||
newGallery,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
galleryID, err := result.LastInsertId()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := tx.Get(&newGallery, `SELECT * FROM galleries WHERE id = ? LIMIT 1`, galleryID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &newGallery, nil
|
||||
}
|
||||
|
||||
func (qb *GalleryQueryBuilder) Update(updatedGallery Gallery, tx *sqlx.Tx) (*Gallery, error) {
|
||||
ensureTx(tx)
|
||||
_, err := tx.NamedExec(
|
||||
`UPDATE galleries SET `+SQLGenKeys(updatedGallery)+` WHERE galleries.id = :id`,
|
||||
updatedGallery,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := tx.Get(&updatedGallery, `SELECT * FROM galleries WHERE id = ? LIMIT 1`, updatedGallery.ID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &updatedGallery, nil
|
||||
}
|
||||
|
||||
func (qb *GalleryQueryBuilder) Find(id int) (*Gallery, error) {
|
||||
query := "SELECT * FROM galleries WHERE id = ? LIMIT 1"
|
||||
args := []interface{}{id}
|
||||
return qb.queryGallery(query, args, nil)
|
||||
}
|
||||
|
||||
func (qb *GalleryQueryBuilder) FindByChecksum(checksum string, tx *sqlx.Tx) (*Gallery, error) {
|
||||
query := "SELECT * FROM galleries WHERE checksum = ? LIMIT 1"
|
||||
args := []interface{}{checksum}
|
||||
return qb.queryGallery(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *GalleryQueryBuilder) FindByPath(path string) (*Gallery, error) {
|
||||
query := "SELECT * FROM galleries WHERE path = ? LIMIT 1"
|
||||
args := []interface{}{path}
|
||||
return qb.queryGallery(query, args, nil)
|
||||
}
|
||||
|
||||
func (qb *GalleryQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) (*Gallery, error) {
|
||||
query := "SELECT galleries.* FROM galleries JOIN scenes ON scenes.id = galleries.scene_id WHERE scenes.id = ? LIMIT 1"
|
||||
args := []interface{}{sceneID}
|
||||
return qb.queryGallery(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *GalleryQueryBuilder) ValidGalleriesForScenePath(scenePath string) ([]Gallery, error) {
|
||||
sceneDirPath := filepath.Dir(scenePath)
|
||||
query := "SELECT galleries.* FROM galleries WHERE galleries.scene_id IS NULL AND galleries.path LIKE '" + sceneDirPath + "%' ORDER BY path ASC"
|
||||
return qb.queryGalleries(query, nil, nil)
|
||||
}
|
||||
|
||||
func (qb *GalleryQueryBuilder) Count() (int, error) {
|
||||
return runCountQuery(buildCountQuery("SELECT galleries.id FROM galleries"), nil)
|
||||
}
|
||||
|
||||
func (qb *GalleryQueryBuilder) All() ([]Gallery, error) {
|
||||
return qb.queryGalleries(selectAll("galleries")+qb.getGallerySort(nil), nil, nil)
|
||||
}
|
||||
|
||||
func (qb *GalleryQueryBuilder) Query(findFilter *FindFilterType) ([]Gallery, int) {
|
||||
if findFilter == nil {
|
||||
findFilter = &FindFilterType{}
|
||||
}
|
||||
|
||||
whereClauses := []string{}
|
||||
havingClauses := []string{}
|
||||
args := []interface{}{}
|
||||
body := selectDistinctIDs("galleries")
|
||||
|
||||
if q := findFilter.Q; q != nil && *q != "" {
|
||||
searchColumns := []string{"galleries.path", "galleries.checksum"}
|
||||
whereClauses = append(whereClauses, getSearch(searchColumns, *q))
|
||||
}
|
||||
|
||||
sortAndPagination := qb.getGallerySort(findFilter) + getPagination(findFilter)
|
||||
idsResult, countResult := executeFindQuery("galleries", body, args, sortAndPagination, whereClauses, havingClauses)
|
||||
|
||||
var galleries []Gallery
|
||||
for _, id := range idsResult {
|
||||
gallery, _ := qb.Find(id)
|
||||
galleries = append(galleries, *gallery)
|
||||
}
|
||||
|
||||
return galleries, countResult
|
||||
}
|
||||
|
||||
func (qb *GalleryQueryBuilder) getGallerySort(findFilter *FindFilterType) string {
|
||||
var sort string
|
||||
var direction string
|
||||
//if findFilter == nil { // TODO temp until title is removed from schema and UI
|
||||
sort = "path"
|
||||
direction = "ASC"
|
||||
//} else {
|
||||
// sort = findFilter.getSort("path")
|
||||
// direction = findFilter.getDirection()
|
||||
//}
|
||||
return getSort(sort, direction, "galleries")
|
||||
}
|
||||
|
||||
func (qb *GalleryQueryBuilder) queryGallery(query string, args []interface{}, tx *sqlx.Tx) (*Gallery, error) {
|
||||
results, err := qb.queryGalleries(query, args, tx)
|
||||
if err != nil || len(results) < 1 {
|
||||
return nil, err
|
||||
}
|
||||
return &results[0], nil
|
||||
}
|
||||
|
||||
func (qb *GalleryQueryBuilder) queryGalleries(query string, args []interface{}, tx *sqlx.Tx) ([]Gallery, error) {
|
||||
var rows *sqlx.Rows
|
||||
var err error
|
||||
if tx != nil {
|
||||
rows, err = tx.Queryx(query, args...)
|
||||
} else {
|
||||
rows, err = database.DB.Queryx(query, args...)
|
||||
}
|
||||
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
galleries := make([]Gallery, 0)
|
||||
gallery := Gallery{}
|
||||
for rows.Next() {
|
||||
if err := rows.StructScan(&gallery); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
galleries = append(galleries, gallery)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return galleries, nil
|
||||
}
|
||||
84
pkg/models/querybuilder_joins.go
Normal file
84
pkg/models/querybuilder_joins.go
Normal file
@@ -0,0 +1,84 @@
|
||||
package models
|
||||
|
||||
import "github.com/jmoiron/sqlx"
|
||||
|
||||
type JoinsQueryBuilder struct{}
|
||||
|
||||
func NewJoinsQueryBuilder() JoinsQueryBuilder {
|
||||
return JoinsQueryBuilder{}
|
||||
}
|
||||
|
||||
func (qb *JoinsQueryBuilder) CreatePerformersScenes(newJoins []PerformersScenes, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
for _, join := range newJoins {
|
||||
_, err := tx.NamedExec(
|
||||
`INSERT INTO performers_scenes (performer_id, scene_id) VALUES (:performer_id, :scene_id)`,
|
||||
join,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (qb *JoinsQueryBuilder) UpdatePerformersScenes(sceneID int, updatedJoins []PerformersScenes, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
|
||||
// Delete the existing joins and then create new ones
|
||||
_, err := tx.Exec("DELETE FROM performers_scenes WHERE scene_id = ?", sceneID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return qb.CreatePerformersScenes(updatedJoins, tx)
|
||||
}
|
||||
|
||||
func (qb *JoinsQueryBuilder) CreateScenesTags(newJoins []ScenesTags, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
for _, join := range newJoins {
|
||||
_, err := tx.NamedExec(
|
||||
`INSERT INTO scenes_tags (scene_id, tag_id) VALUES (:scene_id, :tag_id)`,
|
||||
join,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (qb *JoinsQueryBuilder) UpdateScenesTags(sceneID int, updatedJoins []ScenesTags, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
|
||||
// Delete the existing joins and then create new ones
|
||||
_, err := tx.Exec("DELETE FROM scenes_tags WHERE scene_id = ?", sceneID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return qb.CreateScenesTags(updatedJoins, tx)
|
||||
}
|
||||
|
||||
func (qb *JoinsQueryBuilder) CreateSceneMarkersTags(newJoins []SceneMarkersTags, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
for _, join := range newJoins {
|
||||
_, err := tx.NamedExec(
|
||||
`INSERT INTO scene_markers_tags (scene_marker_id, tag_id) VALUES (:scene_marker_id, :tag_id)`,
|
||||
join,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (qb *JoinsQueryBuilder) UpdateSceneMarkersTags(sceneMarkerID int, updatedJoins []SceneMarkersTags, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
|
||||
// Delete the existing joins and then create new ones
|
||||
_, err := tx.Exec("DELETE FROM scene_markers_tags WHERE scene_marker_id = ?", sceneMarkerID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return qb.CreateSceneMarkersTags(updatedJoins, tx)
|
||||
}
|
||||
179
pkg/models/querybuilder_performer.go
Normal file
179
pkg/models/querybuilder_performer.go
Normal file
@@ -0,0 +1,179 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
)
|
||||
|
||||
type PerformerQueryBuilder struct{}
|
||||
|
||||
func NewPerformerQueryBuilder() PerformerQueryBuilder {
|
||||
return PerformerQueryBuilder{}
|
||||
}
|
||||
|
||||
func (qb *PerformerQueryBuilder) Create(newPerformer Performer, tx *sqlx.Tx) (*Performer, error) {
|
||||
ensureTx(tx)
|
||||
result, err := tx.NamedExec(
|
||||
`INSERT INTO performers (image, checksum, name, url, twitter, instagram, birthdate, ethnicity, country,
|
||||
eye_color, height, measurements, fake_tits, career_length, tattoos, piercings,
|
||||
aliases, favorite, created_at, updated_at)
|
||||
VALUES (:image, :checksum, :name, :url, :twitter, :instagram, :birthdate, :ethnicity, :country,
|
||||
:eye_color, :height, :measurements, :fake_tits, :career_length, :tattoos, :piercings,
|
||||
:aliases, :favorite, :created_at, :updated_at)
|
||||
`,
|
||||
newPerformer,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
performerID, err := result.LastInsertId()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := tx.Get(&newPerformer, `SELECT * FROM performers WHERE id = ? LIMIT 1`, performerID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &newPerformer, nil
|
||||
}
|
||||
|
||||
func (qb *PerformerQueryBuilder) Update(updatedPerformer Performer, tx *sqlx.Tx) (*Performer, error) {
|
||||
ensureTx(tx)
|
||||
_, err := tx.NamedExec(
|
||||
`UPDATE performers SET `+SQLGenKeys(updatedPerformer)+` WHERE performers.id = :id`,
|
||||
updatedPerformer,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := tx.Get(&updatedPerformer, `SELECT * FROM performers WHERE id = ? LIMIT 1`, updatedPerformer.ID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &updatedPerformer, nil
|
||||
}
|
||||
|
||||
func (qb *PerformerQueryBuilder) Find(id int) (*Performer, error) {
|
||||
query := "SELECT * FROM performers WHERE id = ? LIMIT 1"
|
||||
args := []interface{}{id}
|
||||
results, err := qb.queryPerformers(query, args, nil)
|
||||
if err != nil || len(results) < 1 {
|
||||
return nil, err
|
||||
}
|
||||
return &results[0], nil
|
||||
}
|
||||
|
||||
func (qb *PerformerQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) ([]Performer, error) {
|
||||
query := `
|
||||
SELECT performers.* FROM performers
|
||||
LEFT JOIN performers_scenes as scenes_join on scenes_join.performer_id = performers.id
|
||||
LEFT JOIN scenes on scenes_join.scene_id = scenes.id
|
||||
WHERE scenes.id = ?
|
||||
GROUP BY performers.id
|
||||
`
|
||||
args := []interface{}{sceneID}
|
||||
return qb.queryPerformers(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *PerformerQueryBuilder) FindByNames(names []string, tx *sqlx.Tx) ([]Performer, error) {
|
||||
query := "SELECT * FROM performers WHERE name IN " + getInBinding(len(names))
|
||||
var args []interface{}
|
||||
for _, name := range names {
|
||||
args = append(args, name)
|
||||
}
|
||||
return qb.queryPerformers(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *PerformerQueryBuilder) Count() (int, error) {
|
||||
return runCountQuery(buildCountQuery("SELECT performers.id FROM performers"), nil)
|
||||
}
|
||||
|
||||
func (qb *PerformerQueryBuilder) All() ([]Performer, error) {
|
||||
return qb.queryPerformers(selectAll("performers")+qb.getPerformerSort(nil), nil, nil)
|
||||
}
|
||||
|
||||
func (qb *PerformerQueryBuilder) Query(performerFilter *PerformerFilterType, findFilter *FindFilterType) ([]Performer, int) {
|
||||
if performerFilter == nil {
|
||||
performerFilter = &PerformerFilterType{}
|
||||
}
|
||||
if findFilter == nil {
|
||||
findFilter = &FindFilterType{}
|
||||
}
|
||||
|
||||
whereClauses := []string{}
|
||||
havingClauses := []string{}
|
||||
args := []interface{}{}
|
||||
body := selectDistinctIDs("performers")
|
||||
body += `
|
||||
left join performers_scenes as scenes_join on scenes_join.performer_id = performers.id
|
||||
left join scenes on scenes_join.scene_id = scenes.id
|
||||
`
|
||||
|
||||
if q := findFilter.Q; q != nil && *q != "" {
|
||||
searchColumns := []string{"performers.name", "performers.checksum", "performers.birthdate", "performers.ethnicity"}
|
||||
whereClauses = append(whereClauses, getSearch(searchColumns, *q))
|
||||
}
|
||||
|
||||
if favoritesFilter := performerFilter.FilterFavorites; favoritesFilter != nil {
|
||||
if *favoritesFilter == true {
|
||||
whereClauses = append(whereClauses, "performers.favorite = 1")
|
||||
} else {
|
||||
whereClauses = append(whereClauses, "performers.favorite = 0")
|
||||
}
|
||||
}
|
||||
|
||||
sortAndPagination := qb.getPerformerSort(findFilter) + getPagination(findFilter)
|
||||
idsResult, countResult := executeFindQuery("performers", body, args, sortAndPagination, whereClauses, havingClauses)
|
||||
|
||||
var performers []Performer
|
||||
for _, id := range idsResult {
|
||||
performer, _ := qb.Find(id)
|
||||
performers = append(performers, *performer)
|
||||
}
|
||||
|
||||
return performers, countResult
|
||||
}
|
||||
|
||||
func (qb *PerformerQueryBuilder) getPerformerSort(findFilter *FindFilterType) string {
|
||||
var sort string
|
||||
var direction string
|
||||
if findFilter == nil {
|
||||
sort = "name"
|
||||
direction = "ASC"
|
||||
} else {
|
||||
sort = findFilter.GetSort("name")
|
||||
direction = findFilter.GetDirection()
|
||||
}
|
||||
return getSort(sort, direction, "performers")
|
||||
}
|
||||
|
||||
func (qb *PerformerQueryBuilder) queryPerformers(query string, args []interface{}, tx *sqlx.Tx) ([]Performer, error) {
|
||||
var rows *sqlx.Rows
|
||||
var err error
|
||||
if tx != nil {
|
||||
rows, err = tx.Queryx(query, args...)
|
||||
} else {
|
||||
rows, err = database.DB.Queryx(query, args...)
|
||||
}
|
||||
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
performers := make([]Performer, 0)
|
||||
performer := Performer{}
|
||||
for rows.Next() {
|
||||
if err := rows.StructScan(&performer); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
performers = append(performers, performer)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return performers, nil
|
||||
}
|
||||
287
pkg/models/querybuilder_scene.go
Normal file
287
pkg/models/querybuilder_scene.go
Normal file
@@ -0,0 +1,287 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const scenesForPerformerQuery = `
|
||||
SELECT scenes.* FROM scenes
|
||||
LEFT JOIN performers_scenes as performers_join on performers_join.scene_id = scenes.id
|
||||
LEFT JOIN performers on performers_join.performer_id = performers.id
|
||||
WHERE performers.id = ?
|
||||
GROUP BY scenes.id
|
||||
`
|
||||
|
||||
const scenesForStudioQuery = `
|
||||
SELECT scenes.* FROM scenes
|
||||
JOIN studios ON studios.id = scenes.studio_id
|
||||
WHERE studios.id = ?
|
||||
GROUP BY scenes.id
|
||||
`
|
||||
|
||||
const scenesForTagQuery = `
|
||||
SELECT scenes.* FROM scenes
|
||||
LEFT JOIN scenes_tags as tags_join on tags_join.scene_id = scenes.id
|
||||
LEFT JOIN tags on tags_join.tag_id = tags.id
|
||||
WHERE tags.id = ?
|
||||
GROUP BY scenes.id
|
||||
`
|
||||
|
||||
type SceneQueryBuilder struct{}
|
||||
|
||||
func NewSceneQueryBuilder() SceneQueryBuilder {
|
||||
return SceneQueryBuilder{}
|
||||
}
|
||||
|
||||
func (qb *SceneQueryBuilder) Create(newScene Scene, tx *sqlx.Tx) (*Scene, error) {
|
||||
ensureTx(tx)
|
||||
result, err := tx.NamedExec(
|
||||
`INSERT INTO scenes (checksum, path, title, details, url, date, rating, size, duration, video_codec,
|
||||
audio_codec, width, height, framerate, bitrate, studio_id, created_at, updated_at)
|
||||
VALUES (:checksum, :path, :title, :details, :url, :date, :rating, :size, :duration, :video_codec,
|
||||
:audio_codec, :width, :height, :framerate, :bitrate, :studio_id, :created_at, :updated_at)
|
||||
`,
|
||||
newScene,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sceneID, err := result.LastInsertId()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := tx.Get(&newScene, `SELECT * FROM scenes WHERE id = ? LIMIT 1`, sceneID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &newScene, nil
|
||||
}
|
||||
|
||||
func (qb *SceneQueryBuilder) Update(updatedScene Scene, tx *sqlx.Tx) (*Scene, error) {
|
||||
ensureTx(tx)
|
||||
_, err := tx.NamedExec(
|
||||
`UPDATE scenes SET `+SQLGenKeys(updatedScene)+` WHERE scenes.id = :id`,
|
||||
updatedScene,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := tx.Get(&updatedScene, `SELECT * FROM scenes WHERE id = ? LIMIT 1`, updatedScene.ID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &updatedScene, nil
|
||||
}
|
||||
|
||||
func (qb *SceneQueryBuilder) Find(id int) (*Scene, error) {
|
||||
query := "SELECT * FROM scenes WHERE id = ? LIMIT 1"
|
||||
args := []interface{}{id}
|
||||
return qb.queryScene(query, args, nil)
|
||||
}
|
||||
|
||||
func (qb *SceneQueryBuilder) FindByChecksum(checksum string) (*Scene, error) {
|
||||
query := "SELECT * FROM scenes WHERE checksum = ? LIMIT 1"
|
||||
args := []interface{}{checksum}
|
||||
return qb.queryScene(query, args, nil)
|
||||
}
|
||||
|
||||
func (qb *SceneQueryBuilder) FindByPath(path string) (*Scene, error) {
|
||||
query := "SELECT * FROM scenes WHERE path = ? LIMIT 1"
|
||||
args := []interface{}{path}
|
||||
return qb.queryScene(query, args, nil)
|
||||
}
|
||||
|
||||
func (qb *SceneQueryBuilder) FindByPerformerID(performerID int) ([]Scene, error) {
|
||||
args := []interface{}{performerID}
|
||||
return qb.queryScenes(scenesForPerformerQuery, args, nil)
|
||||
}
|
||||
|
||||
func (qb *SceneQueryBuilder) CountByPerformerID(performerID int) (int, error) {
|
||||
args := []interface{}{performerID}
|
||||
return runCountQuery(buildCountQuery(scenesForPerformerQuery), args)
|
||||
}
|
||||
|
||||
func (qb *SceneQueryBuilder) FindByStudioID(studioID int) ([]Scene, error) {
|
||||
args := []interface{}{studioID}
|
||||
return qb.queryScenes(scenesForStudioQuery, args, nil)
|
||||
}
|
||||
|
||||
func (qb *SceneQueryBuilder) Count() (int, error) {
|
||||
return runCountQuery(buildCountQuery("SELECT scenes.id FROM scenes"), nil)
|
||||
}
|
||||
|
||||
func (qb *SceneQueryBuilder) CountByStudioID(studioID int) (int, error) {
|
||||
args := []interface{}{studioID}
|
||||
return runCountQuery(buildCountQuery(scenesForStudioQuery), args)
|
||||
}
|
||||
|
||||
func (qb *SceneQueryBuilder) CountByTagID(tagID int) (int, error) {
|
||||
args := []interface{}{tagID}
|
||||
return runCountQuery(buildCountQuery(scenesForTagQuery), args)
|
||||
}
|
||||
|
||||
func (qb *SceneQueryBuilder) Wall(q *string) ([]Scene, error) {
|
||||
s := ""
|
||||
if q != nil {
|
||||
s = *q
|
||||
}
|
||||
query := "SELECT scenes.* FROM scenes WHERE scenes.details LIKE '%" + s + "%' ORDER BY RANDOM() LIMIT 80"
|
||||
return qb.queryScenes(query, nil, nil)
|
||||
}
|
||||
|
||||
func (qb *SceneQueryBuilder) All() ([]Scene, error) {
|
||||
return qb.queryScenes(selectAll("scenes")+qb.getSceneSort(nil), nil, nil)
|
||||
}
|
||||
|
||||
func (qb *SceneQueryBuilder) Query(sceneFilter *SceneFilterType, findFilter *FindFilterType) ([]Scene, int) {
|
||||
if sceneFilter == nil {
|
||||
sceneFilter = &SceneFilterType{}
|
||||
}
|
||||
if findFilter == nil {
|
||||
findFilter = &FindFilterType{}
|
||||
}
|
||||
|
||||
whereClauses := []string{}
|
||||
havingClauses := []string{}
|
||||
args := []interface{}{}
|
||||
body := selectDistinctIDs("scenes")
|
||||
body = body + `
|
||||
left join scene_markers on scene_markers.scene_id = scenes.id
|
||||
left join performers_scenes as performers_join on performers_join.scene_id = scenes.id
|
||||
left join performers on performers_join.performer_id = performers.id
|
||||
left join studios as studio on studio.id = scenes.studio_id
|
||||
left join galleries as gallery on gallery.scene_id = scenes.id
|
||||
left join scenes_tags as tags_join on tags_join.scene_id = scenes.id
|
||||
left join tags on tags_join.tag_id = tags.id
|
||||
`
|
||||
|
||||
if q := findFilter.Q; q != nil && *q != "" {
|
||||
searchColumns := []string{"scenes.title", "scenes.details", "scenes.path", "scenes.checksum", "scene_markers.title"}
|
||||
whereClauses = append(whereClauses, getSearch(searchColumns, *q))
|
||||
}
|
||||
|
||||
if rating := sceneFilter.Rating; rating != nil {
|
||||
whereClauses = append(whereClauses, "rating = ?")
|
||||
args = append(args, *sceneFilter.Rating)
|
||||
}
|
||||
|
||||
if resolutionFilter := sceneFilter.Resolution; resolutionFilter != nil {
|
||||
if resolution := resolutionFilter.String(); resolutionFilter.IsValid() {
|
||||
switch resolution {
|
||||
case "LOW":
|
||||
whereClauses = append(whereClauses, "(scenes.height >= 240 AND scenes.height < 480)")
|
||||
case "STANDARD":
|
||||
whereClauses = append(whereClauses, "(scenes.height >= 480 AND scenes.height < 720)")
|
||||
case "STANDARD_HD":
|
||||
whereClauses = append(whereClauses, "(scenes.height >= 720 AND scenes.height < 1080)")
|
||||
case "FULL_HD":
|
||||
whereClauses = append(whereClauses, "(scenes.height >= 1080 AND scenes.height < 2160)")
|
||||
case "FOUR_K":
|
||||
whereClauses = append(whereClauses, "scenes.height >= 2160")
|
||||
default:
|
||||
whereClauses = append(whereClauses, "scenes.height < 240")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if hasMarkersFilter := sceneFilter.HasMarkers; hasMarkersFilter != nil {
|
||||
if strings.Compare(*hasMarkersFilter, "true") == 0 {
|
||||
havingClauses = append(havingClauses, "count(scene_markers.scene_id) > 0")
|
||||
} else {
|
||||
whereClauses = append(whereClauses, "scene_markers.id IS NULL")
|
||||
}
|
||||
}
|
||||
|
||||
if isMissingFilter := sceneFilter.IsMissing; isMissingFilter != nil && *isMissingFilter != "" {
|
||||
switch *isMissingFilter {
|
||||
case "gallery":
|
||||
whereClauses = append(whereClauses, "gallery.scene_id IS NULL")
|
||||
case "studio":
|
||||
whereClauses = append(whereClauses, "scenes.studio_id IS NULL")
|
||||
case "performers":
|
||||
whereClauses = append(whereClauses, "performers_join.scene_id IS NULL")
|
||||
default:
|
||||
whereClauses = append(whereClauses, "scenes."+*isMissingFilter+" IS NULL")
|
||||
}
|
||||
}
|
||||
|
||||
if tagsFilter := sceneFilter.Tags; len(tagsFilter) > 0 {
|
||||
for _, tagID := range tagsFilter {
|
||||
args = append(args, tagID)
|
||||
}
|
||||
|
||||
whereClauses = append(whereClauses, "tags.id IN "+getInBinding(len(tagsFilter)))
|
||||
havingClauses = append(havingClauses, "count(distinct tags.id) IS "+strconv.Itoa(len(tagsFilter)))
|
||||
}
|
||||
|
||||
if performerID := sceneFilter.PerformerID; performerID != nil {
|
||||
whereClauses = append(whereClauses, "performers.id = ?")
|
||||
args = append(args, *performerID)
|
||||
}
|
||||
|
||||
if studioID := sceneFilter.StudioID; studioID != nil {
|
||||
whereClauses = append(whereClauses, "studio.id = ?")
|
||||
args = append(args, *studioID)
|
||||
}
|
||||
|
||||
sortAndPagination := qb.getSceneSort(findFilter) + getPagination(findFilter)
|
||||
idsResult, countResult := executeFindQuery("scenes", body, args, sortAndPagination, whereClauses, havingClauses)
|
||||
|
||||
var scenes []Scene
|
||||
for _, id := range idsResult {
|
||||
scene, _ := qb.Find(id)
|
||||
scenes = append(scenes, *scene)
|
||||
}
|
||||
|
||||
return scenes, countResult
|
||||
}
|
||||
|
||||
func (qb *SceneQueryBuilder) getSceneSort(findFilter *FindFilterType) string {
|
||||
if findFilter == nil {
|
||||
return " ORDER BY scenes.path, scenes.date ASC "
|
||||
}
|
||||
sort := findFilter.GetSort("title")
|
||||
direction := findFilter.GetDirection()
|
||||
return getSort(sort, direction, "scenes")
|
||||
}
|
||||
|
||||
func (qb *SceneQueryBuilder) queryScene(query string, args []interface{}, tx *sqlx.Tx) (*Scene, error) {
|
||||
results, err := qb.queryScenes(query, args, tx)
|
||||
if err != nil || len(results) < 1 {
|
||||
return nil, err
|
||||
}
|
||||
return &results[0], nil
|
||||
}
|
||||
|
||||
func (qb *SceneQueryBuilder) queryScenes(query string, args []interface{}, tx *sqlx.Tx) ([]Scene, error) {
|
||||
var rows *sqlx.Rows
|
||||
var err error
|
||||
if tx != nil {
|
||||
rows, err = tx.Queryx(query, args...)
|
||||
} else {
|
||||
rows, err = database.DB.Queryx(query, args...)
|
||||
}
|
||||
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
scenes := make([]Scene, 0)
|
||||
scene := Scene{}
|
||||
for rows.Next() {
|
||||
if err := rows.StructScan(&scene); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
scenes = append(scenes, scene)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return scenes, nil
|
||||
}
|
||||
255
pkg/models/querybuilder_scene_marker.go
Normal file
255
pkg/models/querybuilder_scene_marker.go
Normal file
@@ -0,0 +1,255 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
const sceneMarkersForTagQuery = `
|
||||
SELECT scene_markers.* FROM scene_markers
|
||||
LEFT JOIN scene_markers_tags as tags_join on tags_join.scene_marker_id = scene_markers.id
|
||||
LEFT JOIN tags on tags_join.tag_id = tags.id
|
||||
WHERE tags.id = ?
|
||||
GROUP BY scene_markers.id
|
||||
`
|
||||
|
||||
type SceneMarkerQueryBuilder struct{}
|
||||
|
||||
func NewSceneMarkerQueryBuilder() SceneMarkerQueryBuilder {
|
||||
return SceneMarkerQueryBuilder{}
|
||||
}
|
||||
|
||||
func (qb *SceneMarkerQueryBuilder) Create(newSceneMarker SceneMarker, tx *sqlx.Tx) (*SceneMarker, error) {
|
||||
ensureTx(tx)
|
||||
result, err := tx.NamedExec(
|
||||
`INSERT INTO scene_markers (title, seconds, primary_tag_id, scene_id, created_at, updated_at)
|
||||
VALUES (:title, :seconds, :primary_tag_id, :scene_id, :created_at, :updated_at)
|
||||
`,
|
||||
newSceneMarker,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sceneMarkerID, err := result.LastInsertId()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := tx.Get(&newSceneMarker, `SELECT * FROM scene_markers WHERE id = ? LIMIT 1`, sceneMarkerID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &newSceneMarker, nil
|
||||
}
|
||||
|
||||
func (qb *SceneMarkerQueryBuilder) Update(updatedSceneMarker SceneMarker, tx *sqlx.Tx) (*SceneMarker, error) {
|
||||
ensureTx(tx)
|
||||
_, err := tx.NamedExec(
|
||||
`UPDATE scene_markers SET `+SQLGenKeys(updatedSceneMarker)+` WHERE scene_markers.id = :id`,
|
||||
updatedSceneMarker,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := tx.Get(&updatedSceneMarker, `SELECT * FROM scene_markers WHERE id = ? LIMIT 1`, updatedSceneMarker.ID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &updatedSceneMarker, nil
|
||||
}
|
||||
|
||||
func (qb *SceneMarkerQueryBuilder) Destroy(id string, tx *sqlx.Tx) error {
|
||||
return executeDeleteQuery("scene_markers", id, tx)
|
||||
}
|
||||
|
||||
func (qb *SceneMarkerQueryBuilder) Find(id int) (*SceneMarker, error) {
|
||||
query := "SELECT * FROM scene_markers WHERE id = ? LIMIT 1"
|
||||
args := []interface{}{id}
|
||||
results, err := qb.querySceneMarkers(query, args, nil)
|
||||
if err != nil || len(results) < 1 {
|
||||
return nil, err
|
||||
}
|
||||
return &results[0], nil
|
||||
}
|
||||
|
||||
func (qb *SceneMarkerQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) ([]SceneMarker, error) {
|
||||
query := `
|
||||
SELECT scene_markers.* FROM scene_markers
|
||||
JOIN scenes ON scenes.id = scene_markers.scene_id
|
||||
WHERE scenes.id = ?
|
||||
GROUP BY scene_markers.id
|
||||
ORDER BY scene_markers.seconds ASC
|
||||
`
|
||||
args := []interface{}{sceneID}
|
||||
return qb.querySceneMarkers(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *SceneMarkerQueryBuilder) CountByTagID(tagID int) (int, error) {
|
||||
args := []interface{}{tagID}
|
||||
return runCountQuery(buildCountQuery(sceneMarkersForTagQuery), args)
|
||||
}
|
||||
|
||||
func (qb *SceneMarkerQueryBuilder) GetMarkerStrings(q *string, sort *string) ([]*MarkerStringsResultType, error) {
|
||||
query := "SELECT count(*) as `count`, scene_markers.id as id, scene_markers.title as title FROM scene_markers"
|
||||
if q != nil {
|
||||
query = query + " WHERE title LIKE '%" + *q + "%'"
|
||||
}
|
||||
query = query + " GROUP BY title"
|
||||
if sort != nil && *sort == "count" {
|
||||
query = query + " ORDER BY `count` DESC"
|
||||
} else {
|
||||
query = query + " ORDER BY title ASC"
|
||||
}
|
||||
args := []interface{}{}
|
||||
return qb.queryMarkerStringsResultType(query, args)
|
||||
}
|
||||
|
||||
func (qb *SceneMarkerQueryBuilder) Wall(q *string) ([]SceneMarker, error) {
|
||||
s := ""
|
||||
if q != nil {
|
||||
s = *q
|
||||
}
|
||||
query := "SELECT scene_markers.* FROM scene_markers WHERE scene_markers.title LIKE '%" + s + "%' ORDER BY RANDOM() LIMIT 80"
|
||||
return qb.querySceneMarkers(query, nil, nil)
|
||||
}
|
||||
|
||||
func (qb *SceneMarkerQueryBuilder) Query(sceneMarkerFilter *SceneMarkerFilterType, findFilter *FindFilterType) ([]SceneMarker, int) {
|
||||
if sceneMarkerFilter == nil {
|
||||
sceneMarkerFilter = &SceneMarkerFilterType{}
|
||||
}
|
||||
if findFilter == nil {
|
||||
findFilter = &FindFilterType{}
|
||||
}
|
||||
|
||||
whereClauses := []string{}
|
||||
havingClauses := []string{}
|
||||
args := []interface{}{}
|
||||
body := selectDistinctIDs("scene_markers")
|
||||
body = body + `
|
||||
left join tags as primary_tag on primary_tag.id = scene_markers.primary_tag_id
|
||||
left join scenes as scene on scene.id = scene_markers.scene_id
|
||||
left join scene_markers_tags as tags_join on tags_join.scene_marker_id = scene_markers.id
|
||||
left join tags on tags_join.tag_id = tags.id
|
||||
`
|
||||
|
||||
if tagIDs := sceneMarkerFilter.Tags; tagIDs != nil {
|
||||
//select `scene_markers`.* from `scene_markers`
|
||||
//left join `tags` as `primary_tags_join`
|
||||
// on `primary_tags_join`.`id` = `scene_markers`.`primary_tag_id`
|
||||
// and `primary_tags_join`.`id` in ('3', '37', '9', '89')
|
||||
//left join `scene_markers_tags` as `tags_join`
|
||||
// on `tags_join`.`scene_marker_id` = `scene_markers`.`id`
|
||||
// and `tags_join`.`tag_id` in ('3', '37', '9', '89')
|
||||
//group by `scene_markers`.`id`
|
||||
//having ((count(distinct `primary_tags_join`.`id`) + count(distinct `tags_join`.`tag_id`)) = 4)
|
||||
|
||||
length := len(tagIDs)
|
||||
body += " LEFT JOIN tags AS ptj ON ptj.id = scene_markers.primary_tag_id AND ptj.id IN " + getInBinding(length)
|
||||
body += " LEFT JOIN scene_markers_tags AS tj ON tj.scene_marker_id = scene_markers.id AND tj.tag_id IN " + getInBinding(length)
|
||||
havingClauses = append(havingClauses, "((COUNT(DISTINCT ptj.id) + COUNT(DISTINCT tj.tag_id)) = "+strconv.Itoa(length)+")")
|
||||
for _, tagID := range tagIDs {
|
||||
args = append(args, tagID)
|
||||
}
|
||||
for _, tagID := range tagIDs {
|
||||
args = append(args, tagID)
|
||||
}
|
||||
}
|
||||
|
||||
if sceneTagIDs := sceneMarkerFilter.SceneTags; sceneTagIDs != nil {
|
||||
length := len(sceneTagIDs)
|
||||
body += " LEFT JOIN scenes_tags AS scene_tags_join ON scene_tags_join.scene_id = scene.id AND scene_tags_join.tag_id IN " + getInBinding(length)
|
||||
havingClauses = append(havingClauses, "COUNT(DISTINCT scene_tags_join.tag_id) = "+strconv.Itoa(length))
|
||||
for _, tagID := range sceneTagIDs {
|
||||
args = append(args, tagID)
|
||||
}
|
||||
}
|
||||
|
||||
if performerIDs := sceneMarkerFilter.Performers; performerIDs != nil {
|
||||
length := len(performerIDs)
|
||||
body += " LEFT JOIN performers_scenes as scene_performers ON scene.id = scene_performers.scene_id"
|
||||
whereClauses = append(whereClauses, "scene_performers.performer_id IN "+getInBinding(length))
|
||||
for _, performerID := range performerIDs {
|
||||
args = append(args, performerID)
|
||||
}
|
||||
}
|
||||
|
||||
if q := findFilter.Q; q != nil && *q != "" {
|
||||
searchColumns := []string{"scene_markers.title", "scene.title"}
|
||||
whereClauses = append(whereClauses, getSearch(searchColumns, *q))
|
||||
}
|
||||
|
||||
if tagID := sceneMarkerFilter.TagID; tagID != nil {
|
||||
whereClauses = append(whereClauses, "(scene_markers.primary_tag_id = "+*tagID+" OR tags.id = "+*tagID+")")
|
||||
}
|
||||
|
||||
sortAndPagination := qb.getSceneMarkerSort(findFilter) + getPagination(findFilter)
|
||||
idsResult, countResult := executeFindQuery("scene_markers", body, args, sortAndPagination, whereClauses, havingClauses)
|
||||
|
||||
var sceneMarkers []SceneMarker
|
||||
for _, id := range idsResult {
|
||||
sceneMarker, _ := qb.Find(id)
|
||||
sceneMarkers = append(sceneMarkers, *sceneMarker)
|
||||
}
|
||||
|
||||
return sceneMarkers, countResult
|
||||
}
|
||||
|
||||
func (qb *SceneMarkerQueryBuilder) getSceneMarkerSort(findFilter *FindFilterType) string {
|
||||
sort := findFilter.GetSort("title")
|
||||
direction := findFilter.GetDirection()
|
||||
return getSort(sort, direction, "scene_markers")
|
||||
}
|
||||
|
||||
func (qb *SceneMarkerQueryBuilder) querySceneMarkers(query string, args []interface{}, tx *sqlx.Tx) ([]SceneMarker, error) {
|
||||
var rows *sqlx.Rows
|
||||
var err error
|
||||
if tx != nil {
|
||||
rows, err = tx.Queryx(query, args...)
|
||||
} else {
|
||||
rows, err = database.DB.Queryx(query, args...)
|
||||
}
|
||||
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
sceneMarkers := make([]SceneMarker, 0)
|
||||
sceneMarker := SceneMarker{}
|
||||
for rows.Next() {
|
||||
if err := rows.StructScan(&sceneMarker); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sceneMarkers = append(sceneMarkers, sceneMarker)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return sceneMarkers, nil
|
||||
}
|
||||
|
||||
func (qb *SceneMarkerQueryBuilder) queryMarkerStringsResultType(query string, args []interface{}) ([]*MarkerStringsResultType, error) {
|
||||
rows, err := database.DB.Queryx(query, args...)
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
markerStrings := make([]*MarkerStringsResultType, 0)
|
||||
for rows.Next() {
|
||||
markerString := MarkerStringsResultType{}
|
||||
if err := rows.StructScan(&markerString); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
markerStrings = append(markerStrings, &markerString)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return markerStrings, nil
|
||||
}
|
||||
113
pkg/models/querybuilder_scraped_item.go
Normal file
113
pkg/models/querybuilder_scraped_item.go
Normal file
@@ -0,0 +1,113 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
)
|
||||
|
||||
type ScrapedItemQueryBuilder struct{}
|
||||
|
||||
func NewScrapedItemQueryBuilder() ScrapedItemQueryBuilder {
|
||||
return ScrapedItemQueryBuilder{}
|
||||
}
|
||||
|
||||
func (qb *ScrapedItemQueryBuilder) Create(newScrapedItem ScrapedItem, tx *sqlx.Tx) (*ScrapedItem, error) {
|
||||
ensureTx(tx)
|
||||
result, err := tx.NamedExec(
|
||||
`INSERT INTO scraped_items (title, description, url, date, rating, tags, models, episode, gallery_filename,
|
||||
gallery_url, video_filename, video_url, studio_id, created_at, updated_at)
|
||||
VALUES (:title, :description, :url, :date, :rating, :tags, :models, :episode, :gallery_filename,
|
||||
:gallery_url, :video_filename, :video_url, :studio_id, :created_at, :updated_at)
|
||||
`,
|
||||
newScrapedItem,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
scrapedItemID, err := result.LastInsertId()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := tx.Get(&newScrapedItem, `SELECT * FROM scraped_items WHERE id = ? LIMIT 1`, scrapedItemID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &newScrapedItem, nil
|
||||
}
|
||||
|
||||
func (qb *ScrapedItemQueryBuilder) Update(updatedScrapedItem ScrapedItem, tx *sqlx.Tx) (*ScrapedItem, error) {
|
||||
ensureTx(tx)
|
||||
_, err := tx.NamedExec(
|
||||
`UPDATE scraped_items SET `+SQLGenKeys(updatedScrapedItem)+` WHERE scraped_items.id = :id`,
|
||||
updatedScrapedItem,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := tx.Get(&updatedScrapedItem, `SELECT * FROM scraped_items WHERE id = ? LIMIT 1`, updatedScrapedItem.ID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &updatedScrapedItem, nil
|
||||
}
|
||||
|
||||
func (qb *ScrapedItemQueryBuilder) Find(id int) (*ScrapedItem, error) {
|
||||
query := "SELECT * FROM scraped_items WHERE id = ? LIMIT 1"
|
||||
args := []interface{}{id}
|
||||
return qb.queryScrapedItem(query, args, nil)
|
||||
}
|
||||
|
||||
func (qb *ScrapedItemQueryBuilder) All() ([]ScrapedItem, error) {
|
||||
return qb.queryScrapedItems(selectAll("scraped_items")+qb.getScrapedItemsSort(nil), nil, nil)
|
||||
}
|
||||
|
||||
func (qb *ScrapedItemQueryBuilder) getScrapedItemsSort(findFilter *FindFilterType) string {
|
||||
var sort string
|
||||
var direction string
|
||||
if findFilter == nil {
|
||||
sort = "id" // TODO studio_id and title
|
||||
direction = "ASC"
|
||||
} else {
|
||||
sort = findFilter.GetSort("id")
|
||||
direction = findFilter.GetDirection()
|
||||
}
|
||||
return getSort(sort, direction, "scraped_items")
|
||||
}
|
||||
|
||||
func (qb *ScrapedItemQueryBuilder) queryScrapedItem(query string, args []interface{}, tx *sqlx.Tx) (*ScrapedItem, error) {
|
||||
results, err := qb.queryScrapedItems(query, args, tx)
|
||||
if err != nil || len(results) < 1 {
|
||||
return nil, err
|
||||
}
|
||||
return &results[0], nil
|
||||
}
|
||||
|
||||
func (qb *ScrapedItemQueryBuilder) queryScrapedItems(query string, args []interface{}, tx *sqlx.Tx) ([]ScrapedItem, error) {
|
||||
var rows *sqlx.Rows
|
||||
var err error
|
||||
if tx != nil {
|
||||
rows, err = tx.Queryx(query, args...)
|
||||
} else {
|
||||
rows, err = database.DB.Queryx(query, args...)
|
||||
}
|
||||
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
scrapedItems := make([]ScrapedItem, 0)
|
||||
scrapedItem := ScrapedItem{}
|
||||
for rows.Next() {
|
||||
if err := rows.StructScan(&scrapedItem); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
scrapedItems = append(scrapedItems, scrapedItem)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return scrapedItems, nil
|
||||
}
|
||||
234
pkg/models/querybuilder_sql.go
Normal file
234
pkg/models/querybuilder_sql.go
Normal file
@@ -0,0 +1,234 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func selectAll(tableName string) string {
|
||||
idColumn := getColumn(tableName, "*")
|
||||
return "SELECT " + idColumn + " FROM " + tableName + " "
|
||||
}
|
||||
|
||||
func selectDistinctIDs(tableName string) string {
|
||||
idColumn := getColumn(tableName, "id")
|
||||
return "SELECT DISTINCT " + idColumn + " FROM " + tableName + " "
|
||||
}
|
||||
|
||||
func buildCountQuery(query string) string {
|
||||
return "SELECT COUNT(*) as count FROM (" + query + ") as temp"
|
||||
}
|
||||
|
||||
func getColumn(tableName string, columnName string) string {
|
||||
return tableName + "." + columnName
|
||||
}
|
||||
|
||||
func getPagination(findFilter *FindFilterType) string {
|
||||
if findFilter == nil {
|
||||
panic("nil find filter for pagination")
|
||||
}
|
||||
|
||||
var page int
|
||||
if findFilter.Page == nil || *findFilter.Page < 1 {
|
||||
page = 1
|
||||
} else {
|
||||
page = *findFilter.Page
|
||||
}
|
||||
|
||||
var perPage int
|
||||
if findFilter.PerPage == nil {
|
||||
perPage = 25
|
||||
} else {
|
||||
perPage = *findFilter.PerPage
|
||||
}
|
||||
if perPage > 120 {
|
||||
perPage = 120
|
||||
} else if perPage < 1 {
|
||||
perPage = 1
|
||||
}
|
||||
|
||||
page = (page - 1) * perPage
|
||||
return " LIMIT " + strconv.Itoa(perPage) + " OFFSET " + strconv.Itoa(page) + " "
|
||||
}
|
||||
|
||||
func getSort(sort string, direction string, tableName string) string {
|
||||
if direction != "ASC" && direction != "DESC" {
|
||||
direction = "ASC"
|
||||
}
|
||||
|
||||
if strings.Contains(sort, "_count") {
|
||||
var relationTableName = strings.Split(sort, "_")[0] // TODO: pluralize?
|
||||
colName := getColumn(relationTableName, "id")
|
||||
return " ORDER BY COUNT(distinct " + colName + ") " + direction
|
||||
} else if strings.Compare(sort, "filesize") == 0 {
|
||||
colName := getColumn(tableName, "size")
|
||||
return " ORDER BY cast(" + colName + " as integer) " + direction
|
||||
} else if strings.Compare(sort, "random") == 0 {
|
||||
return " ORDER BY RANDOM() "
|
||||
} else {
|
||||
colName := getColumn(tableName, sort)
|
||||
return " ORDER BY " + colName + " " + direction
|
||||
}
|
||||
}
|
||||
|
||||
func getSearch(columns []string, q string) string {
|
||||
var likeClauses []string
|
||||
queryWords := strings.Split(q, " ")
|
||||
trimmedQuery := strings.Trim(q, "\"")
|
||||
if trimmedQuery == q {
|
||||
// Search for any word
|
||||
for _, word := range queryWords {
|
||||
for _, column := range columns {
|
||||
likeClauses = append(likeClauses, column+" LIKE '%"+word+"%'")
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Search the exact query
|
||||
for _, column := range columns {
|
||||
likeClauses = append(likeClauses, column+" LIKE '%"+trimmedQuery+"%'")
|
||||
}
|
||||
}
|
||||
likes := strings.Join(likeClauses, " OR ")
|
||||
|
||||
return "(" + likes + ")"
|
||||
}
|
||||
|
||||
func getInBinding(length int) string {
|
||||
bindings := strings.Repeat("?, ", length)
|
||||
bindings = strings.TrimRight(bindings, ", ")
|
||||
return "(" + bindings + ")"
|
||||
}
|
||||
|
||||
func runIdsQuery(query string, args []interface{}) ([]int, error) {
|
||||
var result []struct {
|
||||
Int int `db:"id"`
|
||||
}
|
||||
if err := database.DB.Select(&result, query, args...); err != nil && err != sql.ErrNoRows {
|
||||
return []int{}, err
|
||||
}
|
||||
|
||||
vsm := make([]int, len(result))
|
||||
for i, v := range result {
|
||||
vsm[i] = v.Int
|
||||
}
|
||||
return vsm, nil
|
||||
}
|
||||
|
||||
func runCountQuery(query string, args []interface{}) (int, error) {
|
||||
// Perform query and fetch result
|
||||
result := struct {
|
||||
Int int `db:"count"`
|
||||
}{0}
|
||||
if err := database.DB.Get(&result, query, args...); err != nil && err != sql.ErrNoRows {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return result.Int, nil
|
||||
}
|
||||
|
||||
func executeFindQuery(tableName string, body string, args []interface{}, sortAndPagination string, whereClauses []string, havingClauses []string) ([]int, int) {
|
||||
if len(whereClauses) > 0 {
|
||||
body = body + " WHERE " + strings.Join(whereClauses, " AND ") // TODO handle AND or OR
|
||||
}
|
||||
body = body + " GROUP BY " + tableName + ".id "
|
||||
if len(havingClauses) > 0 {
|
||||
body = body + " HAVING " + strings.Join(havingClauses, " AND ") // TODO handle AND or OR
|
||||
}
|
||||
|
||||
countQuery := buildCountQuery(body)
|
||||
countResult, countErr := runCountQuery(countQuery, args)
|
||||
|
||||
idsQuery := body + sortAndPagination
|
||||
idsResult, idsErr := runIdsQuery(idsQuery, args)
|
||||
|
||||
if countErr != nil {
|
||||
panic(countErr)
|
||||
}
|
||||
if idsErr != nil {
|
||||
panic(idsErr)
|
||||
}
|
||||
|
||||
return idsResult, countResult
|
||||
}
|
||||
|
||||
func executeDeleteQuery(tableName string, id string, tx *sqlx.Tx) error {
|
||||
if tx == nil {
|
||||
panic("must use a transaction")
|
||||
}
|
||||
idColumnName := getColumn(tableName, "id")
|
||||
_, err := tx.Exec(
|
||||
`DELETE FROM `+tableName+` WHERE `+idColumnName+` = ?`,
|
||||
id,
|
||||
)
|
||||
return err
|
||||
}
|
||||
|
||||
func ensureTx(tx *sqlx.Tx) {
|
||||
if tx == nil {
|
||||
panic("must use a transaction")
|
||||
}
|
||||
}
|
||||
|
||||
// https://github.com/jmoiron/sqlx/issues/410
|
||||
// sqlGenKeys is used for passing a struct and returning a string
|
||||
// of keys for non empty key:values. These keys are formated
|
||||
// keyname=:keyname with a comma seperating them
|
||||
func SQLGenKeys(i interface{}) string {
|
||||
var query []string
|
||||
v := reflect.ValueOf(i)
|
||||
for i := 0; i < v.NumField(); i++ {
|
||||
//get key for struct tag
|
||||
rawKey := v.Type().Field(i).Tag.Get("db")
|
||||
key := strings.Split(rawKey, ",")[0]
|
||||
if key == "id" {
|
||||
continue
|
||||
}
|
||||
switch t := v.Field(i).Interface().(type) {
|
||||
case string:
|
||||
if t != "" {
|
||||
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||
}
|
||||
case int:
|
||||
if t != 0 {
|
||||
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||
}
|
||||
case float64:
|
||||
if t != 0 {
|
||||
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||
}
|
||||
case SQLiteTimestamp:
|
||||
if !t.Timestamp.IsZero() {
|
||||
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||
}
|
||||
case sql.NullString:
|
||||
if t.Valid {
|
||||
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||
}
|
||||
case sql.NullBool:
|
||||
if t.Valid {
|
||||
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||
}
|
||||
case sql.NullInt64:
|
||||
if t.Valid {
|
||||
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||
}
|
||||
case sql.NullFloat64:
|
||||
if t.Valid {
|
||||
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||
}
|
||||
default:
|
||||
reflectValue := reflect.ValueOf(t)
|
||||
kind := reflectValue.Kind()
|
||||
isNil := reflectValue.IsNil()
|
||||
if kind != reflect.Ptr && !isNil {
|
||||
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||
}
|
||||
}
|
||||
}
|
||||
return strings.Join(query, ", ")
|
||||
}
|
||||
155
pkg/models/querybuilder_studio.go
Normal file
155
pkg/models/querybuilder_studio.go
Normal file
@@ -0,0 +1,155 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
)
|
||||
|
||||
type StudioQueryBuilder struct{}
|
||||
|
||||
func NewStudioQueryBuilder() StudioQueryBuilder {
|
||||
return StudioQueryBuilder{}
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) Create(newStudio Studio, tx *sqlx.Tx) (*Studio, error) {
|
||||
ensureTx(tx)
|
||||
result, err := tx.NamedExec(
|
||||
`INSERT INTO studios (image, checksum, name, url, created_at, updated_at)
|
||||
VALUES (:image, :checksum, :name, :url, :created_at, :updated_at)
|
||||
`,
|
||||
newStudio,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
studioID, err := result.LastInsertId()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := tx.Get(&newStudio, `SELECT * FROM studios WHERE id = ? LIMIT 1`, studioID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &newStudio, nil
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) Update(updatedStudio Studio, tx *sqlx.Tx) (*Studio, error) {
|
||||
ensureTx(tx)
|
||||
_, err := tx.NamedExec(
|
||||
`UPDATE studios SET `+SQLGenKeys(updatedStudio)+` WHERE studios.id = :id`,
|
||||
updatedStudio,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := tx.Get(&updatedStudio, `SELECT * FROM studios WHERE id = ? LIMIT 1`, updatedStudio.ID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &updatedStudio, nil
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) Find(id int, tx *sqlx.Tx) (*Studio, error) {
|
||||
query := "SELECT * FROM studios WHERE id = ? LIMIT 1"
|
||||
args := []interface{}{id}
|
||||
return qb.queryStudio(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) FindBySceneID(sceneID int) (*Studio, error) {
|
||||
query := "SELECT studios.* FROM studios JOIN scenes ON studios.id = scenes.studio_id WHERE scenes.id = ? LIMIT 1"
|
||||
args := []interface{}{sceneID}
|
||||
return qb.queryStudio(query, args, nil)
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) FindByName(name string, tx *sqlx.Tx) (*Studio, error) {
|
||||
query := "SELECT * FROM studios WHERE name = ? LIMIT 1"
|
||||
args := []interface{}{name}
|
||||
return qb.queryStudio(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) Count() (int, error) {
|
||||
return runCountQuery(buildCountQuery("SELECT studios.id FROM studios"), nil)
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) All() ([]Studio, error) {
|
||||
return qb.queryStudios(selectAll("studios")+qb.getStudioSort(nil), nil, nil)
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) Query(findFilter *FindFilterType) ([]Studio, int) {
|
||||
if findFilter == nil {
|
||||
findFilter = &FindFilterType{}
|
||||
}
|
||||
|
||||
whereClauses := []string{}
|
||||
havingClauses := []string{}
|
||||
args := []interface{}{}
|
||||
body := selectDistinctIDs("studios")
|
||||
|
||||
if q := findFilter.Q; q != nil && *q != "" {
|
||||
searchColumns := []string{"studios.name"}
|
||||
whereClauses = append(whereClauses, getSearch(searchColumns, *q))
|
||||
}
|
||||
|
||||
sortAndPagination := qb.getStudioSort(findFilter) + getPagination(findFilter)
|
||||
idsResult, countResult := executeFindQuery("studios", body, args, sortAndPagination, whereClauses, havingClauses)
|
||||
|
||||
var studios []Studio
|
||||
for _, id := range idsResult {
|
||||
studio, _ := qb.Find(id, nil)
|
||||
studios = append(studios, *studio)
|
||||
}
|
||||
|
||||
return studios, countResult
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) getStudioSort(findFilter *FindFilterType) string {
|
||||
var sort string
|
||||
var direction string
|
||||
if findFilter == nil {
|
||||
sort = "name"
|
||||
direction = "ASC"
|
||||
} else {
|
||||
sort = findFilter.GetSort("name")
|
||||
direction = findFilter.GetDirection()
|
||||
}
|
||||
return getSort(sort, direction, "studios")
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) queryStudio(query string, args []interface{}, tx *sqlx.Tx) (*Studio, error) {
|
||||
results, err := qb.queryStudios(query, args, tx)
|
||||
if err != nil || len(results) < 1 {
|
||||
return nil, err
|
||||
}
|
||||
return &results[0], nil
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) queryStudios(query string, args []interface{}, tx *sqlx.Tx) ([]Studio, error) {
|
||||
var rows *sqlx.Rows
|
||||
var err error
|
||||
if tx != nil {
|
||||
rows, err = tx.Queryx(query, args...)
|
||||
} else {
|
||||
rows, err = database.DB.Queryx(query, args...)
|
||||
}
|
||||
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
studios := make([]Studio, 0)
|
||||
studio := Studio{}
|
||||
for rows.Next() {
|
||||
if err := rows.StructScan(&studio); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
studios = append(studios, studio)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return studios, nil
|
||||
}
|
||||
162
pkg/models/querybuilder_tag.go
Normal file
162
pkg/models/querybuilder_tag.go
Normal file
@@ -0,0 +1,162 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
)
|
||||
|
||||
type TagQueryBuilder struct{}
|
||||
|
||||
func NewTagQueryBuilder() TagQueryBuilder {
|
||||
return TagQueryBuilder{}
|
||||
}
|
||||
|
||||
func (qb *TagQueryBuilder) Create(newTag Tag, tx *sqlx.Tx) (*Tag, error) {
|
||||
ensureTx(tx)
|
||||
result, err := tx.NamedExec(
|
||||
`INSERT INTO tags (name, created_at, updated_at)
|
||||
VALUES (:name, :created_at, :updated_at)
|
||||
`,
|
||||
newTag,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
studioID, err := result.LastInsertId()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := tx.Get(&newTag, `SELECT * FROM tags WHERE id = ? LIMIT 1`, studioID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &newTag, nil
|
||||
}
|
||||
|
||||
func (qb *TagQueryBuilder) Update(updatedTag Tag, tx *sqlx.Tx) (*Tag, error) {
|
||||
ensureTx(tx)
|
||||
query := `UPDATE tags SET ` + SQLGenKeys(updatedTag) + ` WHERE tags.id = :id`
|
||||
_, err := tx.NamedExec(
|
||||
query,
|
||||
updatedTag,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := tx.Get(&updatedTag, `SELECT * FROM tags WHERE id = ? LIMIT 1`, updatedTag.ID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &updatedTag, nil
|
||||
}
|
||||
|
||||
func (qb *TagQueryBuilder) Destroy(id string, tx *sqlx.Tx) error {
|
||||
return executeDeleteQuery("tags", id, tx)
|
||||
}
|
||||
|
||||
func (qb *TagQueryBuilder) Find(id int, tx *sqlx.Tx) (*Tag, error) {
|
||||
query := "SELECT * FROM tags WHERE id = ? LIMIT 1"
|
||||
args := []interface{}{id}
|
||||
return qb.queryTag(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *TagQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) ([]Tag, error) {
|
||||
query := `
|
||||
SELECT tags.* FROM tags
|
||||
LEFT JOIN scenes_tags as scenes_join on scenes_join.tag_id = tags.id
|
||||
LEFT JOIN scenes on scenes_join.scene_id = scenes.id
|
||||
WHERE scenes.id = ?
|
||||
GROUP BY tags.id
|
||||
`
|
||||
query += qb.getTagSort(nil)
|
||||
args := []interface{}{sceneID}
|
||||
return qb.queryTags(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *TagQueryBuilder) FindBySceneMarkerID(sceneMarkerID int, tx *sqlx.Tx) ([]Tag, error) {
|
||||
query := `
|
||||
SELECT tags.* FROM tags
|
||||
LEFT JOIN scene_markers_tags as scene_markers_join on scene_markers_join.tag_id = tags.id
|
||||
LEFT JOIN scene_markers on scene_markers_join.scene_marker_id = scene_markers.id
|
||||
WHERE scene_markers.id = ?
|
||||
GROUP BY tags.id
|
||||
`
|
||||
query += qb.getTagSort(nil)
|
||||
args := []interface{}{sceneMarkerID}
|
||||
return qb.queryTags(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *TagQueryBuilder) FindByName(name string, tx *sqlx.Tx) (*Tag, error) {
|
||||
query := "SELECT * FROM tags WHERE name = ? LIMIT 1"
|
||||
args := []interface{}{name}
|
||||
return qb.queryTag(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *TagQueryBuilder) FindByNames(names []string, tx *sqlx.Tx) ([]Tag, error) {
|
||||
query := "SELECT * FROM tags WHERE name IN " + getInBinding(len(names))
|
||||
var args []interface{}
|
||||
for _, name := range names {
|
||||
args = append(args, name)
|
||||
}
|
||||
return qb.queryTags(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *TagQueryBuilder) Count() (int, error) {
|
||||
return runCountQuery(buildCountQuery("SELECT tags.id FROM tags"), nil)
|
||||
}
|
||||
|
||||
func (qb *TagQueryBuilder) All() ([]Tag, error) {
|
||||
return qb.queryTags(selectAll("tags")+qb.getTagSort(nil), nil, nil)
|
||||
}
|
||||
|
||||
func (qb *TagQueryBuilder) getTagSort(findFilter *FindFilterType) string {
|
||||
var sort string
|
||||
var direction string
|
||||
if findFilter == nil {
|
||||
sort = "name"
|
||||
direction = "ASC"
|
||||
} else {
|
||||
sort = findFilter.GetSort("name")
|
||||
direction = findFilter.GetDirection()
|
||||
}
|
||||
return getSort(sort, direction, "tags")
|
||||
}
|
||||
|
||||
func (qb *TagQueryBuilder) queryTag(query string, args []interface{}, tx *sqlx.Tx) (*Tag, error) {
|
||||
results, err := qb.queryTags(query, args, tx)
|
||||
if err != nil || len(results) < 1 {
|
||||
return nil, err
|
||||
}
|
||||
return &results[0], nil
|
||||
}
|
||||
|
||||
func (qb *TagQueryBuilder) queryTags(query string, args []interface{}, tx *sqlx.Tx) ([]Tag, error) {
|
||||
var rows *sqlx.Rows
|
||||
var err error
|
||||
if tx != nil {
|
||||
rows, err = tx.Queryx(query, args...)
|
||||
} else {
|
||||
rows, err = database.DB.Queryx(query, args...)
|
||||
}
|
||||
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
tags := make([]Tag, 0)
|
||||
tag := Tag{}
|
||||
for rows.Next() {
|
||||
if err := rows.StructScan(&tag); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tags = append(tags, tag)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return tags, nil
|
||||
}
|
||||
21
pkg/models/sqlite_timestamp.go
Normal file
21
pkg/models/sqlite_timestamp.go
Normal file
@@ -0,0 +1,21 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"database/sql/driver"
|
||||
"time"
|
||||
)
|
||||
|
||||
type SQLiteTimestamp struct {
|
||||
Timestamp time.Time
|
||||
}
|
||||
|
||||
// Scan implements the Scanner interface.
|
||||
func (t *SQLiteTimestamp) Scan(value interface{}) error {
|
||||
t.Timestamp = value.(time.Time)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Value implements the driver Valuer interface.
|
||||
func (t SQLiteTimestamp) Value() (driver.Value, error) {
|
||||
return t.Timestamp.Format(time.RFC3339), nil
|
||||
}
|
||||
238
pkg/scraper/freeones.go
Normal file
238
pkg/scraper/freeones.go
Normal file
@@ -0,0 +1,238 @@
|
||||
package scraper
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
func GetPerformerNames(q string) ([]string, error) {
|
||||
// Request the HTML page.
|
||||
queryURL := "https://www.freeones.com/suggestions.php?q=" + url.PathEscape(q) + "&t=1"
|
||||
res, err := http.Get(queryURL)
|
||||
if err != nil {
|
||||
logger.Fatal(err)
|
||||
}
|
||||
defer res.Body.Close()
|
||||
if res.StatusCode != 200 {
|
||||
return nil, fmt.Errorf("status code error: %d %s", res.StatusCode, res.Status)
|
||||
}
|
||||
|
||||
// Load the HTML document
|
||||
doc, err := goquery.NewDocumentFromReader(res.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Find the performers
|
||||
var performerNames []string
|
||||
doc.Find(".suggestion").Each(func(i int, s *goquery.Selection) {
|
||||
name := strings.Trim(s.Text(), " ")
|
||||
performerNames = append(performerNames, name)
|
||||
})
|
||||
|
||||
return performerNames, nil
|
||||
}
|
||||
|
||||
func GetPerformer(performerName string) (*models.ScrapedPerformer, error) {
|
||||
queryURL := "https://www.freeones.com/search/?t=1&q=" + url.PathEscape(performerName) + "&view=thumbs"
|
||||
res, err := http.Get(queryURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
if res.StatusCode != 200 {
|
||||
return nil, fmt.Errorf("status code error: %d %s", res.StatusCode, res.Status)
|
||||
}
|
||||
|
||||
// Load the HTML document
|
||||
doc, err := goquery.NewDocumentFromReader(res.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
performerLink := doc.Find("div.Block3 a").FilterFunction(func(i int, s *goquery.Selection) bool {
|
||||
href, _ := s.Attr("href")
|
||||
if href == "/html/j_links/Jenna_Leigh_c/" || href == "/html/a_links/Alexa_Grace_c/" {
|
||||
return false
|
||||
}
|
||||
if strings.ToLower(s.Text()) == strings.ToLower(performerName) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})
|
||||
|
||||
href, _ := performerLink.Attr("href")
|
||||
href = strings.TrimSuffix(href, "/")
|
||||
regex := regexp.MustCompile(`.+_links\/(.+)`)
|
||||
matches := regex.FindStringSubmatch(href)
|
||||
href = strings.Replace(href, matches[1], "bio_"+matches[1]+".php", -1)
|
||||
href = "https://www.freeones.com" + href
|
||||
|
||||
bioRes, err := http.Get(href)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer bioRes.Body.Close()
|
||||
if res.StatusCode != 200 {
|
||||
return nil, fmt.Errorf("status code error: %d %s", res.StatusCode, res.Status)
|
||||
}
|
||||
|
||||
// Load the HTML document
|
||||
bioDoc, err := goquery.NewDocumentFromReader(bioRes.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
params := bioDoc.Find(".paramvalue")
|
||||
paramIndexes := getIndexes(bioDoc)
|
||||
|
||||
result := models.ScrapedPerformer{}
|
||||
|
||||
performerURL := bioRes.Request.URL.String()
|
||||
result.URL = &performerURL
|
||||
|
||||
name := paramValue(params, paramIndexes["name"])
|
||||
result.Name = &name
|
||||
|
||||
ethnicity := getEthnicity(paramValue(params, paramIndexes["ethnicity"]))
|
||||
result.Ethnicity = ðnicity
|
||||
|
||||
country := paramValue(params, paramIndexes["country"])
|
||||
result.Country = &country
|
||||
|
||||
eyeColor := paramValue(params, paramIndexes["eye_color"])
|
||||
result.EyeColor = &eyeColor
|
||||
|
||||
measurements := paramValue(params, paramIndexes["measurements"])
|
||||
result.Measurements = &measurements
|
||||
|
||||
fakeTits := paramValue(params, paramIndexes["fake_tits"])
|
||||
result.FakeTits = &fakeTits
|
||||
|
||||
careerLength := paramValue(params, paramIndexes["career_length"])
|
||||
careerRegex := regexp.MustCompile(`\([\s\S]*`)
|
||||
careerLength = careerRegex.ReplaceAllString(careerLength, "")
|
||||
careerLength = trim(careerLength)
|
||||
result.CareerLength = &careerLength
|
||||
|
||||
tattoos := paramValue(params, paramIndexes["tattoos"])
|
||||
result.Tattoos = &tattoos
|
||||
|
||||
piercings := paramValue(params, paramIndexes["piercings"])
|
||||
result.Piercings = &piercings
|
||||
|
||||
aliases := paramValue(params, paramIndexes["aliases"])
|
||||
result.Aliases = &aliases
|
||||
|
||||
birthdate := paramValue(params, paramIndexes["birthdate"])
|
||||
birthdateRegex := regexp.MustCompile(` \(\d* years old\)`)
|
||||
birthdate = birthdateRegex.ReplaceAllString(birthdate, "")
|
||||
birthdate = trim(birthdate)
|
||||
if birthdate != "Unknown" && len(birthdate) > 0 {
|
||||
t, _ := time.Parse("January _2, 2006", birthdate) // TODO
|
||||
formattedBirthdate := t.Format("2006-01-02")
|
||||
result.Birthdate = &formattedBirthdate
|
||||
}
|
||||
|
||||
height := paramValue(params, paramIndexes["height"])
|
||||
heightRegex := regexp.MustCompile(`heightcm = "(.*)"\;`)
|
||||
heightMatches := heightRegex.FindStringSubmatch(height)
|
||||
if len(heightMatches) > 1 {
|
||||
result.Height = &heightMatches[1]
|
||||
}
|
||||
|
||||
twitterElement := bioDoc.Find(".twitter a")
|
||||
twitterHref, _ := twitterElement.Attr("href")
|
||||
if twitterHref != "" {
|
||||
twitterURL, _ := url.Parse(twitterHref)
|
||||
twitterHandle := strings.Replace(twitterURL.Path, "/", "", -1)
|
||||
result.Twitter = &twitterHandle
|
||||
}
|
||||
|
||||
instaElement := bioDoc.Find(".instagram a")
|
||||
instaHref, _ := instaElement.Attr("href")
|
||||
if instaHref != "" {
|
||||
instaURL, _ := url.Parse(instaHref)
|
||||
instaHandle := strings.Replace(instaURL.Path, "/", "", -1)
|
||||
result.Instagram = &instaHandle
|
||||
}
|
||||
|
||||
return &result, nil
|
||||
}
|
||||
|
||||
func getIndexes(doc *goquery.Document) map[string]int {
|
||||
var indexes = make(map[string]int)
|
||||
doc.Find(".paramname").Each(func(i int, s *goquery.Selection) {
|
||||
index := i + 1
|
||||
paramName := trim(s.Text())
|
||||
switch paramName {
|
||||
case "Babe Name:":
|
||||
indexes["name"] = index
|
||||
case "Ethnicity:":
|
||||
indexes["ethnicity"] = index
|
||||
case "Country of Origin:":
|
||||
indexes["country"] = index
|
||||
case "Date of Birth:":
|
||||
indexes["birthdate"] = index
|
||||
case "Eye Color:":
|
||||
indexes["eye_color"] = index
|
||||
case "Height:":
|
||||
indexes["height"] = index
|
||||
case "Measurements:":
|
||||
indexes["measurements"] = index
|
||||
case "Fake boobs:":
|
||||
indexes["fake_tits"] = index
|
||||
case "Career Start And End":
|
||||
indexes["career_length"] = index
|
||||
case "Tattoos:":
|
||||
indexes["tattoos"] = index
|
||||
case "Piercings:":
|
||||
indexes["piercings"] = index
|
||||
case "Aliases:":
|
||||
indexes["aliases"] = index
|
||||
}
|
||||
})
|
||||
return indexes
|
||||
}
|
||||
|
||||
func getEthnicity(ethnicity string) string {
|
||||
switch ethnicity {
|
||||
case "Caucasian":
|
||||
return "white"
|
||||
case "Black":
|
||||
return "black"
|
||||
case "Latin":
|
||||
return "hispanic"
|
||||
case "Asian":
|
||||
return "asian"
|
||||
default:
|
||||
panic("unknown ethnicity")
|
||||
}
|
||||
}
|
||||
|
||||
func paramValue(params *goquery.Selection, paramIndex int) string {
|
||||
i := paramIndex - 1
|
||||
if paramIndex == 0 {
|
||||
return ""
|
||||
}
|
||||
node := params.Get(i).FirstChild
|
||||
content := trim(node.Data)
|
||||
if content != "" {
|
||||
return content
|
||||
}
|
||||
node = node.NextSibling
|
||||
return trim(node.FirstChild.Data)
|
||||
}
|
||||
|
||||
// https://stackoverflow.com/questions/20305966/why-does-strip-not-remove-the-leading-whitespace
|
||||
func trim(text string) string {
|
||||
// return text.replace(/\A\p{Space}*|\p{Space}*\z/, "");
|
||||
return strings.TrimSpace(text)
|
||||
}
|
||||
8
pkg/utils/boolean.go
Normal file
8
pkg/utils/boolean.go
Normal file
@@ -0,0 +1,8 @@
|
||||
package utils
|
||||
|
||||
func Btoi(b bool) int {
|
||||
if b {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user