mirror of
https://github.com/stashapp/stash.git
synced 2025-12-17 04:14:39 +03:00
Ran formatter and fixed some lint issues
This commit is contained in:
@@ -12,7 +12,7 @@ before_install:
|
||||
- yarn install
|
||||
- ./node_modules/@angular/cli/bin/ng build --prod
|
||||
- cd ../..
|
||||
- go get -v golang.org/x/lint/golint
|
||||
- go get -v github.com/mgechev/revive
|
||||
script:
|
||||
- make lint
|
||||
- make vet
|
||||
|
||||
2
Makefile
2
Makefile
@@ -22,4 +22,4 @@ vet:
|
||||
|
||||
.PHONY: lint
|
||||
lint:
|
||||
go list ./... | grep -v vendor | xargs golint -set_exit_status
|
||||
revive -config revive.toml -exclude ./vendor/... ./...
|
||||
|
||||
16
README.md
16
README.md
@@ -34,6 +34,10 @@ Not yet, but this will come in the future.
|
||||
|
||||
# Development
|
||||
|
||||
## Install
|
||||
|
||||
* [Revive](https://github.com/mgechev/revive) - Configurable linter `go get github.com/mgechev/revive`
|
||||
|
||||
## Environment
|
||||
|
||||
### macOS
|
||||
@@ -53,14 +57,18 @@ TODO
|
||||
|
||||
* `make build` - Builds the binary
|
||||
* `make gqlgen` - Regenerate Go GraphQL files
|
||||
* `make vet` - Run `go vet`
|
||||
* `make lint` - Run the linter
|
||||
|
||||
## Building a release
|
||||
|
||||
1. cd into the UI directory and run `ng build --prod`
|
||||
2. cd back to the root directory and run `make build` to build the executable
|
||||
1. cd into the UI directory and run `ng build --prod` to compile the frontend
|
||||
2. cd back to the root directory and run `make build` to build the executable for your current platform
|
||||
|
||||
#### Notes for the dev
|
||||
## Cross compiling
|
||||
|
||||
https://blog.filippo.io/easy-windows-and-linux-cross-compilers-for-macos/
|
||||
This project makes use of [this](https://github.com/bep/dockerfiles/tree/master/ci-goreleaser) docker container to create an environment
|
||||
where the app can be cross compiled. This process is kicked off by CI via the `scripts/cross-compile.sh` script. Run the following
|
||||
command to open a bash shell to the container to poke around:
|
||||
|
||||
`docker run --rm --mount type=bind,source="$(pwd)",target=/stash -w /stash -i -t bepsays/ci-goreleaser:1.11-2 /bin/bash`
|
||||
12
api/context_keys.go
Normal file
12
api/context_keys.go
Normal file
@@ -0,0 +1,12 @@
|
||||
package api
|
||||
|
||||
// https://stackoverflow.com/questions/40891345/fix-should-not-use-basic-type-string-as-key-in-context-withvalue-golint
|
||||
|
||||
type key int
|
||||
|
||||
const (
|
||||
galleryKey key = 0
|
||||
performerKey key = 1
|
||||
sceneKey key = 2
|
||||
studioKey key = 3
|
||||
)
|
||||
@@ -96,11 +96,11 @@ func (r *queryResolver) Stats(ctx context.Context) (models.StatsResultType, erro
|
||||
tagsQB := models.NewTagQueryBuilder()
|
||||
tagsCount, _ := tagsQB.Count()
|
||||
return models.StatsResultType{
|
||||
SceneCount: scenesCount,
|
||||
GalleryCount: galleryCount,
|
||||
SceneCount: scenesCount,
|
||||
GalleryCount: galleryCount,
|
||||
PerformerCount: performersCount,
|
||||
StudioCount: studiosCount,
|
||||
TagCount: tagsCount,
|
||||
StudioCount: studiosCount,
|
||||
TagCount: tagsCount,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -127,7 +127,7 @@ func (r *queryResolver) SceneMarkerTags(ctx context.Context, scene_id string) ([
|
||||
_, hasKey := tags[markerPrimaryTag.ID]
|
||||
var sceneMarkerTag *models.SceneMarkerTag
|
||||
if !hasKey {
|
||||
sceneMarkerTag = &models.SceneMarkerTag{ Tag: *markerPrimaryTag }
|
||||
sceneMarkerTag = &models.SceneMarkerTag{Tag: *markerPrimaryTag}
|
||||
tags[markerPrimaryTag.ID] = sceneMarkerTag
|
||||
keys = append(keys, markerPrimaryTag.ID)
|
||||
} else {
|
||||
|
||||
@@ -19,8 +19,8 @@ func (r *performerResolver) Name(ctx context.Context, obj *models.Performer) (*s
|
||||
}
|
||||
|
||||
func (r *performerResolver) URL(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||
if obj.Url.Valid {
|
||||
return &obj.Url.String, nil
|
||||
if obj.URL.Valid {
|
||||
return &obj.URL.String, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
@@ -125,7 +125,7 @@ func (r *performerResolver) Favorite(ctx context.Context, obj *models.Performer)
|
||||
|
||||
func (r *performerResolver) ImagePath(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
imagePath := urlbuilders.NewPerformerURLBuilder(baseURL, obj.ID).GetPerformerImageUrl()
|
||||
imagePath := urlbuilders.NewPerformerURLBuilder(baseURL, obj.ID).GetPerformerImageURL()
|
||||
return &imagePath, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -28,8 +28,8 @@ func (r *sceneResolver) Details(ctx context.Context, obj *models.Scene) (*string
|
||||
}
|
||||
|
||||
func (r *sceneResolver) URL(ctx context.Context, obj *models.Scene) (*string, error) {
|
||||
if obj.Url.Valid {
|
||||
return &obj.Url.String, nil
|
||||
if obj.URL.Valid {
|
||||
return &obj.URL.String, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
@@ -55,34 +55,33 @@ func (r *sceneResolver) File(ctx context.Context, obj *models.Scene) (models.Sce
|
||||
height := int(obj.Height.Int64)
|
||||
bitrate := int(obj.Bitrate.Int64)
|
||||
return models.SceneFileType{
|
||||
Size: &obj.Size.String,
|
||||
Duration: &obj.Duration.Float64,
|
||||
Size: &obj.Size.String,
|
||||
Duration: &obj.Duration.Float64,
|
||||
VideoCodec: &obj.VideoCodec.String,
|
||||
AudioCodec: &obj.AudioCodec.String,
|
||||
Width: &width,
|
||||
Height: &height,
|
||||
Framerate: &obj.Framerate.Float64,
|
||||
Bitrate: &bitrate,
|
||||
Width: &width,
|
||||
Height: &height,
|
||||
Framerate: &obj.Framerate.Float64,
|
||||
Bitrate: &bitrate,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (models.ScenePathsType, error) {
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
builder := urlbuilders.NewSceneURLBuilder(baseURL, obj.ID)
|
||||
screenshotPath := builder.GetScreenshotUrl()
|
||||
previewPath := builder.GetStreamPreviewUrl()
|
||||
streamPath := builder.GetStreamUrl()
|
||||
webpPath := builder.GetStreamPreviewImageUrl()
|
||||
vttPath := builder.GetSpriteVttUrl()
|
||||
chaptersVttPath := builder.GetChaptersVttUrl()
|
||||
screenshotPath := builder.GetScreenshotURL()
|
||||
previewPath := builder.GetStreamPreviewURL()
|
||||
streamPath := builder.GetStreamURL()
|
||||
webpPath := builder.GetStreamPreviewImageURL()
|
||||
vttPath := builder.GetSpriteVTTURL()
|
||||
chaptersVttPath := builder.GetChaptersVTTURL()
|
||||
return models.ScenePathsType{
|
||||
Screenshot: &screenshotPath,
|
||||
Preview: &previewPath,
|
||||
Stream: &streamPath,
|
||||
Webp: &webpPath,
|
||||
Vtt: &vttPath,
|
||||
Screenshot: &screenshotPath,
|
||||
Preview: &previewPath,
|
||||
Stream: &streamPath,
|
||||
Webp: &webpPath,
|
||||
Vtt: &vttPath,
|
||||
ChaptersVtt: &chaptersVttPath,
|
||||
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -38,11 +38,11 @@ func (r *sceneMarkerResolver) Tags(ctx context.Context, obj *models.SceneMarker)
|
||||
func (r *sceneMarkerResolver) Stream(ctx context.Context, obj *models.SceneMarker) (string, error) {
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
sceneID := int(obj.SceneID.Int64)
|
||||
return urlbuilders.NewSceneURLBuilder(baseURL, sceneID).GetSceneMarkerStreamUrl(obj.ID), nil
|
||||
return urlbuilders.NewSceneURLBuilder(baseURL, sceneID).GetSceneMarkerStreamURL(obj.ID), nil
|
||||
}
|
||||
|
||||
func (r *sceneMarkerResolver) Preview(ctx context.Context, obj *models.SceneMarker) (string, error) {
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
sceneID := int(obj.SceneID.Int64)
|
||||
return urlbuilders.NewSceneURLBuilder(baseURL, sceneID).GetSceneMarkerStreamPreviewUrl(obj.ID), nil
|
||||
return urlbuilders.NewSceneURLBuilder(baseURL, sceneID).GetSceneMarkerStreamPreviewURL(obj.ID), nil
|
||||
}
|
||||
@@ -19,15 +19,15 @@ func (r *studioResolver) Name(ctx context.Context, obj *models.Studio) (string,
|
||||
}
|
||||
|
||||
func (r *studioResolver) URL(ctx context.Context, obj *models.Studio) (*string, error) {
|
||||
if obj.Url.Valid {
|
||||
return &obj.Url.String, nil
|
||||
if obj.URL.Valid {
|
||||
return &obj.URL.String, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *studioResolver) ImagePath(ctx context.Context, obj *models.Studio) (*string, error) {
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
imagePath := urlbuilders.NewStudioURLBuilder(baseURL, obj.ID).GetStudioImageUrl()
|
||||
imagePath := urlbuilders.NewStudioURLBuilder(baseURL, obj.ID).GetStudioImageURL()
|
||||
return &imagePath, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -20,60 +20,60 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
|
||||
// Populate a new performer from the input
|
||||
currentTime := time.Now()
|
||||
newPerformer := models.Performer{
|
||||
Image: imageData,
|
||||
Checksum: checksum,
|
||||
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
Image: imageData,
|
||||
Checksum: checksum,
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
if input.Name != nil {
|
||||
newPerformer.Name = sql.NullString{ String: *input.Name, Valid: true }
|
||||
newPerformer.Name = sql.NullString{String: *input.Name, Valid: true}
|
||||
}
|
||||
if input.URL != nil {
|
||||
newPerformer.Url = sql.NullString{ String: *input.URL, Valid: true }
|
||||
newPerformer.URL = sql.NullString{String: *input.URL, Valid: true}
|
||||
}
|
||||
if input.Birthdate != nil {
|
||||
newPerformer.Birthdate = sql.NullString{ String: *input.Birthdate, Valid: true }
|
||||
newPerformer.Birthdate = sql.NullString{String: *input.Birthdate, Valid: true}
|
||||
}
|
||||
if input.Ethnicity != nil {
|
||||
newPerformer.Ethnicity = sql.NullString{ String: *input.Ethnicity, Valid: true }
|
||||
newPerformer.Ethnicity = sql.NullString{String: *input.Ethnicity, Valid: true}
|
||||
}
|
||||
if input.Country != nil {
|
||||
newPerformer.Country = sql.NullString{ String: *input.Country, Valid: true }
|
||||
newPerformer.Country = sql.NullString{String: *input.Country, Valid: true}
|
||||
}
|
||||
if input.EyeColor != nil {
|
||||
newPerformer.EyeColor = sql.NullString{ String: *input.EyeColor, Valid: true }
|
||||
newPerformer.EyeColor = sql.NullString{String: *input.EyeColor, Valid: true}
|
||||
}
|
||||
if input.Height != nil {
|
||||
newPerformer.Height = sql.NullString{ String: *input.Height, Valid: true }
|
||||
newPerformer.Height = sql.NullString{String: *input.Height, Valid: true}
|
||||
}
|
||||
if input.Measurements != nil {
|
||||
newPerformer.Measurements = sql.NullString{ String: *input.Measurements, Valid: true }
|
||||
newPerformer.Measurements = sql.NullString{String: *input.Measurements, Valid: true}
|
||||
}
|
||||
if input.FakeTits != nil {
|
||||
newPerformer.FakeTits = sql.NullString{ String: *input.FakeTits, Valid: true }
|
||||
newPerformer.FakeTits = sql.NullString{String: *input.FakeTits, Valid: true}
|
||||
}
|
||||
if input.CareerLength != nil {
|
||||
newPerformer.CareerLength = sql.NullString{ String: *input.CareerLength, Valid: true }
|
||||
newPerformer.CareerLength = sql.NullString{String: *input.CareerLength, Valid: true}
|
||||
}
|
||||
if input.Tattoos != nil {
|
||||
newPerformer.Tattoos = sql.NullString{ String: *input.Tattoos, Valid: true }
|
||||
newPerformer.Tattoos = sql.NullString{String: *input.Tattoos, Valid: true}
|
||||
}
|
||||
if input.Piercings != nil {
|
||||
newPerformer.Piercings = sql.NullString{ String: *input.Piercings, Valid: true }
|
||||
newPerformer.Piercings = sql.NullString{String: *input.Piercings, Valid: true}
|
||||
}
|
||||
if input.Aliases != nil {
|
||||
newPerformer.Aliases = sql.NullString{ String: *input.Aliases, Valid: true }
|
||||
newPerformer.Aliases = sql.NullString{String: *input.Aliases, Valid: true}
|
||||
}
|
||||
if input.Twitter != nil {
|
||||
newPerformer.Twitter = sql.NullString{ String: *input.Twitter, Valid: true }
|
||||
newPerformer.Twitter = sql.NullString{String: *input.Twitter, Valid: true}
|
||||
}
|
||||
if input.Instagram != nil {
|
||||
newPerformer.Instagram = sql.NullString{ String: *input.Instagram, Valid: true }
|
||||
newPerformer.Instagram = sql.NullString{String: *input.Instagram, Valid: true}
|
||||
}
|
||||
if input.Favorite != nil {
|
||||
newPerformer.Favorite = sql.NullBool{ Bool: *input.Favorite, Valid: true }
|
||||
newPerformer.Favorite = sql.NullBool{Bool: *input.Favorite, Valid: true}
|
||||
} else {
|
||||
newPerformer.Favorite = sql.NullBool{ Bool: false, Valid: true }
|
||||
newPerformer.Favorite = sql.NullBool{Bool: false, Valid: true}
|
||||
}
|
||||
|
||||
// Start the transaction and save the performer
|
||||
@@ -97,8 +97,8 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
|
||||
// Populate performer from the input
|
||||
performerID, _ := strconv.Atoi(input.ID)
|
||||
updatedPerformer := models.Performer{
|
||||
ID: performerID,
|
||||
UpdatedAt: models.SQLiteTimestamp{ Timestamp: time.Now() },
|
||||
ID: performerID,
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: time.Now()},
|
||||
}
|
||||
if input.Image != nil {
|
||||
checksum, imageData, err := utils.ProcessBase64Image(*input.Image)
|
||||
@@ -109,54 +109,54 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
|
||||
updatedPerformer.Checksum = checksum
|
||||
}
|
||||
if input.Name != nil {
|
||||
updatedPerformer.Name = sql.NullString{ String: *input.Name, Valid: true }
|
||||
updatedPerformer.Name = sql.NullString{String: *input.Name, Valid: true}
|
||||
}
|
||||
if input.URL != nil {
|
||||
updatedPerformer.Url = sql.NullString{ String: *input.URL, Valid: true }
|
||||
updatedPerformer.URL = sql.NullString{String: *input.URL, Valid: true}
|
||||
}
|
||||
if input.Birthdate != nil {
|
||||
updatedPerformer.Birthdate = sql.NullString{ String: *input.Birthdate, Valid: true }
|
||||
updatedPerformer.Birthdate = sql.NullString{String: *input.Birthdate, Valid: true}
|
||||
}
|
||||
if input.Ethnicity != nil {
|
||||
updatedPerformer.Ethnicity = sql.NullString{ String: *input.Ethnicity, Valid: true }
|
||||
updatedPerformer.Ethnicity = sql.NullString{String: *input.Ethnicity, Valid: true}
|
||||
}
|
||||
if input.Country != nil {
|
||||
updatedPerformer.Country = sql.NullString{ String: *input.Country, Valid: true }
|
||||
updatedPerformer.Country = sql.NullString{String: *input.Country, Valid: true}
|
||||
}
|
||||
if input.EyeColor != nil {
|
||||
updatedPerformer.EyeColor = sql.NullString{ String: *input.EyeColor, Valid: true }
|
||||
updatedPerformer.EyeColor = sql.NullString{String: *input.EyeColor, Valid: true}
|
||||
}
|
||||
if input.Height != nil {
|
||||
updatedPerformer.Height = sql.NullString{ String: *input.Height, Valid: true }
|
||||
updatedPerformer.Height = sql.NullString{String: *input.Height, Valid: true}
|
||||
}
|
||||
if input.Measurements != nil {
|
||||
updatedPerformer.Measurements = sql.NullString{ String: *input.Measurements, Valid: true }
|
||||
updatedPerformer.Measurements = sql.NullString{String: *input.Measurements, Valid: true}
|
||||
}
|
||||
if input.FakeTits != nil {
|
||||
updatedPerformer.FakeTits = sql.NullString{ String: *input.FakeTits, Valid: true }
|
||||
updatedPerformer.FakeTits = sql.NullString{String: *input.FakeTits, Valid: true}
|
||||
}
|
||||
if input.CareerLength != nil {
|
||||
updatedPerformer.CareerLength = sql.NullString{ String: *input.CareerLength, Valid: true }
|
||||
updatedPerformer.CareerLength = sql.NullString{String: *input.CareerLength, Valid: true}
|
||||
}
|
||||
if input.Tattoos != nil {
|
||||
updatedPerformer.Tattoos = sql.NullString{ String: *input.Tattoos, Valid: true }
|
||||
updatedPerformer.Tattoos = sql.NullString{String: *input.Tattoos, Valid: true}
|
||||
}
|
||||
if input.Piercings != nil {
|
||||
updatedPerformer.Piercings = sql.NullString{ String: *input.Piercings, Valid: true }
|
||||
updatedPerformer.Piercings = sql.NullString{String: *input.Piercings, Valid: true}
|
||||
}
|
||||
if input.Aliases != nil {
|
||||
updatedPerformer.Aliases = sql.NullString{ String: *input.Aliases, Valid: true }
|
||||
updatedPerformer.Aliases = sql.NullString{String: *input.Aliases, Valid: true}
|
||||
}
|
||||
if input.Twitter != nil {
|
||||
updatedPerformer.Twitter = sql.NullString{ String: *input.Twitter, Valid: true }
|
||||
updatedPerformer.Twitter = sql.NullString{String: *input.Twitter, Valid: true}
|
||||
}
|
||||
if input.Instagram != nil {
|
||||
updatedPerformer.Instagram = sql.NullString{ String: *input.Instagram, Valid: true }
|
||||
updatedPerformer.Instagram = sql.NullString{String: *input.Instagram, Valid: true}
|
||||
}
|
||||
if input.Favorite != nil {
|
||||
updatedPerformer.Favorite = sql.NullBool{ Bool: *input.Favorite, Valid: true }
|
||||
updatedPerformer.Favorite = sql.NullBool{Bool: *input.Favorite, Valid: true}
|
||||
} else {
|
||||
updatedPerformer.Favorite = sql.NullBool{ Bool: false, Valid: true }
|
||||
updatedPerformer.Favorite = sql.NullBool{Bool: false, Valid: true}
|
||||
}
|
||||
|
||||
// Start the transaction and save the performer
|
||||
|
||||
@@ -14,27 +14,27 @@ func (r *mutationResolver) SceneUpdate(ctx context.Context, input models.SceneUp
|
||||
sceneID, _ := strconv.Atoi(input.ID)
|
||||
updatedTime := time.Now()
|
||||
updatedScene := models.Scene{
|
||||
ID: sceneID,
|
||||
UpdatedAt: models.SQLiteTimestamp{ Timestamp: updatedTime },
|
||||
ID: sceneID,
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: updatedTime},
|
||||
}
|
||||
if input.Title != nil {
|
||||
updatedScene.Title = sql.NullString{ String: *input.Title, Valid: true }
|
||||
updatedScene.Title = sql.NullString{String: *input.Title, Valid: true}
|
||||
}
|
||||
if input.Details != nil {
|
||||
updatedScene.Details = sql.NullString{ String: *input.Details, Valid: true }
|
||||
updatedScene.Details = sql.NullString{String: *input.Details, Valid: true}
|
||||
}
|
||||
if input.URL != nil {
|
||||
updatedScene.Url = sql.NullString{ String: *input.URL, Valid: true }
|
||||
updatedScene.URL = sql.NullString{String: *input.URL, Valid: true}
|
||||
}
|
||||
if input.Date != nil {
|
||||
updatedScene.Date = sql.NullString{ String: *input.Date, Valid: true }
|
||||
updatedScene.Date = sql.NullString{String: *input.Date, Valid: true}
|
||||
}
|
||||
if input.Rating != nil {
|
||||
updatedScene.Rating = sql.NullInt64{ Int64: int64(*input.Rating), Valid: true }
|
||||
updatedScene.Rating = sql.NullInt64{Int64: int64(*input.Rating), Valid: true}
|
||||
}
|
||||
if input.StudioID != nil {
|
||||
studioID, _ := strconv.ParseInt(*input.StudioID, 10, 64)
|
||||
updatedScene.StudioID = sql.NullInt64{ Int64: studioID, Valid: true }
|
||||
updatedScene.StudioID = sql.NullInt64{Int64: studioID, Valid: true}
|
||||
}
|
||||
|
||||
// Start the transaction and save the scene marker
|
||||
@@ -51,8 +51,8 @@ func (r *mutationResolver) SceneUpdate(ctx context.Context, input models.SceneUp
|
||||
// Save the gallery
|
||||
galleryID, _ := strconv.Atoi(*input.GalleryID)
|
||||
updatedGallery := models.Gallery{
|
||||
ID: galleryID,
|
||||
SceneID: sql.NullInt64{ Int64: int64(sceneID), Valid: true },
|
||||
ID: galleryID,
|
||||
SceneID: sql.NullInt64{Int64: int64(sceneID), Valid: true},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: updatedTime},
|
||||
}
|
||||
gqb := models.NewGalleryQueryBuilder()
|
||||
@@ -69,7 +69,7 @@ func (r *mutationResolver) SceneUpdate(ctx context.Context, input models.SceneUp
|
||||
performerID, _ := strconv.Atoi(pid)
|
||||
performerJoin := models.PerformersScenes{
|
||||
PerformerID: performerID,
|
||||
SceneID: sceneID,
|
||||
SceneID: sceneID,
|
||||
}
|
||||
performerJoins = append(performerJoins, performerJoin)
|
||||
}
|
||||
@@ -84,7 +84,7 @@ func (r *mutationResolver) SceneUpdate(ctx context.Context, input models.SceneUp
|
||||
tagID, _ := strconv.Atoi(tid)
|
||||
tagJoin := models.ScenesTags{
|
||||
SceneID: sceneID,
|
||||
TagID: tagID,
|
||||
TagID: tagID,
|
||||
}
|
||||
tagJoins = append(tagJoins, tagJoin)
|
||||
}
|
||||
@@ -106,12 +106,12 @@ func (r *mutationResolver) SceneMarkerCreate(ctx context.Context, input models.S
|
||||
sceneID, _ := strconv.Atoi(input.SceneID)
|
||||
currentTime := time.Now()
|
||||
newSceneMarker := models.SceneMarker{
|
||||
Title: input.Title,
|
||||
Seconds: input.Seconds,
|
||||
PrimaryTagID: sql.NullInt64{ Int64: int64(primaryTagID), Valid: primaryTagID != 0 },
|
||||
SceneID: sql.NullInt64{ Int64: int64(sceneID), Valid: sceneID != 0 },
|
||||
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
Title: input.Title,
|
||||
Seconds: input.Seconds,
|
||||
PrimaryTagID: sql.NullInt64{Int64: int64(primaryTagID), Valid: primaryTagID != 0},
|
||||
SceneID: sql.NullInt64{Int64: int64(sceneID), Valid: sceneID != 0},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
|
||||
// Start the transaction and save the scene marker
|
||||
@@ -130,7 +130,7 @@ func (r *mutationResolver) SceneMarkerCreate(ctx context.Context, input models.S
|
||||
tagID, _ := strconv.Atoi(tid)
|
||||
markerTag := models.SceneMarkersTags{
|
||||
SceneMarkerID: sceneMarker.ID,
|
||||
TagID: tagID,
|
||||
TagID: tagID,
|
||||
}
|
||||
markerTagJoins = append(markerTagJoins, markerTag)
|
||||
}
|
||||
@@ -153,12 +153,12 @@ func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input models.S
|
||||
sceneID, _ := strconv.Atoi(input.SceneID)
|
||||
primaryTagID, _ := strconv.Atoi(input.PrimaryTagID)
|
||||
updatedSceneMarker := models.SceneMarker{
|
||||
ID: sceneMarkerID,
|
||||
Title: input.Title,
|
||||
Seconds: input.Seconds,
|
||||
SceneID: sql.NullInt64{ Int64: int64(sceneID), Valid: sceneID != 0 },
|
||||
PrimaryTagID: sql.NullInt64{ Int64: int64(primaryTagID), Valid: primaryTagID != 0 },
|
||||
UpdatedAt: models.SQLiteTimestamp{ Timestamp: time.Now() },
|
||||
ID: sceneMarkerID,
|
||||
Title: input.Title,
|
||||
Seconds: input.Seconds,
|
||||
SceneID: sql.NullInt64{Int64: int64(sceneID), Valid: sceneID != 0},
|
||||
PrimaryTagID: sql.NullInt64{Int64: int64(primaryTagID), Valid: primaryTagID != 0},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: time.Now()},
|
||||
}
|
||||
|
||||
// Start the transaction and save the scene marker
|
||||
@@ -177,7 +177,7 @@ func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input models.S
|
||||
tagID, _ := strconv.Atoi(tid)
|
||||
markerTag := models.SceneMarkersTags{
|
||||
SceneMarkerID: sceneMarkerID,
|
||||
TagID: tagID,
|
||||
TagID: tagID,
|
||||
}
|
||||
markerTagJoins = append(markerTagJoins, markerTag)
|
||||
}
|
||||
|
||||
@@ -20,14 +20,14 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
|
||||
// Populate a new studio from the input
|
||||
currentTime := time.Now()
|
||||
newStudio := models.Studio{
|
||||
Image: imageData,
|
||||
Checksum: checksum,
|
||||
Name: sql.NullString{ String: input.Name, Valid: true },
|
||||
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
Image: imageData,
|
||||
Checksum: checksum,
|
||||
Name: sql.NullString{String: input.Name, Valid: true},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
if input.URL != nil {
|
||||
newStudio.Url = sql.NullString{ String: *input.URL, Valid: true }
|
||||
newStudio.URL = sql.NullString{String: *input.URL, Valid: true}
|
||||
}
|
||||
|
||||
// Start the transaction and save the studio
|
||||
@@ -51,8 +51,8 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
|
||||
// Populate studio from the input
|
||||
studioID, _ := strconv.Atoi(input.ID)
|
||||
updatedStudio := models.Studio{
|
||||
ID: studioID,
|
||||
UpdatedAt: models.SQLiteTimestamp{ Timestamp: time.Now() },
|
||||
ID: studioID,
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: time.Now()},
|
||||
}
|
||||
if input.Image != nil {
|
||||
checksum, imageData, err := utils.ProcessBase64Image(*input.Image)
|
||||
@@ -63,10 +63,10 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
|
||||
updatedStudio.Checksum = checksum
|
||||
}
|
||||
if input.Name != nil {
|
||||
updatedStudio.Name = sql.NullString{ String: *input.Name, Valid: true }
|
||||
updatedStudio.Name = sql.NullString{String: *input.Name, Valid: true}
|
||||
}
|
||||
if input.URL != nil {
|
||||
updatedStudio.Url = sql.NullString{ String: *input.URL, Valid: true }
|
||||
updatedStudio.URL = sql.NullString{String: *input.URL, Valid: true}
|
||||
}
|
||||
|
||||
// Start the transaction and save the studio
|
||||
|
||||
@@ -12,9 +12,9 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input models.TagCreate
|
||||
// Populate a new tag from the input
|
||||
currentTime := time.Now()
|
||||
newTag := models.Tag{
|
||||
Name: input.Name,
|
||||
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
Name: input.Name,
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
|
||||
// Start the transaction and save the studio
|
||||
@@ -38,9 +38,9 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdate
|
||||
// Populate tag from the input
|
||||
tagID, _ := strconv.Atoi(input.ID)
|
||||
updatedTag := models.Tag{
|
||||
ID: tagID,
|
||||
Name: input.Name,
|
||||
UpdatedAt: models.SQLiteTimestamp{ Timestamp: time.Now() },
|
||||
ID: tagID,
|
||||
Name: input.Name,
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: time.Now()},
|
||||
}
|
||||
|
||||
// Start the transaction and save the tag
|
||||
|
||||
@@ -16,7 +16,7 @@ func (r *queryResolver) FindGalleries(ctx context.Context, filter *models.FindFi
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
galleries, total := qb.Query(filter)
|
||||
return models.FindGalleriesResultType{
|
||||
Count: total,
|
||||
Count: total,
|
||||
Galleries: galleries,
|
||||
}, nil
|
||||
}
|
||||
@@ -16,7 +16,7 @@ func (r *queryResolver) FindPerformers(ctx context.Context, performer_filter *mo
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
performers, total := qb.Query(performer_filter, filter)
|
||||
return models.FindPerformersResultType{
|
||||
Count: total,
|
||||
Count: total,
|
||||
Performers: performers,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ func (r *queryResolver) FindScenes(ctx context.Context, scene_filter *models.Sce
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
scenes, total := qb.Query(scene_filter, filter)
|
||||
return models.FindScenesResultType{
|
||||
Count: total,
|
||||
Count: total,
|
||||
Scenes: scenes,
|
||||
}, nil
|
||||
}
|
||||
@@ -9,7 +9,7 @@ func (r *queryResolver) FindSceneMarkers(ctx context.Context, scene_marker_filte
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
sceneMarkers, total := qb.Query(scene_marker_filter, filter)
|
||||
return models.FindSceneMarkersResultType{
|
||||
Count: total,
|
||||
Count: total,
|
||||
SceneMarkers: sceneMarkers,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ func (r *queryResolver) FindStudios(ctx context.Context, filter *models.FindFilt
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
studios, total := qb.Query(filter)
|
||||
return models.FindStudiosResultType{
|
||||
Count: total,
|
||||
Count: total,
|
||||
Studios: studios,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@ package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/stashapp/stash/logger"
|
||||
"github.com/stashapp/stash/manager"
|
||||
"time"
|
||||
)
|
||||
@@ -15,8 +14,7 @@ func (r *subscriptionResolver) MetadataUpdate(ctx context.Context) (<-chan strin
|
||||
go func() {
|
||||
for {
|
||||
select {
|
||||
case t := <-ticker.C:
|
||||
logger.Trace("metadata subscription tick at %s", t)
|
||||
case _ = <-ticker.C:
|
||||
manager.GetInstance().HandleMetadataUpdateSubscriptionTick(msg)
|
||||
case <-ctx.Done():
|
||||
ticker.Stop()
|
||||
|
||||
@@ -22,7 +22,7 @@ func (rs galleryRoutes) Routes() chi.Router {
|
||||
}
|
||||
|
||||
func (rs galleryRoutes) File(w http.ResponseWriter, r *http.Request) {
|
||||
gallery := r.Context().Value("gallery").(*models.Gallery)
|
||||
gallery := r.Context().Value(galleryKey).(*models.Gallery)
|
||||
fileIndex, _ := strconv.Atoi(chi.URLParam(r, "fileIndex"))
|
||||
thumb := r.URL.Query().Get("thumb")
|
||||
w.Header().Add("Cache-Control", "max-age=604800000") // 1 Week
|
||||
@@ -48,7 +48,7 @@ func GalleryCtx(next http.Handler) http.Handler {
|
||||
return
|
||||
}
|
||||
|
||||
ctx := context.WithValue(r.Context(), "gallery", gallery)
|
||||
ctx := context.WithValue(r.Context(), galleryKey, gallery)
|
||||
next.ServeHTTP(w, r.WithContext(ctx))
|
||||
})
|
||||
}
|
||||
@@ -22,7 +22,7 @@ func (rs performerRoutes) Routes() chi.Router {
|
||||
}
|
||||
|
||||
func (rs performerRoutes) Image(w http.ResponseWriter, r *http.Request) {
|
||||
performer := r.Context().Value("performer").(*models.Performer)
|
||||
performer := r.Context().Value(performerKey).(*models.Performer)
|
||||
_, _ = w.Write(performer.Image)
|
||||
}
|
||||
|
||||
@@ -41,7 +41,7 @@ func PerformerCtx(next http.Handler) http.Handler {
|
||||
return
|
||||
}
|
||||
|
||||
ctx := context.WithValue(r.Context(), "performer", performer)
|
||||
ctx := context.WithValue(r.Context(), performerKey, performer)
|
||||
next.ServeHTTP(w, r.WithContext(ctx))
|
||||
})
|
||||
}
|
||||
@@ -38,31 +38,31 @@ func (rs sceneRoutes) Routes() chi.Router {
|
||||
// region Handlers
|
||||
|
||||
func (rs sceneRoutes) Stream(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value("scene").(*models.Scene)
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
filepath := manager.GetInstance().Paths.Scene.GetStreamPath(scene.Path, scene.Checksum)
|
||||
http.ServeFile(w, r, filepath)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) Screenshot(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value("scene").(*models.Scene)
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
filepath := manager.GetInstance().Paths.Scene.GetScreenshotPath(scene.Checksum)
|
||||
http.ServeFile(w, r, filepath)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) Preview(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value("scene").(*models.Scene)
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
filepath := manager.GetInstance().Paths.Scene.GetStreamPreviewPath(scene.Checksum)
|
||||
http.ServeFile(w, r, filepath)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) Webp(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value("scene").(*models.Scene)
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
filepath := manager.GetInstance().Paths.Scene.GetStreamPreviewImagePath(scene.Checksum)
|
||||
http.ServeFile(w, r, filepath)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) ChapterVtt(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value("scene").(*models.Scene)
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
sceneMarkers, err := qb.FindBySceneID(scene.ID, nil)
|
||||
if err != nil {
|
||||
@@ -72,7 +72,7 @@ func (rs sceneRoutes) ChapterVtt(w http.ResponseWriter, r *http.Request) {
|
||||
vttLines := []string{"WEBVTT", ""}
|
||||
for _, marker := range sceneMarkers {
|
||||
time := utils.GetVTTTime(marker.Seconds)
|
||||
vttLines = append(vttLines, time + " --> " + time)
|
||||
vttLines = append(vttLines, time+" --> "+time)
|
||||
vttLines = append(vttLines, marker.Title)
|
||||
vttLines = append(vttLines, "")
|
||||
}
|
||||
@@ -83,21 +83,21 @@ func (rs sceneRoutes) ChapterVtt(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) VttThumbs(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value("scene").(*models.Scene)
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
w.Header().Set("Content-Type", "text/vtt")
|
||||
filepath := manager.GetInstance().Paths.Scene.GetSpriteVttFilePath(scene.Checksum)
|
||||
http.ServeFile(w, r, filepath)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) VttSprite(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value("scene").(*models.Scene)
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
w.Header().Set("Content-Type", "image/jpeg")
|
||||
filepath := manager.GetInstance().Paths.Scene.GetSpriteImageFilePath(scene.Checksum)
|
||||
http.ServeFile(w, r, filepath)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) SceneMarkerStream(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value("scene").(*models.Scene)
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
sceneMarkerID, _ := strconv.Atoi(chi.URLParam(r, "sceneMarkerId"))
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
sceneMarker, err := qb.Find(sceneMarkerID)
|
||||
@@ -111,7 +111,7 @@ func (rs sceneRoutes) SceneMarkerStream(w http.ResponseWriter, r *http.Request)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) SceneMarkerPreview(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value("scene").(*models.Scene)
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
sceneMarkerID, _ := strconv.Atoi(chi.URLParam(r, "sceneMarkerId"))
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
sceneMarker, err := qb.Find(sceneMarkerID)
|
||||
@@ -145,7 +145,7 @@ func SceneCtx(next http.Handler) http.Handler {
|
||||
return
|
||||
}
|
||||
|
||||
ctx := context.WithValue(r.Context(), "scene", scene)
|
||||
ctx := context.WithValue(r.Context(), sceneKey, scene)
|
||||
next.ServeHTTP(w, r.WithContext(ctx))
|
||||
})
|
||||
}
|
||||
@@ -22,7 +22,7 @@ func (rs studioRoutes) Routes() chi.Router {
|
||||
}
|
||||
|
||||
func (rs studioRoutes) Image(w http.ResponseWriter, r *http.Request) {
|
||||
studio := r.Context().Value("studio").(*models.Studio)
|
||||
studio := r.Context().Value(studioKey).(*models.Studio)
|
||||
_, _ = w.Write(studio.Image)
|
||||
}
|
||||
|
||||
@@ -41,7 +41,7 @@ func StudioCtx(next http.Handler) http.Handler {
|
||||
return
|
||||
}
|
||||
|
||||
ctx := context.WithValue(r.Context(), "studio", studio)
|
||||
ctx := context.WithValue(r.Context(), studioKey, studio)
|
||||
next.ServeHTTP(w, r.WithContext(ctx))
|
||||
})
|
||||
}
|
||||
@@ -83,7 +83,7 @@ func Start() {
|
||||
// Serve the setup UI
|
||||
r.HandleFunc("/setup*", func(w http.ResponseWriter, r *http.Request) {
|
||||
ext := path.Ext(r.URL.Path)
|
||||
if ext == ".html" || ext == "" {
|
||||
if ext == ".html" || ext == "" {
|
||||
data := setupUIBox.Bytes("index.html")
|
||||
_, _ = w.Write(data)
|
||||
} else {
|
||||
@@ -126,9 +126,9 @@ func Start() {
|
||||
_ = os.Mkdir(downloads, 0755)
|
||||
|
||||
config := &jsonschema.Config{
|
||||
Stash: stash,
|
||||
Metadata: metadata,
|
||||
Cache: cache,
|
||||
Stash: stash,
|
||||
Metadata: metadata,
|
||||
Cache: cache,
|
||||
Downloads: downloads,
|
||||
}
|
||||
if err := manager.GetInstance().SaveConfig(config); err != nil {
|
||||
@@ -142,7 +142,7 @@ func Start() {
|
||||
// Serve the angular app
|
||||
r.HandleFunc("/*", func(w http.ResponseWriter, r *http.Request) {
|
||||
ext := path.Ext(r.URL.Path)
|
||||
if ext == ".html" || ext == "" {
|
||||
if ext == ".html" || ext == "" {
|
||||
data := uiBox.Bytes("index.html")
|
||||
_, _ = w.Write(data)
|
||||
} else {
|
||||
@@ -151,12 +151,12 @@ func Start() {
|
||||
})
|
||||
|
||||
httpsServer := &http.Server{
|
||||
Addr: ":"+httpsPort,
|
||||
Handler: r,
|
||||
Addr: ":" + httpsPort,
|
||||
Handler: r,
|
||||
TLSConfig: makeTLSConfig(),
|
||||
}
|
||||
server := &http.Server{
|
||||
Addr: ":"+httpPort,
|
||||
Addr: ":" + httpPort,
|
||||
Handler: r,
|
||||
}
|
||||
|
||||
@@ -187,13 +187,14 @@ func makeTLSConfig() *tls.Config {
|
||||
return tlsConfig
|
||||
}
|
||||
|
||||
|
||||
type contextKey struct {
|
||||
name string
|
||||
}
|
||||
|
||||
var (
|
||||
BaseURLCtxKey = &contextKey{"BaseURL"}
|
||||
)
|
||||
|
||||
func BaseURLMiddleware(next http.Handler) http.Handler {
|
||||
fn := func(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
|
||||
@@ -2,18 +2,18 @@ package urlbuilders
|
||||
|
||||
import "strconv"
|
||||
|
||||
type galleryURLBuilder struct {
|
||||
BaseURL string
|
||||
type GalleryURLBuilder struct {
|
||||
BaseURL string
|
||||
GalleryID string
|
||||
}
|
||||
|
||||
func NewGalleryURLBuilder(baseURL string, galleryID int) galleryURLBuilder {
|
||||
return galleryURLBuilder{
|
||||
BaseURL: baseURL,
|
||||
func NewGalleryURLBuilder(baseURL string, galleryID int) GalleryURLBuilder {
|
||||
return GalleryURLBuilder{
|
||||
BaseURL: baseURL,
|
||||
GalleryID: strconv.Itoa(galleryID),
|
||||
}
|
||||
}
|
||||
|
||||
func (b galleryURLBuilder) GetGalleryImageUrl(fileIndex int) string {
|
||||
func (b GalleryURLBuilder) GetGalleryImageURL(fileIndex int) string {
|
||||
return b.BaseURL + "/gallery/" + b.GalleryID + "/" + strconv.Itoa(fileIndex)
|
||||
}
|
||||
|
||||
@@ -2,18 +2,18 @@ package urlbuilders
|
||||
|
||||
import "strconv"
|
||||
|
||||
type performerURLBuilder struct {
|
||||
BaseURL string
|
||||
type PerformerURLBuilder struct {
|
||||
BaseURL string
|
||||
PerformerID string
|
||||
}
|
||||
|
||||
func NewPerformerURLBuilder(baseURL string, performerID int) performerURLBuilder {
|
||||
return performerURLBuilder{
|
||||
BaseURL: baseURL,
|
||||
func NewPerformerURLBuilder(baseURL string, performerID int) PerformerURLBuilder {
|
||||
return PerformerURLBuilder{
|
||||
BaseURL: baseURL,
|
||||
PerformerID: strconv.Itoa(performerID),
|
||||
}
|
||||
}
|
||||
|
||||
func (b performerURLBuilder) GetPerformerImageUrl() string {
|
||||
func (b PerformerURLBuilder) GetPerformerImageURL() string {
|
||||
return b.BaseURL + "/performer/" + b.PerformerID + "/image"
|
||||
}
|
||||
|
||||
@@ -2,46 +2,46 @@ package urlbuilders
|
||||
|
||||
import "strconv"
|
||||
|
||||
type sceneURLBuilder struct {
|
||||
type SceneURLBuilder struct {
|
||||
BaseURL string
|
||||
SceneID string
|
||||
}
|
||||
|
||||
func NewSceneURLBuilder(baseURL string, sceneID int) sceneURLBuilder {
|
||||
return sceneURLBuilder{
|
||||
func NewSceneURLBuilder(baseURL string, sceneID int) SceneURLBuilder {
|
||||
return SceneURLBuilder{
|
||||
BaseURL: baseURL,
|
||||
SceneID: strconv.Itoa(sceneID),
|
||||
}
|
||||
}
|
||||
|
||||
func (b sceneURLBuilder) GetStreamUrl() string {
|
||||
func (b SceneURLBuilder) GetStreamURL() string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/stream.mp4"
|
||||
}
|
||||
|
||||
func (b sceneURLBuilder) GetStreamPreviewUrl() string {
|
||||
func (b SceneURLBuilder) GetStreamPreviewURL() string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/preview"
|
||||
}
|
||||
|
||||
func (b sceneURLBuilder) GetStreamPreviewImageUrl() string {
|
||||
func (b SceneURLBuilder) GetStreamPreviewImageURL() string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/webp"
|
||||
}
|
||||
|
||||
func (b sceneURLBuilder) GetSpriteVttUrl() string {
|
||||
func (b SceneURLBuilder) GetSpriteVTTURL() string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "_thumbs.vtt"
|
||||
}
|
||||
|
||||
func (b sceneURLBuilder) GetScreenshotUrl() string {
|
||||
func (b SceneURLBuilder) GetScreenshotURL() string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/screenshot"
|
||||
}
|
||||
|
||||
func (b sceneURLBuilder) GetChaptersVttUrl() string {
|
||||
func (b SceneURLBuilder) GetChaptersVTTURL() string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/vtt/chapter"
|
||||
}
|
||||
|
||||
func (b sceneURLBuilder) GetSceneMarkerStreamUrl(sceneMarkerId int) string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/scene_marker/" + strconv.Itoa(sceneMarkerId) + "/stream"
|
||||
func (b SceneURLBuilder) GetSceneMarkerStreamURL(sceneMarkerID int) string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/scene_marker/" + strconv.Itoa(sceneMarkerID) + "/stream"
|
||||
}
|
||||
|
||||
func (b sceneURLBuilder) GetSceneMarkerStreamPreviewUrl(sceneMarkerId int) string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/scene_marker/" + strconv.Itoa(sceneMarkerId) + "/preview"
|
||||
func (b SceneURLBuilder) GetSceneMarkerStreamPreviewURL(sceneMarkerID int) string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/scene_marker/" + strconv.Itoa(sceneMarkerID) + "/preview"
|
||||
}
|
||||
|
||||
@@ -2,18 +2,18 @@ package urlbuilders
|
||||
|
||||
import "strconv"
|
||||
|
||||
type studioURLBuilder struct {
|
||||
BaseURL string
|
||||
type StudioURLBuilder struct {
|
||||
BaseURL string
|
||||
StudioID string
|
||||
}
|
||||
|
||||
func NewStudioURLBuilder(baseURL string, studioID int) studioURLBuilder {
|
||||
return studioURLBuilder{
|
||||
BaseURL: baseURL,
|
||||
func NewStudioURLBuilder(baseURL string, studioID int) StudioURLBuilder {
|
||||
return StudioURLBuilder{
|
||||
BaseURL: baseURL,
|
||||
StudioID: strconv.Itoa(studioID),
|
||||
}
|
||||
}
|
||||
|
||||
func (b studioURLBuilder) GetStudioImageUrl() string {
|
||||
func (b StudioURLBuilder) GetStudioImageURL() string {
|
||||
return b.BaseURL + "/studio/" + b.StudioID + "/image"
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ import (
|
||||
)
|
||||
|
||||
type Packr2Source struct {
|
||||
Box *packr.Box
|
||||
Box *packr.Box
|
||||
Migrations *source.Migrations
|
||||
}
|
||||
|
||||
|
||||
@@ -49,7 +49,7 @@ func Download(configDirectory string) error {
|
||||
archivePath := filepath.Join(configDirectory, "ffmpeg"+urlExt)
|
||||
_ = os.Remove(archivePath) // remove archive if it already exists
|
||||
out, err := os.Create(archivePath)
|
||||
if err != nil {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer out.Close()
|
||||
@@ -68,7 +68,7 @@ func Download(configDirectory string) error {
|
||||
|
||||
// Write the response to the archive file location
|
||||
_, err = io.Copy(out, resp.Body)
|
||||
if err != nil {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -77,7 +77,7 @@ func Download(configDirectory string) error {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
return fmt.Errorf("FFMPeg was downloaded to %s. ")
|
||||
return fmt.Errorf("ffmpeg was downloaded to %s", archivePath)
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -101,17 +101,15 @@ func getFFMPEGURL() string {
|
||||
func getFFMPEGFilename() string {
|
||||
if runtime.GOOS == "windows" {
|
||||
return "ffmpeg.exe"
|
||||
} else {
|
||||
return "ffmpeg"
|
||||
}
|
||||
return "ffmpeg"
|
||||
}
|
||||
|
||||
func getFFProbeFilename() string {
|
||||
if runtime.GOOS == "windows" {
|
||||
return "ffprobe.exe"
|
||||
} else {
|
||||
return "ffprobe"
|
||||
}
|
||||
return "ffprobe"
|
||||
}
|
||||
|
||||
// Checks if FFMPEG in the path has the correct flags
|
||||
@@ -150,7 +148,7 @@ func unzip(src, configDirectory string) error {
|
||||
|
||||
unzippedPath := filepath.Join(configDirectory, filename)
|
||||
unzippedOutput, err := os.Create(unzippedPath)
|
||||
if err != nil {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
|
||||
@@ -6,8 +6,8 @@ import (
|
||||
)
|
||||
|
||||
type SceneMarkerOptions struct {
|
||||
ScenePath string
|
||||
Seconds int
|
||||
ScenePath string
|
||||
Seconds int
|
||||
Width int
|
||||
OutputPath string
|
||||
}
|
||||
|
||||
@@ -4,10 +4,10 @@ import "fmt"
|
||||
|
||||
type ScreenshotOptions struct {
|
||||
OutputPath string
|
||||
Quality int
|
||||
Time float64
|
||||
Width int
|
||||
Verbosity string
|
||||
Quality int
|
||||
Time float64
|
||||
Width int
|
||||
Verbosity string
|
||||
}
|
||||
|
||||
func (e *Encoder) Screenshot(probeResult VideoFile, options ScreenshotOptions) {
|
||||
|
||||
@@ -23,7 +23,7 @@ func IsValidCodec(codecName string) bool {
|
||||
}
|
||||
|
||||
type VideoFile struct {
|
||||
JSON FFProbeJSON
|
||||
JSON FFProbeJSON
|
||||
AudioStream *FFProbeStream
|
||||
VideoStream *FFProbeStream
|
||||
|
||||
@@ -86,7 +86,7 @@ func parse(filePath string, probeJSON *FFProbeJSON) (*VideoFile, error) {
|
||||
result.Bitrate, _ = strconv.ParseInt(probeJSON.Format.BitRate, 10, 64)
|
||||
result.Container = probeJSON.Format.FormatName
|
||||
duration, _ := strconv.ParseFloat(probeJSON.Format.Duration, 64)
|
||||
result.Duration = math.Round(duration*100)/100
|
||||
result.Duration = math.Round(duration*100) / 100
|
||||
fileStat, _ := os.Stat(filePath)
|
||||
result.Size = fileStat.Size()
|
||||
result.StartTime, _ = strconv.ParseFloat(probeJSON.Format.StartTime, 64)
|
||||
@@ -112,7 +112,7 @@ func parse(filePath string, probeJSON *FFProbeJSON) (*VideoFile, error) {
|
||||
} else {
|
||||
framerate, _ = strconv.ParseFloat(videoStream.AvgFrameRate, 64)
|
||||
}
|
||||
result.FrameRate = math.Round(framerate*100)/100
|
||||
result.FrameRate = math.Round(framerate*100) / 100
|
||||
if rotate, err := strconv.ParseInt(videoStream.Tags.Rotate, 10, 64); err == nil && rotate != 180 {
|
||||
result.Width = videoStream.Height
|
||||
result.Height = videoStream.Width
|
||||
@@ -141,8 +141,8 @@ func (v *VideoFile) GetVideoStream() *FFProbeStream {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (v *VideoFile) getStreamIndex(fileType string, probeJson FFProbeJSON) int {
|
||||
for i, stream := range probeJson.Streams {
|
||||
func (v *VideoFile) getStreamIndex(fileType string, probeJSON FFProbeJSON) int {
|
||||
for i, stream := range probeJSON.Streams {
|
||||
if stream.CodecType == fileType {
|
||||
return i
|
||||
}
|
||||
|
||||
@@ -25,7 +25,7 @@ type FFProbeJSON struct {
|
||||
} `json:"tags"`
|
||||
} `json:"format"`
|
||||
Streams []FFProbeStream `json:"streams"`
|
||||
Error struct {
|
||||
Error struct {
|
||||
Code int `json:"code"`
|
||||
String string `json:"string"`
|
||||
} `json:"error"`
|
||||
@@ -59,27 +59,27 @@ type FFProbeStream struct {
|
||||
TimedThumbnails int `json:"timed_thumbnails"`
|
||||
VisualImpaired int `json:"visual_impaired"`
|
||||
} `json:"disposition"`
|
||||
Duration string `json:"duration"`
|
||||
DurationTs int `json:"duration_ts"`
|
||||
HasBFrames int `json:"has_b_frames,omitempty"`
|
||||
Height int `json:"height,omitempty"`
|
||||
Index int `json:"index"`
|
||||
IsAvc string `json:"is_avc,omitempty"`
|
||||
Level int `json:"level,omitempty"`
|
||||
NalLengthSize string `json:"nal_length_size,omitempty"`
|
||||
NbFrames string `json:"nb_frames"`
|
||||
PixFmt string `json:"pix_fmt,omitempty"`
|
||||
Profile string `json:"profile"`
|
||||
RFrameRate string `json:"r_frame_rate"`
|
||||
Refs int `json:"refs,omitempty"`
|
||||
SampleAspectRatio string `json:"sample_aspect_ratio,omitempty"`
|
||||
StartPts int `json:"start_pts"`
|
||||
StartTime string `json:"start_time"`
|
||||
Duration string `json:"duration"`
|
||||
DurationTs int `json:"duration_ts"`
|
||||
HasBFrames int `json:"has_b_frames,omitempty"`
|
||||
Height int `json:"height,omitempty"`
|
||||
Index int `json:"index"`
|
||||
IsAvc string `json:"is_avc,omitempty"`
|
||||
Level int `json:"level,omitempty"`
|
||||
NalLengthSize string `json:"nal_length_size,omitempty"`
|
||||
NbFrames string `json:"nb_frames"`
|
||||
PixFmt string `json:"pix_fmt,omitempty"`
|
||||
Profile string `json:"profile"`
|
||||
RFrameRate string `json:"r_frame_rate"`
|
||||
Refs int `json:"refs,omitempty"`
|
||||
SampleAspectRatio string `json:"sample_aspect_ratio,omitempty"`
|
||||
StartPts int `json:"start_pts"`
|
||||
StartTime string `json:"start_time"`
|
||||
Tags struct {
|
||||
CreationTime time.Time `json:"creation_time"`
|
||||
HandlerName string `json:"handler_name"`
|
||||
Language string `json:"language"`
|
||||
Rotate string `json:"rotate"`
|
||||
Rotate string `json:"rotate"`
|
||||
} `json:"tags"`
|
||||
TimeBase string `json:"time_base"`
|
||||
Width int `json:"width,omitempty"`
|
||||
|
||||
@@ -33,7 +33,7 @@ func init() {
|
||||
func Progressf(format string, args ...interface{}) {
|
||||
progressLogger.Infof(format, args...)
|
||||
l := &LogItem{
|
||||
Type: "progress",
|
||||
Type: "progress",
|
||||
Message: fmt.Sprintf(format, args...),
|
||||
}
|
||||
addLogItem(l)
|
||||
@@ -47,8 +47,8 @@ func Trace(args ...interface{}) {
|
||||
func Debug(args ...interface{}) {
|
||||
logger.Debug(args...)
|
||||
l := &LogItem{
|
||||
Type: "debug",
|
||||
Message: fmt.Sprint(args),
|
||||
Type: "debug",
|
||||
Message: fmt.Sprint(args...),
|
||||
}
|
||||
addLogItem(l)
|
||||
}
|
||||
@@ -56,7 +56,7 @@ func Debug(args ...interface{}) {
|
||||
func Debugf(format string, args ...interface{}) {
|
||||
logger.Debugf(format, args...)
|
||||
l := &LogItem{
|
||||
Type: "debug",
|
||||
Type: "debug",
|
||||
Message: fmt.Sprintf(format, args...),
|
||||
}
|
||||
addLogItem(l)
|
||||
@@ -65,8 +65,8 @@ func Debugf(format string, args ...interface{}) {
|
||||
func Info(args ...interface{}) {
|
||||
logger.Info(args...)
|
||||
l := &LogItem{
|
||||
Type: "info",
|
||||
Message: fmt.Sprint(args),
|
||||
Type: "info",
|
||||
Message: fmt.Sprint(args...),
|
||||
}
|
||||
addLogItem(l)
|
||||
}
|
||||
@@ -74,7 +74,7 @@ func Info(args ...interface{}) {
|
||||
func Infof(format string, args ...interface{}) {
|
||||
logger.Infof(format, args...)
|
||||
l := &LogItem{
|
||||
Type: "info",
|
||||
Type: "info",
|
||||
Message: fmt.Sprintf(format, args...),
|
||||
}
|
||||
addLogItem(l)
|
||||
@@ -83,8 +83,8 @@ func Infof(format string, args ...interface{}) {
|
||||
func Warn(args ...interface{}) {
|
||||
logger.Warn(args...)
|
||||
l := &LogItem{
|
||||
Type: "warn",
|
||||
Message: fmt.Sprint(args),
|
||||
Type: "warn",
|
||||
Message: fmt.Sprint(args...),
|
||||
}
|
||||
addLogItem(l)
|
||||
}
|
||||
@@ -92,7 +92,7 @@ func Warn(args ...interface{}) {
|
||||
func Warnf(format string, args ...interface{}) {
|
||||
logger.Warnf(format, args...)
|
||||
l := &LogItem{
|
||||
Type: "warn",
|
||||
Type: "warn",
|
||||
Message: fmt.Sprintf(format, args...),
|
||||
}
|
||||
addLogItem(l)
|
||||
@@ -101,8 +101,8 @@ func Warnf(format string, args ...interface{}) {
|
||||
func Error(args ...interface{}) {
|
||||
logger.Error(args...)
|
||||
l := &LogItem{
|
||||
Type: "error",
|
||||
Message: fmt.Sprint(args),
|
||||
Type: "error",
|
||||
Message: fmt.Sprint(args...),
|
||||
}
|
||||
addLogItem(l)
|
||||
}
|
||||
@@ -110,7 +110,7 @@ func Error(args ...interface{}) {
|
||||
func Errorf(format string, args ...interface{}) {
|
||||
logger.Errorf(format, args...)
|
||||
l := &LogItem{
|
||||
Type: "error",
|
||||
Type: "error",
|
||||
Message: fmt.Sprintf(format, args...),
|
||||
}
|
||||
addLogItem(l)
|
||||
|
||||
@@ -4,7 +4,7 @@ import (
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type ProgressFormatter struct {}
|
||||
type ProgressFormatter struct{}
|
||||
|
||||
func (f *ProgressFormatter) Format(entry *logrus.Entry) ([]byte, error) {
|
||||
msg := []byte("Processing --> " + entry.Message + "\r")
|
||||
|
||||
@@ -45,7 +45,7 @@ func (g *GeneratorInfo) configure() error {
|
||||
|
||||
numberOfFrames, _ := strconv.Atoi(videoStream.NbFrames)
|
||||
if numberOfFrames == 0 {
|
||||
command := `ffmpeg -nostats -i `+g.VideoFile.Path+` -vcodec copy -f rawvideo -y /dev/null 2>&1 | \
|
||||
command := `ffmpeg -nostats -i ` + g.VideoFile.Path + ` -vcodec copy -f rawvideo -y /dev/null 2>&1 | \
|
||||
grep frame | \
|
||||
awk '{split($0,a,"fps")}END{print a[1]}' | \
|
||||
sed 's/.*= *//'`
|
||||
|
||||
@@ -13,8 +13,8 @@ import (
|
||||
type PreviewGenerator struct {
|
||||
Info *GeneratorInfo
|
||||
|
||||
VideoFilename string
|
||||
ImageFilename string
|
||||
VideoFilename string
|
||||
ImageFilename string
|
||||
OutputDirectory string
|
||||
}
|
||||
|
||||
@@ -67,7 +67,7 @@ func (g *PreviewGenerator) generateConcatFile() error {
|
||||
w := bufio.NewWriter(f)
|
||||
for i := 0; i < g.Info.ChunkCount; i++ {
|
||||
num := fmt.Sprintf("%.3d", i)
|
||||
filename := "preview"+num+".mp4"
|
||||
filename := "preview" + num + ".mp4"
|
||||
_, _ = w.WriteString(fmt.Sprintf("file '%s'\n", filename))
|
||||
}
|
||||
return w.Flush()
|
||||
@@ -84,12 +84,12 @@ func (g *PreviewGenerator) generateVideo(encoder *ffmpeg.Encoder) error {
|
||||
for i := 0; i < g.Info.ChunkCount; i++ {
|
||||
time := i * stepSize
|
||||
num := fmt.Sprintf("%.3d", i)
|
||||
filename := "preview"+num+".mp4"
|
||||
filename := "preview" + num + ".mp4"
|
||||
chunkOutputPath := instance.Paths.Generated.GetTmpPath(filename)
|
||||
|
||||
options := ffmpeg.ScenePreviewChunkOptions{
|
||||
Time: time,
|
||||
Width: 640,
|
||||
Time: time,
|
||||
Width: 640,
|
||||
OutputPath: chunkOutputPath,
|
||||
}
|
||||
encoder.ScenePreviewVideoChunk(g.Info.VideoFile, options)
|
||||
@@ -112,12 +112,11 @@ func (g *PreviewGenerator) generateImage(encoder *ffmpeg.Encoder) error {
|
||||
tmpOutputPath := instance.Paths.Generated.GetTmpPath(g.ImageFilename)
|
||||
if err := encoder.ScenePreviewVideoToImage(g.Info.VideoFile, 640, videoPreviewPath, tmpOutputPath); err != nil {
|
||||
return err
|
||||
} else {
|
||||
if err := os.Rename(tmpOutputPath, outputPath); err != nil {
|
||||
return err
|
||||
}
|
||||
logger.Debug("created video preview image: ", outputPath)
|
||||
}
|
||||
if err := os.Rename(tmpOutputPath, outputPath); err != nil {
|
||||
return err
|
||||
}
|
||||
logger.Debug("created video preview image: ", outputPath)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -97,7 +97,7 @@ func (g *SpriteGenerator) generateSpriteImage(encoder *ffmpeg.Encoder) error {
|
||||
montage := imaging.New(canvasWidth, canvasHeight, color.NRGBA{})
|
||||
for index := 0; index < len(images); index++ {
|
||||
x := width * (index % g.Columns)
|
||||
y := height * int(math.Floor(float64(index) / float64(g.Rows)))
|
||||
y := height * int(math.Floor(float64(index)/float64(g.Rows)))
|
||||
img := images[index]
|
||||
montage = imaging.Paste(montage, img, image.Pt(x, y))
|
||||
}
|
||||
@@ -121,11 +121,11 @@ func (g *SpriteGenerator) generateSpriteVTT(encoder *ffmpeg.Encoder) error {
|
||||
vttLines := []string{"WEBVTT", ""}
|
||||
for index := 0; index < g.Info.ChunkCount; index++ {
|
||||
x := width * (index % g.Columns)
|
||||
y := height * int(math.Floor(float64(index) / float64(g.Rows)))
|
||||
y := height * int(math.Floor(float64(index)/float64(g.Rows)))
|
||||
startTime := utils.GetVTTTime(float64(index) * stepSize)
|
||||
endTime := utils.GetVTTTime(float64(index + 1) * stepSize)
|
||||
endTime := utils.GetVTTTime(float64(index+1) * stepSize)
|
||||
|
||||
vttLines = append(vttLines, startTime + " --> " + endTime)
|
||||
vttLines = append(vttLines, startTime+" --> "+endTime)
|
||||
vttLines = append(vttLines, fmt.Sprintf("%s#xywh=%d,%d,%d,%d", spriteImageName, x, y, width, height))
|
||||
vttLines = append(vttLines, "")
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ import (
|
||||
"github.com/stashapp/stash/manager/jsonschema"
|
||||
)
|
||||
|
||||
type jsonUtils struct {}
|
||||
type jsonUtils struct{}
|
||||
|
||||
func (jp *jsonUtils) getMappings() (*jsonschema.Mappings, error) {
|
||||
return jsonschema.LoadMappingsFile(instance.Paths.JSON.MappingsFile)
|
||||
|
||||
@@ -8,7 +8,7 @@ import (
|
||||
|
||||
type Performer struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
Url string `json:"url,omitempty"`
|
||||
URL string `json:"url,omitempty"`
|
||||
Twitter string `json:"twitter,omitempty"`
|
||||
Instagram string `json:"instagram,omitempty"`
|
||||
Birthdate string `json:"birthdate,omitempty"`
|
||||
|
||||
@@ -27,7 +27,7 @@ type SceneFile struct {
|
||||
type Scene struct {
|
||||
Title string `json:"title,omitempty"`
|
||||
Studio string `json:"studio,omitempty"`
|
||||
Url string `json:"url,omitempty"`
|
||||
URL string `json:"url,omitempty"`
|
||||
Date string `json:"date,omitempty"`
|
||||
Rating int `json:"rating,omitempty"`
|
||||
Details string `json:"details,omitempty"`
|
||||
|
||||
@@ -9,16 +9,16 @@ import (
|
||||
type ScrapedItem struct {
|
||||
Title string `json:"title,omitempty"`
|
||||
Description string `json:"description,omitempty"`
|
||||
Url string `json:"url,omitempty"`
|
||||
URL string `json:"url,omitempty"`
|
||||
Date string `json:"date,omitempty"`
|
||||
Rating string `json:"rating,omitempty"`
|
||||
Tags string `json:"tags,omitempty"`
|
||||
Models string `json:"models,omitempty"`
|
||||
Episode int `json:"episode,omitempty"`
|
||||
GalleryFilename string `json:"gallery_filename,omitempty"`
|
||||
GalleryUrl string `json:"gallery_url,omitempty"`
|
||||
GalleryURL string `json:"gallery_url,omitempty"`
|
||||
VideoFilename string `json:"video_filename,omitempty"`
|
||||
VideoUrl string `json:"video_url,omitempty"`
|
||||
VideoURL string `json:"video_url,omitempty"`
|
||||
Studio string `json:"studio,omitempty"`
|
||||
UpdatedAt RailsTime `json:"updated_at,omitempty"`
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ import (
|
||||
|
||||
type Studio struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
Url string `json:"url,omitempty"`
|
||||
URL string `json:"url,omitempty"`
|
||||
Image string `json:"image,omitempty"`
|
||||
}
|
||||
|
||||
|
||||
@@ -20,10 +20,7 @@ func marshalToFile(filePath string, j interface{}) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := ioutil.WriteFile(filePath, data, 0755); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
return ioutil.WriteFile(filePath, data, 0755)
|
||||
}
|
||||
|
||||
func encode(j interface{}) ([]byte, error) {
|
||||
|
||||
@@ -10,7 +10,9 @@ import (
|
||||
)
|
||||
|
||||
func (s *singleton) Scan() {
|
||||
if s.Status != Idle { return }
|
||||
if s.Status != Idle {
|
||||
return
|
||||
}
|
||||
s.Status = Scan
|
||||
|
||||
go func() {
|
||||
@@ -31,7 +33,9 @@ func (s *singleton) Scan() {
|
||||
}
|
||||
|
||||
func (s *singleton) Import() {
|
||||
if s.Status != Idle { return }
|
||||
if s.Status != Idle {
|
||||
return
|
||||
}
|
||||
s.Status = Import
|
||||
|
||||
go func() {
|
||||
@@ -46,7 +50,9 @@ func (s *singleton) Import() {
|
||||
}
|
||||
|
||||
func (s *singleton) Export() {
|
||||
if s.Status != Idle { return }
|
||||
if s.Status != Idle {
|
||||
return
|
||||
}
|
||||
s.Status = Export
|
||||
|
||||
go func() {
|
||||
@@ -61,7 +67,9 @@ func (s *singleton) Export() {
|
||||
}
|
||||
|
||||
func (s *singleton) Generate(sprites bool, previews bool, markers bool, transcodes bool) {
|
||||
if s.Status != Idle { return }
|
||||
if s.Status != Idle {
|
||||
return
|
||||
}
|
||||
s.Status = Generate
|
||||
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
@@ -108,7 +116,7 @@ func (s *singleton) Generate(sprites bool, previews bool, markers bool, transcod
|
||||
}
|
||||
|
||||
func (s *singleton) returnToIdleState() {
|
||||
if r := recover(); r!= nil {
|
||||
if r := recover(); r != nil {
|
||||
logger.Info("recovered from ", r)
|
||||
}
|
||||
|
||||
|
||||
@@ -5,12 +5,12 @@ import (
|
||||
)
|
||||
|
||||
type Paths struct {
|
||||
Config *jsonschema.Config
|
||||
Generated *generatedPaths
|
||||
JSON *jsonPaths
|
||||
Config *jsonschema.Config
|
||||
Generated *generatedPaths
|
||||
JSON *jsonPaths
|
||||
|
||||
Gallery *galleryPaths
|
||||
Scene *scenePaths
|
||||
Gallery *galleryPaths
|
||||
Scene *scenePaths
|
||||
SceneMarkers *sceneMarkerPaths
|
||||
}
|
||||
|
||||
|
||||
@@ -26,13 +26,13 @@ func newJSONPaths(p Paths) *jsonPaths {
|
||||
}
|
||||
|
||||
func (jp *jsonPaths) PerformerJSONPath(checksum string) string {
|
||||
return filepath.Join(jp.Performers, checksum + ".json")
|
||||
return filepath.Join(jp.Performers, checksum+".json")
|
||||
}
|
||||
|
||||
func (jp *jsonPaths) SceneJSONPath(checksum string) string {
|
||||
return filepath.Join(jp.Scenes, checksum + ".json")
|
||||
return filepath.Join(jp.Scenes, checksum+".json")
|
||||
}
|
||||
|
||||
func (jp *jsonPaths) StudioJSONPath(checksum string) string {
|
||||
return filepath.Join(jp.Studios, checksum + ".json")
|
||||
return filepath.Join(jp.Studios, checksum+".json")
|
||||
}
|
||||
|
||||
@@ -16,9 +16,9 @@ func newSceneMarkerPaths(p Paths) *sceneMarkerPaths {
|
||||
}
|
||||
|
||||
func (sp *sceneMarkerPaths) GetStreamPath(checksum string, seconds int) string {
|
||||
return filepath.Join(sp.generated.Markers, checksum, strconv.Itoa(seconds) + ".mp4")
|
||||
return filepath.Join(sp.generated.Markers, checksum, strconv.Itoa(seconds)+".mp4")
|
||||
}
|
||||
|
||||
func (sp *sceneMarkerPaths) GetStreamPreviewImagePath(checksum string, seconds int) string {
|
||||
return filepath.Join(sp.generated.Markers, checksum, strconv.Itoa(seconds) + ".webp")
|
||||
return filepath.Join(sp.generated.Markers, checksum, strconv.Itoa(seconds)+".webp")
|
||||
}
|
||||
|
||||
@@ -32,9 +32,8 @@ func (sp *scenePaths) GetStreamPath(scenePath string, checksum string) string {
|
||||
transcodeExists, _ := utils.FileExists(transcodePath)
|
||||
if transcodeExists {
|
||||
return transcodePath
|
||||
} else {
|
||||
return scenePath
|
||||
}
|
||||
return scenePath
|
||||
}
|
||||
|
||||
func (sp *scenePaths) GetStreamPreviewPath(checksum string) string {
|
||||
|
||||
@@ -18,9 +18,9 @@ type StaticPathsType struct {
|
||||
|
||||
var StaticPaths = StaticPathsType{
|
||||
ExecutionDirectory: getExecutionDirectory(),
|
||||
ConfigDirectory: getConfigDirectory(),
|
||||
ConfigFile: filepath.Join(getConfigDirectory(), "config.json"),
|
||||
DatabaseFile: filepath.Join(getConfigDirectory(), "stash-go.sqlite"),
|
||||
ConfigDirectory: getConfigDirectory(),
|
||||
ConfigFile: filepath.Join(getConfigDirectory(), "config.json"),
|
||||
DatabaseFile: filepath.Join(getConfigDirectory(), "stash-go.sqlite"),
|
||||
}
|
||||
|
||||
func getExecutionDirectory() string {
|
||||
|
||||
@@ -15,7 +15,7 @@ import (
|
||||
|
||||
type ExportTask struct {
|
||||
Mappings *jsonschema.Mappings
|
||||
Scraped []jsonschema.ScrapedItem
|
||||
Scraped []jsonschema.ScrapedItem
|
||||
}
|
||||
|
||||
func (t *ExportTask) Start(wg *sync.WaitGroup) {
|
||||
@@ -59,7 +59,7 @@ func (t *ExportTask) ExportScenes(ctx context.Context) {
|
||||
index := i + 1
|
||||
logger.Progressf("[scenes] %d of %d", index, len(scenes))
|
||||
|
||||
t.Mappings.Scenes = append(t.Mappings.Scenes, jsonschema.PathMapping{ Path: scene.Path, Checksum: scene.Checksum })
|
||||
t.Mappings.Scenes = append(t.Mappings.Scenes, jsonschema.PathMapping{Path: scene.Path, Checksum: scene.Checksum})
|
||||
newSceneJSON := jsonschema.Scene{}
|
||||
|
||||
var studioName string
|
||||
@@ -86,8 +86,8 @@ func (t *ExportTask) ExportScenes(ctx context.Context) {
|
||||
if studioName != "" {
|
||||
newSceneJSON.Studio = studioName
|
||||
}
|
||||
if scene.Url.Valid {
|
||||
newSceneJSON.Url = scene.Url.String
|
||||
if scene.URL.Valid {
|
||||
newSceneJSON.URL = scene.URL.String
|
||||
}
|
||||
if scene.Date.Valid {
|
||||
newSceneJSON.Date = utils.GetYMDFromDatabaseDate(scene.Date.String)
|
||||
@@ -120,15 +120,15 @@ func (t *ExportTask) ExportScenes(ctx context.Context) {
|
||||
logger.Errorf("[scenes] <%s> invalid tags for scene marker: %s", scene.Checksum, err.Error())
|
||||
continue
|
||||
}
|
||||
if sceneMarker.Title == "" || sceneMarker.Seconds == 0 || primaryTag.Name == "" {
|
||||
if sceneMarker.Title == "" || sceneMarker.Seconds == 0 || primaryTag.Name == "" {
|
||||
logger.Errorf("[scenes] invalid scene marker: %v", sceneMarker)
|
||||
}
|
||||
|
||||
sceneMarkerJSON := jsonschema.SceneMarker{
|
||||
Title: sceneMarker.Title,
|
||||
Seconds: t.getDecimalString(sceneMarker.Seconds),
|
||||
Title: sceneMarker.Title,
|
||||
Seconds: t.getDecimalString(sceneMarker.Seconds),
|
||||
PrimaryTag: primaryTag.Name,
|
||||
Tags: t.getTagNames(sceneMarkerTags),
|
||||
Tags: t.getTagNames(sceneMarkerTags),
|
||||
}
|
||||
|
||||
newSceneJSON.Markers = append(newSceneJSON.Markers, sceneMarkerJSON)
|
||||
@@ -187,7 +187,7 @@ func (t *ExportTask) ExportGalleries(ctx context.Context) {
|
||||
for i, gallery := range galleries {
|
||||
index := i + 1
|
||||
logger.Progressf("[galleries] %d of %d", index, len(galleries))
|
||||
t.Mappings.Galleries = append(t.Mappings.Galleries, jsonschema.PathMapping{ Path: gallery.Path, Checksum: gallery.Checksum })
|
||||
t.Mappings.Galleries = append(t.Mappings.Galleries, jsonschema.PathMapping{Path: gallery.Path, Checksum: gallery.Checksum})
|
||||
}
|
||||
|
||||
logger.Infof("[galleries] export complete")
|
||||
@@ -206,15 +206,15 @@ func (t *ExportTask) ExportPerformers(ctx context.Context) {
|
||||
index := i + 1
|
||||
logger.Progressf("[performers] %d of %d", index, len(performers))
|
||||
|
||||
t.Mappings.Performers = append(t.Mappings.Performers, jsonschema.NameMapping{ Name: performer.Name.String, Checksum: performer.Checksum })
|
||||
t.Mappings.Performers = append(t.Mappings.Performers, jsonschema.NameMapping{Name: performer.Name.String, Checksum: performer.Checksum})
|
||||
|
||||
newPerformerJSON := jsonschema.Performer{}
|
||||
|
||||
if performer.Name.Valid {
|
||||
newPerformerJSON.Name = performer.Name.String
|
||||
}
|
||||
if performer.Url.Valid {
|
||||
newPerformerJSON.Url = performer.Url.String
|
||||
if performer.URL.Valid {
|
||||
newPerformerJSON.URL = performer.URL.String
|
||||
}
|
||||
if performer.Birthdate.Valid {
|
||||
newPerformerJSON.Birthdate = utils.GetYMDFromDatabaseDate(performer.Birthdate.String)
|
||||
@@ -289,15 +289,15 @@ func (t *ExportTask) ExportStudios(ctx context.Context) {
|
||||
index := i + 1
|
||||
logger.Progressf("[studios] %d of %d", index, len(studios))
|
||||
|
||||
t.Mappings.Studios = append(t.Mappings.Studios, jsonschema.NameMapping{ Name: studio.Name.String, Checksum: studio.Checksum })
|
||||
t.Mappings.Studios = append(t.Mappings.Studios, jsonschema.NameMapping{Name: studio.Name.String, Checksum: studio.Checksum})
|
||||
|
||||
newStudioJSON := jsonschema.Studio{}
|
||||
|
||||
if studio.Name.Valid {
|
||||
newStudioJSON.Name = studio.Name.String
|
||||
}
|
||||
if studio.Url.Valid {
|
||||
newStudioJSON.Url = studio.Url.String
|
||||
if studio.URL.Valid {
|
||||
newStudioJSON.URL = studio.URL.String
|
||||
}
|
||||
|
||||
newStudioJSON.Image = utils.GetBase64StringFromData(studio.Image)
|
||||
@@ -349,8 +349,8 @@ func (t *ExportTask) ExportScrapedItems(ctx context.Context) {
|
||||
if scrapedItem.Description.Valid {
|
||||
newScrapedItemJSON.Description = scrapedItem.Description.String
|
||||
}
|
||||
if scrapedItem.Url.Valid {
|
||||
newScrapedItemJSON.Url = scrapedItem.Url.String
|
||||
if scrapedItem.URL.Valid {
|
||||
newScrapedItemJSON.URL = scrapedItem.URL.String
|
||||
}
|
||||
if scrapedItem.Date.Valid {
|
||||
newScrapedItemJSON.Date = utils.GetYMDFromDatabaseDate(scrapedItem.Date.String)
|
||||
@@ -370,18 +370,18 @@ func (t *ExportTask) ExportScrapedItems(ctx context.Context) {
|
||||
if scrapedItem.GalleryFilename.Valid {
|
||||
newScrapedItemJSON.GalleryFilename = scrapedItem.GalleryFilename.String
|
||||
}
|
||||
if scrapedItem.GalleryUrl.Valid {
|
||||
newScrapedItemJSON.GalleryUrl = scrapedItem.GalleryUrl.String
|
||||
if scrapedItem.GalleryURL.Valid {
|
||||
newScrapedItemJSON.GalleryURL = scrapedItem.GalleryURL.String
|
||||
}
|
||||
if scrapedItem.VideoFilename.Valid {
|
||||
newScrapedItemJSON.VideoFilename = scrapedItem.VideoFilename.String
|
||||
}
|
||||
if scrapedItem.VideoUrl.Valid {
|
||||
newScrapedItemJSON.VideoUrl = scrapedItem.VideoUrl.String
|
||||
if scrapedItem.VideoURL.Valid {
|
||||
newScrapedItemJSON.VideoURL = scrapedItem.VideoURL.String
|
||||
}
|
||||
|
||||
newScrapedItemJSON.Studio = studioName
|
||||
updatedAt := jsonschema.RailsTime{ Time: scrapedItem.UpdatedAt.Timestamp } // TODO keeping ruby format
|
||||
updatedAt := jsonschema.RailsTime{Time: scrapedItem.UpdatedAt.Timestamp} // TODO keeping ruby format
|
||||
newScrapedItemJSON.UpdatedAt = updatedAt
|
||||
|
||||
t.Scraped = append(t.Scraped, newScrapedItemJSON)
|
||||
@@ -449,9 +449,9 @@ func getPrecision(num float64) int {
|
||||
|
||||
e := 1.0
|
||||
p := 0
|
||||
for (math.Round(num * e) / e) != num {
|
||||
for (math.Round(num*e) / e) != num {
|
||||
e *= 10
|
||||
p += 1
|
||||
p++
|
||||
}
|
||||
return p
|
||||
}
|
||||
@@ -51,8 +51,8 @@ func (t *GenerateMarkersTask) Start(wg *sync.WaitGroup) {
|
||||
|
||||
options := ffmpeg.SceneMarkerOptions{
|
||||
ScenePath: t.Scene.Path,
|
||||
Seconds: seconds,
|
||||
Width: 640,
|
||||
Seconds: seconds,
|
||||
Width: 640,
|
||||
}
|
||||
if !videoExists {
|
||||
options.OutputPath = instance.Paths.Generated.GetTmpPath(videoFilename) // tmp output in case the process ends abruptly
|
||||
|
||||
@@ -16,7 +16,7 @@ import (
|
||||
|
||||
type ImportTask struct {
|
||||
Mappings *jsonschema.Mappings
|
||||
Scraped []jsonschema.ScrapedItem
|
||||
Scraped []jsonschema.ScrapedItem
|
||||
}
|
||||
|
||||
func (t *ImportTask) Start(wg *sync.WaitGroup) {
|
||||
@@ -57,7 +57,9 @@ func (t *ImportTask) ImportPerformers(ctx context.Context) {
|
||||
logger.Errorf("[performers] failed to read json: %s", err.Error())
|
||||
continue
|
||||
}
|
||||
if mappingJSON.Checksum == "" || mappingJSON.Name == "" || performerJSON == nil { return }
|
||||
if mappingJSON.Checksum == "" || mappingJSON.Name == "" || performerJSON == nil {
|
||||
return
|
||||
}
|
||||
|
||||
logger.Progressf("[performers] %d of %d", index, len(t.Mappings.Performers))
|
||||
|
||||
@@ -72,57 +74,57 @@ func (t *ImportTask) ImportPerformers(ctx context.Context) {
|
||||
// Populate a new performer from the input
|
||||
currentTime := time.Now()
|
||||
newPerformer := models.Performer{
|
||||
Image: imageData,
|
||||
Checksum: checksum,
|
||||
Favorite: sql.NullBool{ Bool: performerJSON.Favorite, Valid: true },
|
||||
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
Image: imageData,
|
||||
Checksum: checksum,
|
||||
Favorite: sql.NullBool{Bool: performerJSON.Favorite, Valid: true},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
|
||||
if performerJSON.Name != "" {
|
||||
newPerformer.Name = sql.NullString{ String: performerJSON.Name, Valid: true }
|
||||
newPerformer.Name = sql.NullString{String: performerJSON.Name, Valid: true}
|
||||
}
|
||||
if performerJSON.Url != "" {
|
||||
newPerformer.Url = sql.NullString{ String: performerJSON.Url, Valid: true }
|
||||
if performerJSON.URL != "" {
|
||||
newPerformer.URL = sql.NullString{String: performerJSON.URL, Valid: true}
|
||||
}
|
||||
if performerJSON.Birthdate != "" {
|
||||
newPerformer.Birthdate = sql.NullString{ String: performerJSON.Birthdate, Valid: true }
|
||||
newPerformer.Birthdate = sql.NullString{String: performerJSON.Birthdate, Valid: true}
|
||||
}
|
||||
if performerJSON.Ethnicity != "" {
|
||||
newPerformer.Ethnicity = sql.NullString{ String: performerJSON.Ethnicity, Valid: true }
|
||||
newPerformer.Ethnicity = sql.NullString{String: performerJSON.Ethnicity, Valid: true}
|
||||
}
|
||||
if performerJSON.Country != "" {
|
||||
newPerformer.Country = sql.NullString{ String: performerJSON.Country, Valid: true }
|
||||
newPerformer.Country = sql.NullString{String: performerJSON.Country, Valid: true}
|
||||
}
|
||||
if performerJSON.EyeColor != "" {
|
||||
newPerformer.EyeColor = sql.NullString{ String: performerJSON.EyeColor, Valid: true }
|
||||
newPerformer.EyeColor = sql.NullString{String: performerJSON.EyeColor, Valid: true}
|
||||
}
|
||||
if performerJSON.Height != "" {
|
||||
newPerformer.Height = sql.NullString{ String: performerJSON.Height, Valid: true }
|
||||
newPerformer.Height = sql.NullString{String: performerJSON.Height, Valid: true}
|
||||
}
|
||||
if performerJSON.Measurements != "" {
|
||||
newPerformer.Measurements = sql.NullString{ String: performerJSON.Measurements, Valid: true }
|
||||
newPerformer.Measurements = sql.NullString{String: performerJSON.Measurements, Valid: true}
|
||||
}
|
||||
if performerJSON.FakeTits != "" {
|
||||
newPerformer.FakeTits = sql.NullString{ String: performerJSON.FakeTits, Valid: true }
|
||||
newPerformer.FakeTits = sql.NullString{String: performerJSON.FakeTits, Valid: true}
|
||||
}
|
||||
if performerJSON.CareerLength != "" {
|
||||
newPerformer.CareerLength = sql.NullString{ String: performerJSON.CareerLength, Valid: true }
|
||||
newPerformer.CareerLength = sql.NullString{String: performerJSON.CareerLength, Valid: true}
|
||||
}
|
||||
if performerJSON.Tattoos != "" {
|
||||
newPerformer.Tattoos = sql.NullString{ String: performerJSON.Tattoos, Valid: true }
|
||||
newPerformer.Tattoos = sql.NullString{String: performerJSON.Tattoos, Valid: true}
|
||||
}
|
||||
if performerJSON.Piercings != "" {
|
||||
newPerformer.Piercings = sql.NullString{ String: performerJSON.Piercings, Valid: true }
|
||||
newPerformer.Piercings = sql.NullString{String: performerJSON.Piercings, Valid: true}
|
||||
}
|
||||
if performerJSON.Aliases != "" {
|
||||
newPerformer.Aliases = sql.NullString{ String: performerJSON.Aliases, Valid: true }
|
||||
newPerformer.Aliases = sql.NullString{String: performerJSON.Aliases, Valid: true}
|
||||
}
|
||||
if performerJSON.Twitter != "" {
|
||||
newPerformer.Twitter = sql.NullString{ String: performerJSON.Twitter, Valid: true }
|
||||
newPerformer.Twitter = sql.NullString{String: performerJSON.Twitter, Valid: true}
|
||||
}
|
||||
if performerJSON.Instagram != "" {
|
||||
newPerformer.Instagram = sql.NullString{ String: performerJSON.Instagram, Valid: true }
|
||||
newPerformer.Instagram = sql.NullString{String: performerJSON.Instagram, Valid: true}
|
||||
}
|
||||
|
||||
_, err = qb.Create(newPerformer, tx)
|
||||
@@ -151,7 +153,9 @@ func (t *ImportTask) ImportStudios(ctx context.Context) {
|
||||
logger.Errorf("[studios] failed to read json: %s", err.Error())
|
||||
continue
|
||||
}
|
||||
if mappingJSON.Checksum == "" || mappingJSON.Name == "" || studioJSON == nil { return }
|
||||
if mappingJSON.Checksum == "" || mappingJSON.Name == "" || studioJSON == nil {
|
||||
return
|
||||
}
|
||||
|
||||
logger.Progressf("[studios] %d of %d", index, len(t.Mappings.Studios))
|
||||
|
||||
@@ -166,12 +170,12 @@ func (t *ImportTask) ImportStudios(ctx context.Context) {
|
||||
// Populate a new studio from the input
|
||||
currentTime := time.Now()
|
||||
newStudio := models.Studio{
|
||||
Image: imageData,
|
||||
Checksum: checksum,
|
||||
Name: sql.NullString{ String: studioJSON.Name, Valid: true },
|
||||
Url: sql.NullString{ String: studioJSON.Url, Valid: true },
|
||||
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
Image: imageData,
|
||||
Checksum: checksum,
|
||||
Name: sql.NullString{String: studioJSON.Name, Valid: true},
|
||||
URL: sql.NullString{String: studioJSON.URL, Valid: true},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
|
||||
_, err = qb.Create(newStudio, tx)
|
||||
@@ -195,17 +199,19 @@ func (t *ImportTask) ImportGalleries(ctx context.Context) {
|
||||
|
||||
for i, mappingJSON := range t.Mappings.Galleries {
|
||||
index := i + 1
|
||||
if mappingJSON.Checksum == "" || mappingJSON.Path == "" { return }
|
||||
if mappingJSON.Checksum == "" || mappingJSON.Path == "" {
|
||||
return
|
||||
}
|
||||
|
||||
logger.Progressf("[galleries] %d of %d", index, len(t.Mappings.Galleries))
|
||||
|
||||
// Populate a new gallery from the input
|
||||
currentTime := time.Now()
|
||||
newGallery := models.Gallery{
|
||||
Checksum: mappingJSON.Checksum,
|
||||
Path: mappingJSON.Path,
|
||||
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
Checksum: mappingJSON.Checksum,
|
||||
Path: mappingJSON.Path,
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
|
||||
_, err := qb.Create(newGallery, tx)
|
||||
@@ -254,7 +260,9 @@ func (t *ImportTask) ImportTags(ctx context.Context) {
|
||||
}
|
||||
|
||||
// Get the tags from the markers if we have marker json
|
||||
if len(sceneJSON.Markers) == 0 { continue }
|
||||
if len(sceneJSON.Markers) == 0 {
|
||||
continue
|
||||
}
|
||||
for _, markerJSON := range sceneJSON.Markers {
|
||||
if markerJSON.PrimaryTag != "" {
|
||||
tagNames = append(tagNames, markerJSON.PrimaryTag)
|
||||
@@ -269,9 +277,9 @@ func (t *ImportTask) ImportTags(ctx context.Context) {
|
||||
for _, tagName := range uniqueTagNames {
|
||||
currentTime := time.Now()
|
||||
newTag := models.Tag{
|
||||
Name: tagName,
|
||||
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
Name: tagName,
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
|
||||
_, err := qb.Create(newTag, tx)
|
||||
@@ -306,20 +314,20 @@ func (t *ImportTask) ImportScrapedItems(ctx context.Context) {
|
||||
updatedAt = mappingJSON.UpdatedAt.Time
|
||||
}
|
||||
newScrapedItem := models.ScrapedItem{
|
||||
Title: sql.NullString{String: mappingJSON.Title, Valid: true},
|
||||
Description: sql.NullString{String: mappingJSON.Description, Valid: true},
|
||||
Url: sql.NullString{String: mappingJSON.Url, Valid: true},
|
||||
Date: sql.NullString{String: mappingJSON.Date, Valid: true},
|
||||
Rating: sql.NullString{String: mappingJSON.Rating, Valid: true},
|
||||
Tags: sql.NullString{String: mappingJSON.Tags, Valid: true},
|
||||
Models: sql.NullString{String: mappingJSON.Models, Valid: true},
|
||||
Episode: sql.NullInt64{Int64: int64(mappingJSON.Episode), Valid: true},
|
||||
Title: sql.NullString{String: mappingJSON.Title, Valid: true},
|
||||
Description: sql.NullString{String: mappingJSON.Description, Valid: true},
|
||||
URL: sql.NullString{String: mappingJSON.URL, Valid: true},
|
||||
Date: sql.NullString{String: mappingJSON.Date, Valid: true},
|
||||
Rating: sql.NullString{String: mappingJSON.Rating, Valid: true},
|
||||
Tags: sql.NullString{String: mappingJSON.Tags, Valid: true},
|
||||
Models: sql.NullString{String: mappingJSON.Models, Valid: true},
|
||||
Episode: sql.NullInt64{Int64: int64(mappingJSON.Episode), Valid: true},
|
||||
GalleryFilename: sql.NullString{String: mappingJSON.GalleryFilename, Valid: true},
|
||||
GalleryUrl: sql.NullString{String: mappingJSON.GalleryUrl, Valid: true},
|
||||
VideoFilename: sql.NullString{String: mappingJSON.VideoFilename, Valid: true},
|
||||
VideoUrl: sql.NullString{String: mappingJSON.VideoUrl, Valid: true},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: updatedAt},
|
||||
GalleryURL: sql.NullString{String: mappingJSON.GalleryURL, Valid: true},
|
||||
VideoFilename: sql.NullString{String: mappingJSON.VideoFilename, Valid: true},
|
||||
VideoURL: sql.NullString{String: mappingJSON.VideoURL, Valid: true},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: updatedAt},
|
||||
}
|
||||
|
||||
studio, err := sqb.FindByName(mappingJSON.Studio, tx)
|
||||
@@ -332,7 +340,7 @@ func (t *ImportTask) ImportScrapedItems(ctx context.Context) {
|
||||
|
||||
_, err = qb.Create(newScrapedItem, tx)
|
||||
if err != nil {
|
||||
logger.Errorf("[scraped sites] <%s> failed to create: %s", newScrapedItem.Title, err.Error())
|
||||
logger.Errorf("[scraped sites] <%s> failed to create: %s", newScrapedItem.Title.String, err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -360,10 +368,10 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
|
||||
logger.Progressf("[scenes] %d of %d", index, len(t.Mappings.Scenes))
|
||||
|
||||
newScene := models.Scene{
|
||||
Checksum: mappingJSON.Checksum,
|
||||
Path: mappingJSON.Path,
|
||||
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
Checksum: mappingJSON.Checksum,
|
||||
Path: mappingJSON.Path,
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
|
||||
sceneJSON, err := instance.JSON.getScene(mappingJSON.Checksum)
|
||||
@@ -380,8 +388,8 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
|
||||
if sceneJSON.Details != "" {
|
||||
newScene.Details = sql.NullString{String: sceneJSON.Details, Valid: true}
|
||||
}
|
||||
if sceneJSON.Url != "" {
|
||||
newScene.Url = sql.NullString{String: sceneJSON.Url, Valid: true}
|
||||
if sceneJSON.URL != "" {
|
||||
newScene.URL = sql.NullString{String: sceneJSON.URL, Valid: true}
|
||||
}
|
||||
if sceneJSON.Date != "" {
|
||||
newScene.Date = sql.NullString{String: sceneJSON.Date, Valid: true}
|
||||
@@ -427,9 +435,9 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
|
||||
sqb := models.NewStudioQueryBuilder()
|
||||
studio, err := sqb.FindByName(sceneJSON.Studio, tx)
|
||||
if err != nil {
|
||||
logger.Warn("[scenes] studio <%s> does not exist: %s", sceneJSON.Studio, err)
|
||||
logger.Warnf("[scenes] studio <%s> does not exist: %s", sceneJSON.Studio, err.Error())
|
||||
} else {
|
||||
newScene.StudioID = sql.NullInt64{ Int64: int64(studio.ID), Valid: true }
|
||||
newScene.StudioID = sql.NullInt64{Int64: int64(studio.ID), Valid: true}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -451,9 +459,9 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
|
||||
gqb := models.NewGalleryQueryBuilder()
|
||||
gallery, err := gqb.FindByChecksum(sceneJSON.Gallery, tx)
|
||||
if err != nil {
|
||||
logger.Warn("[scenes] gallery <%s> does not exist: %s", sceneJSON.Gallery, err)
|
||||
logger.Warnf("[scenes] gallery <%s> does not exist: %s", sceneJSON.Gallery, err.Error())
|
||||
} else {
|
||||
gallery.SceneID = sql.NullInt64{ Int64: int64(scene.ID), Valid: true }
|
||||
gallery.SceneID = sql.NullInt64{Int64: int64(scene.ID), Valid: true}
|
||||
_, err := gqb.Update(*gallery, tx)
|
||||
if err != nil {
|
||||
logger.Errorf("[scenes] <%s> failed to update gallery: %s", scene.Checksum, err.Error())
|
||||
@@ -465,13 +473,13 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
|
||||
if len(sceneJSON.Performers) > 0 {
|
||||
performers, err := t.getPerformers(sceneJSON.Performers, tx)
|
||||
if err != nil {
|
||||
logger.Warn("[scenes] <%s> failed to fetch performers: %s", scene.Checksum, err)
|
||||
logger.Warnf("[scenes] <%s> failed to fetch performers: %s", scene.Checksum, err.Error())
|
||||
} else {
|
||||
var performerJoins []models.PerformersScenes
|
||||
for _, performer := range performers {
|
||||
join := models.PerformersScenes{
|
||||
PerformerID: performer.ID,
|
||||
SceneID: scene.ID,
|
||||
SceneID: scene.ID,
|
||||
}
|
||||
performerJoins = append(performerJoins, join)
|
||||
}
|
||||
@@ -485,13 +493,13 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
|
||||
if len(sceneJSON.Tags) > 0 {
|
||||
tags, err := t.getTags(scene.Checksum, sceneJSON.Tags, tx)
|
||||
if err != nil {
|
||||
logger.Warn("[scenes] <%s> failed to fetch tags: %s", scene.Checksum, err)
|
||||
logger.Warnf("[scenes] <%s> failed to fetch tags: %s", scene.Checksum, err.Error())
|
||||
} else {
|
||||
var tagJoins []models.ScenesTags
|
||||
for _, tag := range tags {
|
||||
join := models.ScenesTags{
|
||||
SceneID: scene.ID,
|
||||
TagID: tag.ID,
|
||||
TagID: tag.ID,
|
||||
}
|
||||
tagJoins = append(tagJoins, join)
|
||||
}
|
||||
@@ -508,11 +516,11 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
|
||||
for _, marker := range sceneJSON.Markers {
|
||||
seconds, _ := strconv.ParseFloat(marker.Seconds, 64)
|
||||
newSceneMarker := models.SceneMarker{
|
||||
Title: marker.Title,
|
||||
Seconds: seconds,
|
||||
SceneID: sql.NullInt64{Int64: int64(scene.ID), Valid: true},
|
||||
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
Title: marker.Title,
|
||||
Seconds: seconds,
|
||||
SceneID: sql.NullInt64{Int64: int64(scene.ID), Valid: true},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
|
||||
primaryTag, err := tqb.FindByName(marker.PrimaryTag, tx)
|
||||
@@ -536,13 +544,13 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
|
||||
// Get the scene marker tags and create the joins
|
||||
tags, err := t.getTags(scene.Checksum, marker.Tags, tx)
|
||||
if err != nil {
|
||||
logger.Warn("[scenes] <%s> failed to fetch scene marker tags: %s", scene.Checksum, err)
|
||||
logger.Warnf("[scenes] <%s> failed to fetch scene marker tags: %s", scene.Checksum, err.Error())
|
||||
} else {
|
||||
var tagJoins []models.SceneMarkersTags
|
||||
for _, tag := range tags {
|
||||
join := models.SceneMarkersTags{
|
||||
SceneMarkerID: sceneMarker.ID,
|
||||
TagID: tag.ID,
|
||||
TagID: tag.ID,
|
||||
}
|
||||
tagJoins = append(tagJoins, join)
|
||||
}
|
||||
|
||||
@@ -53,10 +53,10 @@ func (t *ScanTask) scanGallery() {
|
||||
logger.Infof("%s doesn't exist. Creating new item...", t.FilePath)
|
||||
currentTime := time.Now()
|
||||
newGallery := models.Gallery{
|
||||
Checksum: checksum,
|
||||
Path: t.FilePath,
|
||||
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
Checksum: checksum,
|
||||
Path: t.FilePath,
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
_, err = qb.Create(newGallery, tx)
|
||||
}
|
||||
@@ -102,18 +102,18 @@ func (t *ScanTask) scanScene() {
|
||||
logger.Infof("%s doesn't exist. Creating new item...", t.FilePath)
|
||||
currentTime := time.Now()
|
||||
newScene := models.Scene{
|
||||
Checksum: checksum,
|
||||
Path: t.FilePath,
|
||||
Duration: sql.NullFloat64{Float64: videoFile.Duration, Valid: true },
|
||||
VideoCodec: sql.NullString{ String: videoFile.VideoCodec, Valid: true},
|
||||
AudioCodec: sql.NullString{ String: videoFile.AudioCodec, Valid: true},
|
||||
Width: sql.NullInt64{ Int64: int64(videoFile.Width), Valid: true },
|
||||
Height: sql.NullInt64{ Int64: int64(videoFile.Height), Valid: true },
|
||||
Framerate: sql.NullFloat64{ Float64: videoFile.FrameRate, Valid: true },
|
||||
Bitrate: sql.NullInt64{ Int64: videoFile.Bitrate, Valid: true },
|
||||
Size: sql.NullString{ String: strconv.Itoa(int(videoFile.Size)), Valid: true },
|
||||
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
Checksum: checksum,
|
||||
Path: t.FilePath,
|
||||
Duration: sql.NullFloat64{Float64: videoFile.Duration, Valid: true},
|
||||
VideoCodec: sql.NullString{String: videoFile.VideoCodec, Valid: true},
|
||||
AudioCodec: sql.NullString{String: videoFile.AudioCodec, Valid: true},
|
||||
Width: sql.NullInt64{Int64: int64(videoFile.Width), Valid: true},
|
||||
Height: sql.NullInt64{Int64: int64(videoFile.Height), Valid: true},
|
||||
Framerate: sql.NullFloat64{Float64: videoFile.FrameRate, Valid: true},
|
||||
Bitrate: sql.NullInt64{Int64: videoFile.Bitrate, Valid: true},
|
||||
Size: sql.NullString{String: strconv.Itoa(int(videoFile.Size)), Valid: true},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
_, err = qb.Create(newScene, tx)
|
||||
}
|
||||
@@ -145,9 +145,9 @@ func (t *ScanTask) makeScreenshot(probeResult ffmpeg.VideoFile, outputPath strin
|
||||
encoder := ffmpeg.NewEncoder(instance.StaticPaths.FFMPEG)
|
||||
options := ffmpeg.ScreenshotOptions{
|
||||
OutputPath: outputPath,
|
||||
Quality: quality,
|
||||
Time: float64(probeResult.Duration) * 0.2,
|
||||
Width: width,
|
||||
Quality: quality,
|
||||
Time: float64(probeResult.Duration) * 0.2,
|
||||
Width: width,
|
||||
}
|
||||
encoder.Screenshot(probeResult, options)
|
||||
}
|
||||
|
||||
@@ -32,7 +32,7 @@ func (t *GenerateTranscodeTask) Start(wg *sync.WaitGroup) {
|
||||
return
|
||||
}
|
||||
|
||||
outputPath := instance.Paths.Generated.GetTmpPath(t.Scene.Checksum+".mp4")
|
||||
outputPath := instance.Paths.Generated.GetTmpPath(t.Scene.Checksum + ".mp4")
|
||||
options := ffmpeg.TranscodeOptions{
|
||||
OutputPath: outputPath,
|
||||
}
|
||||
@@ -42,6 +42,6 @@ func (t *GenerateTranscodeTask) Start(wg *sync.WaitGroup) {
|
||||
logger.Errorf("[transcode] error generating transcode: %s", err.Error())
|
||||
return
|
||||
}
|
||||
logger.Debug("[transcode] <%s> created transcode: ", t.Scene.Checksum)
|
||||
logger.Debugf("[transcode] <%s> created transcode: %s", t.Scene.Checksum, outputPath)
|
||||
return
|
||||
}
|
||||
@@ -10,7 +10,7 @@ func IsStreamable(scene *models.Scene) (bool, error) {
|
||||
if scene == nil {
|
||||
return false, fmt.Errorf("nil scene")
|
||||
}
|
||||
fileType, err := utils.FileType(scene.Path)
|
||||
fileType, err := utils.FileType(scene.Path)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
@@ -25,7 +25,7 @@ type Gallery struct {
|
||||
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
}
|
||||
|
||||
func (g *Gallery) GetFiles(baseUrl string) []GalleryFilesType {
|
||||
func (g *Gallery) GetFiles(baseURL string) []GalleryFilesType {
|
||||
var galleryFiles []GalleryFilesType
|
||||
filteredFiles, readCloser, err := g.listZipContents()
|
||||
if err != nil {
|
||||
@@ -33,9 +33,9 @@ func (g *Gallery) GetFiles(baseUrl string) []GalleryFilesType {
|
||||
}
|
||||
defer readCloser.Close()
|
||||
|
||||
builder := urlbuilders.NewGalleryURLBuilder(baseUrl, g.ID)
|
||||
builder := urlbuilders.NewGalleryURLBuilder(baseURL, g.ID)
|
||||
for i, file := range filteredFiles {
|
||||
galleryURL := builder.GetGalleryImageUrl(i)
|
||||
galleryURL := builder.GetGalleryImageURL(i)
|
||||
galleryFile := GalleryFilesType{
|
||||
Index: i,
|
||||
Name: &file.Name,
|
||||
|
||||
@@ -9,7 +9,7 @@ type Performer struct {
|
||||
Image []byte `db:"image" json:"image"`
|
||||
Checksum string `db:"checksum" json:"checksum"`
|
||||
Name sql.NullString `db:"name" json:"name"`
|
||||
Url sql.NullString `db:"url" json:"url"`
|
||||
URL sql.NullString `db:"url" json:"url"`
|
||||
Twitter sql.NullString `db:"twitter" json:"twitter"`
|
||||
Instagram sql.NullString `db:"instagram" json:"instagram"`
|
||||
Birthdate sql.NullString `db:"birthdate" json:"birthdate"` // TODO dates?
|
||||
|
||||
@@ -10,7 +10,7 @@ type Scene struct {
|
||||
Path string `db:"path" json:"path"`
|
||||
Title sql.NullString `db:"title" json:"title"`
|
||||
Details sql.NullString `db:"details" json:"details"`
|
||||
Url sql.NullString `db:"url" json:"url"`
|
||||
URL sql.NullString `db:"url" json:"url"`
|
||||
Date sql.NullString `db:"date" json:"date"` // TODO dates?
|
||||
Rating sql.NullInt64 `db:"rating" json:"rating"`
|
||||
Size sql.NullString `db:"size" json:"size"`
|
||||
|
||||
@@ -8,16 +8,16 @@ type ScrapedItem struct {
|
||||
ID int `db:"id" json:"id"`
|
||||
Title sql.NullString `db:"title" json:"title"`
|
||||
Description sql.NullString `db:"description" json:"description"`
|
||||
Url sql.NullString `db:"url" json:"url"`
|
||||
URL sql.NullString `db:"url" json:"url"`
|
||||
Date sql.NullString `db:"date" json:"date"` // TODO dates?
|
||||
Rating sql.NullString `db:"rating" json:"rating"`
|
||||
Tags sql.NullString `db:"tags" json:"tags"`
|
||||
Models sql.NullString `db:"models" json:"models"`
|
||||
Episode sql.NullInt64 `db:"episode" json:"episode"`
|
||||
GalleryFilename sql.NullString `db:"gallery_filename" json:"gallery_filename"`
|
||||
GalleryUrl sql.NullString `db:"gallery_url" json:"gallery_url"`
|
||||
GalleryURL sql.NullString `db:"gallery_url" json:"gallery_url"`
|
||||
VideoFilename sql.NullString `db:"video_filename" json:"video_filename"`
|
||||
VideoUrl sql.NullString `db:"video_url" json:"video_url"`
|
||||
VideoURL sql.NullString `db:"video_url" json:"video_url"`
|
||||
StudioID sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
|
||||
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
|
||||
@@ -9,7 +9,7 @@ type Studio struct {
|
||||
Image []byte `db:"image" json:"image"`
|
||||
Checksum string `db:"checksum" json:"checksum"`
|
||||
Name sql.NullString `db:"name" json:"name"`
|
||||
Url sql.NullString `db:"url" json:"url"`
|
||||
URL sql.NullString `db:"url" json:"url"`
|
||||
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
}
|
||||
|
||||
@@ -7,13 +7,13 @@ import (
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
type galleryQueryBuilder struct {}
|
||||
type GalleryQueryBuilder struct{}
|
||||
|
||||
func NewGalleryQueryBuilder() galleryQueryBuilder {
|
||||
return galleryQueryBuilder{}
|
||||
func NewGalleryQueryBuilder() GalleryQueryBuilder {
|
||||
return GalleryQueryBuilder{}
|
||||
}
|
||||
|
||||
func (qb *galleryQueryBuilder) Create(newGallery Gallery, tx *sqlx.Tx) (*Gallery, error) {
|
||||
func (qb *GalleryQueryBuilder) Create(newGallery Gallery, tx *sqlx.Tx) (*Gallery, error) {
|
||||
ensureTx(tx)
|
||||
result, err := tx.NamedExec(
|
||||
`INSERT INTO galleries (path, checksum, scene_id, created_at, updated_at)
|
||||
@@ -34,10 +34,10 @@ func (qb *galleryQueryBuilder) Create(newGallery Gallery, tx *sqlx.Tx) (*Gallery
|
||||
return &newGallery, nil
|
||||
}
|
||||
|
||||
func (qb *galleryQueryBuilder) Update(updatedGallery Gallery, tx *sqlx.Tx) (*Gallery, error) {
|
||||
func (qb *GalleryQueryBuilder) Update(updatedGallery Gallery, tx *sqlx.Tx) (*Gallery, error) {
|
||||
ensureTx(tx)
|
||||
_, err := tx.NamedExec(
|
||||
`UPDATE galleries SET `+SqlGenKeys(updatedGallery)+` WHERE galleries.id = :id`,
|
||||
`UPDATE galleries SET `+SQLGenKeys(updatedGallery)+` WHERE galleries.id = :id`,
|
||||
updatedGallery,
|
||||
)
|
||||
if err != nil {
|
||||
@@ -50,45 +50,45 @@ func (qb *galleryQueryBuilder) Update(updatedGallery Gallery, tx *sqlx.Tx) (*Gal
|
||||
return &updatedGallery, nil
|
||||
}
|
||||
|
||||
func (qb *galleryQueryBuilder) Find(id int) (*Gallery, error) {
|
||||
func (qb *GalleryQueryBuilder) Find(id int) (*Gallery, error) {
|
||||
query := "SELECT * FROM galleries WHERE id = ? LIMIT 1"
|
||||
args := []interface{}{id}
|
||||
return qb.queryGallery(query, args, nil)
|
||||
}
|
||||
|
||||
func (qb *galleryQueryBuilder) FindByChecksum(checksum string, tx *sqlx.Tx) (*Gallery, error) {
|
||||
func (qb *GalleryQueryBuilder) FindByChecksum(checksum string, tx *sqlx.Tx) (*Gallery, error) {
|
||||
query := "SELECT * FROM galleries WHERE checksum = ? LIMIT 1"
|
||||
args := []interface{}{checksum}
|
||||
return qb.queryGallery(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *galleryQueryBuilder) FindByPath(path string) (*Gallery, error) {
|
||||
func (qb *GalleryQueryBuilder) FindByPath(path string) (*Gallery, error) {
|
||||
query := "SELECT * FROM galleries WHERE path = ? LIMIT 1"
|
||||
args := []interface{}{path}
|
||||
return qb.queryGallery(query, args, nil)
|
||||
}
|
||||
|
||||
func (qb *galleryQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) (*Gallery, error) {
|
||||
func (qb *GalleryQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) (*Gallery, error) {
|
||||
query := "SELECT galleries.* FROM galleries JOIN scenes ON scenes.id = galleries.scene_id WHERE scenes.id = ? LIMIT 1"
|
||||
args := []interface{}{sceneID}
|
||||
return qb.queryGallery(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *galleryQueryBuilder) ValidGalleriesForScenePath(scenePath string) ([]Gallery, error) {
|
||||
func (qb *GalleryQueryBuilder) ValidGalleriesForScenePath(scenePath string) ([]Gallery, error) {
|
||||
sceneDirPath := filepath.Dir(scenePath)
|
||||
query := "SELECT galleries.* FROM galleries WHERE galleries.scene_id IS NULL AND galleries.path LIKE '" + sceneDirPath + "%' ORDER BY path ASC"
|
||||
return qb.queryGalleries(query, nil, nil)
|
||||
}
|
||||
|
||||
func (qb *galleryQueryBuilder) Count() (int, error) {
|
||||
func (qb *GalleryQueryBuilder) Count() (int, error) {
|
||||
return runCountQuery(buildCountQuery("SELECT galleries.id FROM galleries"), nil)
|
||||
}
|
||||
|
||||
func (qb *galleryQueryBuilder) All() ([]Gallery, error) {
|
||||
return qb.queryGalleries(selectAll("galleries") + qb.getGallerySort(nil), nil, nil)
|
||||
func (qb *GalleryQueryBuilder) All() ([]Gallery, error) {
|
||||
return qb.queryGalleries(selectAll("galleries")+qb.getGallerySort(nil), nil, nil)
|
||||
}
|
||||
|
||||
func (qb *galleryQueryBuilder) Query(findFilter *FindFilterType) ([]Gallery, int) {
|
||||
func (qb *GalleryQueryBuilder) Query(findFilter *FindFilterType) ([]Gallery, int) {
|
||||
if findFilter == nil {
|
||||
findFilter = &FindFilterType{}
|
||||
}
|
||||
@@ -115,12 +115,12 @@ func (qb *galleryQueryBuilder) Query(findFilter *FindFilterType) ([]Gallery, int
|
||||
return galleries, countResult
|
||||
}
|
||||
|
||||
func (qb *galleryQueryBuilder) getGallerySort(findFilter *FindFilterType) string {
|
||||
func (qb *GalleryQueryBuilder) getGallerySort(findFilter *FindFilterType) string {
|
||||
var sort string
|
||||
var direction string
|
||||
//if findFilter == nil { // TODO temp until title is removed from schema and UI
|
||||
sort = "path"
|
||||
direction = "ASC"
|
||||
sort = "path"
|
||||
direction = "ASC"
|
||||
//} else {
|
||||
// sort = findFilter.getSort("path")
|
||||
// direction = findFilter.getDirection()
|
||||
@@ -128,7 +128,7 @@ func (qb *galleryQueryBuilder) getGallerySort(findFilter *FindFilterType) string
|
||||
return getSort(sort, direction, "galleries")
|
||||
}
|
||||
|
||||
func (qb *galleryQueryBuilder) queryGallery(query string, args []interface{}, tx *sqlx.Tx) (*Gallery, error) {
|
||||
func (qb *GalleryQueryBuilder) queryGallery(query string, args []interface{}, tx *sqlx.Tx) (*Gallery, error) {
|
||||
results, err := qb.queryGalleries(query, args, tx)
|
||||
if err != nil || len(results) < 1 {
|
||||
return nil, err
|
||||
@@ -136,7 +136,7 @@ func (qb *galleryQueryBuilder) queryGallery(query string, args []interface{}, tx
|
||||
return &results[0], nil
|
||||
}
|
||||
|
||||
func (qb *galleryQueryBuilder) queryGalleries(query string, args []interface{}, tx *sqlx.Tx) ([]Gallery, error) {
|
||||
func (qb *GalleryQueryBuilder) queryGalleries(query string, args []interface{}, tx *sqlx.Tx) ([]Gallery, error) {
|
||||
var rows *sqlx.Rows
|
||||
var err error
|
||||
if tx != nil {
|
||||
|
||||
@@ -2,13 +2,13 @@ package models
|
||||
|
||||
import "github.com/jmoiron/sqlx"
|
||||
|
||||
type joinsQueryBuilder struct {}
|
||||
type JoinsQueryBuilder struct{}
|
||||
|
||||
func NewJoinsQueryBuilder() joinsQueryBuilder {
|
||||
return joinsQueryBuilder{}
|
||||
func NewJoinsQueryBuilder() JoinsQueryBuilder {
|
||||
return JoinsQueryBuilder{}
|
||||
}
|
||||
|
||||
func (qb *joinsQueryBuilder) CreatePerformersScenes(newJoins []PerformersScenes, tx *sqlx.Tx) error {
|
||||
func (qb *JoinsQueryBuilder) CreatePerformersScenes(newJoins []PerformersScenes, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
for _, join := range newJoins {
|
||||
_, err := tx.NamedExec(
|
||||
@@ -22,7 +22,7 @@ func (qb *joinsQueryBuilder) CreatePerformersScenes(newJoins []PerformersScenes,
|
||||
return nil
|
||||
}
|
||||
|
||||
func (qb *joinsQueryBuilder) UpdatePerformersScenes(sceneID int, updatedJoins []PerformersScenes, tx *sqlx.Tx) error {
|
||||
func (qb *JoinsQueryBuilder) UpdatePerformersScenes(sceneID int, updatedJoins []PerformersScenes, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
|
||||
// Delete the existing joins and then create new ones
|
||||
@@ -33,7 +33,7 @@ func (qb *joinsQueryBuilder) UpdatePerformersScenes(sceneID int, updatedJoins []
|
||||
return qb.CreatePerformersScenes(updatedJoins, tx)
|
||||
}
|
||||
|
||||
func (qb *joinsQueryBuilder) CreateScenesTags(newJoins []ScenesTags, tx *sqlx.Tx) error {
|
||||
func (qb *JoinsQueryBuilder) CreateScenesTags(newJoins []ScenesTags, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
for _, join := range newJoins {
|
||||
_, err := tx.NamedExec(
|
||||
@@ -47,7 +47,7 @@ func (qb *joinsQueryBuilder) CreateScenesTags(newJoins []ScenesTags, tx *sqlx.Tx
|
||||
return nil
|
||||
}
|
||||
|
||||
func (qb *joinsQueryBuilder) UpdateScenesTags(sceneID int, updatedJoins []ScenesTags, tx *sqlx.Tx) error {
|
||||
func (qb *JoinsQueryBuilder) UpdateScenesTags(sceneID int, updatedJoins []ScenesTags, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
|
||||
// Delete the existing joins and then create new ones
|
||||
@@ -58,7 +58,7 @@ func (qb *joinsQueryBuilder) UpdateScenesTags(sceneID int, updatedJoins []Scenes
|
||||
return qb.CreateScenesTags(updatedJoins, tx)
|
||||
}
|
||||
|
||||
func (qb *joinsQueryBuilder) CreateSceneMarkersTags(newJoins []SceneMarkersTags, tx *sqlx.Tx) error {
|
||||
func (qb *JoinsQueryBuilder) CreateSceneMarkersTags(newJoins []SceneMarkersTags, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
for _, join := range newJoins {
|
||||
_, err := tx.NamedExec(
|
||||
@@ -72,7 +72,7 @@ func (qb *joinsQueryBuilder) CreateSceneMarkersTags(newJoins []SceneMarkersTags,
|
||||
return nil
|
||||
}
|
||||
|
||||
func (qb *joinsQueryBuilder) UpdateSceneMarkersTags(sceneMarkerID int, updatedJoins []SceneMarkersTags, tx *sqlx.Tx) error {
|
||||
func (qb *JoinsQueryBuilder) UpdateSceneMarkersTags(sceneMarkerID int, updatedJoins []SceneMarkersTags, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
|
||||
// Delete the existing joins and then create new ones
|
||||
|
||||
@@ -6,13 +6,13 @@ import (
|
||||
"github.com/stashapp/stash/database"
|
||||
)
|
||||
|
||||
type performerQueryBuilder struct {}
|
||||
type PerformerQueryBuilder struct{}
|
||||
|
||||
func NewPerformerQueryBuilder() performerQueryBuilder {
|
||||
return performerQueryBuilder{}
|
||||
func NewPerformerQueryBuilder() PerformerQueryBuilder {
|
||||
return PerformerQueryBuilder{}
|
||||
}
|
||||
|
||||
func (qb *performerQueryBuilder) Create(newPerformer Performer, tx *sqlx.Tx) (*Performer, error) {
|
||||
func (qb *PerformerQueryBuilder) Create(newPerformer Performer, tx *sqlx.Tx) (*Performer, error) {
|
||||
ensureTx(tx)
|
||||
result, err := tx.NamedExec(
|
||||
`INSERT INTO performers (image, checksum, name, url, twitter, instagram, birthdate, ethnicity, country,
|
||||
@@ -38,10 +38,10 @@ func (qb *performerQueryBuilder) Create(newPerformer Performer, tx *sqlx.Tx) (*P
|
||||
return &newPerformer, nil
|
||||
}
|
||||
|
||||
func (qb *performerQueryBuilder) Update(updatedPerformer Performer, tx *sqlx.Tx) (*Performer, error) {
|
||||
func (qb *PerformerQueryBuilder) Update(updatedPerformer Performer, tx *sqlx.Tx) (*Performer, error) {
|
||||
ensureTx(tx)
|
||||
_, err := tx.NamedExec(
|
||||
`UPDATE performers SET `+SqlGenKeys(updatedPerformer)+` WHERE performers.id = :id`,
|
||||
`UPDATE performers SET `+SQLGenKeys(updatedPerformer)+` WHERE performers.id = :id`,
|
||||
updatedPerformer,
|
||||
)
|
||||
if err != nil {
|
||||
@@ -54,7 +54,7 @@ func (qb *performerQueryBuilder) Update(updatedPerformer Performer, tx *sqlx.Tx)
|
||||
return &updatedPerformer, nil
|
||||
}
|
||||
|
||||
func (qb *performerQueryBuilder) Find(id int) (*Performer, error) {
|
||||
func (qb *PerformerQueryBuilder) Find(id int) (*Performer, error) {
|
||||
query := "SELECT * FROM performers WHERE id = ? LIMIT 1"
|
||||
args := []interface{}{id}
|
||||
results, err := qb.queryPerformers(query, args, nil)
|
||||
@@ -64,7 +64,7 @@ func (qb *performerQueryBuilder) Find(id int) (*Performer, error) {
|
||||
return &results[0], nil
|
||||
}
|
||||
|
||||
func (qb *performerQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) ([]Performer, error) {
|
||||
func (qb *PerformerQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) ([]Performer, error) {
|
||||
query := `
|
||||
SELECT performers.* FROM performers
|
||||
LEFT JOIN performers_scenes as scenes_join on scenes_join.performer_id = performers.id
|
||||
@@ -76,7 +76,7 @@ func (qb *performerQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) ([]Perf
|
||||
return qb.queryPerformers(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *performerQueryBuilder) FindByNames(names []string, tx *sqlx.Tx) ([]Performer, error) {
|
||||
func (qb *PerformerQueryBuilder) FindByNames(names []string, tx *sqlx.Tx) ([]Performer, error) {
|
||||
query := "SELECT * FROM performers WHERE name IN " + getInBinding(len(names))
|
||||
var args []interface{}
|
||||
for _, name := range names {
|
||||
@@ -85,15 +85,15 @@ func (qb *performerQueryBuilder) FindByNames(names []string, tx *sqlx.Tx) ([]Per
|
||||
return qb.queryPerformers(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *performerQueryBuilder) Count() (int, error) {
|
||||
func (qb *PerformerQueryBuilder) Count() (int, error) {
|
||||
return runCountQuery(buildCountQuery("SELECT performers.id FROM performers"), nil)
|
||||
}
|
||||
|
||||
func (qb *performerQueryBuilder) All() ([]Performer, error) {
|
||||
return qb.queryPerformers(selectAll("performers") + qb.getPerformerSort(nil), nil, nil)
|
||||
func (qb *PerformerQueryBuilder) All() ([]Performer, error) {
|
||||
return qb.queryPerformers(selectAll("performers")+qb.getPerformerSort(nil), nil, nil)
|
||||
}
|
||||
|
||||
func (qb *performerQueryBuilder) Query(performerFilter *PerformerFilterType, findFilter *FindFilterType) ([]Performer, int) {
|
||||
func (qb *PerformerQueryBuilder) Query(performerFilter *PerformerFilterType, findFilter *FindFilterType) ([]Performer, int) {
|
||||
if performerFilter == nil {
|
||||
performerFilter = &PerformerFilterType{}
|
||||
}
|
||||
@@ -135,7 +135,7 @@ func (qb *performerQueryBuilder) Query(performerFilter *PerformerFilterType, fin
|
||||
return performers, countResult
|
||||
}
|
||||
|
||||
func (qb *performerQueryBuilder) getPerformerSort(findFilter *FindFilterType) string {
|
||||
func (qb *PerformerQueryBuilder) getPerformerSort(findFilter *FindFilterType) string {
|
||||
var sort string
|
||||
var direction string
|
||||
if findFilter == nil {
|
||||
@@ -148,7 +148,7 @@ func (qb *performerQueryBuilder) getPerformerSort(findFilter *FindFilterType) st
|
||||
return getSort(sort, direction, "performers")
|
||||
}
|
||||
|
||||
func (qb *performerQueryBuilder) queryPerformers(query string, args []interface{}, tx *sqlx.Tx) ([]Performer, error) {
|
||||
func (qb *PerformerQueryBuilder) queryPerformers(query string, args []interface{}, tx *sqlx.Tx) ([]Performer, error) {
|
||||
var rows *sqlx.Rows
|
||||
var err error
|
||||
if tx != nil {
|
||||
|
||||
@@ -31,13 +31,13 @@ WHERE tags.id = ?
|
||||
GROUP BY scenes.id
|
||||
`
|
||||
|
||||
type sceneQueryBuilder struct{}
|
||||
type SceneQueryBuilder struct{}
|
||||
|
||||
func NewSceneQueryBuilder() sceneQueryBuilder {
|
||||
return sceneQueryBuilder{}
|
||||
func NewSceneQueryBuilder() SceneQueryBuilder {
|
||||
return SceneQueryBuilder{}
|
||||
}
|
||||
|
||||
func (qb *sceneQueryBuilder) Create(newScene Scene, tx *sqlx.Tx) (*Scene, error) {
|
||||
func (qb *SceneQueryBuilder) Create(newScene Scene, tx *sqlx.Tx) (*Scene, error) {
|
||||
ensureTx(tx)
|
||||
result, err := tx.NamedExec(
|
||||
`INSERT INTO scenes (checksum, path, title, details, url, date, rating, size, duration, video_codec,
|
||||
@@ -60,10 +60,10 @@ func (qb *sceneQueryBuilder) Create(newScene Scene, tx *sqlx.Tx) (*Scene, error)
|
||||
return &newScene, nil
|
||||
}
|
||||
|
||||
func (qb *sceneQueryBuilder) Update(updatedScene Scene, tx *sqlx.Tx) (*Scene, error) {
|
||||
func (qb *SceneQueryBuilder) Update(updatedScene Scene, tx *sqlx.Tx) (*Scene, error) {
|
||||
ensureTx(tx)
|
||||
_, err := tx.NamedExec(
|
||||
`UPDATE scenes SET `+SqlGenKeys(updatedScene)+` WHERE scenes.id = :id`,
|
||||
`UPDATE scenes SET `+SQLGenKeys(updatedScene)+` WHERE scenes.id = :id`,
|
||||
updatedScene,
|
||||
)
|
||||
if err != nil {
|
||||
@@ -76,54 +76,54 @@ func (qb *sceneQueryBuilder) Update(updatedScene Scene, tx *sqlx.Tx) (*Scene, er
|
||||
return &updatedScene, nil
|
||||
}
|
||||
|
||||
func (qb *sceneQueryBuilder) Find(id int) (*Scene, error) {
|
||||
func (qb *SceneQueryBuilder) Find(id int) (*Scene, error) {
|
||||
query := "SELECT * FROM scenes WHERE id = ? LIMIT 1"
|
||||
args := []interface{}{id}
|
||||
return qb.queryScene(query, args, nil)
|
||||
}
|
||||
|
||||
func (qb *sceneQueryBuilder) FindByChecksum(checksum string) (*Scene, error) {
|
||||
func (qb *SceneQueryBuilder) FindByChecksum(checksum string) (*Scene, error) {
|
||||
query := "SELECT * FROM scenes WHERE checksum = ? LIMIT 1"
|
||||
args := []interface{}{checksum}
|
||||
return qb.queryScene(query, args, nil)
|
||||
}
|
||||
|
||||
func (qb *sceneQueryBuilder) FindByPath(path string) (*Scene, error) {
|
||||
func (qb *SceneQueryBuilder) FindByPath(path string) (*Scene, error) {
|
||||
query := "SELECT * FROM scenes WHERE path = ? LIMIT 1"
|
||||
args := []interface{}{path}
|
||||
return qb.queryScene(query, args, nil)
|
||||
}
|
||||
|
||||
func (qb *sceneQueryBuilder) FindByPerformerID(performerID int) ([]Scene, error) {
|
||||
func (qb *SceneQueryBuilder) FindByPerformerID(performerID int) ([]Scene, error) {
|
||||
args := []interface{}{performerID}
|
||||
return qb.queryScenes(scenesForPerformerQuery, args, nil)
|
||||
}
|
||||
|
||||
func (qb *sceneQueryBuilder) CountByPerformerID(performerID int) (int, error) {
|
||||
func (qb *SceneQueryBuilder) CountByPerformerID(performerID int) (int, error) {
|
||||
args := []interface{}{performerID}
|
||||
return runCountQuery(buildCountQuery(scenesForPerformerQuery), args)
|
||||
}
|
||||
|
||||
func (qb *sceneQueryBuilder) FindByStudioID(studioID int) ([]Scene, error) {
|
||||
func (qb *SceneQueryBuilder) FindByStudioID(studioID int) ([]Scene, error) {
|
||||
args := []interface{}{studioID}
|
||||
return qb.queryScenes(scenesForStudioQuery, args, nil)
|
||||
}
|
||||
|
||||
func (qb *sceneQueryBuilder) Count() (int, error) {
|
||||
func (qb *SceneQueryBuilder) Count() (int, error) {
|
||||
return runCountQuery(buildCountQuery("SELECT scenes.id FROM scenes"), nil)
|
||||
}
|
||||
|
||||
func (qb *sceneQueryBuilder) CountByStudioID(studioID int) (int, error) {
|
||||
func (qb *SceneQueryBuilder) CountByStudioID(studioID int) (int, error) {
|
||||
args := []interface{}{studioID}
|
||||
return runCountQuery(buildCountQuery(scenesForStudioQuery), args)
|
||||
}
|
||||
|
||||
func (qb *sceneQueryBuilder) CountByTagID(tagID int) (int, error) {
|
||||
func (qb *SceneQueryBuilder) CountByTagID(tagID int) (int, error) {
|
||||
args := []interface{}{tagID}
|
||||
return runCountQuery(buildCountQuery(scenesForTagQuery), args)
|
||||
}
|
||||
|
||||
func (qb *sceneQueryBuilder) Wall(q *string) ([]Scene, error) {
|
||||
func (qb *SceneQueryBuilder) Wall(q *string) ([]Scene, error) {
|
||||
s := ""
|
||||
if q != nil {
|
||||
s = *q
|
||||
@@ -132,11 +132,11 @@ func (qb *sceneQueryBuilder) Wall(q *string) ([]Scene, error) {
|
||||
return qb.queryScenes(query, nil, nil)
|
||||
}
|
||||
|
||||
func (qb *sceneQueryBuilder) All() ([]Scene, error) {
|
||||
return qb.queryScenes(selectAll("scenes") + qb.getSceneSort(nil), nil, nil)
|
||||
func (qb *SceneQueryBuilder) All() ([]Scene, error) {
|
||||
return qb.queryScenes(selectAll("scenes")+qb.getSceneSort(nil), nil, nil)
|
||||
}
|
||||
|
||||
func (qb *sceneQueryBuilder) Query(sceneFilter *SceneFilterType, findFilter *FindFilterType) ([]Scene, int) {
|
||||
func (qb *SceneQueryBuilder) Query(sceneFilter *SceneFilterType, findFilter *FindFilterType) ([]Scene, int) {
|
||||
if sceneFilter == nil {
|
||||
sceneFilter = &SceneFilterType{}
|
||||
}
|
||||
@@ -209,8 +209,8 @@ func (qb *sceneQueryBuilder) Query(sceneFilter *SceneFilterType, findFilter *Fin
|
||||
}
|
||||
|
||||
if tagsFilter := sceneFilter.Tags; len(tagsFilter) > 0 {
|
||||
for _, tagId := range tagsFilter {
|
||||
args = append(args, tagId)
|
||||
for _, tagID := range tagsFilter {
|
||||
args = append(args, tagID)
|
||||
}
|
||||
|
||||
whereClauses = append(whereClauses, "tags.id IN "+getInBinding(len(tagsFilter)))
|
||||
@@ -239,17 +239,16 @@ func (qb *sceneQueryBuilder) Query(sceneFilter *SceneFilterType, findFilter *Fin
|
||||
return scenes, countResult
|
||||
}
|
||||
|
||||
func (qb *sceneQueryBuilder) getSceneSort(findFilter *FindFilterType) string {
|
||||
func (qb *SceneQueryBuilder) getSceneSort(findFilter *FindFilterType) string {
|
||||
if findFilter == nil {
|
||||
return " ORDER BY scenes.path, scenes.date ASC "
|
||||
} else {
|
||||
sort := findFilter.GetSort("title")
|
||||
direction := findFilter.GetDirection()
|
||||
return getSort(sort, direction, "scenes")
|
||||
}
|
||||
sort := findFilter.GetSort("title")
|
||||
direction := findFilter.GetDirection()
|
||||
return getSort(sort, direction, "scenes")
|
||||
}
|
||||
|
||||
func (qb *sceneQueryBuilder) queryScene(query string, args []interface{}, tx *sqlx.Tx) (*Scene, error) {
|
||||
func (qb *SceneQueryBuilder) queryScene(query string, args []interface{}, tx *sqlx.Tx) (*Scene, error) {
|
||||
results, err := qb.queryScenes(query, args, tx)
|
||||
if err != nil || len(results) < 1 {
|
||||
return nil, err
|
||||
@@ -257,7 +256,7 @@ func (qb *sceneQueryBuilder) queryScene(query string, args []interface{}, tx *sq
|
||||
return &results[0], nil
|
||||
}
|
||||
|
||||
func (qb *sceneQueryBuilder) queryScenes(query string, args []interface{}, tx *sqlx.Tx) ([]Scene, error) {
|
||||
func (qb *SceneQueryBuilder) queryScenes(query string, args []interface{}, tx *sqlx.Tx) ([]Scene, error) {
|
||||
var rows *sqlx.Rows
|
||||
var err error
|
||||
if tx != nil {
|
||||
|
||||
@@ -15,13 +15,13 @@ WHERE tags.id = ?
|
||||
GROUP BY scene_markers.id
|
||||
`
|
||||
|
||||
type sceneMarkerQueryBuilder struct {}
|
||||
type SceneMarkerQueryBuilder struct{}
|
||||
|
||||
func NewSceneMarkerQueryBuilder() sceneMarkerQueryBuilder {
|
||||
return sceneMarkerQueryBuilder{}
|
||||
func NewSceneMarkerQueryBuilder() SceneMarkerQueryBuilder {
|
||||
return SceneMarkerQueryBuilder{}
|
||||
}
|
||||
|
||||
func (qb *sceneMarkerQueryBuilder) Create(newSceneMarker SceneMarker, tx *sqlx.Tx) (*SceneMarker, error) {
|
||||
func (qb *SceneMarkerQueryBuilder) Create(newSceneMarker SceneMarker, tx *sqlx.Tx) (*SceneMarker, error) {
|
||||
ensureTx(tx)
|
||||
result, err := tx.NamedExec(
|
||||
`INSERT INTO scene_markers (title, seconds, primary_tag_id, scene_id, created_at, updated_at)
|
||||
@@ -43,10 +43,10 @@ func (qb *sceneMarkerQueryBuilder) Create(newSceneMarker SceneMarker, tx *sqlx.T
|
||||
return &newSceneMarker, nil
|
||||
}
|
||||
|
||||
func (qb *sceneMarkerQueryBuilder) Update(updatedSceneMarker SceneMarker, tx *sqlx.Tx) (*SceneMarker, error) {
|
||||
func (qb *SceneMarkerQueryBuilder) Update(updatedSceneMarker SceneMarker, tx *sqlx.Tx) (*SceneMarker, error) {
|
||||
ensureTx(tx)
|
||||
_, err := tx.NamedExec(
|
||||
`UPDATE scene_markers SET `+SqlGenKeys(updatedSceneMarker)+` WHERE scene_markers.id = :id`,
|
||||
`UPDATE scene_markers SET `+SQLGenKeys(updatedSceneMarker)+` WHERE scene_markers.id = :id`,
|
||||
updatedSceneMarker,
|
||||
)
|
||||
if err != nil {
|
||||
@@ -59,11 +59,11 @@ func (qb *sceneMarkerQueryBuilder) Update(updatedSceneMarker SceneMarker, tx *sq
|
||||
return &updatedSceneMarker, nil
|
||||
}
|
||||
|
||||
func (qb *sceneMarkerQueryBuilder) Destroy(id string, tx *sqlx.Tx) error {
|
||||
func (qb *SceneMarkerQueryBuilder) Destroy(id string, tx *sqlx.Tx) error {
|
||||
return executeDeleteQuery("scene_markers", id, tx)
|
||||
}
|
||||
|
||||
func (qb *sceneMarkerQueryBuilder) Find(id int) (*SceneMarker, error) {
|
||||
func (qb *SceneMarkerQueryBuilder) Find(id int) (*SceneMarker, error) {
|
||||
query := "SELECT * FROM scene_markers WHERE id = ? LIMIT 1"
|
||||
args := []interface{}{id}
|
||||
results, err := qb.querySceneMarkers(query, args, nil)
|
||||
@@ -73,7 +73,7 @@ func (qb *sceneMarkerQueryBuilder) Find(id int) (*SceneMarker, error) {
|
||||
return &results[0], nil
|
||||
}
|
||||
|
||||
func (qb *sceneMarkerQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) ([]SceneMarker, error) {
|
||||
func (qb *SceneMarkerQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) ([]SceneMarker, error) {
|
||||
query := `
|
||||
SELECT scene_markers.* FROM scene_markers
|
||||
JOIN scenes ON scenes.id = scene_markers.scene_id
|
||||
@@ -85,12 +85,12 @@ func (qb *sceneMarkerQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) ([]Sc
|
||||
return qb.querySceneMarkers(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *sceneMarkerQueryBuilder) CountByTagID(tagID int) (int, error) {
|
||||
func (qb *SceneMarkerQueryBuilder) CountByTagID(tagID int) (int, error) {
|
||||
args := []interface{}{tagID}
|
||||
return runCountQuery(buildCountQuery(sceneMarkersForTagQuery), args)
|
||||
}
|
||||
|
||||
func (qb *sceneMarkerQueryBuilder) GetMarkerStrings(q *string, sort *string) ([]*MarkerStringsResultType, error) {
|
||||
func (qb *SceneMarkerQueryBuilder) GetMarkerStrings(q *string, sort *string) ([]*MarkerStringsResultType, error) {
|
||||
query := "SELECT count(*) as `count`, scene_markers.id as id, scene_markers.title as title FROM scene_markers"
|
||||
if q != nil {
|
||||
query = query + " WHERE title LIKE '%" + *q + "%'"
|
||||
@@ -105,7 +105,7 @@ func (qb *sceneMarkerQueryBuilder) GetMarkerStrings(q *string, sort *string) ([]
|
||||
return qb.queryMarkerStringsResultType(query, args)
|
||||
}
|
||||
|
||||
func (qb *sceneMarkerQueryBuilder) Wall(q *string) ([]SceneMarker, error) {
|
||||
func (qb *SceneMarkerQueryBuilder) Wall(q *string) ([]SceneMarker, error) {
|
||||
s := ""
|
||||
if q != nil {
|
||||
s = *q
|
||||
@@ -114,7 +114,7 @@ func (qb *sceneMarkerQueryBuilder) Wall(q *string) ([]SceneMarker, error) {
|
||||
return qb.querySceneMarkers(query, nil, nil)
|
||||
}
|
||||
|
||||
func (qb *sceneMarkerQueryBuilder) Query(sceneMarkerFilter *SceneMarkerFilterType, findFilter *FindFilterType) ([]SceneMarker, int) {
|
||||
func (qb *SceneMarkerQueryBuilder) Query(sceneMarkerFilter *SceneMarkerFilterType, findFilter *FindFilterType) ([]SceneMarker, int) {
|
||||
if sceneMarkerFilter == nil {
|
||||
sceneMarkerFilter = &SceneMarkerFilterType{}
|
||||
}
|
||||
@@ -147,7 +147,7 @@ func (qb *sceneMarkerQueryBuilder) Query(sceneMarkerFilter *SceneMarkerFilterTyp
|
||||
length := len(tagIDs)
|
||||
body += " LEFT JOIN tags AS ptj ON ptj.id = scene_markers.primary_tag_id AND ptj.id IN " + getInBinding(length)
|
||||
body += " LEFT JOIN scene_markers_tags AS tj ON tj.scene_marker_id = scene_markers.id AND tj.tag_id IN " + getInBinding(length)
|
||||
havingClauses = append(havingClauses, "((COUNT(DISTINCT ptj.id) + COUNT(DISTINCT tj.tag_id)) = " + strconv.Itoa(length) +")")
|
||||
havingClauses = append(havingClauses, "((COUNT(DISTINCT ptj.id) + COUNT(DISTINCT tj.tag_id)) = "+strconv.Itoa(length)+")")
|
||||
for _, tagID := range tagIDs {
|
||||
args = append(args, tagID)
|
||||
}
|
||||
@@ -159,7 +159,7 @@ func (qb *sceneMarkerQueryBuilder) Query(sceneMarkerFilter *SceneMarkerFilterTyp
|
||||
if sceneTagIDs := sceneMarkerFilter.SceneTags; sceneTagIDs != nil {
|
||||
length := len(sceneTagIDs)
|
||||
body += " LEFT JOIN scenes_tags AS scene_tags_join ON scene_tags_join.scene_id = scene.id AND scene_tags_join.tag_id IN " + getInBinding(length)
|
||||
havingClauses = append(havingClauses, "COUNT(DISTINCT scene_tags_join.tag_id) = " + strconv.Itoa(length))
|
||||
havingClauses = append(havingClauses, "COUNT(DISTINCT scene_tags_join.tag_id) = "+strconv.Itoa(length))
|
||||
for _, tagID := range sceneTagIDs {
|
||||
args = append(args, tagID)
|
||||
}
|
||||
@@ -168,7 +168,7 @@ func (qb *sceneMarkerQueryBuilder) Query(sceneMarkerFilter *SceneMarkerFilterTyp
|
||||
if performerIDs := sceneMarkerFilter.Performers; performerIDs != nil {
|
||||
length := len(performerIDs)
|
||||
body += " LEFT JOIN performers_scenes as scene_performers ON scene.id = scene_performers.scene_id"
|
||||
whereClauses = append(whereClauses, "scene_performers.performer_id IN " + getInBinding(length))
|
||||
whereClauses = append(whereClauses, "scene_performers.performer_id IN "+getInBinding(length))
|
||||
for _, performerID := range performerIDs {
|
||||
args = append(args, performerID)
|
||||
}
|
||||
@@ -195,13 +195,13 @@ func (qb *sceneMarkerQueryBuilder) Query(sceneMarkerFilter *SceneMarkerFilterTyp
|
||||
return sceneMarkers, countResult
|
||||
}
|
||||
|
||||
func (qb *sceneMarkerQueryBuilder) getSceneMarkerSort(findFilter *FindFilterType) string {
|
||||
func (qb *SceneMarkerQueryBuilder) getSceneMarkerSort(findFilter *FindFilterType) string {
|
||||
sort := findFilter.GetSort("title")
|
||||
direction := findFilter.GetDirection()
|
||||
return getSort(sort, direction, "scene_markers")
|
||||
}
|
||||
|
||||
func (qb *sceneMarkerQueryBuilder) querySceneMarkers(query string, args []interface{}, tx *sqlx.Tx) ([]SceneMarker, error) {
|
||||
func (qb *SceneMarkerQueryBuilder) querySceneMarkers(query string, args []interface{}, tx *sqlx.Tx) ([]SceneMarker, error) {
|
||||
var rows *sqlx.Rows
|
||||
var err error
|
||||
if tx != nil {
|
||||
@@ -231,7 +231,7 @@ func (qb *sceneMarkerQueryBuilder) querySceneMarkers(query string, args []interf
|
||||
return sceneMarkers, nil
|
||||
}
|
||||
|
||||
func (qb *sceneMarkerQueryBuilder) queryMarkerStringsResultType(query string, args []interface{}) ([]*MarkerStringsResultType, error) {
|
||||
func (qb *SceneMarkerQueryBuilder) queryMarkerStringsResultType(query string, args []interface{}) ([]*MarkerStringsResultType, error) {
|
||||
rows, err := database.DB.Queryx(query, args...)
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return nil, err
|
||||
|
||||
@@ -6,13 +6,13 @@ import (
|
||||
"github.com/stashapp/stash/database"
|
||||
)
|
||||
|
||||
type scrapedItemQueryBuilder struct{}
|
||||
type ScrapedItemQueryBuilder struct{}
|
||||
|
||||
func NewScrapedItemQueryBuilder() scrapedItemQueryBuilder {
|
||||
return scrapedItemQueryBuilder{}
|
||||
func NewScrapedItemQueryBuilder() ScrapedItemQueryBuilder {
|
||||
return ScrapedItemQueryBuilder{}
|
||||
}
|
||||
|
||||
func (qb *scrapedItemQueryBuilder) Create(newScrapedItem ScrapedItem, tx *sqlx.Tx) (*ScrapedItem, error) {
|
||||
func (qb *ScrapedItemQueryBuilder) Create(newScrapedItem ScrapedItem, tx *sqlx.Tx) (*ScrapedItem, error) {
|
||||
ensureTx(tx)
|
||||
result, err := tx.NamedExec(
|
||||
`INSERT INTO scraped_items (title, description, url, date, rating, tags, models, episode, gallery_filename,
|
||||
@@ -35,10 +35,10 @@ func (qb *scrapedItemQueryBuilder) Create(newScrapedItem ScrapedItem, tx *sqlx.T
|
||||
return &newScrapedItem, nil
|
||||
}
|
||||
|
||||
func (qb *scrapedItemQueryBuilder) Update(updatedScrapedItem ScrapedItem, tx *sqlx.Tx) (*ScrapedItem, error) {
|
||||
func (qb *ScrapedItemQueryBuilder) Update(updatedScrapedItem ScrapedItem, tx *sqlx.Tx) (*ScrapedItem, error) {
|
||||
ensureTx(tx)
|
||||
_, err := tx.NamedExec(
|
||||
`UPDATE scraped_items SET `+SqlGenKeys(updatedScrapedItem)+` WHERE scraped_items.id = :id`,
|
||||
`UPDATE scraped_items SET `+SQLGenKeys(updatedScrapedItem)+` WHERE scraped_items.id = :id`,
|
||||
updatedScrapedItem,
|
||||
)
|
||||
if err != nil {
|
||||
@@ -51,17 +51,17 @@ func (qb *scrapedItemQueryBuilder) Update(updatedScrapedItem ScrapedItem, tx *sq
|
||||
return &updatedScrapedItem, nil
|
||||
}
|
||||
|
||||
func (qb *scrapedItemQueryBuilder) Find(id int) (*ScrapedItem, error) {
|
||||
func (qb *ScrapedItemQueryBuilder) Find(id int) (*ScrapedItem, error) {
|
||||
query := "SELECT * FROM scraped_items WHERE id = ? LIMIT 1"
|
||||
args := []interface{}{id}
|
||||
return qb.queryScrapedItem(query, args, nil)
|
||||
}
|
||||
|
||||
func (qb *scrapedItemQueryBuilder) All() ([]ScrapedItem, error) {
|
||||
return qb.queryScrapedItems(selectAll("scraped_items") + qb.getScrapedItemsSort(nil), nil, nil)
|
||||
func (qb *ScrapedItemQueryBuilder) All() ([]ScrapedItem, error) {
|
||||
return qb.queryScrapedItems(selectAll("scraped_items")+qb.getScrapedItemsSort(nil), nil, nil)
|
||||
}
|
||||
|
||||
func (qb *scrapedItemQueryBuilder) getScrapedItemsSort(findFilter *FindFilterType) string {
|
||||
func (qb *ScrapedItemQueryBuilder) getScrapedItemsSort(findFilter *FindFilterType) string {
|
||||
var sort string
|
||||
var direction string
|
||||
if findFilter == nil {
|
||||
@@ -74,7 +74,7 @@ func (qb *scrapedItemQueryBuilder) getScrapedItemsSort(findFilter *FindFilterTyp
|
||||
return getSort(sort, direction, "scraped_items")
|
||||
}
|
||||
|
||||
func (qb *scrapedItemQueryBuilder) queryScrapedItem(query string, args []interface{}, tx *sqlx.Tx) (*ScrapedItem, error) {
|
||||
func (qb *ScrapedItemQueryBuilder) queryScrapedItem(query string, args []interface{}, tx *sqlx.Tx) (*ScrapedItem, error) {
|
||||
results, err := qb.queryScrapedItems(query, args, tx)
|
||||
if err != nil || len(results) < 1 {
|
||||
return nil, err
|
||||
@@ -82,7 +82,7 @@ func (qb *scrapedItemQueryBuilder) queryScrapedItem(query string, args []interfa
|
||||
return &results[0], nil
|
||||
}
|
||||
|
||||
func (qb *scrapedItemQueryBuilder) queryScrapedItems(query string, args []interface{}, tx *sqlx.Tx) ([]ScrapedItem, error) {
|
||||
func (qb *ScrapedItemQueryBuilder) queryScrapedItems(query string, args []interface{}, tx *sqlx.Tx) ([]ScrapedItem, error) {
|
||||
var rows *sqlx.Rows
|
||||
var err error
|
||||
if tx != nil {
|
||||
|
||||
@@ -162,7 +162,7 @@ func executeDeleteQuery(tableName string, id string, tx *sqlx.Tx) error {
|
||||
}
|
||||
idColumnName := getColumn(tableName, "id")
|
||||
_, err := tx.Exec(
|
||||
`DELETE FROM ` + tableName + ` WHERE ` + idColumnName + ` = ?`,
|
||||
`DELETE FROM `+tableName+` WHERE `+idColumnName+` = ?`,
|
||||
id,
|
||||
)
|
||||
return err
|
||||
@@ -178,7 +178,7 @@ func ensureTx(tx *sqlx.Tx) {
|
||||
// sqlGenKeys is used for passing a struct and returning a string
|
||||
// of keys for non empty key:values. These keys are formated
|
||||
// keyname=:keyname with a comma seperating them
|
||||
func SqlGenKeys(i interface{}) string {
|
||||
func SQLGenKeys(i interface{}) string {
|
||||
var query []string
|
||||
v := reflect.ValueOf(i)
|
||||
for i := 0; i < v.NumField(); i++ {
|
||||
|
||||
@@ -6,13 +6,13 @@ import (
|
||||
"github.com/stashapp/stash/database"
|
||||
)
|
||||
|
||||
type studioQueryBuilder struct {}
|
||||
type StudioQueryBuilder struct{}
|
||||
|
||||
func NewStudioQueryBuilder() studioQueryBuilder {
|
||||
return studioQueryBuilder{}
|
||||
func NewStudioQueryBuilder() StudioQueryBuilder {
|
||||
return StudioQueryBuilder{}
|
||||
}
|
||||
|
||||
func (qb *studioQueryBuilder) Create(newStudio Studio, tx *sqlx.Tx) (*Studio, error) {
|
||||
func (qb *StudioQueryBuilder) Create(newStudio Studio, tx *sqlx.Tx) (*Studio, error) {
|
||||
ensureTx(tx)
|
||||
result, err := tx.NamedExec(
|
||||
`INSERT INTO studios (image, checksum, name, url, created_at, updated_at)
|
||||
@@ -34,10 +34,10 @@ func (qb *studioQueryBuilder) Create(newStudio Studio, tx *sqlx.Tx) (*Studio, er
|
||||
return &newStudio, nil
|
||||
}
|
||||
|
||||
func (qb *studioQueryBuilder) Update(updatedStudio Studio, tx *sqlx.Tx) (*Studio, error) {
|
||||
func (qb *StudioQueryBuilder) Update(updatedStudio Studio, tx *sqlx.Tx) (*Studio, error) {
|
||||
ensureTx(tx)
|
||||
_, err := tx.NamedExec(
|
||||
`UPDATE studios SET `+SqlGenKeys(updatedStudio)+` WHERE studios.id = :id`,
|
||||
`UPDATE studios SET `+SQLGenKeys(updatedStudio)+` WHERE studios.id = :id`,
|
||||
updatedStudio,
|
||||
)
|
||||
if err != nil {
|
||||
@@ -50,33 +50,33 @@ func (qb *studioQueryBuilder) Update(updatedStudio Studio, tx *sqlx.Tx) (*Studio
|
||||
return &updatedStudio, nil
|
||||
}
|
||||
|
||||
func (qb *studioQueryBuilder) Find(id int, tx *sqlx.Tx) (*Studio, error) {
|
||||
func (qb *StudioQueryBuilder) Find(id int, tx *sqlx.Tx) (*Studio, error) {
|
||||
query := "SELECT * FROM studios WHERE id = ? LIMIT 1"
|
||||
args := []interface{}{id}
|
||||
return qb.queryStudio(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *studioQueryBuilder) FindBySceneID(sceneID int) (*Studio, error) {
|
||||
func (qb *StudioQueryBuilder) FindBySceneID(sceneID int) (*Studio, error) {
|
||||
query := "SELECT studios.* FROM studios JOIN scenes ON studios.id = scenes.studio_id WHERE scenes.id = ? LIMIT 1"
|
||||
args := []interface{}{sceneID}
|
||||
return qb.queryStudio(query, args, nil)
|
||||
}
|
||||
|
||||
func (qb *studioQueryBuilder) FindByName(name string, tx *sqlx.Tx) (*Studio, error) {
|
||||
func (qb *StudioQueryBuilder) FindByName(name string, tx *sqlx.Tx) (*Studio, error) {
|
||||
query := "SELECT * FROM studios WHERE name = ? LIMIT 1"
|
||||
args := []interface{}{name}
|
||||
return qb.queryStudio(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *studioQueryBuilder) Count() (int, error) {
|
||||
func (qb *StudioQueryBuilder) Count() (int, error) {
|
||||
return runCountQuery(buildCountQuery("SELECT studios.id FROM studios"), nil)
|
||||
}
|
||||
|
||||
func (qb *studioQueryBuilder) All() ([]Studio, error) {
|
||||
return qb.queryStudios(selectAll("studios") + qb.getStudioSort(nil), nil, nil)
|
||||
func (qb *StudioQueryBuilder) All() ([]Studio, error) {
|
||||
return qb.queryStudios(selectAll("studios")+qb.getStudioSort(nil), nil, nil)
|
||||
}
|
||||
|
||||
func (qb *studioQueryBuilder) Query(findFilter *FindFilterType) ([]Studio, int) {
|
||||
func (qb *StudioQueryBuilder) Query(findFilter *FindFilterType) ([]Studio, int) {
|
||||
if findFilter == nil {
|
||||
findFilter = &FindFilterType{}
|
||||
}
|
||||
@@ -103,7 +103,7 @@ func (qb *studioQueryBuilder) Query(findFilter *FindFilterType) ([]Studio, int)
|
||||
return studios, countResult
|
||||
}
|
||||
|
||||
func (qb *studioQueryBuilder) getStudioSort(findFilter *FindFilterType) string {
|
||||
func (qb *StudioQueryBuilder) getStudioSort(findFilter *FindFilterType) string {
|
||||
var sort string
|
||||
var direction string
|
||||
if findFilter == nil {
|
||||
@@ -116,7 +116,7 @@ func (qb *studioQueryBuilder) getStudioSort(findFilter *FindFilterType) string {
|
||||
return getSort(sort, direction, "studios")
|
||||
}
|
||||
|
||||
func (qb *studioQueryBuilder) queryStudio(query string, args []interface{}, tx *sqlx.Tx) (*Studio, error) {
|
||||
func (qb *StudioQueryBuilder) queryStudio(query string, args []interface{}, tx *sqlx.Tx) (*Studio, error) {
|
||||
results, err := qb.queryStudios(query, args, tx)
|
||||
if err != nil || len(results) < 1 {
|
||||
return nil, err
|
||||
@@ -124,7 +124,7 @@ func (qb *studioQueryBuilder) queryStudio(query string, args []interface{}, tx *
|
||||
return &results[0], nil
|
||||
}
|
||||
|
||||
func (qb *studioQueryBuilder) queryStudios(query string, args []interface{}, tx *sqlx.Tx) ([]Studio, error) {
|
||||
func (qb *StudioQueryBuilder) queryStudios(query string, args []interface{}, tx *sqlx.Tx) ([]Studio, error) {
|
||||
var rows *sqlx.Rows
|
||||
var err error
|
||||
if tx != nil {
|
||||
|
||||
@@ -6,13 +6,13 @@ import (
|
||||
"github.com/stashapp/stash/database"
|
||||
)
|
||||
|
||||
type tagQueryBuilder struct {}
|
||||
type TagQueryBuilder struct{}
|
||||
|
||||
func NewTagQueryBuilder() tagQueryBuilder {
|
||||
return tagQueryBuilder{}
|
||||
func NewTagQueryBuilder() TagQueryBuilder {
|
||||
return TagQueryBuilder{}
|
||||
}
|
||||
|
||||
func (qb *tagQueryBuilder) Create(newTag Tag, tx *sqlx.Tx) (*Tag, error) {
|
||||
func (qb *TagQueryBuilder) Create(newTag Tag, tx *sqlx.Tx) (*Tag, error) {
|
||||
ensureTx(tx)
|
||||
result, err := tx.NamedExec(
|
||||
`INSERT INTO tags (name, created_at, updated_at)
|
||||
@@ -34,9 +34,9 @@ func (qb *tagQueryBuilder) Create(newTag Tag, tx *sqlx.Tx) (*Tag, error) {
|
||||
return &newTag, nil
|
||||
}
|
||||
|
||||
func (qb *tagQueryBuilder) Update(updatedTag Tag, tx *sqlx.Tx) (*Tag, error) {
|
||||
func (qb *TagQueryBuilder) Update(updatedTag Tag, tx *sqlx.Tx) (*Tag, error) {
|
||||
ensureTx(tx)
|
||||
query := `UPDATE tags SET `+SqlGenKeys(updatedTag)+` WHERE tags.id = :id`
|
||||
query := `UPDATE tags SET ` + SQLGenKeys(updatedTag) + ` WHERE tags.id = :id`
|
||||
_, err := tx.NamedExec(
|
||||
query,
|
||||
updatedTag,
|
||||
@@ -51,17 +51,17 @@ func (qb *tagQueryBuilder) Update(updatedTag Tag, tx *sqlx.Tx) (*Tag, error) {
|
||||
return &updatedTag, nil
|
||||
}
|
||||
|
||||
func (qb *tagQueryBuilder) Destroy(id string, tx *sqlx.Tx) error {
|
||||
func (qb *TagQueryBuilder) Destroy(id string, tx *sqlx.Tx) error {
|
||||
return executeDeleteQuery("tags", id, tx)
|
||||
}
|
||||
|
||||
func (qb *tagQueryBuilder) Find(id int, tx *sqlx.Tx) (*Tag, error) {
|
||||
func (qb *TagQueryBuilder) Find(id int, tx *sqlx.Tx) (*Tag, error) {
|
||||
query := "SELECT * FROM tags WHERE id = ? LIMIT 1"
|
||||
args := []interface{}{id}
|
||||
return qb.queryTag(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *tagQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) ([]Tag, error) {
|
||||
func (qb *TagQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) ([]Tag, error) {
|
||||
query := `
|
||||
SELECT tags.* FROM tags
|
||||
LEFT JOIN scenes_tags as scenes_join on scenes_join.tag_id = tags.id
|
||||
@@ -74,7 +74,7 @@ func (qb *tagQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) ([]Tag, error
|
||||
return qb.queryTags(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *tagQueryBuilder) FindBySceneMarkerID(sceneMarkerID int, tx *sqlx.Tx) ([]Tag, error) {
|
||||
func (qb *TagQueryBuilder) FindBySceneMarkerID(sceneMarkerID int, tx *sqlx.Tx) ([]Tag, error) {
|
||||
query := `
|
||||
SELECT tags.* FROM tags
|
||||
LEFT JOIN scene_markers_tags as scene_markers_join on scene_markers_join.tag_id = tags.id
|
||||
@@ -87,13 +87,13 @@ func (qb *tagQueryBuilder) FindBySceneMarkerID(sceneMarkerID int, tx *sqlx.Tx) (
|
||||
return qb.queryTags(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *tagQueryBuilder) FindByName(name string, tx *sqlx.Tx) (*Tag, error) {
|
||||
func (qb *TagQueryBuilder) FindByName(name string, tx *sqlx.Tx) (*Tag, error) {
|
||||
query := "SELECT * FROM tags WHERE name = ? LIMIT 1"
|
||||
args := []interface{}{name}
|
||||
return qb.queryTag(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *tagQueryBuilder) FindByNames(names []string, tx *sqlx.Tx) ([]Tag, error) {
|
||||
func (qb *TagQueryBuilder) FindByNames(names []string, tx *sqlx.Tx) ([]Tag, error) {
|
||||
query := "SELECT * FROM tags WHERE name IN " + getInBinding(len(names))
|
||||
var args []interface{}
|
||||
for _, name := range names {
|
||||
@@ -102,15 +102,15 @@ func (qb *tagQueryBuilder) FindByNames(names []string, tx *sqlx.Tx) ([]Tag, erro
|
||||
return qb.queryTags(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *tagQueryBuilder) Count() (int, error) {
|
||||
func (qb *TagQueryBuilder) Count() (int, error) {
|
||||
return runCountQuery(buildCountQuery("SELECT tags.id FROM tags"), nil)
|
||||
}
|
||||
|
||||
func (qb *tagQueryBuilder) All() ([]Tag, error) {
|
||||
return qb.queryTags(selectAll("tags") + qb.getTagSort(nil), nil, nil)
|
||||
func (qb *TagQueryBuilder) All() ([]Tag, error) {
|
||||
return qb.queryTags(selectAll("tags")+qb.getTagSort(nil), nil, nil)
|
||||
}
|
||||
|
||||
func (qb *tagQueryBuilder) getTagSort(findFilter *FindFilterType) string {
|
||||
func (qb *TagQueryBuilder) getTagSort(findFilter *FindFilterType) string {
|
||||
var sort string
|
||||
var direction string
|
||||
if findFilter == nil {
|
||||
@@ -123,7 +123,7 @@ func (qb *tagQueryBuilder) getTagSort(findFilter *FindFilterType) string {
|
||||
return getSort(sort, direction, "tags")
|
||||
}
|
||||
|
||||
func (qb *tagQueryBuilder) queryTag(query string, args []interface{}, tx *sqlx.Tx) (*Tag, error) {
|
||||
func (qb *TagQueryBuilder) queryTag(query string, args []interface{}, tx *sqlx.Tx) (*Tag, error) {
|
||||
results, err := qb.queryTags(query, args, tx)
|
||||
if err != nil || len(results) < 1 {
|
||||
return nil, err
|
||||
@@ -131,7 +131,7 @@ func (qb *tagQueryBuilder) queryTag(query string, args []interface{}, tx *sqlx.T
|
||||
return &results[0], nil
|
||||
}
|
||||
|
||||
func (qb *tagQueryBuilder) queryTags(query string, args []interface{}, tx *sqlx.Tx) ([]Tag, error) {
|
||||
func (qb *TagQueryBuilder) queryTags(query string, args []interface{}, tx *sqlx.Tx) ([]Tag, error) {
|
||||
var rows *sqlx.Rows
|
||||
var err error
|
||||
if tx != nil {
|
||||
|
||||
File diff suppressed because one or more lines are too long
30
revive.toml
Normal file
30
revive.toml
Normal file
@@ -0,0 +1,30 @@
|
||||
ignoreGeneratedHeader = false
|
||||
severity = "error"
|
||||
confidence = 0.8
|
||||
errorCode = 1
|
||||
warningCode = 1
|
||||
|
||||
#[rule.blank-imports]
|
||||
[rule.context-as-argument]
|
||||
[rule.context-keys-type]
|
||||
[rule.dot-imports]
|
||||
[rule.error-return]
|
||||
[rule.error-strings]
|
||||
[rule.error-naming]
|
||||
#[rule.exported]
|
||||
#[rule.if-return]
|
||||
[rule.increment-decrement]
|
||||
#[rule.var-naming]
|
||||
[rule.var-declaration]
|
||||
[rule.package-comments]
|
||||
[rule.range]
|
||||
[rule.receiver-naming]
|
||||
[rule.time-naming]
|
||||
#[rule.unexported-return]
|
||||
#[rule.indent-error-flow]
|
||||
[rule.errorf]
|
||||
#[rule.empty-block]
|
||||
[rule.superfluous-else]
|
||||
#[rule.unused-parameter]
|
||||
[rule.unreachable-code]
|
||||
[rule.redefines-builtin-id]
|
||||
@@ -14,7 +14,7 @@ import (
|
||||
|
||||
func GetPerformerNames(q string) ([]string, error) {
|
||||
// Request the HTML page.
|
||||
queryURL := "https://www.freeones.com/suggestions.php?q="+url.PathEscape(q)+"&t=1"
|
||||
queryURL := "https://www.freeones.com/suggestions.php?q=" + url.PathEscape(q) + "&t=1"
|
||||
res, err := http.Get(queryURL)
|
||||
if err != nil {
|
||||
logger.Fatal(err)
|
||||
@@ -41,7 +41,7 @@ func GetPerformerNames(q string) ([]string, error) {
|
||||
}
|
||||
|
||||
func GetPerformer(performerName string) (*models.ScrapedPerformer, error) {
|
||||
queryURL := "https://www.freeones.com/search/?t=1&q="+url.PathEscape(performerName)+"&view=thumbs"
|
||||
queryURL := "https://www.freeones.com/search/?t=1&q=" + url.PathEscape(performerName) + "&view=thumbs"
|
||||
res, err := http.Get(queryURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -151,16 +151,16 @@ func GetPerformer(performerName string) (*models.ScrapedPerformer, error) {
|
||||
twitterElement := bioDoc.Find(".twitter a")
|
||||
twitterHref, _ := twitterElement.Attr("href")
|
||||
if twitterHref != "" {
|
||||
twitterUrl, _ := url.Parse(twitterHref)
|
||||
twitterHandle := strings.Replace(twitterUrl.Path, "/", "", -1)
|
||||
twitterURL, _ := url.Parse(twitterHref)
|
||||
twitterHandle := strings.Replace(twitterURL.Path, "/", "", -1)
|
||||
result.Twitter = &twitterHandle
|
||||
}
|
||||
|
||||
instaElement := bioDoc.Find(".instagram a")
|
||||
instaHref, _ := instaElement.Attr("href")
|
||||
if instaHref != "" {
|
||||
instaUrl, _ := url.Parse(instaHref)
|
||||
instaHandle := strings.Replace(instaUrl.Path, "/", "", -1)
|
||||
instaURL, _ := url.Parse(instaHref)
|
||||
instaHandle := strings.Replace(instaURL.Path, "/", "", -1)
|
||||
result.Instagram = &instaHandle
|
||||
}
|
||||
|
||||
@@ -204,10 +204,14 @@ func getIndexes(doc *goquery.Document) map[string]int {
|
||||
|
||||
func getEthnicity(ethnicity string) string {
|
||||
switch ethnicity {
|
||||
case "Caucasian": return "white"
|
||||
case "Black": return "black"
|
||||
case "Latin": return "hispanic"
|
||||
case "Asian": return "asian"
|
||||
case "Caucasian":
|
||||
return "white"
|
||||
case "Black":
|
||||
return "black"
|
||||
case "Latin":
|
||||
return "hispanic"
|
||||
case "Asian":
|
||||
return "asian"
|
||||
default:
|
||||
panic("unknown ethnicity")
|
||||
}
|
||||
@@ -217,15 +221,14 @@ func paramValue(params *goquery.Selection, paramIndex int) string {
|
||||
i := paramIndex - 1
|
||||
if paramIndex == 0 {
|
||||
return ""
|
||||
} else {
|
||||
node := params.Get(i).FirstChild
|
||||
content := trim(node.Data)
|
||||
if content != "" {
|
||||
return content
|
||||
}
|
||||
node = node.NextSibling
|
||||
return trim(node.FirstChild.Data)
|
||||
}
|
||||
node := params.Get(i).FirstChild
|
||||
content := trim(node.Data)
|
||||
if content != "" {
|
||||
return content
|
||||
}
|
||||
node = node.NextSibling
|
||||
return trim(node.FirstChild.Data)
|
||||
}
|
||||
|
||||
// https://stackoverflow.com/questions/20305966/why-does-strip-not-remove-the-leading-whitespace
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
"time"
|
||||
)
|
||||
|
||||
// GetVTTTime returns a timestamp appropriate for VTT files (hh:mm:ss)
|
||||
func GetVTTTime(totalSeconds float64) (s string) {
|
||||
totalSecondsString := strconv.FormatFloat(totalSeconds, 'f', -1, 64)
|
||||
secondsDuration, _ := time.ParseDuration(totalSecondsString + "s")
|
||||
|
||||
@@ -5,11 +5,10 @@ import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Sometimes the \ isn't recognized as valid on windows
|
||||
// FixWindowsPath replaces \ with / in the given path because sometimes the \ isn't recognized as valid on windows
|
||||
func FixWindowsPath(str string) string {
|
||||
if runtime.GOOS == "windows" {
|
||||
return strings.Replace(str, "\\", "/", -1)
|
||||
} else {
|
||||
return str
|
||||
}
|
||||
return str
|
||||
}
|
||||
Reference in New Issue
Block a user