Merge pull request #782 from stashapp/develop

Version 0.3 release
This commit is contained in:
WithoutPants
2020-09-02 12:21:05 +10:00
committed by GitHub
768 changed files with 167692 additions and 31245 deletions

63
.dockerignore Normal file
View File

@@ -0,0 +1,63 @@
####
# Go
####
# Binaries for programs and plugins
*.exe
*.exe~
*.dll
*.so
*.dylib
# Test binary, built with `go test -c`
*.test
# Output of the go coverage tool, specifically when used with LiteIDE
*.out
# Packr2 artifacts
**/*-packr.go
# GraphQL generated output
pkg/models/generated_*.go
ui/v2.5/src/core/generated-*.tsx
# packr generated files
*-packr.go
####
# Jetbrains
####
# User-specific stuff
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/**/usage.statistics.xml
.idea/**/dictionaries
.idea/**/shelf
# Generated files
.idea/**/contentModel.xml
# Sensitive or high-churn files
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
####
# Random
####
ui/v2.5/node_modules
ui/v2.5/build
*.db
stash
dist
docker

5
.gitignore vendored
View File

@@ -20,7 +20,6 @@
# GraphQL generated output
pkg/models/generated_*.go
ui/v2/src/core/generated-*.tsx
ui/v2.5/src/core/generated-*.tsx
# packr generated files
@@ -49,6 +48,9 @@ ui/v2.5/src/core/generated-*.tsx
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
# Goland Junk
pkg/pkg
####
# Random
####
@@ -59,3 +61,4 @@ node_modules
stash
dist
.DS_Store

View File

@@ -14,9 +14,9 @@ env:
before_install:
- echo -e "machine github.com\n login $CI_USER_TOKEN" > ~/.netrc
- nvm install 12
- travis_retry yarn --cwd ui/v2.5 install --frozen-lockfile
- travis_retry make pre-ui
- make generate
- CI=false yarn --cwd ui/v2.5 build-ci
- CI=false make ui-validate ui-only
#- go get -v github.com/mgechev/revive
script:
# left lint off to avoid getting extra dependency
@@ -41,10 +41,12 @@ deploy:
api_key:
secure: tGJ2q62CfPdayid2qEtW2aGRhMgCl3lBXYYQqp3eH0vFgIIf6cs7IDX7YC/x3XKMEQ/iMLZmtCXZvSTqNrD6Sk7MSnt30GIs+4uxIZDnnd8mV5X3K4n4gjD+NAORc4DrQBvUGrYMKJsR5gtkH0nu6diWb1o1If7OiJEuCPRhrmQYcza7NUdABnA9Z2wn2RNUV9Ga33WUCqLMEU5GtNBlfQPiP/khCQrqn/ocR6wUjYut3J6YagzqH4wsfJi3glHyWtowcNIw1LZi5zFxHD/bRBT4Tln7yypkjWNq9eQILA6i6kRUGf7ggyTx26/k8n4tnu+QD0vVh4EcjlThpU/LGyUXzKrrxjRwaDZnM0oYxg5AfHcBuAiAdo0eWnV3lEWRfTJMIVb9MPf4qDmzR4RREfB5OXOxwq3ODeCcJE8sTIMD/wBPZrlqS/QrRpND2gn2X4snkVukN9t9F4CMTFMtVSzFV7TDJW5E5Lq6VEExulteQhs6kcK9NRPNAaLgRQAw7X9kVWfDtiGUP+fE2i8F9Bo8bm7sOT5O5VPMPykx3EgeNg1IqIgMTCsMlhMJT4xBJoQUgmd2wWyf3Ryw+P+sFgdb5Sd7+lFgJBjMUUoOxMxAOiEgdFvCXcr+/Udyz2RdtetU1/6VzXzLPcKOw0wubZeBkISqu7o9gpfdMP9Eq00=
file:
- dist/stash-osx
- dist/stash-win.exe
- dist/stash-linux
- dist/stash-pi
- dist/stash-osx
- dist/stash-win.exe
- dist/stash-linux
- dist/stash-linux-arm64v8
- dist/stash-linux-arm32v7
- dist/stash-pi
skip_cleanup: true
overwrite: true
name: "${STASH_VERSION}: Latest development build"
@@ -53,20 +55,29 @@ deploy:
on:
repo: stashapp/stash
branch: develop
# docker image build for develop release
- provider: script
skip_cleanup: true
script: bash ./docker/ci/x86_64/docker_push.sh development-x86_64
on:
repo: stashapp/stash
branch: develop
# official master release - only build when tagged
- provider: releases
api_key:
secure: tGJ2q62CfPdayid2qEtW2aGRhMgCl3lBXYYQqp3eH0vFgIIf6cs7IDX7YC/x3XKMEQ/iMLZmtCXZvSTqNrD6Sk7MSnt30GIs+4uxIZDnnd8mV5X3K4n4gjD+NAORc4DrQBvUGrYMKJsR5gtkH0nu6diWb1o1If7OiJEuCPRhrmQYcza7NUdABnA9Z2wn2RNUV9Ga33WUCqLMEU5GtNBlfQPiP/khCQrqn/ocR6wUjYut3J6YagzqH4wsfJi3glHyWtowcNIw1LZi5zFxHD/bRBT4Tln7yypkjWNq9eQILA6i6kRUGf7ggyTx26/k8n4tnu+QD0vVh4EcjlThpU/LGyUXzKrrxjRwaDZnM0oYxg5AfHcBuAiAdo0eWnV3lEWRfTJMIVb9MPf4qDmzR4RREfB5OXOxwq3ODeCcJE8sTIMD/wBPZrlqS/QrRpND2gn2X4snkVukN9t9F4CMTFMtVSzFV7TDJW5E5Lq6VEExulteQhs6kcK9NRPNAaLgRQAw7X9kVWfDtiGUP+fE2i8F9Bo8bm7sOT5O5VPMPykx3EgeNg1IqIgMTCsMlhMJT4xBJoQUgmd2wWyf3Ryw+P+sFgdb5Sd7+lFgJBjMUUoOxMxAOiEgdFvCXcr+/Udyz2RdtetU1/6VzXzLPcKOw0wubZeBkISqu7o9gpfdMP9Eq00=
file:
- dist/stash-osx
- dist/stash-win.exe
- dist/stash-linux
- dist/stash-pi
- dist/stash-osx
- dist/stash-win.exe
- dist/stash-linux
- dist/stash-linux-arm64v8
- dist/stash-linux-arm32v7
- dist/stash-pi
# make the release a draft so the maintainers can confirm before releasing
draft: true
skip_cleanup: true
overwrite: true
# don't write the body. To be done manually for now. In future we might
# don't write the body. To be done manually for now. In future we might
# want to generate the changelog or get it from a file
name: ${STASH_VERSION}
on:
@@ -74,3 +85,12 @@ deploy:
tags: true
# make sure we don't release using the latest_develop tag
condition: $TRAVIS_TAG != latest_develop
# docker image build for master release
- provider: script
skip_cleanup: true
script: bash ./docker/ci/x86_64/docker_push.sh latest
on:
repo: stashapp/stash
tags: true
# make sure we don't release using the latest_develop tag
condition: $TRAVIS_TAG != latest_develop

View File

@@ -1,15 +1,52 @@
ifeq ($(OS),Windows_NT)
SEPARATOR := &&
SET := set
IS_WIN =
ifeq (${SHELL}, sh.exe)
IS_WIN = true
endif
ifeq (${SHELL}, cmd)
IS_WIN = true
endif
release: generate ui build
ifdef IS_WIN
SEPARATOR := &&
SET := set
else
SEPARATOR := ;
SET := export
endif
build:
$(eval DATE := $(shell go run scripts/getDate.go))
# set LDFLAGS environment variable to any extra ldflags required
# set OUTPUT to generate a specific binary name
LDFLAGS := $(LDFLAGS)
ifdef OUTPUT
OUTPUT := -o $(OUTPUT)
endif
.PHONY: release pre-build install clean
release: generate ui build-release
pre-build:
ifndef BUILD_DATE
$(eval BUILD_DATE := $(shell go run -mod=vendor scripts/getDate.go))
endif
ifndef GITHASH
$(eval GITHASH := $(shell git rev-parse --short HEAD))
endif
ifndef STASH_VERSION
$(eval STASH_VERSION := $(shell git describe --tags --exclude latest_develop))
$(SET) CGO_ENABLED=1 $(SEPARATOR) go build -mod=vendor -v -ldflags "-X 'github.com/stashapp/stash/pkg/api.version=$(STASH_VERSION)' -X 'github.com/stashapp/stash/pkg/api.buildstamp=$(DATE)' -X 'github.com/stashapp/stash/pkg/api.githash=$(GITHASH)'"
endif
build: pre-build
$(eval LDFLAGS := $(LDFLAGS) -X 'github.com/stashapp/stash/pkg/api.version=$(STASH_VERSION)' -X 'github.com/stashapp/stash/pkg/api.buildstamp=$(BUILD_DATE)' -X 'github.com/stashapp/stash/pkg/api.githash=$(GITHASH)')
$(SET) CGO_ENABLED=1 $(SEPARATOR) go build $(OUTPUT) -mod=vendor -v -ldflags "$(LDFLAGS) $(EXTRA_LDFLAGS)"
# strips debug symbols from the release build
# consider -trimpath in go build if we move to go 1.13+
build-release: EXTRA_LDFLAGS := -s -w
build-release: build
install:
packr2 install
@@ -58,11 +95,25 @@ it:
pre-ui:
cd ui/v2.5 && yarn install --frozen-lockfile
.PHONY: ui
ui:
.PHONY: ui-only
ui-only: pre-build
$(SET) REACT_APP_DATE="$(BUILD_DATE)" $(SEPARATOR) \
$(SET) REACT_APP_GITHASH=$(GITHASH) $(SEPARATOR) \
$(SET) REACT_APP_STASH_VERSION=$(STASH_VERSION) $(SEPARATOR) \
cd ui/v2.5 && yarn build
.PHONY: ui
ui: ui-only
packr2
.PHONY: ui-start
ui-start: pre-build
$(SET) REACT_APP_DATE="$(BUILD_DATE)" $(SEPARATOR) \
$(SET) REACT_APP_GITHASH=$(GITHASH) $(SEPARATOR) \
$(SET) REACT_APP_STASH_VERSION=$(STASH_VERSION) $(SEPARATOR) \
cd ui/v2.5 && yarn start
.PHONY: fmt-ui
fmt-ui:
cd ui/v2.5 && yarn format

View File

@@ -4,10 +4,14 @@
[![Go Report Card](https://goreportcard.com/badge/github.com/stashapp/stash)](https://goreportcard.com/report/github.com/stashapp/stash)
[![Discord](https://img.shields.io/discord/559159668438728723.svg?logo=discord)](https://discord.gg/2TsNFKt)
https://stashapp.cc
**Stash is a Go app which organizes and serves your porn.**
See a demo [here](https://vimeo.com/275537038) (password is stashapp).
An in-app manual is available, and the manual pages can be viewed [here](https://github.com/stashapp/stash/tree/develop/ui/v2.5/src/docs/en).
# Docker install
Follow [this README.md in the docker directory.](docker/production/README.md)

View File

@@ -6,8 +6,12 @@ FROM golang:1.11.13 as compiler
RUN apt-get update && apt-get install -y apt-transport-https
RUN curl -sL https://deb.nodesource.com/setup_10.x | bash -
RUN curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - && \
echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list
# prevent caching of the key
ADD https://dl.yarnpkg.com/debian/pubkey.gpg yarn.gpg
RUN cat yarn.gpg | apt-key add - && \
echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list && \
rm yarn.gpg
RUN apt-get update && \
apt-get install -y nodejs yarn xz-utils --no-install-recommends || exit 1; \

View File

@@ -0,0 +1,25 @@
# must be built from /dist directory
FROM ubuntu:18.04 as prep
LABEL MAINTAINER="https://discord.gg/Uz29ny"
RUN apt-get update && \
apt-get -y install curl xz-utils && \
apt-get autoclean -y && \
rm -rf /var/lib/apt/lists/*
WORKDIR /
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
RUN curl --http1.1 -o /ffmpeg.tar.xz https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz && \
tar xf /ffmpeg.tar.xz && \
rm ffmpeg.tar.xz && \
mv /ffmpeg*/ /ffmpeg/
FROM ubuntu:18.04 as app
RUN apt-get update && apt-get -y install ca-certificates
COPY --from=prep /ffmpeg/ffmpeg /ffmpeg/ffprobe /usr/bin/
COPY /stash-linux /usr/bin/stash
EXPOSE 9999
CMD ["stash"]

View File

@@ -0,0 +1 @@
This dockerfile is used by travis to build the stash image. It must be run after cross-compiling - that is, `stash-linux` must exist in the `dist` directory. This image must be built from the `dist` directory.

View File

@@ -0,0 +1,9 @@
#!/bin/bash
DOCKER_TAG=$1
# must build the image from dist directory
docker build -t stashapp/stash:$DOCKER_TAG -f ./docker/ci/x86_64/Dockerfile ./dist
echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin
docker push stashapp/stash:$DOCKER_TAG

View File

@@ -10,9 +10,12 @@ ENV PACKR2_DOWNLOAD_URL=https://github.com/gobuffalo/packr/releases/download/v${
# Install tools
RUN apt-get update && apt-get install -y apt-transport-https
RUN curl -sL https://deb.nodesource.com/setup_10.x | bash -
RUN curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - && \
echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list
# prevent caching of the key
ADD https://dl.yarnpkg.com/debian/pubkey.gpg yarn.gpg
RUN cat yarn.gpg | apt-key add - && \
echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list && \
rm yarn.gpg
RUN apt-get update && \
apt-get install -y automake autogen \
@@ -20,6 +23,8 @@ RUN apt-get update && \
patch make tar xz-utils bzip2 gzip sed cpio \
gcc-6-multilib g++-6-multilib gcc-mingw-w64 g++-mingw-w64 clang llvm-dev \
gcc-arm-linux-gnueabi libc-dev-armel-cross linux-libc-dev-armel-cross \
gcc-arm-linux-gnueabihf libc-dev-armhf-cross \
gcc-aarch64-linux-gnu libc-dev-arm64-cross \
nodejs yarn --no-install-recommends || exit 1; \
rm -rf /var/lib/apt/lists/*;

View File

@@ -1,6 +1,6 @@
user=stashappdev
user=stashapp
repo=compiler
version=2
version=3
latest:
docker build -t ${user}/${repo}:latest .

View File

@@ -7,9 +7,12 @@ RUN apt-get update && \
rm -rf /var/lib/apt/lists/*
WORKDIR /
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
RUN curl -L -o /stash $(curl -s https://api.github.com/repos/stashapp/stash/releases/tags/latest_develop | awk '/browser_download_url/ && /stash-linux/' | sed -e 's/.*: "\(.*\)"/\1/') && \
chmod +x /stash && \
curl --http1.1 -o /ffmpeg.tar.xz https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz && \
# added " to end of stash-linux clause so that it doesn't pick up the arm builds
RUN curl -L -o /stash $(curl -s https://api.github.com/repos/stashapp/stash/releases/tags/latest_develop | awk '/browser_download_url/ && /stash-linux"/' | sed -e 's/.*: "\(.*\)"/\1/') && \
chmod +x /stash
RUN curl --http1.1 -o /ffmpeg.tar.xz https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz && \
tar xf /ffmpeg.tar.xz && \
rm ffmpeg.tar.xz && \
mv /ffmpeg*/ /ffmpeg/

View File

@@ -7,9 +7,12 @@ RUN apt-get update && \
rm -rf /var/lib/apt/lists/*
WORKDIR /
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
RUN curl -L -o /stash $(curl -s https://api.github.com/repos/stashapp/stash/releases/latest | awk '/browser_download_url/ && /stash-linux/' | sed -e 's/.*: "\(.*\)"/\1/') && \
chmod +x /stash && \
curl --http1.1 -o /ffmpeg.tar.xz https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz && \
# added " to end of stash-linux clause so that it doesn't pick up the arm builds
RUN curl -L -o /stash $(curl -s https://api.github.com/repos/stashapp/stash/releases/latest | awk '/browser_download_url/ && /stash-linux/"' | sed -e 's/.*: "\(.*\)"/\1/') && \
chmod +x /stash
RUN curl --http1.1 -o /ffmpeg.tar.xz https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz && \
tar xf /ffmpeg.tar.xz && \
rm ffmpeg.tar.xz && \
mv /ffmpeg*/ /ffmpeg/

9
go.mod
View File

@@ -3,11 +3,14 @@ module github.com/stashapp/stash
require (
github.com/99designs/gqlgen v0.9.0
github.com/antchfx/htmlquery v1.2.3
github.com/bmatcuk/doublestar v1.3.1
github.com/bmatcuk/doublestar/v2 v2.0.1
github.com/chromedp/cdproto v0.0.0-20200608134039-8a80cdaf865c
github.com/chromedp/chromedp v0.5.3
github.com/disintegration/imaging v1.6.0
github.com/go-chi/chi v4.0.2+incompatible
github.com/gobuffalo/packr/v2 v2.0.2
github.com/golang-migrate/migrate/v4 v4.3.1
github.com/gorilla/securecookie v1.1.1
github.com/gorilla/sessions v1.2.0
github.com/gorilla/websocket v1.4.0
github.com/h2non/filetype v1.0.8
@@ -16,16 +19,18 @@ require (
github.com/jmoiron/sqlx v1.2.0
github.com/json-iterator/go v1.1.9
github.com/mattn/go-sqlite3 v1.13.0
github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007
github.com/rs/cors v1.6.0
github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f
github.com/sirupsen/logrus v1.4.2
github.com/spf13/pflag v1.0.3
github.com/spf13/viper v1.4.0
github.com/stretchr/testify v1.5.1
github.com/tidwall/gjson v1.6.0
github.com/vektah/gqlparser v1.1.2
golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4
golang.org/x/image v0.0.0-20190118043309-183bebdce1b2
golang.org/x/net v0.0.0-20200421231249-e086a090c8fd
golang.org/x/net v0.0.0-20200602114024-627f9648deb9
gopkg.in/yaml.v2 v2.2.2
)

32
go.sum
View File

@@ -37,11 +37,16 @@ github.com/aws/aws-sdk-go v1.17.7/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
github.com/bitly/go-hostpool v0.0.0-20171023180738-a3a6125de932/go.mod h1:NOuUCSz6Q9T7+igc/hlvDOUdtWKryOrtFyIVABv/p7k=
github.com/bmatcuk/doublestar v1.3.1 h1:rT8rxDPsavp9G+4ZULzqhhUSaI/OPsTZNG88Z3i0xvY=
github.com/bmatcuk/doublestar v1.3.1/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE=
github.com/bmatcuk/doublestar/v2 v2.0.1 h1:EFT91DmIMRcrUEcYUW7AqSAwKvNzP5+CoDmNVBbcQOU=
github.com/bmatcuk/doublestar/v2 v2.0.1/go.mod h1:QMmcs3H2AUQICWhfzLXz+IYln8lRQmTZRptLie8RgRw=
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=
github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBTaaSFSlLx/70C2HPIMNZpVV8+vt/A+FMnYP11g=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/chromedp/cdproto v0.0.0-20200116234248-4da64dd111ac/go.mod h1:PfAWWKJqjlGFYJEidUM6aVIWPr0EpobeyVWEEmplX7g=
github.com/chromedp/cdproto v0.0.0-20200608134039-8a80cdaf865c h1:qM1xzKK8kc93zKPkxK4iqtjksqDDrU6g9wGnr30jyLo=
github.com/chromedp/cdproto v0.0.0-20200608134039-8a80cdaf865c/go.mod h1:E6LPWRdIJc11h/di5p0rwvRmUYbhGpBEH7ZbPfzDIOE=
github.com/chromedp/chromedp v0.5.3 h1:F9LafxmYpsQhWQBdCs+6Sret1zzeeFyHS5LkRF//Ffg=
github.com/chromedp/chromedp v0.5.3/go.mod h1:YLdPtndaHQ4rCpSpBG+IPpy9JvX0VD+7aaLxYgYj28w=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
github.com/cockroachdb/cockroach-go v0.0.0-20181001143604-e0a95dfd547c/go.mod h1:XGLbWH/ujMcbPbhZq52Nv6UrCghb1yGn//133kEsvDk=
@@ -310,6 +315,12 @@ github.com/gobuffalo/uuid v2.0.5+incompatible/go.mod h1:ErhIzkRhm0FtRuiE/PeORqcw
github.com/gobuffalo/validate v2.0.3+incompatible/go.mod h1:N+EtDe0J8252BgfzQUChBgfd6L93m9weay53EWFVsMM=
github.com/gobuffalo/x v0.0.0-20181003152136-452098b06085/go.mod h1:WevpGD+5YOreDJznWevcn8NTmQEW5STSBgIkpkjzqXc=
github.com/gobuffalo/x v0.0.0-20181007152206-913e47c59ca7/go.mod h1:9rDPXaB3kXdKWzMc4odGQQdG2e2DIEmANy5aSJ9yesY=
github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee h1:s+21KNqlpePfkah2I+gwHF8xmJWRjooY+5248k6m4A0=
github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo=
github.com/gobwas/pool v0.2.0 h1:QEmUOlnSjWtnpRGHF3SauEiOsy82Cup83Vf2LcMlnc8=
github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw=
github.com/gobwas/ws v1.0.2 h1:CoAavW/wd/kulfZmSIBt6p24n4j7tHgNVCjsfHVNUbo=
github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM=
github.com/gocql/gocql v0.0.0-20190301043612-f6df8288f9b4/go.mod h1:4Fw1eo5iaEhDUs8XyuhSVCVy52Jq3L+/3GJgYkwc+/0=
github.com/gofrs/uuid v3.1.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM=
github.com/gofrs/uuid v3.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM=
@@ -408,6 +419,8 @@ github.com/karrick/godirwalk v1.7.8/go.mod h1:2c9FRhkDxdIbgkOnCEvnSWs71Bhugbl46s
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/knq/sysutil v0.0.0-20191005231841-15668db23d08 h1:V0an7KRw92wmJysvFvtqtKMAPmvS5O0jtB0nYo6t+gs=
github.com/knq/sysutil v0.0.0-20191005231841-15668db23d08/go.mod h1:dFWs1zEqDjFtnBXsd1vPOZaLsESovai349994nHx3e0=
github.com/konsorten/go-windows-terminal-sequences v0.0.0-20180402223658-b729f2633dfe/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
@@ -425,6 +438,9 @@ github.com/lib/pq v1.0.0 h1:X5PMW56eZitiTeO7tKzZxFCSpbFZJtkMMooicw2us9A=
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/magiconair/properties v1.8.0 h1:LLgXmsheXeRoUOBOjtwPQCWIYqM/LU1ayDtDePerRcY=
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs=
github.com/mailru/easyjson v0.7.1 h1:mdxE1MF9o53iCb2Ghj1VfWvh7ZOwHpnVG/xwXrV90U8=
github.com/mailru/easyjson v0.7.1/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs=
github.com/markbates/deplist v1.0.4/go.mod h1:gRRbPbbuA8TmMiRvaOzUlRfzfjeCCBqX2A6arxN01MM=
github.com/markbates/deplist v1.0.5/go.mod h1:gRRbPbbuA8TmMiRvaOzUlRfzfjeCCBqX2A6arxN01MM=
github.com/markbates/going v1.0.2/go.mod h1:UWCk3zm0UKefHZ7l8BNqi26UyiEMniznk8naLdTcy6c=
@@ -470,6 +486,8 @@ github.com/mongodb/mongo-go-driver v0.3.0/go.mod h1:NK/HWDIIZkaYsnYa0hmtP443T5EL
github.com/monoculum/formam v0.0.0-20180901015400-4e68be1d79ba/go.mod h1:RKgILGEJq24YyJ2ban8EO0RUVSJlF1pGsEvoLEACr/Q=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/nakagami/firebirdsql v0.0.0-20190310045651-3c02a58cfed8/go.mod h1:86wM1zFnC6/uDBfZGNwB65O+pR2OFi5q/YQaEUid1qA=
github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007 h1:Ohgj9L0EYOgXxkDp+bczlMBiulwmqYzQpvQNUdtt3oc=
github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007/go.mod h1:wKCOWMb6iNlvKiOToY2cNuaovSXvIiv1zDi9QDR7aGQ=
github.com/neelance/astrewrite v0.0.0-20160511093645-99348263ae86/go.mod h1:kHJEU3ofeGjhHklVoIGuVj85JJwZ6kWPaJwCIxgnFmo=
github.com/neelance/sourcemap v0.0.0-20151028013722-8c68805598ab/go.mod h1:Qr6/a/Q4r9LP1IltGz7tA7iOK1WonHEYhu1HRBA7ZiM=
github.com/nicksnyder/go-i18n v1.10.0/go.mod h1:HrK7VCrbOvQoUAQ7Vpy7i87N7JZZZ7R2xBGjv0j365Q=
@@ -582,6 +600,7 @@ github.com/spf13/viper v1.3.1/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DM
github.com/spf13/viper v1.4.0 h1:yXHLWeravcrgGyFSyCgdYpXQ9dR9c/WED3pg1RhxqEU=
github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1 h1:2vfRuCMp5sSVIDSqO8oNnWJq7mPa6KVP3iPIwFBuy8A=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
@@ -590,7 +609,13 @@ github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UV
github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/tarm/serial v0.0.0-20180830185346-98f6abe2eb07/go.mod h1:kDXzergiv9cbyO7IOYJZWg1U88JhDg3PB6klq9Hg2pA=
github.com/tidwall/gjson v1.6.0 h1:9VEQWz6LLMUsUl6PueE49ir4Ka6CzLymOAZDxpFsTDc=
github.com/tidwall/gjson v1.6.0/go.mod h1:P256ACg0Mn+j1RXIDXoss50DeIABTYK1PULOJHhxOls=
github.com/tidwall/match v1.0.1 h1:PnKP62LPNxHKTwvHHZZzdOAOCtsJTjo6dZLCwpKm5xc=
github.com/tidwall/match v1.0.1/go.mod h1:LujAq0jyVjBy028G1WhWfIzbpQfMO8bBZ6Tyb0+pL9E=
github.com/tidwall/pretty v0.0.0-20180105212114-65a9db5fad51/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4=
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
@@ -676,6 +701,8 @@ golang.org/x/net v0.0.0-20190522155817-f3200d17e092 h1:4QSRKanuywn15aTZvI/mIDEgP
golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20200421231249-e086a090c8fd h1:QPwSajcTUrFriMF1nJ3XzgoqakqQEsnZf9LdXdi2nkI=
golang.org/x/net v0.0.0-20200421231249-e086a090c8fd/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200602114024-627f9648deb9 h1:pNX+40auqi2JqRfOP1akLGtYcn15TUbkhwuCO3foqqM=
golang.org/x/net v0.0.0-20200602114024-627f9648deb9/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20181017192945-9dcd33a902f4/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
@@ -719,6 +746,7 @@ golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190426135247-a129542de9ae h1:mQLHiymj/JXKnnjc62tb7nD5pZLs940/sXJu+Xp3DBA=
golang.org/x/sys v0.0.0-20190426135247-a129542de9ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd h1:xhmwyvizuTgC2qz7ZlMluP20uW+C3Rm0FD/WLDX8884=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=

View File

@@ -44,3 +44,7 @@ models:
model: github.com/stashapp/stash/pkg/models.ScrapedSceneTag
SceneFileType:
model: github.com/stashapp/stash/pkg/models.SceneFileType
ScrapedMovie:
model: github.com/stashapp/stash/pkg/models.ScrapedMovie
ScrapedMovieStudio:
model: github.com/stashapp/stash/pkg/models.ScrapedMovieStudio

View File

@@ -3,10 +3,15 @@ fragment ConfigGeneralData on ConfigGeneralResult {
databasePath
generatedPath
cachePath
calculateMD5
videoFileNamingAlgorithm
previewSegments
previewSegmentDuration
previewExcludeStart
previewExcludeEnd
previewPreset
maxTranscodeSize
maxStreamingTranscodeSize
forceMkv
forceHevc
username
password
maxSessionAge
@@ -16,6 +21,7 @@ fragment ConfigGeneralData on ConfigGeneralResult {
logAccess
excludes
scraperUserAgent
scraperCDPPath
}
fragment ConfigInterfaceData on ConfigInterfaceResult {

View File

@@ -8,4 +8,9 @@ fragment GalleryData on Gallery {
name
path
}
scene {
id
title
path
}
}

View File

@@ -1,6 +1,7 @@
fragment SlimSceneData on Scene {
id
checksum
oshash
title
details
url

View File

@@ -1,6 +1,7 @@
fragment SceneData on Scene {
id
checksum
oshash
title
details
url
@@ -33,8 +34,6 @@ fragment SceneData on Scene {
...SceneMarkerData
}
is_streamable
gallery {
...GalleryData
}

View File

@@ -19,7 +19,7 @@ fragment ScrapedPerformerData on ScrapedPerformer {
}
fragment ScrapedScenePerformerData on ScrapedScenePerformer {
id
stored_id
name
gender
url
@@ -38,6 +38,12 @@ fragment ScrapedScenePerformerData on ScrapedScenePerformer {
aliases
}
fragment ScrapedMovieStudioData on ScrapedMovieStudio {
id
name
url
}
fragment ScrapedMovieData on ScrapedMovie {
name
aliases
@@ -47,10 +53,16 @@ fragment ScrapedMovieData on ScrapedMovie {
director
url
synopsis
front_image
back_image
studio {
...ScrapedMovieStudioData
}
}
fragment ScrapedSceneMovieData on ScrapedSceneMovie {
id
stored_id
name
aliases
duration
@@ -62,13 +74,13 @@ fragment ScrapedSceneMovieData on ScrapedSceneMovie {
}
fragment ScrapedSceneStudioData on ScrapedSceneStudio {
id
stored_id
name
url
}
fragment ScrapedSceneTagData on ScrapedSceneTag {
id
stored_id
name
}
@@ -105,4 +117,4 @@ fragment ScrapedSceneData on ScrapedScene {
movies {
...ScrapedSceneMovieData
}
}
}

View File

@@ -3,6 +3,22 @@ fragment StudioData on Studio {
checksum
name
url
parent_studio {
id
checksum
name
url
image_path
scene_count
}
child_studios {
id
checksum
name
url
image_path
scene_count
}
image_path
scene_count
}

View File

@@ -1,6 +1,7 @@
fragment TagData on Tag {
id
name
image_path
scene_count
scene_marker_count
}

View File

@@ -22,6 +22,10 @@ mutation MetadataClean {
metadataClean
}
mutation MigrateHashNaming {
migrateHashNaming
}
mutation StopJob {
stopJob
}

View File

@@ -0,0 +1,7 @@
mutation ReloadPlugins {
reloadPlugins
}
mutation RunPluginTask($plugin_id: ID!, $task_name: String!, $args: [PluginArgInput!]) {
runPluginTask(plugin_id: $plugin_id, task_name: $task_name, args: $args)
}

View File

@@ -80,6 +80,10 @@ mutation SceneDestroy($id: ID!, $delete_file: Boolean, $delete_generated : Boole
sceneDestroy(input: {id: $id, delete_file: $delete_file, delete_generated: $delete_generated})
}
mutation ScenesDestroy($ids: [ID!]!, $delete_file: Boolean, $delete_generated : Boolean) {
scenesDestroy(input: {ids: $ids, delete_file: $delete_file, delete_generated: $delete_generated})
}
mutation SceneGenerateScreenshot($id: ID!, $at: Float) {
sceneGenerateScreenshot(id: $id, at: $at)
}

View File

@@ -0,0 +1,3 @@
mutation ReloadScrapers {
reloadScrapers
}

View File

@@ -1,9 +1,10 @@
mutation StudioCreate(
$name: String!,
$url: String,
$image: String) {
$image: String
$parent_id: ID) {
studioCreate(input: { name: $name, url: $url, image: $image }) {
studioCreate(input: { name: $name, url: $url, image: $image, parent_id: $parent_id }) {
...StudioData
}
}
@@ -12,9 +13,10 @@ mutation StudioUpdate(
$id: ID!
$name: String,
$url: String,
$image: String) {
$image: String
$parent_id: ID) {
studioUpdate(input: { id: $id, name: $name, url: $url, image: $image }) {
studioUpdate(input: { id: $id, name: $name, url: $url, image: $image, parent_id: $parent_id }) {
...StudioData
}
}

View File

@@ -1,5 +1,5 @@
mutation TagCreate($name: String!) {
tagCreate(input: { name: $name }) {
mutation TagCreate($name: String!, $image: String) {
tagCreate(input: { name: $name, image: $image }) {
...TagData
}
}
@@ -8,8 +8,8 @@ mutation TagDestroy($id: ID!) {
tagDestroy(input: { id: $id })
}
mutation TagUpdate($id: ID!, $name: String!) {
tagUpdate(input: { id: $id, name: $name }) {
mutation TagUpdate($id: ID!, $name: String!, $image: String) {
tagUpdate(input: { id: $id, name: $name, image: $image }) {
...TagData
}
}

View File

@@ -1,5 +1,5 @@
query FindGalleries($filter: FindFilterType) {
findGalleries(filter: $filter) {
query FindGalleries($filter: FindFilterType, $gallery_filter: GalleryFilterType) {
findGalleries(gallery_filter: $gallery_filter, filter: $filter) {
count
galleries {
...GalleryData

View File

@@ -1,9 +1,3 @@
query FindTag($id: ID!) {
findTag(id: $id) {
...TagData
}
}
query MarkerStrings($q: String, $sort: String) {
markerStrings(q: $q, sort: $sort) {
id

View File

@@ -0,0 +1,25 @@
query Plugins {
plugins {
id
name
description
url
version
tasks {
name
description
}
}
}
query PluginTasks {
pluginTasks {
name
description
plugin {
id
name
}
}
}

View File

@@ -53,4 +53,12 @@ query ParseSceneFilenames($filter: FindFilterType!, $config: SceneParserInput!)
tag_ids
}
}
}
}
query SceneStreams($id: ID!) {
sceneStreams(id: $id) {
url
mime_type
label
}
}

View File

@@ -20,6 +20,17 @@ query ListSceneScrapers {
}
}
query ListMovieScrapers {
listMovieScrapers {
id
name
movie {
urls
supported_scrapes
}
}
}
query ScrapePerformerList($scraper_id: ID!, $query: String!) {
scrapePerformerList(scraper_id: $scraper_id, query: $query) {
...ScrapedPerformerData
@@ -48,4 +59,10 @@ query ScrapeSceneURL($url: String!) {
scrapeSceneURL(url: $url) {
...ScrapedSceneData
}
}
}
query ScrapeMovieURL($url: String!) {
scrapeMovieURL(url: $url) {
...ScrapedMovieData
}
}

View File

@@ -1,5 +1,5 @@
query FindStudios($filter: FindFilterType) {
findStudios(filter: $filter) {
query FindStudios($filter: FindFilterType, $studio_filter: StudioFilterType ) {
findStudios(filter: $filter, studio_filter: $studio_filter) {
count
studios {
...StudioData

View File

@@ -0,0 +1,14 @@
query FindTags($filter: FindFilterType, $tag_filter: TagFilterType ) {
findTags(filter: $filter, tag_filter: $tag_filter) {
count
tags {
...TagData
}
}
}
query FindTag($id: ID!) {
findTag(id: $id) {
...TagData
}
}

View File

@@ -2,11 +2,16 @@
type Query {
"""Find a scene by ID or Checksum"""
findScene(id: ID, checksum: String): Scene
findSceneByHash(input: SceneHashInput!): Scene
"""A function which queries Scene objects"""
findScenes(scene_filter: SceneFilterType, scene_ids: [Int!], filter: FindFilterType): FindScenesResultType!
findScenesByPathRegex(filter: FindFilterType): FindScenesResultType!
"""Return valid stream paths"""
sceneStreams(id: ID): [SceneStreamEndpoint!]!
parseSceneFilenames(filter: FindFilterType, config: SceneParserInput!): SceneParserResultType!
"""A function which queries SceneMarker objects"""
@@ -20,7 +25,7 @@ type Query {
"""Find a studio by ID"""
findStudio(id: ID!): Studio
"""A function which queries Studio objects"""
findStudios(filter: FindFilterType): FindStudiosResultType!
findStudios(studio_filter: StudioFilterType, filter: FindFilterType): FindStudiosResultType!
"""Find a movie by ID"""
findMovie(id: ID!): Movie
@@ -28,9 +33,10 @@ type Query {
findMovies(movie_filter: MovieFilterType, filter: FindFilterType): FindMoviesResultType!
findGallery(id: ID!): Gallery
findGalleries(filter: FindFilterType): FindGalleriesResultType!
findGalleries(gallery_filter: GalleryFilterType, filter: FindFilterType): FindGalleriesResultType!
findTag(id: ID!): Tag
findTags(tag_filter: TagFilterType, filter: FindFilterType): FindTagsResultType!
"""Retrieve random scene markers for the wall"""
markerWall(q: String): [SceneMarker!]!
@@ -53,6 +59,8 @@ type Query {
"""List available scrapers"""
listPerformerScrapers: [Scraper!]!
listSceneScrapers: [Scraper!]!
listMovieScrapers: [Scraper!]!
"""Scrape a list of performers based on name"""
scrapePerformerList(scraper_id: ID!, query: String!): [ScrapedPerformer!]!
"""Scrapes a complete performer record based on a scrapePerformerList result"""
@@ -63,12 +71,21 @@ type Query {
scrapeScene(scraper_id: ID!, scene: SceneUpdateInput!): ScrapedScene
"""Scrapes a complete performer record based on a URL"""
scrapeSceneURL(url: String!): ScrapedScene
"""Scrapes a complete movie record based on a URL"""
scrapeMovieURL(url: String!): ScrapedMovie
"""Scrape a performer using Freeones"""
scrapeFreeones(performer_name: String!): ScrapedPerformer
"""Scrape a list of performers from a query"""
scrapeFreeonesPerformerList(query: String!): [String!]!
# Plugins
"""List loaded plugins"""
plugins: [Plugin!]
"""List available plugin operations"""
pluginTasks: [PluginTask!]
# Config
"""Returns the current, complete configuration"""
configuration: ConfigResult!
@@ -104,6 +121,7 @@ type Mutation {
sceneUpdate(input: SceneUpdateInput!): Scene
bulkSceneUpdate(input: BulkSceneUpdateInput!): [Scene!]
sceneDestroy(input: SceneDestroyInput!): Boolean!
scenesDestroy(input: ScenesDestroyInput!): Boolean!
scenesUpdate(input: [SceneUpdateInput!]!): [Scene]
"""Increments the o-counter for a scene. Returns the new value"""
@@ -152,6 +170,15 @@ type Mutation {
metadataAutoTag(input: AutoTagMetadataInput!): String!
"""Clean metadata. Returns the job ID"""
metadataClean: String!
"""Migrate generated files for the current hash naming"""
migrateHashNaming: String!
"""Reload scrapers"""
reloadScrapers: Boolean!
"""Run plugin task. Returns the job ID"""
runPluginTask(plugin_id: ID!, task_name: String!, args: [PluginArgInput!]): String!
reloadPlugins: Boolean!
stopJob: Boolean!
}

View File

@@ -7,6 +7,21 @@ enum StreamingResolutionEnum {
"Original", ORIGINAL
}
enum PreviewPreset {
"X264_ULTRAFAST", ultrafast
"X264_VERYFAST", veryfast
"X264_FAST", fast
"X264_MEDIUM", medium
"X264_SLOW", slow
"X264_SLOWER", slower
"X264_VERYSLOW", veryslow
}
enum HashAlgorithm {
MD5
"oshash", OSHASH
}
input ConfigGeneralInput {
"""Array of file paths to content"""
stashes: [String!]
@@ -16,14 +31,24 @@ input ConfigGeneralInput {
generatedPath: String
"""Path to cache"""
cachePath: String
"""Whether to calculate MD5 checksums for scene video files"""
calculateMD5: Boolean!
"""Hash algorithm to use for generated file naming"""
videoFileNamingAlgorithm: HashAlgorithm!
"""Number of segments in a preview file"""
previewSegments: Int
"""Preview segment duration, in seconds"""
previewSegmentDuration: Float
"""Duration of start of video to exclude when generating previews"""
previewExcludeStart: String
"""Duration of end of video to exclude when generating previews"""
previewExcludeEnd: String
"""Preset when generating preview"""
previewPreset: PreviewPreset
"""Max generated transcode size"""
maxTranscodeSize: StreamingResolutionEnum
"""Max streaming transcode size"""
maxStreamingTranscodeSize: StreamingResolutionEnum
"""Force MKV as supported format"""
forceMkv: Boolean!
"""Force HEVC as a supported codec"""
forceHevc: Boolean!
"""Username"""
username: String
"""Password"""
@@ -42,6 +67,8 @@ input ConfigGeneralInput {
excludes: [String!]
"""Scraper user agent string"""
scraperUserAgent: String
"""Scraper CDP path. Path to chrome executable or remote address"""
scraperCDPPath: String
}
type ConfigGeneralResult {
@@ -53,14 +80,24 @@ type ConfigGeneralResult {
generatedPath: String!
"""Path to cache"""
cachePath: String!
"""Whether to calculate MD5 checksums for scene video files"""
calculateMD5: Boolean!
"""Hash algorithm to use for generated file naming"""
videoFileNamingAlgorithm: HashAlgorithm!
"""Number of segments in a preview file"""
previewSegments: Int!
"""Preview segment duration, in seconds"""
previewSegmentDuration: Float!
"""Duration of start of video to exclude when generating previews"""
previewExcludeStart: String!
"""Duration of end of video to exclude when generating previews"""
previewExcludeEnd: String!
"""Preset when generating preview"""
previewPreset: PreviewPreset!
"""Max generated transcode size"""
maxTranscodeSize: StreamingResolutionEnum
"""Max streaming transcode size"""
maxStreamingTranscodeSize: StreamingResolutionEnum
"""Force MKV as supported format"""
forceMkv: Boolean!
"""Force HEVC as a supported codec"""
forceHevc: Boolean!
"""Username"""
username: String!
"""Password"""
@@ -79,6 +116,8 @@ type ConfigGeneralResult {
excludes: [String!]!
"""Scraper user agent string"""
scraperUserAgent: String
"""Scraper CDP path. Path to chrome executable or remote address"""
scraperCDPPath: String
}
input ConfigInterfaceInput {

View File

@@ -89,6 +89,31 @@ input SceneFilterType {
input MovieFilterType {
"""Filter to only include movies with this studio"""
studios: MultiCriterionInput
"""Filter to only include movies missing this property"""
is_missing: String
}
input StudioFilterType {
"""Filter to only include studios with this parent studio"""
parents: MultiCriterionInput
"""Filter to only include studios missing this property"""
is_missing: String
}
input GalleryFilterType {
"""Filter to only include galleries missing this property"""
is_missing: String
}
input TagFilterType {
"""Filter to only include tags missing this property"""
is_missing: String
"""Filter by number of scenes with this tag"""
scene_count: IntCriterionInput
"""Filter by number of markers with this tag"""
marker_count: IntCriterionInput
}
enum CriterionModifier {

View File

@@ -4,6 +4,7 @@ type Gallery {
checksum: String!
path: String!
title: String
scene: Scene
"""The files in the gallery"""
files: [GalleryFilesType!]! # Resolver

View File

@@ -1,12 +1,35 @@
input GenerateMetadataInput {
sprites: Boolean!
previews: Boolean!
previewPreset: PreviewPreset
imagePreviews: Boolean!
previewOptions: GeneratePreviewOptionsInput
markers: Boolean!
transcodes: Boolean!
"""gallery thumbnails for cache usage"""
thumbnails: Boolean!
"""scene ids to generate for"""
sceneIDs: [ID!]
"""marker ids to generate for"""
markerIDs: [ID!]
"""gallery ids to generate for"""
galleryIDs: [ID!]
"""overwrite existing media"""
overwrite: Boolean
}
input GeneratePreviewOptionsInput {
"""Number of segments in a preview file"""
previewSegments: Int
"""Preview segment duration, in seconds"""
previewSegmentDuration: Float
"""Duration of start of video to exclude when generating previews"""
previewExcludeStart: String
"""Duration of end of video to exclude when generating previews"""
previewExcludeEnd: String
"""Preset when generating preview"""
previewPreset: PreviewPreset
}
input ScanMetadataInput {
@@ -27,13 +50,3 @@ type MetadataUpdateStatus {
status: String!
message: String!
}
enum PreviewPreset {
"X264_ULTRAFAST", ultrafast
"X264_VERYFAST", veryfast
"X264_FAST", fast
"X264_MEDIUM", medium
"X264_SLOW", slow
"X264_SLOWER", slower
"X264_VERYSLOW", veryslow
}

View File

@@ -4,6 +4,7 @@ enum GenderEnum {
TRANSGENDER_MALE
TRANSGENDER_FEMALE
INTERSEX
NON_BINARY
}
type Performer {

View File

@@ -0,0 +1,35 @@
type Plugin {
id: ID!
name: String!
description: String
url: String
version: String
tasks: [PluginTask!]
}
type PluginTask {
name: String!
description: String
plugin: Plugin!
}
type PluginResult {
error: String
result: String
}
input PluginArgInput {
key: String!
value: PluginValueInput
}
input PluginValueInput {
str: String
i: Int
b: Boolean
f: Float
o: [PluginArgInput!]
a: [PluginValueInput!]
}

View File

@@ -25,7 +25,8 @@ type SceneMovie {
type Scene {
id: ID!
checksum: String!
checksum: String
oshash: String
title: String
details: String
url: String
@@ -36,7 +37,6 @@ type Scene {
file: SceneFileType! # Resolver
paths: ScenePathsType! # Resolver
is_streamable: Boolean! # Resolver
scene_markers: [SceneMarker!]!
gallery: Gallery
@@ -99,6 +99,12 @@ input SceneDestroyInput {
delete_generated: Boolean
}
input ScenesDestroyInput {
ids: [ID!]!
delete_file: Boolean
delete_generated: Boolean
}
type FindScenesResultType {
count: Int!
scenes: [Scene!]!
@@ -132,4 +138,15 @@ type SceneParserResult {
type SceneParserResultType {
count: Int!
results: [SceneParserResult!]!
}
}
input SceneHashInput {
checksum: String
oshash: String
}
type SceneStreamEndpoint {
url: String!
mime_type: String
label: String
}

View File

@@ -1,3 +1,10 @@
type ScrapedMovieStudio {
"""Set if studio matched"""
id: ID
name: String!
url: String
}
"""A movie from a scraping operation..."""
type ScrapedMovie {
name: String
@@ -8,6 +15,11 @@ type ScrapedMovie {
director: String
url: String
synopsis: String
studio: ScrapedMovieStudio
"""This should be base64 encoded"""
front_image: String
back_image: String
}
input ScrapedMovieInput {
@@ -19,4 +31,4 @@ input ScrapedMovieInput {
director: String
url: String
synopsis: String
}
}

View File

@@ -20,12 +20,14 @@ type Scraper {
performer: ScraperSpec
"""Details for scene scraper"""
scene: ScraperSpec
"""Details for movie scraper"""
movie: ScraperSpec
}
type ScrapedScenePerformer {
"""Set if performer matched"""
id: ID
stored_id: ID
name: String!
gender: String
url: String
@@ -46,7 +48,7 @@ type ScrapedScenePerformer {
type ScrapedSceneMovie {
"""Set if movie matched"""
id: ID
stored_id: ID
name: String!
aliases: String
duration: String
@@ -59,14 +61,14 @@ type ScrapedSceneMovie {
type ScrapedSceneStudio {
"""Set if studio matched"""
id: ID
stored_id: ID
name: String!
url: String
}
type ScrapedSceneTag {
"""Set if tag matched"""
id: ID
stored_id: ID
name: String!
}

View File

@@ -3,7 +3,8 @@ type Studio {
checksum: String!
name: String!
url: String
parent_studio: Studio
child_studios: [Studio!]!
image_path: String # Resolver
scene_count: Int # Resolver
}
@@ -11,6 +12,7 @@ type Studio {
input StudioCreateInput {
name: String!
url: String
parent_id: ID
"""This should be base64 encoded"""
image: String
}
@@ -19,6 +21,7 @@ input StudioUpdateInput {
id: ID!
name: String
url: String
parent_id: ID,
"""This should be base64 encoded"""
image: String
}

View File

@@ -2,19 +2,31 @@ type Tag {
id: ID!
name: String!
image_path: String # Resolver
scene_count: Int # Resolver
scene_marker_count: Int # Resolver
}
input TagCreateInput {
name: String!
"""This should be base64 encoded"""
image: String
}
input TagUpdateInput {
id: ID!
name: String!
"""This should be base64 encoded"""
image: String
}
input TagDestroyInput {
id: ID!
}
type FindTagsResultType {
count: Int!
tags: [Tag!]!
}

View File

@@ -13,7 +13,12 @@ import (
func main() {
manager.Initialize()
database.Initialize(config.GetDatabasePath())
// perform the post-migration for new databases
if database.Initialize(config.GetDatabasePath()) {
manager.GetInstance().PostMigrate()
}
api.Start()
blockForever()
}

View File

@@ -11,4 +11,5 @@ const (
studioKey key = 3
movieKey key = 4
ContextUser key = 5
tagKey key = 6
)

View File

@@ -5,6 +5,7 @@ import (
"strings"
"github.com/gobuffalo/packr/v2"
"github.com/stashapp/stash/pkg/utils"
)
var performerBox *packr.Box
@@ -30,3 +31,19 @@ func getRandomPerformerImage(gender string) ([]byte, error) {
index := rand.Intn(len(imageFiles))
return box.Find(imageFiles[index])
}
func getRandomPerformerImageUsingName(name, gender string) ([]byte, error) {
var box *packr.Box
switch strings.ToUpper(gender) {
case "FEMALE":
box = performerBox
case "MALE":
box = performerBoxMale
default:
box = performerBox
}
imageFiles := box.List()
index := utils.IntFromString(name) % uint64(len(imageFiles))
return box.Find(imageFiles[index])
}

View File

@@ -8,6 +8,7 @@ import (
"github.com/stashapp/stash/pkg/database"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager"
)
type migrateData struct {
@@ -80,6 +81,9 @@ func doMigrateHandler(w http.ResponseWriter, r *http.Request) {
return
}
// perform post-migration operations
manager.GetInstance().PostMigrate()
// if no backup path was provided, then delete the created backup
if formBackupPath == "" {
err = os.Remove(backupPath)

View File

@@ -43,6 +43,22 @@ func (r *Resolver) Tag() models.TagResolver {
return &tagResolver{r}
}
func (r *Resolver) ScrapedSceneTag() models.ScrapedSceneTagResolver {
return &scrapedSceneTagResolver{r}
}
func (r *Resolver) ScrapedSceneMovie() models.ScrapedSceneMovieResolver {
return &scrapedSceneMovieResolver{r}
}
func (r *Resolver) ScrapedScenePerformer() models.ScrapedScenePerformerResolver {
return &scrapedScenePerformerResolver{r}
}
func (r *Resolver) ScrapedSceneStudio() models.ScrapedSceneStudioResolver {
return &scrapedSceneStudioResolver{r}
}
type mutationResolver struct{ *Resolver }
type queryResolver struct{ *Resolver }
type subscriptionResolver struct{ *Resolver }
@@ -54,6 +70,10 @@ type sceneMarkerResolver struct{ *Resolver }
type studioResolver struct{ *Resolver }
type movieResolver struct{ *Resolver }
type tagResolver struct{ *Resolver }
type scrapedSceneTagResolver struct{ *Resolver }
type scrapedSceneMovieResolver struct{ *Resolver }
type scrapedScenePerformerResolver struct{ *Resolver }
type scrapedSceneStudioResolver struct{ *Resolver }
func (r *queryResolver) MarkerWall(ctx context.Context, q *string) ([]*models.SceneMarker, error) {
qb := models.NewSceneMarkerQueryBuilder()

View File

@@ -2,6 +2,7 @@ package api
import (
"context"
"github.com/stashapp/stash/pkg/models"
)
@@ -13,3 +14,12 @@ func (r *galleryResolver) Files(ctx context.Context, obj *models.Gallery) ([]*mo
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
return obj.GetFiles(baseURL), nil
}
func (r *galleryResolver) Scene(ctx context.Context, obj *models.Gallery) (*models.Scene, error) {
if !obj.SceneID.Valid {
return nil, nil
}
qb := models.NewSceneQueryBuilder()
return qb.Find(int(obj.SceneID.Int64))
}

View File

@@ -4,11 +4,24 @@ import (
"context"
"github.com/stashapp/stash/pkg/api/urlbuilders"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
func (r *sceneResolver) Checksum(ctx context.Context, obj *models.Scene) (*string, error) {
if obj.Checksum.Valid {
return &obj.Checksum.String, nil
}
return nil, nil
}
func (r *sceneResolver) Oshash(ctx context.Context, obj *models.Scene) (*string, error) {
if obj.OSHash.Valid {
return &obj.OSHash.String, nil
}
return nil, nil
}
func (r *sceneResolver) Title(ctx context.Context, obj *models.Scene) (*string, error) {
if obj.Title.Valid {
return &obj.Title.String, nil
@@ -81,12 +94,6 @@ func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.S
}, nil
}
func (r *sceneResolver) IsStreamable(ctx context.Context, obj *models.Scene) (bool, error) {
// ignore error
ret, _ := manager.IsStreamable(obj)
return ret, nil
}
func (r *sceneResolver) SceneMarkers(ctx context.Context, obj *models.Scene) ([]*models.SceneMarker, error) {
qb := models.NewSceneMarkerQueryBuilder()
return qb.FindBySceneID(obj.ID, nil)

View File

@@ -0,0 +1,23 @@
package api
import (
"context"
"github.com/stashapp/stash/pkg/models"
)
func (r *scrapedSceneTagResolver) StoredID(ctx context.Context, obj *models.ScrapedSceneTag) (*string, error) {
return obj.ID, nil
}
func (r *scrapedSceneMovieResolver) StoredID(ctx context.Context, obj *models.ScrapedSceneMovie) (*string, error) {
return obj.ID, nil
}
func (r *scrapedScenePerformerResolver) StoredID(ctx context.Context, obj *models.ScrapedScenePerformer) (*string, error) {
return obj.ID, nil
}
func (r *scrapedSceneStudioResolver) StoredID(ctx context.Context, obj *models.ScrapedSceneStudio) (*string, error) {
return obj.ID, nil
}

View File

@@ -2,6 +2,7 @@ package api
import (
"context"
"github.com/stashapp/stash/pkg/api/urlbuilders"
"github.com/stashapp/stash/pkg/models"
)
@@ -31,3 +32,17 @@ func (r *studioResolver) SceneCount(ctx context.Context, obj *models.Studio) (*i
res, err := qb.CountByStudioID(obj.ID)
return &res, err
}
func (r *studioResolver) ParentStudio(ctx context.Context, obj *models.Studio) (*models.Studio, error) {
if !obj.ParentID.Valid {
return nil, nil
}
qb := models.NewStudioQueryBuilder()
return qb.Find(int(obj.ParentID.Int64), nil)
}
func (r *studioResolver) ChildStudios(ctx context.Context, obj *models.Studio) ([]*models.Studio, error) {
qb := models.NewStudioQueryBuilder()
return qb.FindChildren(obj.ID, nil)
}

View File

@@ -2,6 +2,8 @@ package api
import (
"context"
"github.com/stashapp/stash/pkg/api/urlbuilders"
"github.com/stashapp/stash/pkg/models"
)
@@ -22,3 +24,9 @@ func (r *tagResolver) SceneMarkerCount(ctx context.Context, obj *models.Tag) (*i
count, err := qb.CountByTagID(obj.ID)
return &count, err
}
func (r *tagResolver) ImagePath(ctx context.Context, obj *models.Tag) (*string, error) {
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
imagePath := urlbuilders.NewTagURLBuilder(baseURL, obj.ID).GetTagImageURL()
return &imagePath, nil
}

View File

@@ -2,6 +2,7 @@ package api
import (
"context"
"errors"
"fmt"
"path/filepath"
@@ -45,6 +46,37 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
config.Set(config.Cache, input.CachePath)
}
if !input.CalculateMd5 && input.VideoFileNamingAlgorithm == models.HashAlgorithmMd5 {
return makeConfigGeneralResult(), errors.New("calculateMD5 must be true if using MD5")
}
if input.VideoFileNamingAlgorithm != config.GetVideoFileNamingAlgorithm() {
// validate changing VideoFileNamingAlgorithm
if err := manager.ValidateVideoFileNamingAlgorithm(input.VideoFileNamingAlgorithm); err != nil {
return makeConfigGeneralResult(), err
}
config.Set(config.VideoFileNamingAlgorithm, input.VideoFileNamingAlgorithm)
}
config.Set(config.CalculateMD5, input.CalculateMd5)
if input.PreviewSegments != nil {
config.Set(config.PreviewSegments, *input.PreviewSegments)
}
if input.PreviewSegmentDuration != nil {
config.Set(config.PreviewSegmentDuration, *input.PreviewSegmentDuration)
}
if input.PreviewExcludeStart != nil {
config.Set(config.PreviewExcludeStart, *input.PreviewExcludeStart)
}
if input.PreviewExcludeEnd != nil {
config.Set(config.PreviewExcludeEnd, *input.PreviewExcludeEnd)
}
if input.PreviewPreset != nil {
config.Set(config.PreviewPreset, input.PreviewPreset.String())
}
if input.MaxTranscodeSize != nil {
config.Set(config.MaxTranscodeSize, input.MaxTranscodeSize.String())
}
@@ -52,8 +84,6 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
if input.MaxStreamingTranscodeSize != nil {
config.Set(config.MaxStreamingTranscodeSize, input.MaxStreamingTranscodeSize.String())
}
config.Set(config.ForceMKV, input.ForceMkv)
config.Set(config.ForceHEVC, input.ForceHevc)
if input.Username != nil {
config.Set(config.Username, input.Username)
@@ -89,8 +119,15 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
config.Set(config.Exclude, input.Excludes)
}
refreshScraperCache := false
if input.ScraperUserAgent != nil {
config.Set(config.ScraperUserAgent, input.ScraperUserAgent)
refreshScraperCache = true
}
if input.ScraperCDPPath != nil {
config.Set(config.ScraperCDPPath, input.ScraperCDPPath)
refreshScraperCache = true
}
if err := config.Write(); err != nil {
@@ -98,6 +135,9 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
}
manager.GetInstance().RefreshConfig()
if refreshScraperCache {
manager.GetInstance().RefreshScraperCache()
}
return makeConfigGeneralResult(), nil
}

View File

@@ -23,7 +23,7 @@ func (r *mutationResolver) MetadataExport(ctx context.Context) (string, error) {
}
func (r *mutationResolver) MetadataGenerate(ctx context.Context, input models.GenerateMetadataInput) (string, error) {
manager.GetInstance().Generate(input.Sprites, input.Previews, input.PreviewPreset, input.ImagePreviews, input.Markers, input.Transcodes, input.Thumbnails)
manager.GetInstance().Generate(input)
return "todo", nil
}
@@ -37,6 +37,11 @@ func (r *mutationResolver) MetadataClean(ctx context.Context) (string, error) {
return "todo", nil
}
func (r *mutationResolver) MigrateHashNaming(ctx context.Context) (string, error) {
manager.GetInstance().MigrateHash()
return "todo", nil
}
func (r *mutationResolver) JobStatus(ctx context.Context) (*models.MetadataUpdateStatus, error) {
status := manager.GetInstance().Status
ret := models.MetadataUpdateStatus{

View File

@@ -19,32 +19,35 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input models.MovieCr
var backimageData []byte
var err error
if input.FrontImage == nil {
// HACK: if back image is being set, set the front image to the default.
// This is because we can't have a null front image with a non-null back image.
if input.FrontImage == nil && input.BackImage != nil {
input.FrontImage = &models.DefaultMovieImage
}
if input.BackImage == nil {
input.BackImage = &models.DefaultMovieImage
}
// Process the base 64 encoded image string
_, frontimageData, err = utils.ProcessBase64Image(*input.FrontImage)
if err != nil {
return nil, err
if input.FrontImage != nil {
_, frontimageData, err = utils.ProcessBase64Image(*input.FrontImage)
if err != nil {
return nil, err
}
}
// Process the base 64 encoded image string
_, backimageData, err = utils.ProcessBase64Image(*input.BackImage)
if err != nil {
return nil, err
if input.BackImage != nil {
_, backimageData, err = utils.ProcessBase64Image(*input.BackImage)
if err != nil {
return nil, err
}
}
// Populate a new movie from the input
currentTime := time.Now()
newMovie := models.Movie{
BackImage: backimageData,
FrontImage: frontimageData,
Checksum: checksum,
Name: sql.NullString{String: input.Name, Valid: true},
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
Checksum: checksum,
Name: sql.NullString{String: input.Name, Valid: true},
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
}
if input.Aliases != nil {
@@ -90,6 +93,14 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input models.MovieCr
return nil, err
}
// update image table
if len(frontimageData) > 0 {
if err := qb.UpdateMovieImages(movie.ID, frontimageData, backimageData, tx); err != nil {
_ = tx.Rollback()
return nil, err
}
}
// Commit
if err := tx.Commit(); err != nil {
return nil, err
@@ -106,19 +117,22 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp
ID: movieID,
UpdatedAt: &models.SQLiteTimestamp{Timestamp: time.Now()},
}
var frontimageData []byte
var err error
frontImageIncluded := wasFieldIncluded(ctx, "front_image")
if input.FrontImage != nil {
_, frontimageData, err := utils.ProcessBase64Image(*input.FrontImage)
_, frontimageData, err = utils.ProcessBase64Image(*input.FrontImage)
if err != nil {
return nil, err
}
updatedMovie.FrontImage = &frontimageData
}
backImageIncluded := wasFieldIncluded(ctx, "back_image")
var backimageData []byte
if input.BackImage != nil {
_, backimageData, err := utils.ProcessBase64Image(*input.BackImage)
_, backimageData, err = utils.ProcessBase64Image(*input.BackImage)
if err != nil {
return nil, err
}
updatedMovie.BackImage = &backimageData
}
if input.Name != nil {
@@ -177,6 +191,43 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp
return nil, err
}
// update image table
if frontImageIncluded || backImageIncluded {
if !frontImageIncluded {
frontimageData, err = qb.GetFrontImage(updatedMovie.ID, tx)
if err != nil {
tx.Rollback()
return nil, err
}
}
if !backImageIncluded {
backimageData, err = qb.GetBackImage(updatedMovie.ID, tx)
if err != nil {
tx.Rollback()
return nil, err
}
}
if len(frontimageData) == 0 && len(backimageData) == 0 {
// both images are being nulled. Destroy them.
if err := qb.DestroyMovieImages(movie.ID, tx); err != nil {
tx.Rollback()
return nil, err
}
} else {
// HACK - if front image is null and back image is not null, then set the front image
// to the default image since we can't have a null front image and a non-null back image
if frontimageData == nil && backimageData != nil {
_, frontimageData, _ = utils.ProcessBase64Image(models.DefaultMovieImage)
}
if err := qb.UpdateMovieImages(movie.ID, frontimageData, backimageData, tx); err != nil {
_ = tx.Rollback()
return nil, err
}
}
}
// Commit
if err := tx.Commit(); err != nil {
return nil, err

View File

@@ -18,13 +18,7 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
var imageData []byte
var err error
if input.Image == nil {
gender := ""
if input.Gender != nil {
gender = input.Gender.String()
}
imageData, err = getRandomPerformerImage(gender)
} else {
if input.Image != nil {
_, imageData, err = utils.ProcessBase64Image(*input.Image)
}
@@ -35,7 +29,6 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
// Populate a new performer from the input
currentTime := time.Now()
newPerformer := models.Performer{
Image: imageData,
Checksum: checksum,
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
@@ -103,6 +96,14 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
return nil, err
}
// update image table
if len(imageData) > 0 {
if err := qb.UpdatePerformerImage(performer.ID, imageData, tx); err != nil {
_ = tx.Rollback()
return nil, err
}
}
// Commit
if err := tx.Commit(); err != nil {
return nil, err
@@ -118,12 +119,14 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
ID: performerID,
UpdatedAt: models.SQLiteTimestamp{Timestamp: time.Now()},
}
var imageData []byte
var err error
imageIncluded := wasFieldIncluded(ctx, "image")
if input.Image != nil {
_, imageData, err := utils.ProcessBase64Image(*input.Image)
_, imageData, err = utils.ProcessBase64Image(*input.Image)
if err != nil {
return nil, err
}
updatedPerformer.Image = imageData
}
if input.Name != nil {
// generate checksum from performer name rather than image
@@ -188,10 +191,24 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
qb := models.NewPerformerQueryBuilder()
performer, err := qb.Update(updatedPerformer, tx)
if err != nil {
_ = tx.Rollback()
tx.Rollback()
return nil, err
}
// update image table
if len(imageData) > 0 {
if err := qb.UpdatePerformerImage(performer.ID, imageData, tx); err != nil {
tx.Rollback()
return nil, err
}
} else if imageIncluded {
// must be unsetting
if err := qb.DestroyPerformerImage(performer.ID, tx); err != nil {
tx.Rollback()
return nil, err
}
}
// Commit
if err := tx.Commit(); err != nil {
return nil, err

View File

@@ -0,0 +1,48 @@
package api
import (
"context"
"net/http"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/plugin/common"
)
func (r *mutationResolver) RunPluginTask(ctx context.Context, pluginID string, taskName string, args []*models.PluginArgInput) (string, error) {
currentUser := getCurrentUserID(ctx)
var cookie *http.Cookie
var err error
if currentUser != nil {
cookie, err = createSessionCookie(*currentUser)
if err != nil {
return "", err
}
}
serverConnection := common.StashServerConnection{
Scheme: "http",
Port: config.GetPort(),
SessionCookie: cookie,
Dir: config.GetConfigPath(),
}
if HasTLSConfig() {
serverConnection.Scheme = "https"
}
manager.GetInstance().RunPluginTask(pluginID, taskName, args, serverConnection)
return "todo", nil
}
func (r *mutationResolver) ReloadPlugins(ctx context.Context) (bool, error) {
err := manager.GetInstance().PluginCache.ReloadPlugins()
if err != nil {
logger.Errorf("Error reading plugin configs: %s", err.Error())
}
return true, nil
}

View File

@@ -10,6 +10,7 @@ import (
"github.com/stashapp/stash/pkg/database"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
@@ -80,13 +81,15 @@ func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, tx *sqlx.T
if input.Date != nil {
updatedScene.Date = &models.SQLiteDate{String: *input.Date, Valid: true}
}
if input.CoverImage != nil && *input.CoverImage != "" {
var err error
_, coverImageData, err = utils.ProcessBase64Image(*input.CoverImage)
if err != nil {
return nil, err
}
updatedScene.Cover = &coverImageData
// update the cover after updating the scene
}
if input.Rating != nil {
@@ -111,6 +114,13 @@ func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, tx *sqlx.T
return nil, err
}
// update cover table
if len(coverImageData) > 0 {
if err := qb.UpdateSceneCover(sceneID, coverImageData, tx); err != nil {
return nil, err
}
}
// Clear the existing gallery value
gqb := models.NewGalleryQueryBuilder()
err = gqb.ClearGalleryId(sceneID, tx)
@@ -188,8 +198,7 @@ func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, tx *sqlx.T
// only update the cover image if provided and everything else was successful
if coverImageData != nil {
err = manager.SetSceneScreenshot(scene.Checksum, coverImageData)
err = manager.SetSceneScreenshot(scene.GetHash(config.GetVideoFileNamingAlgorithm()), coverImageData)
if err != nil {
return nil, err
}
@@ -409,7 +418,7 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
// if delete generated is true, then delete the generated files
// for the scene
if input.DeleteGenerated != nil && *input.DeleteGenerated {
manager.DeleteGeneratedSceneFiles(scene)
manager.DeleteGeneratedSceneFiles(scene, config.GetVideoFileNamingAlgorithm())
}
// if delete file is true, then delete the file as well
@@ -421,6 +430,48 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
return true, nil
}
func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.ScenesDestroyInput) (bool, error) {
qb := models.NewSceneQueryBuilder()
tx := database.DB.MustBeginTx(ctx, nil)
var scenes []*models.Scene
for _, id := range input.Ids {
sceneID, _ := strconv.Atoi(id)
scene, err := qb.Find(sceneID)
if scene != nil {
scenes = append(scenes, scene)
}
err = manager.DestroyScene(sceneID, tx)
if err != nil {
tx.Rollback()
return false, err
}
}
if err := tx.Commit(); err != nil {
return false, err
}
fileNamingAlgo := config.GetVideoFileNamingAlgorithm()
for _, scene := range scenes {
// if delete generated is true, then delete the generated files
// for the scene
if input.DeleteGenerated != nil && *input.DeleteGenerated {
manager.DeleteGeneratedSceneFiles(scene, fileNamingAlgo)
}
// if delete file is true, then delete the file as well
// if it fails, just log a message
if input.DeleteFile != nil && *input.DeleteFile {
manager.DeleteSceneFile(scene)
}
}
return true, nil
}
func (r *mutationResolver) SceneMarkerCreate(ctx context.Context, input models.SceneMarkerCreateInput) (*models.SceneMarker, error) {
primaryTagID, _ := strconv.Atoi(input.PrimaryTagID)
sceneID, _ := strconv.Atoi(input.SceneID)
@@ -479,7 +530,7 @@ func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (b
if scene != nil {
seconds := int(marker.Seconds)
manager.DeleteSceneMarkerFiles(scene, seconds)
manager.DeleteSceneMarkerFiles(scene, seconds, config.GetVideoFileNamingAlgorithm())
}
return true, nil
@@ -548,7 +599,7 @@ func changeMarker(ctx context.Context, changeType int, changedMarker models.Scen
if scene != nil {
seconds := int(existingMarker.Seconds)
manager.DeleteSceneMarkerFiles(scene, seconds)
manager.DeleteSceneMarkerFiles(scene, seconds, config.GetVideoFileNamingAlgorithm())
}
}

View File

@@ -0,0 +1,17 @@
package api
import (
"context"
"github.com/stashapp/stash/pkg/manager"
)
func (r *mutationResolver) ReloadScrapers(ctx context.Context) (bool, error) {
err := manager.GetInstance().ScraperCache.ReloadScrapers()
if err != nil {
return false, err
}
return true, nil
}

View File

@@ -7,6 +7,7 @@ import (
"time"
"github.com/stashapp/stash/pkg/database"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
@@ -31,7 +32,6 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
// Populate a new studio from the input
currentTime := time.Now()
newStudio := models.Studio{
Image: imageData,
Checksum: checksum,
Name: sql.NullString{String: input.Name, Valid: true},
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
@@ -40,6 +40,10 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
if input.URL != nil {
newStudio.URL = sql.NullString{String: *input.URL, Valid: true}
}
if input.ParentID != nil {
parentID, _ := strconv.ParseInt(*input.ParentID, 10, 64)
newStudio.ParentID = sql.NullInt64{Int64: parentID, Valid: true}
}
// Start the transaction and save the studio
tx := database.DB.MustBeginTx(ctx, nil)
@@ -50,6 +54,14 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
return nil, err
}
// update image table
if len(imageData) > 0 {
if err := qb.UpdateStudioImage(studio.ID, imageData, tx); err != nil {
_ = tx.Rollback()
return nil, err
}
}
// Commit
if err := tx.Commit(); err != nil {
return nil, err
@@ -61,36 +73,68 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.StudioUpdateInput) (*models.Studio, error) {
// Populate studio from the input
studioID, _ := strconv.Atoi(input.ID)
updatedStudio := models.Studio{
updatedStudio := models.StudioPartial{
ID: studioID,
UpdatedAt: models.SQLiteTimestamp{Timestamp: time.Now()},
UpdatedAt: &models.SQLiteTimestamp{Timestamp: time.Now()},
}
var imageData []byte
imageIncluded := wasFieldIncluded(ctx, "image")
if input.Image != nil {
_, imageData, err := utils.ProcessBase64Image(*input.Image)
var err error
_, imageData, err = utils.ProcessBase64Image(*input.Image)
if err != nil {
return nil, err
}
updatedStudio.Image = imageData
}
if input.Name != nil {
// generate checksum from studio name rather than image
checksum := utils.MD5FromString(*input.Name)
updatedStudio.Name = sql.NullString{String: *input.Name, Valid: true}
updatedStudio.Checksum = checksum
updatedStudio.Name = &sql.NullString{String: *input.Name, Valid: true}
updatedStudio.Checksum = &checksum
}
if input.URL != nil {
updatedStudio.URL = sql.NullString{String: *input.URL, Valid: true}
updatedStudio.URL = &sql.NullString{String: *input.URL, Valid: true}
}
if input.ParentID != nil {
parentID, _ := strconv.ParseInt(*input.ParentID, 10, 64)
updatedStudio.ParentID = &sql.NullInt64{Int64: parentID, Valid: true}
} else {
// parent studio must be nullable
updatedStudio.ParentID = &sql.NullInt64{Valid: false}
}
// Start the transaction and save the studio
tx := database.DB.MustBeginTx(ctx, nil)
qb := models.NewStudioQueryBuilder()
if err := manager.ValidateModifyStudio(updatedStudio, tx); err != nil {
tx.Rollback()
return nil, err
}
studio, err := qb.Update(updatedStudio, tx)
if err != nil {
_ = tx.Rollback()
tx.Rollback()
return nil, err
}
// update image table
if len(imageData) > 0 {
if err := qb.UpdateStudioImage(studio.ID, imageData, tx); err != nil {
tx.Rollback()
return nil, err
}
} else if imageIncluded {
// must be unsetting
if err := qb.DestroyStudioImage(studio.ID, tx); err != nil {
tx.Rollback()
return nil, err
}
}
// Commit
if err := tx.Commit(); err != nil {
return nil, err

View File

@@ -2,10 +2,14 @@ package api
import (
"context"
"github.com/stashapp/stash/pkg/database"
"github.com/stashapp/stash/pkg/models"
"fmt"
"strconv"
"time"
"github.com/stashapp/stash/pkg/database"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
func (r *mutationResolver) TagCreate(ctx context.Context, input models.TagCreateInput) (*models.Tag, error) {
@@ -17,15 +21,41 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input models.TagCreate
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
}
// Start the transaction and save the studio
var imageData []byte
var err error
if input.Image != nil {
_, imageData, err = utils.ProcessBase64Image(*input.Image)
if err != nil {
return nil, err
}
}
// Start the transaction and save the tag
tx := database.DB.MustBeginTx(ctx, nil)
qb := models.NewTagQueryBuilder()
// ensure name is unique
if err := manager.EnsureTagNameUnique(newTag, tx); err != nil {
tx.Rollback()
return nil, err
}
tag, err := qb.Create(newTag, tx)
if err != nil {
_ = tx.Rollback()
tx.Rollback()
return nil, err
}
// update image table
if len(imageData) > 0 {
if err := qb.UpdateTagImage(tag.ID, imageData, tx); err != nil {
_ = tx.Rollback()
return nil, err
}
}
// Commit
if err := tx.Commit(); err != nil {
return nil, err
@@ -43,15 +73,61 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdate
UpdatedAt: models.SQLiteTimestamp{Timestamp: time.Now()},
}
var imageData []byte
var err error
imageIncluded := wasFieldIncluded(ctx, "image")
if input.Image != nil {
_, imageData, err = utils.ProcessBase64Image(*input.Image)
if err != nil {
return nil, err
}
}
// Start the transaction and save the tag
tx := database.DB.MustBeginTx(ctx, nil)
qb := models.NewTagQueryBuilder()
// ensure name is unique
existing, err := qb.Find(tagID, tx)
if err != nil {
tx.Rollback()
return nil, err
}
if existing == nil {
tx.Rollback()
return nil, fmt.Errorf("Tag with ID %d not found", tagID)
}
if existing.Name != updatedTag.Name {
if err := manager.EnsureTagNameUnique(updatedTag, tx); err != nil {
tx.Rollback()
return nil, err
}
}
tag, err := qb.Update(updatedTag, tx)
if err != nil {
_ = tx.Rollback()
return nil, err
}
// update image table
if len(imageData) > 0 {
if err := qb.UpdateTagImage(tag.ID, imageData, tx); err != nil {
tx.Rollback()
return nil, err
}
} else if imageIncluded {
// must be unsetting
if err := qb.DestroyTagImage(tag.ID, tx); err != nil {
tx.Rollback()
return nil, err
}
}
// Commit
if err := tx.Commit(); err != nil {
return nil, err

View File

@@ -40,16 +40,22 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult {
maxStreamingTranscodeSize := config.GetMaxStreamingTranscodeSize()
scraperUserAgent := config.GetScraperUserAgent()
scraperCDPPath := config.GetScraperCDPPath()
return &models.ConfigGeneralResult{
Stashes: config.GetStashPaths(),
DatabasePath: config.GetDatabasePath(),
GeneratedPath: config.GetGeneratedPath(),
CachePath: config.GetCachePath(),
CalculateMd5: config.IsCalculateMD5(),
VideoFileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(),
PreviewSegments: config.GetPreviewSegments(),
PreviewSegmentDuration: config.GetPreviewSegmentDuration(),
PreviewExcludeStart: config.GetPreviewExcludeStart(),
PreviewExcludeEnd: config.GetPreviewExcludeEnd(),
PreviewPreset: config.GetPreviewPreset(),
MaxTranscodeSize: &maxTranscodeSize,
MaxStreamingTranscodeSize: &maxStreamingTranscodeSize,
ForceMkv: config.GetForceMKV(),
ForceHevc: config.GetForceHEVC(),
Username: config.GetUsername(),
Password: config.GetPasswordHash(),
MaxSessionAge: config.GetMaxSessionAge(),
@@ -59,6 +65,7 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult {
LogAccess: config.GetLogAccess(),
Excludes: config.GetExcludes(),
ScraperUserAgent: &scraperUserAgent,
ScraperCDPPath: &scraperCDPPath,
}
}

View File

@@ -2,8 +2,9 @@ package api
import (
"context"
"github.com/stashapp/stash/pkg/models"
"strconv"
"github.com/stashapp/stash/pkg/models"
)
func (r *queryResolver) FindGallery(ctx context.Context, id string) (*models.Gallery, error) {
@@ -12,9 +13,9 @@ func (r *queryResolver) FindGallery(ctx context.Context, id string) (*models.Gal
return qb.Find(idInt)
}
func (r *queryResolver) FindGalleries(ctx context.Context, filter *models.FindFilterType) (*models.FindGalleriesResultType, error) {
func (r *queryResolver) FindGalleries(ctx context.Context, galleryFilter *models.GalleryFilterType, filter *models.FindFilterType) (*models.FindGalleriesResultType, error) {
qb := models.NewGalleryQueryBuilder()
galleries, total := qb.Query(filter)
galleries, total := qb.Query(galleryFilter, filter)
return &models.FindGalleriesResultType{
Count: total,
Galleries: galleries,

View File

@@ -21,6 +21,28 @@ func (r *queryResolver) FindScene(ctx context.Context, id *string, checksum *str
return scene, err
}
func (r *queryResolver) FindSceneByHash(ctx context.Context, input models.SceneHashInput) (*models.Scene, error) {
qb := models.NewSceneQueryBuilder()
var scene *models.Scene
var err error
if input.Checksum != nil {
scene, err = qb.FindByChecksum(*input.Checksum)
if err != nil {
return nil, err
}
}
if scene == nil && input.Oshash != nil {
scene, err = qb.FindByOSHash(*input.Oshash)
if err != nil {
return nil, err
}
}
return scene, err
}
func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.SceneFilterType, sceneIds []int, filter *models.FindFilterType) (*models.FindScenesResultType, error) {
qb := models.NewSceneQueryBuilder()
scenes, total := qb.Query(sceneFilter, filter)

View File

@@ -12,9 +12,9 @@ func (r *queryResolver) FindStudio(ctx context.Context, id string) (*models.Stud
return qb.Find(idInt, nil)
}
func (r *queryResolver) FindStudios(ctx context.Context, filter *models.FindFilterType) (*models.FindStudiosResultType, error) {
func (r *queryResolver) FindStudios(ctx context.Context, studioFilter *models.StudioFilterType, filter *models.FindFilterType) (*models.FindStudiosResultType, error) {
qb := models.NewStudioQueryBuilder()
studios, total := qb.Query(filter)
studios, total := qb.Query(studioFilter, filter)
return &models.FindStudiosResultType{
Count: total,
Studios: studios,

View File

@@ -2,8 +2,9 @@ package api
import (
"context"
"github.com/stashapp/stash/pkg/models"
"strconv"
"github.com/stashapp/stash/pkg/models"
)
func (r *queryResolver) FindTag(ctx context.Context, id string) (*models.Tag, error) {
@@ -12,6 +13,15 @@ func (r *queryResolver) FindTag(ctx context.Context, id string) (*models.Tag, er
return qb.Find(idInt, nil)
}
func (r *queryResolver) FindTags(ctx context.Context, tagFilter *models.TagFilterType, filter *models.FindFilterType) (*models.FindTagsResultType, error) {
qb := models.NewTagQueryBuilder()
tags, total := qb.Query(tagFilter, filter)
return &models.FindTagsResultType{
Count: total,
Tags: tags,
}, nil
}
func (r *queryResolver) AllTags(ctx context.Context) ([]*models.Tag, error) {
qb := models.NewTagQueryBuilder()
return qb.All()

View File

@@ -0,0 +1,16 @@
package api
import (
"context"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/models"
)
func (r *queryResolver) Plugins(ctx context.Context) ([]*models.Plugin, error) {
return manager.GetInstance().PluginCache.ListPlugins(), nil
}
func (r *queryResolver) PluginTasks(ctx context.Context) ([]*models.PluginTask, error) {
return manager.GetInstance().PluginCache.ListPluginTasks(), nil
}

View File

@@ -0,0 +1,31 @@
package api
import (
"context"
"errors"
"strconv"
"github.com/stashapp/stash/pkg/api/urlbuilders"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/models"
)
func (r *queryResolver) SceneStreams(ctx context.Context, id *string) ([]*models.SceneStreamEndpoint, error) {
// find the scene
qb := models.NewSceneQueryBuilder()
idInt, _ := strconv.Atoi(*id)
scene, err := qb.Find(idInt)
if err != nil {
return nil, err
}
if scene == nil {
return nil, errors.New("nil scene")
}
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
builder := urlbuilders.NewSceneURLBuilder(baseURL, scene.ID)
return manager.GetSceneStreamPaths(scene, builder.GetStreamURL())
}

View File

@@ -3,6 +3,7 @@ package api
import (
"context"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scraper"
)
@@ -12,12 +13,12 @@ func (r *queryResolver) ScrapeFreeones(ctx context.Context, performer_name strin
scrapedPerformer := models.ScrapedPerformerInput{
Name: &performer_name,
}
return scraper.GetFreeonesScraper().ScrapePerformer(scrapedPerformer)
return manager.GetInstance().ScraperCache.ScrapePerformer(scraper.FreeonesScraperID, scrapedPerformer)
}
// deprecated
func (r *queryResolver) ScrapeFreeonesPerformerList(ctx context.Context, query string) ([]string, error) {
scrapedPerformers, err := scraper.GetFreeonesScraper().ScrapePerformerNames(query)
scrapedPerformers, err := manager.GetInstance().ScraperCache.ScrapePerformerList(scraper.FreeonesScraperID, query)
if err != nil {
return nil, err
@@ -33,11 +34,15 @@ func (r *queryResolver) ScrapeFreeonesPerformerList(ctx context.Context, query s
}
func (r *queryResolver) ListPerformerScrapers(ctx context.Context) ([]*models.Scraper, error) {
return scraper.ListPerformerScrapers()
return manager.GetInstance().ScraperCache.ListPerformerScrapers(), nil
}
func (r *queryResolver) ListSceneScrapers(ctx context.Context) ([]*models.Scraper, error) {
return scraper.ListSceneScrapers()
return manager.GetInstance().ScraperCache.ListSceneScrapers(), nil
}
func (r *queryResolver) ListMovieScrapers(ctx context.Context) ([]*models.Scraper, error) {
return manager.GetInstance().ScraperCache.ListMovieScrapers(), nil
}
func (r *queryResolver) ScrapePerformerList(ctx context.Context, scraperID string, query string) ([]*models.ScrapedPerformer, error) {
@@ -45,21 +50,25 @@ func (r *queryResolver) ScrapePerformerList(ctx context.Context, scraperID strin
return nil, nil
}
return scraper.ScrapePerformerList(scraperID, query)
return manager.GetInstance().ScraperCache.ScrapePerformerList(scraperID, query)
}
func (r *queryResolver) ScrapePerformer(ctx context.Context, scraperID string, scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) {
return scraper.ScrapePerformer(scraperID, scrapedPerformer)
return manager.GetInstance().ScraperCache.ScrapePerformer(scraperID, scrapedPerformer)
}
func (r *queryResolver) ScrapePerformerURL(ctx context.Context, url string) (*models.ScrapedPerformer, error) {
return scraper.ScrapePerformerURL(url)
return manager.GetInstance().ScraperCache.ScrapePerformerURL(url)
}
func (r *queryResolver) ScrapeScene(ctx context.Context, scraperID string, scene models.SceneUpdateInput) (*models.ScrapedScene, error) {
return scraper.ScrapeScene(scraperID, scene)
return manager.GetInstance().ScraperCache.ScrapeScene(scraperID, scene)
}
func (r *queryResolver) ScrapeSceneURL(ctx context.Context, url string) (*models.ScrapedScene, error) {
return scraper.ScrapeSceneURL(url)
return manager.GetInstance().ScraperCache.ScrapeSceneURL(url)
}
func (r *queryResolver) ScrapeMovieURL(ctx context.Context, url string) (*models.ScrapedMovie, error) {
return manager.GetInstance().ScraperCache.ScrapeMovieURL(url)
}

View File

@@ -7,6 +7,7 @@ import (
"github.com/go-chi/chi"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
type movieRoutes struct{}
@@ -25,12 +26,28 @@ func (rs movieRoutes) Routes() chi.Router {
func (rs movieRoutes) FrontImage(w http.ResponseWriter, r *http.Request) {
movie := r.Context().Value(movieKey).(*models.Movie)
_, _ = w.Write(movie.FrontImage)
qb := models.NewMovieQueryBuilder()
image, _ := qb.GetFrontImage(movie.ID, nil)
defaultParam := r.URL.Query().Get("default")
if len(image) == 0 || defaultParam == "true" {
_, image, _ = utils.ProcessBase64Image(models.DefaultMovieImage)
}
utils.ServeImage(image, w, r)
}
func (rs movieRoutes) BackImage(w http.ResponseWriter, r *http.Request) {
movie := r.Context().Value(movieKey).(*models.Movie)
_, _ = w.Write(movie.BackImage)
qb := models.NewMovieQueryBuilder()
image, _ := qb.GetBackImage(movie.ID, nil)
defaultParam := r.URL.Query().Get("default")
if len(image) == 0 || defaultParam == "true" {
_, image, _ = utils.ProcessBase64Image(models.DefaultMovieImage)
}
utils.ServeImage(image, w, r)
}
func MovieCtx(next http.Handler) http.Handler {

View File

@@ -2,13 +2,12 @@ package api
import (
"context"
"crypto/md5"
"fmt"
"github.com/go-chi/chi"
"github.com/stashapp/stash/pkg/models"
"net/http"
"strconv"
"strings"
"github.com/go-chi/chi"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
type performerRoutes struct{}
@@ -26,17 +25,15 @@ func (rs performerRoutes) Routes() chi.Router {
func (rs performerRoutes) Image(w http.ResponseWriter, r *http.Request) {
performer := r.Context().Value(performerKey).(*models.Performer)
etag := fmt.Sprintf("%x", md5.Sum(performer.Image))
qb := models.NewPerformerQueryBuilder()
image, _ := qb.GetPerformerImage(performer.ID, nil)
if match := r.Header.Get("If-None-Match"); match != "" {
if strings.Contains(match, etag) {
w.WriteHeader(http.StatusNotModified)
return
}
defaultParam := r.URL.Query().Get("default")
if len(image) == 0 || defaultParam == "true" {
image, _ = getRandomPerformerImageUsingName(performer.Name.String, performer.Gender.String)
}
w.Header().Add("Etag", etag)
_, _ = w.Write(performer.Image)
utils.ServeImage(image, w, r)
}
func PerformerCtx(next http.Handler) http.Handler {

View File

@@ -2,9 +2,7 @@ package api
import (
"context"
"io"
"net/http"
"os"
"strconv"
"strings"
@@ -24,8 +22,15 @@ func (rs sceneRoutes) Routes() chi.Router {
r.Route("/{sceneId}", func(r chi.Router) {
r.Use(SceneCtx)
r.Get("/stream", rs.Stream)
r.Get("/stream.mp4", rs.Stream)
// streaming endpoints
r.Get("/stream", rs.StreamDirect)
r.Get("/stream.mkv", rs.StreamMKV)
r.Get("/stream.webm", rs.StreamWebM)
r.Get("/stream.m3u8", rs.StreamHLS)
r.Get("/stream.ts", rs.StreamTS)
r.Get("/stream.mp4", rs.StreamMp4)
r.Get("/screenshot", rs.Screenshot)
r.Get("/preview", rs.Preview)
r.Get("/webp", rs.Webp)
@@ -42,41 +47,95 @@ func (rs sceneRoutes) Routes() chi.Router {
// region Handlers
func (rs sceneRoutes) Stream(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene)
container := ""
func getSceneFileContainer(scene *models.Scene) ffmpeg.Container {
var container ffmpeg.Container
if scene.Format.Valid {
container = scene.Format.String
container = ffmpeg.Container(scene.Format.String)
} else { // container isn't in the DB
// shouldn't happen, fallback to ffprobe
tmpVideoFile, err := ffmpeg.NewVideoFile(manager.GetInstance().FFProbePath, scene.Path)
if err != nil {
logger.Errorf("[transcode] error reading video file: %s", err.Error())
return
return ffmpeg.Container("")
}
container = string(ffmpeg.MatchContainer(tmpVideoFile.Container, scene.Path))
container = ffmpeg.MatchContainer(tmpVideoFile.Container, scene.Path)
}
// detect if not a streamable file and try to transcode it instead
filepath := manager.GetInstance().Paths.Scene.GetStreamPath(scene.Path, scene.Checksum)
videoCodec := scene.VideoCodec.String
audioCodec := ffmpeg.MissingUnsupported
if scene.AudioCodec.Valid {
audioCodec = ffmpeg.AudioCodec(scene.AudioCodec.String)
}
hasTranscode, _ := manager.HasTranscode(scene)
if ffmpeg.IsValidCodec(videoCodec) && ffmpeg.IsValidCombo(videoCodec, ffmpeg.Container(container)) && ffmpeg.IsValidAudioForContainer(audioCodec, ffmpeg.Container(container)) || hasTranscode {
manager.RegisterStream(filepath, &w)
http.ServeFile(w, r, filepath)
manager.WaitAndDeregisterStream(filepath, &w, r)
return container
}
func (rs sceneRoutes) StreamDirect(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene)
fileNamingAlgo := config.GetVideoFileNamingAlgorithm()
filepath := manager.GetInstance().Paths.Scene.GetStreamPath(scene.Path, scene.GetHash(fileNamingAlgo))
manager.RegisterStream(filepath, &w)
http.ServeFile(w, r, filepath)
manager.WaitAndDeregisterStream(filepath, &w, r)
}
func (rs sceneRoutes) StreamMKV(w http.ResponseWriter, r *http.Request) {
// only allow mkv streaming if the scene container is an mkv already
scene := r.Context().Value(sceneKey).(*models.Scene)
container := getSceneFileContainer(scene)
if container != ffmpeg.Matroska {
w.WriteHeader(http.StatusBadRequest)
w.Write([]byte("not an mkv file"))
return
}
rs.streamTranscode(w, r, ffmpeg.CodecMKVAudio)
}
func (rs sceneRoutes) StreamWebM(w http.ResponseWriter, r *http.Request) {
rs.streamTranscode(w, r, ffmpeg.CodecVP9)
}
func (rs sceneRoutes) StreamMp4(w http.ResponseWriter, r *http.Request) {
rs.streamTranscode(w, r, ffmpeg.CodecH264)
}
func (rs sceneRoutes) StreamHLS(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene)
videoFile, err := ffmpeg.NewVideoFile(manager.GetInstance().FFProbePath, scene.Path)
if err != nil {
logger.Errorf("[stream] error reading video file: %s", err.Error())
return
}
logger.Debug("Returning HLS playlist")
// getting the playlist manifest only
w.Header().Set("Content-Type", ffmpeg.MimeHLS)
var str strings.Builder
ffmpeg.WriteHLSPlaylist(*videoFile, r.URL.String(), &str)
requestByteRange := utils.CreateByteRange(r.Header.Get("Range"))
if requestByteRange.RawString != "" {
logger.Debugf("Requested range: %s", requestByteRange.RawString)
}
ret := requestByteRange.Apply([]byte(str.String()))
rangeStr := requestByteRange.ToHeaderValue(int64(str.Len()))
w.Header().Set("Content-Range", rangeStr)
w.Write(ret)
}
func (rs sceneRoutes) StreamTS(w http.ResponseWriter, r *http.Request) {
rs.streamTranscode(w, r, ffmpeg.CodecHLS)
}
func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, videoCodec ffmpeg.Codec) {
logger.Debugf("Streaming as %s", videoCodec.MimeType)
scene := r.Context().Value(sceneKey).(*models.Scene)
// needs to be transcoded
videoFile, err := ffmpeg.NewVideoFile(manager.GetInstance().FFProbePath, scene.Path)
if err != nil {
logger.Errorf("[stream] error reading video file: %s", err.Error())
@@ -87,82 +146,54 @@ func (rs sceneRoutes) Stream(w http.ResponseWriter, r *http.Request) {
r.ParseForm()
startTime := r.Form.Get("start")
encoder := ffmpeg.NewEncoder(manager.GetInstance().FFMPEGPath)
var stream *ffmpeg.Stream
var stream io.ReadCloser
var process *os.Process
mimeType := ffmpeg.MimeWebm
if audioCodec == ffmpeg.MissingUnsupported {
//ffmpeg fails if it trys to transcode a non supported audio codec
stream, process, err = encoder.StreamTranscodeVideo(*videoFile, startTime, config.GetMaxStreamingTranscodeSize())
} else {
copyVideo := false // try to be smart if the video to be transcoded is in a Matroska container
// mp4 has always supported audio so it doesn't need to be checked
// while mpeg_ts has seeking issues if we don't reencode the video
if config.GetForceMKV() { // If MKV is forced as supported and video codec is also supported then only transcode audio
if ffmpeg.Container(container) == ffmpeg.Matroska {
switch videoCodec {
case ffmpeg.H264, ffmpeg.Vp9, ffmpeg.Vp8:
copyVideo = true
case ffmpeg.Hevc:
if config.GetForceHEVC() {
copyVideo = true
}
}
}
}
if copyVideo { // copy video stream instead of transcoding it
stream, process, err = encoder.StreamMkvTranscodeAudio(*videoFile, startTime, config.GetMaxStreamingTranscodeSize())
mimeType = ffmpeg.MimeMkv
} else {
stream, process, err = encoder.StreamTranscode(*videoFile, startTime, config.GetMaxStreamingTranscodeSize())
}
audioCodec := ffmpeg.MissingUnsupported
if scene.AudioCodec.Valid {
audioCodec = ffmpeg.AudioCodec(scene.AudioCodec.String)
}
options := ffmpeg.GetTranscodeStreamOptions(*videoFile, videoCodec, audioCodec)
options.StartTime = startTime
options.MaxTranscodeSize = config.GetMaxStreamingTranscodeSize()
encoder := ffmpeg.NewEncoder(manager.GetInstance().FFMPEGPath)
stream, err = encoder.GetTranscodeStream(options)
if err != nil {
logger.Errorf("[stream] error transcoding video file: %s", err.Error())
w.WriteHeader(http.StatusBadRequest)
w.Write([]byte(err.Error()))
return
}
w.WriteHeader(http.StatusOK)
w.Header().Set("Content-Type", mimeType)
logger.Infof("[stream] transcoding video file to %s", mimeType)
// handle if client closes the connection
notify := r.Context().Done()
go func() {
<-notify
logger.Info("[stream] client closed the connection. Killing stream process.")
process.Kill()
}()
_, err = io.Copy(w, stream)
if err != nil {
logger.Errorf("[stream] error serving transcoded video file: %s", err.Error())
}
stream.Serve(w, r)
}
func (rs sceneRoutes) Screenshot(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene)
filepath := manager.GetInstance().Paths.Scene.GetScreenshotPath(scene.Checksum)
http.ServeFile(w, r, filepath)
filepath := manager.GetInstance().Paths.Scene.GetScreenshotPath(scene.GetHash(config.GetVideoFileNamingAlgorithm()))
// fall back to the scene image blob if the file isn't present
screenshotExists, _ := utils.FileExists(filepath)
if screenshotExists {
http.ServeFile(w, r, filepath)
} else {
qb := models.NewSceneQueryBuilder()
cover, _ := qb.GetSceneCover(scene.ID, nil)
utils.ServeImage(cover, w, r)
}
}
func (rs sceneRoutes) Preview(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene)
filepath := manager.GetInstance().Paths.Scene.GetStreamPreviewPath(scene.Checksum)
http.ServeFile(w, r, filepath)
filepath := manager.GetInstance().Paths.Scene.GetStreamPreviewPath(scene.GetHash(config.GetVideoFileNamingAlgorithm()))
utils.ServeFileNoCache(w, r, filepath)
}
func (rs sceneRoutes) Webp(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene)
filepath := manager.GetInstance().Paths.Scene.GetStreamPreviewImagePath(scene.Checksum)
filepath := manager.GetInstance().Paths.Scene.GetStreamPreviewImagePath(scene.GetHash(config.GetVideoFileNamingAlgorithm()))
http.ServeFile(w, r, filepath)
}
@@ -218,14 +249,14 @@ func (rs sceneRoutes) ChapterVtt(w http.ResponseWriter, r *http.Request) {
func (rs sceneRoutes) VttThumbs(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene)
w.Header().Set("Content-Type", "text/vtt")
filepath := manager.GetInstance().Paths.Scene.GetSpriteVttFilePath(scene.Checksum)
filepath := manager.GetInstance().Paths.Scene.GetSpriteVttFilePath(scene.GetHash(config.GetVideoFileNamingAlgorithm()))
http.ServeFile(w, r, filepath)
}
func (rs sceneRoutes) VttSprite(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene)
w.Header().Set("Content-Type", "image/jpeg")
filepath := manager.GetInstance().Paths.Scene.GetSpriteImageFilePath(scene.Checksum)
filepath := manager.GetInstance().Paths.Scene.GetSpriteImageFilePath(scene.GetHash(config.GetVideoFileNamingAlgorithm()))
http.ServeFile(w, r, filepath)
}
@@ -239,7 +270,7 @@ func (rs sceneRoutes) SceneMarkerStream(w http.ResponseWriter, r *http.Request)
http.Error(w, http.StatusText(404), 404)
return
}
filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPath(scene.Checksum, int(sceneMarker.Seconds))
filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPath(scene.GetHash(config.GetVideoFileNamingAlgorithm()), int(sceneMarker.Seconds))
http.ServeFile(w, r, filepath)
}
@@ -253,7 +284,7 @@ func (rs sceneRoutes) SceneMarkerPreview(w http.ResponseWriter, r *http.Request)
http.Error(w, http.StatusText(404), 404)
return
}
filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPreviewImagePath(scene.Checksum, int(sceneMarker.Seconds))
filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPreviewImagePath(scene.GetHash(config.GetVideoFileNamingAlgorithm()), int(sceneMarker.Seconds))
// If the image doesn't exist, send the placeholder
exists, _ := utils.FileExists(filepath)
@@ -275,15 +306,19 @@ func SceneCtx(next http.Handler) http.Handler {
sceneID, _ := strconv.Atoi(sceneIdentifierQueryParam)
var scene *models.Scene
var err error
qb := models.NewSceneQueryBuilder()
if sceneID == 0 {
scene, err = qb.FindByChecksum(sceneIdentifierQueryParam)
// determine checksum/os by the length of the query param
if len(sceneIdentifierQueryParam) == 32 {
scene, _ = qb.FindByChecksum(sceneIdentifierQueryParam)
} else {
scene, _ = qb.FindByOSHash(sceneIdentifierQueryParam)
}
} else {
scene, err = qb.Find(sceneID)
scene, _ = qb.Find(sceneID)
}
if err != nil {
if scene == nil {
http.Error(w, http.StatusText(404), 404)
return
}

View File

@@ -2,13 +2,12 @@ package api
import (
"context"
"crypto/md5"
"fmt"
"github.com/go-chi/chi"
"github.com/stashapp/stash/pkg/models"
"net/http"
"strconv"
"strings"
"github.com/go-chi/chi"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
type studioRoutes struct{}
@@ -26,22 +25,15 @@ func (rs studioRoutes) Routes() chi.Router {
func (rs studioRoutes) Image(w http.ResponseWriter, r *http.Request) {
studio := r.Context().Value(studioKey).(*models.Studio)
etag := fmt.Sprintf("%x", md5.Sum(studio.Image))
if match := r.Header.Get("If-None-Match"); match != "" {
if strings.Contains(match, etag) {
w.WriteHeader(http.StatusNotModified)
return
}
qb := models.NewStudioQueryBuilder()
image, _ := qb.GetStudioImage(studio.ID, nil)
defaultParam := r.URL.Query().Get("default")
if len(image) == 0 || defaultParam == "true" {
_, image, _ = utils.ProcessBase64Image(models.DefaultStudioImage)
}
contentType := http.DetectContentType(studio.Image)
if contentType == "text/xml; charset=utf-8" || contentType == "text/plain; charset=utf-8" {
contentType = "image/svg+xml"
}
w.Header().Set("Content-Type", contentType)
w.Header().Add("Etag", etag)
_, _ = w.Write(studio.Image)
utils.ServeImage(image, w, r)
}
func StudioCtx(next http.Handler) http.Handler {

58
pkg/api/routes_tag.go Normal file
View File

@@ -0,0 +1,58 @@
package api
import (
"context"
"net/http"
"strconv"
"github.com/go-chi/chi"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
type tagRoutes struct{}
func (rs tagRoutes) Routes() chi.Router {
r := chi.NewRouter()
r.Route("/{tagId}", func(r chi.Router) {
r.Use(TagCtx)
r.Get("/image", rs.Image)
})
return r
}
func (rs tagRoutes) Image(w http.ResponseWriter, r *http.Request) {
tag := r.Context().Value(tagKey).(*models.Tag)
qb := models.NewTagQueryBuilder()
image, _ := qb.GetTagImage(tag.ID, nil)
// use default image if not present
defaultParam := r.URL.Query().Get("default")
if len(image) == 0 || defaultParam == "true" {
image = models.DefaultTagImage
}
utils.ServeImage(image, w, r)
}
func TagCtx(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
tagID, err := strconv.Atoi(chi.URLParam(r, "tagId"))
if err != nil {
http.Error(w, http.StatusText(404), 404)
return
}
qb := models.NewTagQueryBuilder()
tag, err := qb.Find(tagID, nil)
if err != nil {
http.Error(w, http.StatusText(404), 404)
return
}
ctx := context.WithValue(r.Context(), tagKey, tag)
next.ServeHTTP(w, r.WithContext(ctx))
})
}

View File

@@ -154,6 +154,7 @@ func Start() {
r.Mount("/scene", sceneRoutes{}.Routes())
r.Mount("/studio", studioRoutes{}.Routes())
r.Mount("/movie", movieRoutes{}.Routes())
r.Mount("/tag", tagRoutes{}.Routes())
r.HandleFunc("/css", func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/css")
@@ -251,6 +252,24 @@ func Start() {
})
startThumbCache()
// Serve static folders
customServedFolders := config.GetCustomServedFolders()
if customServedFolders != nil {
r.HandleFunc("/custom/*", func(w http.ResponseWriter, r *http.Request) {
r.URL.Path = strings.Replace(r.URL.Path, "/custom", "", 1)
// map the path to the applicable filesystem location
var dir string
r.URL.Path, dir = customServedFolders.GetFilesystemLocation(r.URL.Path)
if dir != "" {
http.FileServer(http.Dir(dir)).ServeHTTP(w, r)
} else {
http.NotFound(w, r)
}
})
}
// Serve the web app
r.HandleFunc("/*", func(w http.ResponseWriter, r *http.Request) {
ext := path.Ext(r.URL.Path)
@@ -338,6 +357,15 @@ func makeTLSConfig() *tls.Config {
return tlsConfig
}
func HasTLSConfig() bool {
ret, _ := utils.FileExists(paths.GetSSLCert())
if ret {
ret, _ = utils.FileExists(paths.GetSSLKey())
}
return ret
}
type contextKey struct {
name string
}

View File

@@ -1,12 +1,14 @@
package api
import (
"context"
"fmt"
"html/template"
"net/http"
"github.com/stashapp/stash/pkg/manager/config"
"github.com/gorilla/securecookie"
"github.com/gorilla/sessions"
)
@@ -125,3 +127,26 @@ func getSessionUserID(w http.ResponseWriter, r *http.Request) (string, error) {
return "", nil
}
func getCurrentUserID(ctx context.Context) *string {
userCtxVal := ctx.Value(ContextUser)
if userCtxVal != nil {
currentUser := userCtxVal.(string)
return &currentUser
}
return nil
}
func createSessionCookie(username string) (*http.Cookie, error) {
session := sessions.NewSession(sessionStore, cookieName)
session.Values[userIDKey] = username
encoded, err := securecookie.EncodeMulti(session.Name(), session.Values,
sessionStore.Codecs...)
if err != nil {
return nil, err
}
return sessions.NewCookie(session.Name(), encoded, session.Options), nil
}

View File

@@ -3,13 +3,13 @@ package urlbuilders
import "strconv"
type MovieURLBuilder struct {
BaseURL string
BaseURL string
MovieID string
}
func NewMovieURLBuilder(baseURL string, movieID int) MovieURLBuilder {
return MovieURLBuilder{
BaseURL: baseURL,
BaseURL: baseURL,
MovieID: strconv.Itoa(movieID),
}
}
@@ -21,4 +21,3 @@ func (b MovieURLBuilder) GetMovieFrontImageURL() string {
func (b MovieURLBuilder) GetMovieBackImageURL() string {
return b.BaseURL + "/movie/" + b.MovieID + "/backimage"
}

View File

@@ -18,7 +18,7 @@ func NewSceneURLBuilder(baseURL string, sceneID int) SceneURLBuilder {
}
func (b SceneURLBuilder) GetStreamURL() string {
return b.BaseURL + "/scene/" + b.SceneID + "/stream.mp4"
return b.BaseURL + "/scene/" + b.SceneID + "/stream"
}
func (b SceneURLBuilder) GetStreamPreviewURL() string {

View File

@@ -0,0 +1,19 @@
package urlbuilders
import "strconv"
type TagURLBuilder struct {
BaseURL string
TagID string
}
func NewTagURLBuilder(baseURL string, tagID int) TagURLBuilder {
return TagURLBuilder{
BaseURL: baseURL,
TagID: strconv.Itoa(tagID),
}
}
func (b TagURLBuilder) GetTagImageURL() string {
return b.BaseURL + "/tag/" + b.TagID + "/image"
}

View File

@@ -19,7 +19,7 @@ import (
var DB *sqlx.DB
var dbPath string
var appSchemaVersion uint = 8
var appSchemaVersion uint = 12
var databaseSchemaVersion uint
const sqlite3Driver = "sqlite3ex"
@@ -29,7 +29,11 @@ func init() {
registerCustomDriver()
}
func Initialize(databasePath string) {
// Initialize initializes the database. If the database is new, then it
// performs a full migration to the latest schema version. Otherwise, any
// necessary migrations must be run separately using RunMigrations.
// Returns true if the database is new.
func Initialize(databasePath string) bool {
dbPath = databasePath
if err := getDatabaseSchemaVersion(); err != nil {
@@ -42,7 +46,7 @@ func Initialize(databasePath string) {
panic(err)
}
// RunMigrations calls Initialise. Just return
return
return true
} else {
if databaseSchemaVersion > appSchemaVersion {
panic(fmt.Sprintf("Database schema version %d is incompatible with required schema version %d", databaseSchemaVersion, appSchemaVersion))
@@ -51,12 +55,14 @@ func Initialize(databasePath string) {
// if migration is needed, then don't open the connection
if NeedsMigration() {
logger.Warnf("Database schema version %d does not match required schema version %d.", databaseSchemaVersion, appSchemaVersion)
return
return false
}
}
const disableForeignKeys = false
DB = open(databasePath, disableForeignKeys)
return false
}
func open(databasePath string, disableForeignKeys bool) *sqlx.DB {
@@ -100,6 +106,7 @@ func Backup(backupPath string) error {
}
defer db.Close()
logger.Infof("Backing up database into: %s", backupPath)
_, err = db.Exec(`VACUUM INTO "` + backupPath + `"`)
if err != nil {
return fmt.Errorf("Vacuum failed: %s", err)
@@ -109,6 +116,7 @@ func Backup(backupPath string) error {
}
func RestoreFromBackup(backupPath string) error {
logger.Infof("Restoring backup database %s into %s", backupPath, dbPath)
return os.Rename(backupPath, dbPath)
}
@@ -177,18 +185,28 @@ func RunMigrations() error {
databaseSchemaVersion, _, _ = m.Version()
stepNumber := appSchemaVersion - databaseSchemaVersion
if stepNumber != 0 {
logger.Infof("Migrating database from version %d to %d", databaseSchemaVersion, appSchemaVersion)
err = m.Steps(int(stepNumber))
if err != nil {
// migration failed
logger.Errorf("Error migrating database: %s", err.Error())
m.Close()
return err
}
}
m.Close()
// re-initialise the database
Initialize(dbPath)
// run a vacuum on the database
logger.Info("Performing vacuum on database")
_, err = DB.Exec("VACUUM")
if err != nil {
logger.Warnf("error while performing post-migration vacuum: %s", err.Error())
}
return nil
}

View File

@@ -0,0 +1,514 @@
-- recreate scenes, studios and performers tables
ALTER TABLE `studios` rename to `_studios_old`;
ALTER TABLE `scenes` rename to `_scenes_old`;
ALTER TABLE `performers` RENAME TO `_performers_old`;
ALTER TABLE `movies` rename to `_movies_old`;
-- remove studio image
CREATE TABLE `studios` (
`id` integer not null primary key autoincrement,
`checksum` varchar(255) not null,
`name` varchar(255),
`url` varchar(255),
`parent_id` integer DEFAULT NULL CHECK ( id IS NOT parent_id ) REFERENCES studios(id) on delete set null,
`created_at` datetime not null,
`updated_at` datetime not null
);
DROP INDEX `studios_checksum_unique`;
DROP INDEX `index_studios_on_name`;
DROP INDEX `index_studios_on_checksum`;
CREATE UNIQUE INDEX `studios_checksum_unique` on `studios` (`checksum`);
CREATE INDEX `index_studios_on_name` on `studios` (`name`);
CREATE INDEX `index_studios_on_checksum` on `studios` (`checksum`);
-- remove scene cover
CREATE TABLE `scenes` (
`id` integer not null primary key autoincrement,
`path` varchar(510) not null,
`checksum` varchar(255) not null,
`title` varchar(255),
`details` text,
`url` varchar(255),
`date` date,
`rating` tinyint,
`size` varchar(255),
`duration` float,
`video_codec` varchar(255),
`audio_codec` varchar(255),
`width` tinyint,
`height` tinyint,
`framerate` float,
`bitrate` integer,
`studio_id` integer,
`o_counter` tinyint not null default 0,
`format` varchar(255),
`created_at` datetime not null,
`updated_at` datetime not null,
-- changed from cascade delete
foreign key(`studio_id`) references `studios`(`id`) on delete SET NULL
);
DROP INDEX IF EXISTS `scenes_path_unique`;
DROP INDEX IF EXISTS `scenes_checksum_unique`;
DROP INDEX IF EXISTS `index_scenes_on_studio_id`;
CREATE UNIQUE INDEX `scenes_path_unique` on `scenes` (`path`);
CREATE UNIQUE INDEX `scenes_checksum_unique` on `scenes` (`checksum`);
CREATE INDEX `index_scenes_on_studio_id` on `scenes` (`studio_id`);
-- remove performer image
CREATE TABLE `performers` (
`id` integer not null primary key autoincrement,
`checksum` varchar(255) not null,
`name` varchar(255),
`gender` varchar(20),
`url` varchar(255),
`twitter` varchar(255),
`instagram` varchar(255),
`birthdate` date,
`ethnicity` varchar(255),
`country` varchar(255),
`eye_color` varchar(255),
`height` varchar(255),
`measurements` varchar(255),
`fake_tits` varchar(255),
`career_length` varchar(255),
`tattoos` varchar(255),
`piercings` varchar(255),
`aliases` varchar(255),
`favorite` boolean not null default '0',
`created_at` datetime not null,
`updated_at` datetime not null
);
DROP INDEX `performers_checksum_unique`;
DROP INDEX `index_performers_on_name`;
CREATE UNIQUE INDEX `performers_checksum_unique` on `performers` (`checksum`);
CREATE INDEX `index_performers_on_name` on `performers` (`name`);
-- remove front_image and back_image
CREATE TABLE `movies` (
`id` integer not null primary key autoincrement,
`name` varchar(255) not null,
`aliases` varchar(255),
`duration` integer,
`date` date,
`rating` tinyint,
`studio_id` integer,
`director` varchar(255),
`synopsis` text,
`checksum` varchar(255) not null,
`url` varchar(255),
`created_at` datetime not null,
`updated_at` datetime not null,
foreign key(`studio_id`) references `studios`(`id`) on delete set null
);
DROP INDEX `movies_name_unique`;
DROP INDEX `movies_checksum_unique`;
DROP INDEX `index_movies_on_studio_id`;
CREATE UNIQUE INDEX `movies_name_unique` on `movies` (`name`);
CREATE UNIQUE INDEX `movies_checksum_unique` on `movies` (`checksum`);
CREATE INDEX `index_movies_on_studio_id` on `movies` (`studio_id`);
-- recreate the tables referencing the above tables to correct their references
ALTER TABLE `galleries` rename to `_galleries_old`;
ALTER TABLE `performers_scenes` rename to `_performers_scenes_old`;
ALTER TABLE `scene_markers` rename to `_scene_markers_old`;
ALTER TABLE `scene_markers_tags` rename to `_scene_markers_tags_old`;
ALTER TABLE `scenes_tags` rename to `_scenes_tags_old`;
ALTER TABLE `movies_scenes` rename to `_movies_scenes_old`;
ALTER TABLE `scraped_items` rename to `_scraped_items_old`;
CREATE TABLE `galleries` (
`id` integer not null primary key autoincrement,
`path` varchar(510) not null,
`checksum` varchar(255) not null,
`scene_id` integer,
`created_at` datetime not null,
`updated_at` datetime not null,
foreign key(`scene_id`) references `scenes`(`id`)
);
DROP INDEX IF EXISTS `index_galleries_on_scene_id`;
DROP INDEX IF EXISTS `galleries_path_unique`;
DROP INDEX IF EXISTS `galleries_checksum_unique`;
CREATE INDEX `index_galleries_on_scene_id` on `galleries` (`scene_id`);
CREATE UNIQUE INDEX `galleries_path_unique` on `galleries` (`path`);
CREATE UNIQUE INDEX `galleries_checksum_unique` on `galleries` (`checksum`);
CREATE TABLE `performers_scenes` (
`performer_id` integer,
`scene_id` integer,
foreign key(`performer_id`) references `performers`(`id`),
foreign key(`scene_id`) references `scenes`(`id`)
);
DROP INDEX `index_performers_scenes_on_scene_id`;
DROP INDEX `index_performers_scenes_on_performer_id`;
CREATE INDEX `index_performers_scenes_on_scene_id` on `performers_scenes` (`scene_id`);
CREATE INDEX `index_performers_scenes_on_performer_id` on `performers_scenes` (`performer_id`);
CREATE TABLE `scene_markers` (
`id` integer not null primary key autoincrement,
`title` varchar(255) not null,
`seconds` float not null,
`primary_tag_id` integer not null,
`scene_id` integer,
`created_at` datetime not null,
`updated_at` datetime not null,
foreign key(`primary_tag_id`) references `tags`(`id`),
foreign key(`scene_id`) references `scenes`(`id`)
);
DROP INDEX `index_scene_markers_on_scene_id`;
DROP INDEX `index_scene_markers_on_primary_tag_id`;
CREATE INDEX `index_scene_markers_on_scene_id` on `scene_markers` (`scene_id`);
CREATE INDEX `index_scene_markers_on_primary_tag_id` on `scene_markers` (`primary_tag_id`);
CREATE TABLE `scene_markers_tags` (
`scene_marker_id` integer,
`tag_id` integer,
foreign key(`scene_marker_id`) references `scene_markers`(`id`) on delete CASCADE,
foreign key(`tag_id`) references `tags`(`id`)
);
DROP INDEX `index_scene_markers_tags_on_tag_id`;
DROP INDEX `index_scene_markers_tags_on_scene_marker_id`;
CREATE INDEX `index_scene_markers_tags_on_tag_id` on `scene_markers_tags` (`tag_id`);
CREATE INDEX `index_scene_markers_tags_on_scene_marker_id` on `scene_markers_tags` (`scene_marker_id`);
CREATE TABLE `scenes_tags` (
`scene_id` integer,
`tag_id` integer,
foreign key(`scene_id`) references `scenes`(`id`) on delete CASCADE,
foreign key(`tag_id`) references `tags`(`id`)
);
DROP INDEX `index_scenes_tags_on_tag_id`;
DROP INDEX `index_scenes_tags_on_scene_id`;
CREATE INDEX `index_scenes_tags_on_tag_id` on `scenes_tags` (`tag_id`);
CREATE INDEX `index_scenes_tags_on_scene_id` on `scenes_tags` (`scene_id`);
CREATE TABLE `movies_scenes` (
`movie_id` integer,
`scene_id` integer,
`scene_index` tinyint,
foreign key(`movie_id`) references `movies`(`id`) on delete cascade,
foreign key(`scene_id`) references `scenes`(`id`) on delete cascade
);
DROP INDEX `index_movies_scenes_on_movie_id`;
DROP INDEX `index_movies_scenes_on_scene_id`;
CREATE INDEX `index_movies_scenes_on_movie_id` on `movies_scenes` (`movie_id`);
CREATE INDEX `index_movies_scenes_on_scene_id` on `movies_scenes` (`scene_id`);
-- remove movie_id since doesn't appear to be used
CREATE TABLE `scraped_items` (
`id` integer not null primary key autoincrement,
`title` varchar(255),
`description` text,
`url` varchar(255),
`date` date,
`rating` varchar(255),
`tags` varchar(510),
`models` varchar(510),
`episode` integer,
`gallery_filename` varchar(255),
`gallery_url` varchar(510),
`video_filename` varchar(255),
`video_url` varchar(255),
`studio_id` integer,
`created_at` datetime not null,
`updated_at` datetime not null,
foreign key(`studio_id`) references `studios`(`id`)
);
DROP INDEX `index_scraped_items_on_studio_id`;
CREATE INDEX `index_scraped_items_on_studio_id` on `scraped_items` (`studio_id`);
-- now populate from the old tables
-- these tables are changed so require the full column def
INSERT INTO `studios`
(
`id`,
`checksum`,
`name`,
`url`,
`parent_id`,
`created_at`,
`updated_at`
)
SELECT
`id`,
`checksum`,
`name`,
`url`,
`parent_id`,
`created_at`,
`updated_at`
FROM `_studios_old`;
INSERT INTO `scenes`
(
`id`,
`path`,
`checksum`,
`title`,
`details`,
`url`,
`date`,
`rating`,
`size`,
`duration`,
`video_codec`,
`audio_codec`,
`width`,
`height`,
`framerate`,
`bitrate`,
`studio_id`,
`o_counter`,
`format`,
`created_at`,
`updated_at`
)
SELECT
`id`,
`path`,
`checksum`,
`title`,
`details`,
`url`,
`date`,
`rating`,
`size`,
`duration`,
`video_codec`,
`audio_codec`,
`width`,
`height`,
`framerate`,
`bitrate`,
`studio_id`,
`o_counter`,
`format`,
`created_at`,
`updated_at`
FROM `_scenes_old`;
INSERT INTO `performers`
(
`id`,
`checksum`,
`name`,
`gender`,
`url`,
`twitter`,
`instagram`,
`birthdate`,
`ethnicity`,
`country`,
`eye_color`,
`height`,
`measurements`,
`fake_tits`,
`career_length`,
`tattoos`,
`piercings`,
`aliases`,
`favorite`,
`created_at`,
`updated_at`
)
SELECT
`id`,
`checksum`,
`name`,
`gender`,
`url`,
`twitter`,
`instagram`,
`birthdate`,
`ethnicity`,
`country`,
`eye_color`,
`height`,
`measurements`,
`fake_tits`,
`career_length`,
`tattoos`,
`piercings`,
`aliases`,
`favorite`,
`created_at`,
`updated_at`
FROM `_performers_old`;
INSERT INTO `movies`
(
`id`,
`name`,
`aliases`,
`duration`,
`date`,
`rating`,
`studio_id`,
`director`,
`synopsis`,
`checksum`,
`url`,
`created_at`,
`updated_at`
)
SELECT
`id`,
`name`,
`aliases`,
`duration`,
`date`,
`rating`,
`studio_id`,
`director`,
`synopsis`,
`checksum`,
`url`,
`created_at`,
`updated_at`
FROM `_movies_old`;
INSERT INTO `scraped_items`
(
`id`,
`title`,
`description`,
`url`,
`date`,
`rating`,
`tags`,
`models`,
`episode`,
`gallery_filename`,
`gallery_url`,
`video_filename`,
`video_url`,
`studio_id`,
`created_at`,
`updated_at`
)
SELECT
`id`,
`title`,
`description`,
`url`,
`date`,
`rating`,
`tags`,
`models`,
`episode`,
`gallery_filename`,
`gallery_url`,
`video_filename`,
`video_url`,
`studio_id`,
`created_at`,
`updated_at`
FROM `_scraped_items_old`;
-- these tables are a direct copy
INSERT INTO `galleries` SELECT * from `_galleries_old`;
INSERT INTO `performers_scenes` SELECT * from `_performers_scenes_old`;
INSERT INTO `scene_markers` SELECT * from `_scene_markers_old`;
INSERT INTO `scene_markers_tags` SELECT * from `_scene_markers_tags_old`;
INSERT INTO `scenes_tags` SELECT * from `_scenes_tags_old`;
INSERT INTO `movies_scenes` SELECT * from `_movies_scenes_old`;
-- populate covers in separate table
CREATE TABLE `scenes_cover` (
`scene_id` integer,
`cover` blob not null,
foreign key(`scene_id`) references `scenes`(`id`) on delete CASCADE
);
CREATE UNIQUE INDEX `index_scene_covers_on_scene_id` on `scenes_cover` (`scene_id`);
INSERT INTO `scenes_cover`
(
`scene_id`,
`cover`
)
SELECT `id`, `cover` from `_scenes_old` where `cover` is not null;
-- put performer images in separate table
CREATE TABLE `performers_image` (
`performer_id` integer,
`image` blob not null,
foreign key(`performer_id`) references `performers`(`id`) on delete CASCADE
);
CREATE UNIQUE INDEX `index_performer_image_on_performer_id` on `performers_image` (`performer_id`);
INSERT INTO `performers_image`
(
`performer_id`,
`image`
)
SELECT `id`, `image` from `_performers_old` where `image` is not null;
-- put studio images in separate table
CREATE TABLE `studios_image` (
`studio_id` integer,
`image` blob not null,
foreign key(`studio_id`) references `studios`(`id`) on delete CASCADE
);
CREATE UNIQUE INDEX `index_studio_image_on_studio_id` on `studios_image` (`studio_id`);
INSERT INTO `studios_image`
(
`studio_id`,
`image`
)
SELECT `id`, `image` from `_studios_old` where `image` is not null;
-- put movie images in separate table
CREATE TABLE `movies_images` (
`movie_id` integer,
`front_image` blob not null,
`back_image` blob,
foreign key(`movie_id`) references `movies`(`id`) on delete CASCADE
);
CREATE UNIQUE INDEX `index_movie_images_on_movie_id` on `movies_images` (`movie_id`);
INSERT INTO `movies_images`
(
`movie_id`,
`front_image`,
`back_image`
)
SELECT `id`, `front_image`, `back_image` from `_movies_old` where `front_image` is not null;
-- drop old tables
DROP TABLE `_scenes_old`;
DROP TABLE `_studios_old`;
DROP TABLE `_performers_old`;
DROP TABLE `_movies_old`;
DROP TABLE `_galleries_old`;
DROP TABLE `_performers_scenes_old`;
DROP TABLE `_scene_markers_old`;
DROP TABLE `_scene_markers_tags_old`;
DROP TABLE `_scenes_tags_old`;
DROP TABLE `_movies_scenes_old`;
DROP TABLE `_scraped_items_old`;

View File

@@ -0,0 +1,7 @@
CREATE TABLE `tags_image` (
`tag_id` integer,
`image` blob not null,
foreign key(`tag_id`) references `tags`(`id`) on delete CASCADE
);
CREATE UNIQUE INDEX `index_tag_image_on_tag_id` on `tags_image` (`tag_id`);

View File

@@ -0,0 +1,219 @@
-- need to change scenes.checksum to be nullable
ALTER TABLE `scenes` rename to `_scenes_old`;
CREATE TABLE `scenes` (
`id` integer not null primary key autoincrement,
`path` varchar(510) not null,
-- nullable
`checksum` varchar(255),
-- add oshash
`oshash` varchar(255),
`title` varchar(255),
`details` text,
`url` varchar(255),
`date` date,
`rating` tinyint,
`size` varchar(255),
`duration` float,
`video_codec` varchar(255),
`audio_codec` varchar(255),
`width` tinyint,
`height` tinyint,
`framerate` float,
`bitrate` integer,
`studio_id` integer,
`o_counter` tinyint not null default 0,
`format` varchar(255),
`created_at` datetime not null,
`updated_at` datetime not null,
foreign key(`studio_id`) references `studios`(`id`) on delete SET NULL,
-- add check to ensure at least one hash is set
CHECK (`checksum` is not null or `oshash` is not null)
);
DROP INDEX IF EXISTS `scenes_path_unique`;
DROP INDEX IF EXISTS `scenes_checksum_unique`;
DROP INDEX IF EXISTS `index_scenes_on_studio_id`;
CREATE UNIQUE INDEX `scenes_path_unique` on `scenes` (`path`);
CREATE UNIQUE INDEX `scenes_checksum_unique` on `scenes` (`checksum`);
CREATE UNIQUE INDEX `scenes_oshash_unique` on `scenes` (`oshash`);
CREATE INDEX `index_scenes_on_studio_id` on `scenes` (`studio_id`);
-- recreate the tables referencing scenes to correct their references
ALTER TABLE `galleries` rename to `_galleries_old`;
ALTER TABLE `performers_scenes` rename to `_performers_scenes_old`;
ALTER TABLE `scene_markers` rename to `_scene_markers_old`;
ALTER TABLE `scene_markers_tags` rename to `_scene_markers_tags_old`;
ALTER TABLE `scenes_tags` rename to `_scenes_tags_old`;
ALTER TABLE `movies_scenes` rename to `_movies_scenes_old`;
ALTER TABLE `scenes_cover` rename to `_scenes_cover_old`;
CREATE TABLE `galleries` (
`id` integer not null primary key autoincrement,
`path` varchar(510) not null,
`checksum` varchar(255) not null,
`scene_id` integer,
`created_at` datetime not null,
`updated_at` datetime not null,
foreign key(`scene_id`) references `scenes`(`id`)
);
DROP INDEX IF EXISTS `index_galleries_on_scene_id`;
DROP INDEX IF EXISTS `galleries_path_unique`;
DROP INDEX IF EXISTS `galleries_checksum_unique`;
CREATE INDEX `index_galleries_on_scene_id` on `galleries` (`scene_id`);
CREATE UNIQUE INDEX `galleries_path_unique` on `galleries` (`path`);
CREATE UNIQUE INDEX `galleries_checksum_unique` on `galleries` (`checksum`);
CREATE TABLE `performers_scenes` (
`performer_id` integer,
`scene_id` integer,
foreign key(`performer_id`) references `performers`(`id`),
foreign key(`scene_id`) references `scenes`(`id`)
);
DROP INDEX `index_performers_scenes_on_scene_id`;
DROP INDEX `index_performers_scenes_on_performer_id`;
CREATE INDEX `index_performers_scenes_on_scene_id` on `performers_scenes` (`scene_id`);
CREATE INDEX `index_performers_scenes_on_performer_id` on `performers_scenes` (`performer_id`);
CREATE TABLE `scene_markers` (
`id` integer not null primary key autoincrement,
`title` varchar(255) not null,
`seconds` float not null,
`primary_tag_id` integer not null,
`scene_id` integer,
`created_at` datetime not null,
`updated_at` datetime not null,
foreign key(`primary_tag_id`) references `tags`(`id`),
foreign key(`scene_id`) references `scenes`(`id`)
);
DROP INDEX `index_scene_markers_on_scene_id`;
DROP INDEX `index_scene_markers_on_primary_tag_id`;
CREATE INDEX `index_scene_markers_on_scene_id` on `scene_markers` (`scene_id`);
CREATE INDEX `index_scene_markers_on_primary_tag_id` on `scene_markers` (`primary_tag_id`);
CREATE TABLE `scene_markers_tags` (
`scene_marker_id` integer,
`tag_id` integer,
foreign key(`scene_marker_id`) references `scene_markers`(`id`) on delete CASCADE,
foreign key(`tag_id`) references `tags`(`id`)
);
DROP INDEX `index_scene_markers_tags_on_tag_id`;
DROP INDEX `index_scene_markers_tags_on_scene_marker_id`;
CREATE INDEX `index_scene_markers_tags_on_tag_id` on `scene_markers_tags` (`tag_id`);
CREATE INDEX `index_scene_markers_tags_on_scene_marker_id` on `scene_markers_tags` (`scene_marker_id`);
CREATE TABLE `scenes_tags` (
`scene_id` integer,
`tag_id` integer,
foreign key(`scene_id`) references `scenes`(`id`) on delete CASCADE,
foreign key(`tag_id`) references `tags`(`id`)
);
DROP INDEX `index_scenes_tags_on_tag_id`;
DROP INDEX `index_scenes_tags_on_scene_id`;
CREATE INDEX `index_scenes_tags_on_tag_id` on `scenes_tags` (`tag_id`);
CREATE INDEX `index_scenes_tags_on_scene_id` on `scenes_tags` (`scene_id`);
CREATE TABLE `movies_scenes` (
`movie_id` integer,
`scene_id` integer,
`scene_index` tinyint,
foreign key(`movie_id`) references `movies`(`id`) on delete cascade,
foreign key(`scene_id`) references `scenes`(`id`) on delete cascade
);
DROP INDEX `index_movies_scenes_on_movie_id`;
DROP INDEX `index_movies_scenes_on_scene_id`;
CREATE INDEX `index_movies_scenes_on_movie_id` on `movies_scenes` (`movie_id`);
CREATE INDEX `index_movies_scenes_on_scene_id` on `movies_scenes` (`scene_id`);
CREATE TABLE `scenes_cover` (
`scene_id` integer,
`cover` blob not null,
foreign key(`scene_id`) references `scenes`(`id`) on delete CASCADE
);
DROP INDEX `index_scene_covers_on_scene_id`;
CREATE UNIQUE INDEX `index_scene_covers_on_scene_id` on `scenes_cover` (`scene_id`);
-- now populate from the old tables
-- these tables are changed so require the full column def
INSERT INTO `scenes`
(
`id`,
`path`,
`checksum`,
`title`,
`details`,
`url`,
`date`,
`rating`,
`size`,
`duration`,
`video_codec`,
`audio_codec`,
`width`,
`height`,
`framerate`,
`bitrate`,
`studio_id`,
`o_counter`,
`format`,
`created_at`,
`updated_at`
)
SELECT
`id`,
`path`,
`checksum`,
`title`,
`details`,
`url`,
`date`,
`rating`,
`size`,
`duration`,
`video_codec`,
`audio_codec`,
`width`,
`height`,
`framerate`,
`bitrate`,
`studio_id`,
`o_counter`,
`format`,
`created_at`,
`updated_at`
FROM `_scenes_old`;
-- these tables are a direct copy
INSERT INTO `galleries` SELECT * from `_galleries_old`;
INSERT INTO `performers_scenes` SELECT * from `_performers_scenes_old`;
INSERT INTO `scene_markers` SELECT * from `_scene_markers_old`;
INSERT INTO `scene_markers_tags` SELECT * from `_scene_markers_tags_old`;
INSERT INTO `scenes_tags` SELECT * from `_scenes_tags_old`;
INSERT INTO `movies_scenes` SELECT * from `_movies_scenes_old`;
INSERT INTO `scenes_cover` SELECT * from `_scenes_cover_old`;
-- drop old tables
DROP TABLE `_scenes_old`;
DROP TABLE `_galleries_old`;
DROP TABLE `_performers_scenes_old`;
DROP TABLE `_scene_markers_old`;
DROP TABLE `_scene_markers_tags_old`;
DROP TABLE `_scenes_tags_old`;
DROP TABLE `_movies_scenes_old`;
DROP TABLE `_scenes_cover_old`;

View File

@@ -0,0 +1,3 @@
ALTER TABLE studios
ADD COLUMN parent_id INTEGER DEFAULT NULL CHECK ( id IS NOT parent_id ) REFERENCES studios(id) on delete set null;
CREATE INDEX index_studios_on_parent_id on studios (parent_id);

View File

@@ -3,12 +3,13 @@ package database
import (
"bytes"
"fmt"
"github.com/gobuffalo/packr/v2"
"github.com/golang-migrate/migrate/v4"
"github.com/golang-migrate/migrate/v4/source"
"io"
"io/ioutil"
"os"
"github.com/gobuffalo/packr/v2"
"github.com/golang-migrate/migrate/v4"
"github.com/golang-migrate/migrate/v4/source"
)
type Packr2Source struct {
@@ -72,7 +73,7 @@ func (s *Packr2Source) ReadUp(version uint) (r io.ReadCloser, identifier string,
if migration, ok := s.Migrations.Up(version); !ok {
return nil, "", os.ErrNotExist
} else {
b := s.Box.Bytes(migration.Raw)
b, _ := s.Box.Find(migration.Raw)
return ioutil.NopCloser(bytes.NewBuffer(b)),
migration.Identifier,
nil
@@ -83,7 +84,7 @@ func (s *Packr2Source) ReadDown(version uint) (r io.ReadCloser, identifier strin
if migration, ok := s.Migrations.Down(version); !ok {
return nil, "", migrate.ErrNilVersion
} else {
b := s.Box.Bytes(migration.Raw)
b, _ := s.Box.Find(migration.Raw)
return ioutil.NopCloser(bytes.NewBuffer(b)),
migration.Identifier,
nil

View File

@@ -2,7 +2,6 @@ package ffmpeg
import (
"fmt"
"io"
"io/ioutil"
"os"
"os/exec"
@@ -101,6 +100,7 @@ func (e *Encoder) run(probeResult VideoFile, args []string) (string, error) {
}
buf := make([]byte, 80)
lastProgress := 0.0
var errBuilder strings.Builder
for {
n, err := stderr.Read(buf)
@@ -109,7 +109,11 @@ func (e *Encoder) run(probeResult VideoFile, args []string) (string, error) {
time := GetTimeFromRegex(data)
if time > 0 && probeResult.Duration > 0 {
progress := time / probeResult.Duration
logger.Infof("Progress %.2f", progress)
if progress > lastProgress+0.01 {
logger.Infof("Progress %.2f", progress)
lastProgress = progress
}
}
errBuilder.WriteString(data)
@@ -133,21 +137,3 @@ func (e *Encoder) run(probeResult VideoFile, args []string) (string, error) {
return stdoutString, nil
}
func (e *Encoder) stream(probeResult VideoFile, args []string) (io.ReadCloser, *os.Process, error) {
cmd := exec.Command(e.Path, args...)
stdout, err := cmd.StdoutPipe()
if nil != err {
logger.Error("FFMPEG stdout not available: " + err.Error())
}
if err = cmd.Start(); err != nil {
return nil, nil, err
}
registerRunningEncoder(probeResult.Path, cmd.Process)
go waitAndDeregister(probeResult.Path, cmd)
return stdout, cmd.Process, nil
}

View File

@@ -8,18 +8,58 @@ import (
)
type ScenePreviewChunkOptions struct {
Time int
StartTime float64
Duration float64
Width int
OutputPath string
}
func (e *Encoder) ScenePreviewVideoChunk(probeResult VideoFile, options ScenePreviewChunkOptions, preset string) {
func (e *Encoder) ScenePreviewVideoChunk(probeResult VideoFile, options ScenePreviewChunkOptions, preset string, fallback bool) error {
var fastSeek float64
var slowSeek float64
fallbackMinSlowSeek := 20.0
args := []string{
"-v", "error",
"-xerror",
"-ss", strconv.Itoa(options.Time),
"-i", probeResult.Path,
"-t", "0.75",
}
// Non-fallback: enable xerror.
// "-xerror" causes ffmpeg to fail on warnings, often the preview is fine but could be broken.
if !fallback {
args = append(args, "-xerror")
fastSeek = options.StartTime
slowSeek = 0
} else {
// In fallback mode, disable "-xerror" and try a combination of fast/slow seek instead of just fastseek
// Commonly with avi/wmv ffmpeg doesn't seem to always predict the right start point to begin decoding when
// using fast seek. If you force ffmpeg to decode more, it avoids the "blocky green artifact" issue.
if options.StartTime > fallbackMinSlowSeek {
// Handle seeks longer than fallbackMinSlowSeek with fast/slow seeks
// Allow for at least fallbackMinSlowSeek seconds of slow seek
fastSeek = options.StartTime - fallbackMinSlowSeek
slowSeek = fallbackMinSlowSeek
} else {
// Handle seeks shorter than fallbackMinSlowSeek with only slow seeks.
slowSeek = options.StartTime
fastSeek = 0
}
}
if fastSeek > 0 {
args = append(args, "-ss")
args = append(args, strconv.FormatFloat(fastSeek, 'f', 2, 64))
}
args = append(args, "-i")
args = append(args, probeResult.Path)
if slowSeek > 0 {
args = append(args, "-ss")
args = append(args, strconv.FormatFloat(slowSeek, 'f', 2, 64))
}
args2 := []string{
"-t", strconv.FormatFloat(options.Duration, 'f', 2, 64),
"-max_muxing_queue_size", "1024", // https://trac.ffmpeg.org/ticket/6375
"-y",
"-c:v", "libx264",
@@ -35,10 +75,14 @@ func (e *Encoder) ScenePreviewVideoChunk(probeResult VideoFile, options ScenePre
"-strict", "-2",
options.OutputPath,
}
_, _ = e.run(probeResult, args)
finalArgs := append(args, args2...)
_, err := e.run(probeResult, finalArgs)
return err
}
func (e *Encoder) ScenePreviewVideoChunkCombine(probeResult VideoFile, concatFilePath string, outputPath string) {
func (e *Encoder) ScenePreviewVideoChunkCombine(probeResult VideoFile, concatFilePath string, outputPath string) error {
args := []string{
"-v", "error",
"-f", "concat",
@@ -47,7 +91,8 @@ func (e *Encoder) ScenePreviewVideoChunkCombine(probeResult VideoFile, concatFil
"-c", "copy",
outputPath,
}
_, _ = e.run(probeResult, args)
_, err := e.run(probeResult, args)
return err
}
func (e *Encoder) ScenePreviewVideoToImage(probeResult VideoFile, width int, videoPreviewPath string, outputPath string) error {

View File

@@ -21,7 +21,7 @@ func (e *Encoder) Screenshot(probeResult VideoFile, options ScreenshotOptions) e
"-v", options.Verbosity,
"-ss", fmt.Sprintf("%v", options.Time),
"-y",
"-i", probeResult.Path, // TODO: Wrap in quotes?
"-i", probeResult.Path,
"-vframes", "1",
"-q:v", fmt.Sprintf("%v", options.Quality),
"-vf", fmt.Sprintf("scale=%v:-1", options.Width),

View File

@@ -1,8 +1,6 @@
package ffmpeg
import (
"io"
"os"
"strconv"
"github.com/stashapp/stash/pkg/models"
@@ -111,77 +109,3 @@ func (e *Encoder) CopyVideo(probeResult VideoFile, options TranscodeOptions) {
}
_, _ = e.run(probeResult, args)
}
func (e *Encoder) StreamTranscode(probeResult VideoFile, startTime string, maxTranscodeSize models.StreamingResolutionEnum) (io.ReadCloser, *os.Process, error) {
scale := calculateTranscodeScale(probeResult, maxTranscodeSize)
args := []string{}
if startTime != "" {
args = append(args, "-ss", startTime)
}
args = append(args,
"-i", probeResult.Path,
"-c:v", "libvpx-vp9",
"-vf", "scale="+scale,
"-deadline", "realtime",
"-cpu-used", "5",
"-row-mt", "1",
"-crf", "30",
"-b:v", "0",
"-f", "webm",
"pipe:",
)
return e.stream(probeResult, args)
}
//transcode the video, remove the audio
//in some videos where the audio codec is not supported by ffmpeg
//ffmpeg fails if you try to transcode the audio
func (e *Encoder) StreamTranscodeVideo(probeResult VideoFile, startTime string, maxTranscodeSize models.StreamingResolutionEnum) (io.ReadCloser, *os.Process, error) {
scale := calculateTranscodeScale(probeResult, maxTranscodeSize)
args := []string{}
if startTime != "" {
args = append(args, "-ss", startTime)
}
args = append(args,
"-i", probeResult.Path,
"-an",
"-c:v", "libvpx-vp9",
"-vf", "scale="+scale,
"-deadline", "realtime",
"-cpu-used", "5",
"-row-mt", "1",
"-crf", "30",
"-b:v", "0",
"-f", "webm",
"pipe:",
)
return e.stream(probeResult, args)
}
//it is very common in MKVs to have just the audio codec unsupported
//copy the video stream, transcode the audio and serve as Matroska
func (e *Encoder) StreamMkvTranscodeAudio(probeResult VideoFile, startTime string, maxTranscodeSize models.StreamingResolutionEnum) (io.ReadCloser, *os.Process, error) {
args := []string{}
if startTime != "" {
args = append(args, "-ss", startTime)
}
args = append(args,
"-i", probeResult.Path,
"-c:v", "copy",
"-c:a", "libopus",
"-b:a", "96k",
"-vbr", "on",
"-f", "matroska",
"pipe:",
)
return e.stream(probeResult, args)
}

View File

@@ -12,7 +12,6 @@ import (
"time"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager/config"
)
type Container string
@@ -47,11 +46,17 @@ const (
Hevc string = "hevc"
Vp8 string = "vp8"
Vp9 string = "vp9"
Mkv string = "mkv" // only used from the browser to indicate mkv support
Hls string = "hls" // only used from the browser to indicate hls support
MimeWebm string = "video/webm"
MimeMkv string = "video/x-matroska"
MimeMp4 string = "video/mp4"
MimeHLS string = "application/vnd.apple.mpegurl"
MimeMpegts string = "video/MP2T"
)
var ValidCodecs = []string{H264, H265, Vp8, Vp9}
// only support H264 by default, since Safari does not support VP8/VP9
var DefaultSupportedCodecs = []string{H264, H265}
var validForH264Mkv = []Container{Mp4, Matroska}
var validForH264 = []Container{Mp4}
@@ -102,15 +107,8 @@ func MatchContainer(format string, filePath string) Container { // match ffprobe
return container
}
func IsValidCodec(codecName string) bool {
forceHEVC := config.GetForceHEVC()
if forceHEVC {
if codecName == Hevc {
return true
}
}
for _, c := range ValidCodecs {
func IsValidCodec(codecName string, supportedCodecs []string) bool {
for _, c := range supportedCodecs {
if c == codecName {
return true
}
@@ -158,30 +156,31 @@ func IsValidForContainer(format Container, validContainers []Container) bool {
}
//extend stream validation check to take into account container
func IsValidCombo(codecName string, format Container) bool {
forceMKV := config.GetForceMKV()
forceHEVC := config.GetForceHEVC()
func IsValidCombo(codecName string, format Container, supportedVideoCodecs []string) bool {
supportMKV := IsValidCodec(Mkv, supportedVideoCodecs)
supportHEVC := IsValidCodec(Hevc, supportedVideoCodecs)
switch codecName {
case H264:
if forceMKV {
if supportMKV {
return IsValidForContainer(format, validForH264Mkv)
}
return IsValidForContainer(format, validForH264)
case H265:
if forceMKV {
if supportMKV {
return IsValidForContainer(format, validForH265Mkv)
}
return IsValidForContainer(format, validForH265)
case Vp8:
return IsValidForContainer(format, validForVp8)
case Vp9:
if forceMKV {
if supportMKV {
return IsValidForContainer(format, validForVp9Mkv)
}
return IsValidForContainer(format, validForVp9)
case Hevc:
if forceHEVC {
if forceMKV {
if supportHEVC {
if supportMKV {
return IsValidForContainer(format, validForHevcMkv)
}
return IsValidForContainer(format, validForHevc)
@@ -190,6 +189,13 @@ func IsValidCombo(codecName string, format Container) bool {
return false
}
func IsStreamable(videoCodec string, audioCodec AudioCodec, container Container) bool {
supportedVideoCodecs := DefaultSupportedCodecs
// check if the video codec matches the supported codecs
return IsValidCodec(videoCodec, supportedVideoCodecs) && IsValidCombo(videoCodec, container, supportedVideoCodecs) && IsValidAudioForContainer(audioCodec, container)
}
type VideoFile struct {
JSON FFProbeJSON
AudioStream *FFProbeStream

42
pkg/ffmpeg/hls.go Normal file
View File

@@ -0,0 +1,42 @@
package ffmpeg
import (
"fmt"
"io"
"strings"
)
const hlsSegmentLength = 10.0
func WriteHLSPlaylist(probeResult VideoFile, baseUrl string, w io.Writer) {
fmt.Fprint(w, "#EXTM3U\n")
fmt.Fprint(w, "#EXT-X-VERSION:3\n")
fmt.Fprint(w, "#EXT-X-MEDIA-SEQUENCE:0\n")
fmt.Fprint(w, "#EXT-X-ALLOW-CACHE:YES\n")
fmt.Fprintf(w, "#EXT-X-TARGETDURATION:%d\n", int(hlsSegmentLength))
fmt.Fprint(w, "#EXT-X-PLAYLIST-TYPE:VOD\n")
duration := probeResult.Duration
leftover := duration
upTo := 0.0
tsURL := baseUrl
i := strings.LastIndex(baseUrl, ".m3u8")
tsURL = baseUrl[0:i] + ".ts"
for leftover > 0 {
thisLength := hlsSegmentLength
if leftover < thisLength {
thisLength = leftover
}
fmt.Fprintf(w, "#EXTINF: %f,\n", thisLength)
fmt.Fprintf(w, "%s?start=%f\n", tsURL, upTo)
leftover -= thisLength
upTo += thisLength
}
fmt.Fprint(w, "#EXT-X-ENDLIST\n")
}

245
pkg/ffmpeg/stream.go Normal file
View File

@@ -0,0 +1,245 @@
package ffmpeg
import (
"io"
"io/ioutil"
"net/http"
"os"
"os/exec"
"strconv"
"strings"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
)
const CopyStreamCodec = "copy"
type Stream struct {
Stdout io.ReadCloser
Process *os.Process
options TranscodeStreamOptions
mimeType string
}
func (s *Stream) Serve(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", s.mimeType)
w.WriteHeader(http.StatusOK)
logger.Infof("[stream] transcoding video file to %s", s.mimeType)
// handle if client closes the connection
notify := r.Context().Done()
go func() {
<-notify
s.Process.Kill()
}()
_, err := io.Copy(w, s.Stdout)
if err != nil {
logger.Errorf("[stream] error serving transcoded video file: %s", err.Error())
}
}
type Codec struct {
Codec string
format string
MimeType string
extraArgs []string
hls bool
}
var CodecHLS = Codec{
Codec: "libx264",
format: "mpegts",
MimeType: MimeMpegts,
extraArgs: []string{
"-acodec", "aac",
"-pix_fmt", "yuv420p",
"-preset", "veryfast",
"-crf", "25",
},
hls: true,
}
var CodecH264 = Codec{
Codec: "libx264",
format: "mp4",
MimeType: MimeMp4,
extraArgs: []string{
"-movflags", "frag_keyframe",
"-pix_fmt", "yuv420p",
"-preset", "veryfast",
"-crf", "25",
},
}
var CodecVP9 = Codec{
Codec: "libvpx-vp9",
format: "webm",
MimeType: MimeWebm,
extraArgs: []string{
"-deadline", "realtime",
"-cpu-used", "5",
"-row-mt", "1",
"-crf", "30",
"-b:v", "0",
},
}
var CodecVP8 = Codec{
Codec: "libvpx",
format: "webm",
MimeType: MimeWebm,
extraArgs: []string{
"-deadline", "realtime",
"-cpu-used", "5",
"-crf", "12",
"-b:v", "3M",
"-pix_fmt", "yuv420p",
},
}
var CodecHEVC = Codec{
Codec: "libx265",
format: "mp4",
MimeType: MimeMp4,
extraArgs: []string{
"-movflags", "frag_keyframe",
"-preset", "veryfast",
"-crf", "30",
},
}
// it is very common in MKVs to have just the audio codec unsupported
// copy the video stream, transcode the audio and serve as Matroska
var CodecMKVAudio = Codec{
Codec: CopyStreamCodec,
format: "matroska",
MimeType: MimeMkv,
extraArgs: []string{
"-c:a", "libopus",
"-b:a", "96k",
"-vbr", "on",
},
}
type TranscodeStreamOptions struct {
ProbeResult VideoFile
Codec Codec
StartTime string
MaxTranscodeSize models.StreamingResolutionEnum
// transcode the video, remove the audio
// in some videos where the audio codec is not supported by ffmpeg
// ffmpeg fails if you try to transcode the audio
VideoOnly bool
}
func GetTranscodeStreamOptions(probeResult VideoFile, videoCodec Codec, audioCodec AudioCodec) TranscodeStreamOptions {
options := TranscodeStreamOptions{
ProbeResult: probeResult,
Codec: videoCodec,
}
if audioCodec == MissingUnsupported {
// ffmpeg fails if it trys to transcode a non supported audio codec
options.VideoOnly = true
}
return options
}
func (o TranscodeStreamOptions) getStreamArgs() []string {
args := []string{
"-hide_banner",
"-v", "error",
}
if o.StartTime != "" {
args = append(args, "-ss", o.StartTime)
}
if o.Codec.hls {
// we only serve a fixed segment length
args = append(args, "-t", strconv.Itoa(int(hlsSegmentLength)))
}
args = append(args,
"-i", o.ProbeResult.Path,
)
if o.VideoOnly {
args = append(args, "-an")
}
args = append(args,
"-c:v", o.Codec.Codec,
)
// don't set scale when copying video stream
if o.Codec.Codec != CopyStreamCodec {
scale := calculateTranscodeScale(o.ProbeResult, o.MaxTranscodeSize)
args = append(args,
"-vf", "scale="+scale,
)
}
if len(o.Codec.extraArgs) > 0 {
args = append(args, o.Codec.extraArgs...)
}
args = append(args,
// this is needed for 5-channel ac3 files
"-ac", "2",
"-f", o.Codec.format,
"pipe:",
)
return args
}
func (e *Encoder) GetTranscodeStream(options TranscodeStreamOptions) (*Stream, error) {
return e.stream(options.ProbeResult, options)
}
func (e *Encoder) stream(probeResult VideoFile, options TranscodeStreamOptions) (*Stream, error) {
args := options.getStreamArgs()
cmd := exec.Command(e.Path, args...)
logger.Debugf("Streaming via: %s", strings.Join(cmd.Args, " "))
stdout, err := cmd.StdoutPipe()
if nil != err {
logger.Error("FFMPEG stdout not available: " + err.Error())
return nil, err
}
stderr, err := cmd.StderrPipe()
if nil != err {
logger.Error("FFMPEG stderr not available: " + err.Error())
return nil, err
}
if err = cmd.Start(); err != nil {
return nil, err
}
registerRunningEncoder(probeResult.Path, cmd.Process)
go waitAndDeregister(probeResult.Path, cmd)
// stderr must be consumed or the process deadlocks
go func() {
stderrData, _ := ioutil.ReadAll(stderr)
stderrString := string(stderrData)
if len(stderrString) > 0 {
logger.Debugf("[stream] ffmpeg stderr: %s", stderrString)
}
}()
ret := &Stream{
Stdout: stdout,
Process: cmd.Process,
options: options,
mimeType: options.Codec.MimeType,
}
return ret, nil
}

View File

@@ -65,6 +65,8 @@ func logLevelFromString(level string) logrus.Level {
ret = logrus.WarnLevel
} else if level == "Error" {
ret = logrus.ErrorLevel
} else if level == "Trace" {
ret = logrus.TraceLevel
}
return ret
@@ -178,6 +180,15 @@ func Trace(args ...interface{}) {
logger.Trace(args...)
}
func Tracef(format string, args ...interface{}) {
logger.Tracef(format, args...)
l := &LogItem{
Type: "trace",
Message: fmt.Sprintf(format, args...),
}
addLogItem(l)
}
func Debug(args ...interface{}) {
logger.Debug(args...)
l := &LogItem{

70
pkg/manager/checksum.go Normal file
View File

@@ -0,0 +1,70 @@
package manager
import (
"errors"
"github.com/spf13/viper"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/models"
)
func setInitialMD5Config() {
// if there are no scene files in the database, then default the
// VideoFileNamingAlgorithm config setting to oshash and calculateMD5 to
// false, otherwise set them to true for backwards compatibility purposes
sqb := models.NewSceneQueryBuilder()
count, err := sqb.Count()
if err != nil {
logger.Errorf("Error while counting scenes: %s", err.Error())
return
}
usingMD5 := count != 0
defaultAlgorithm := models.HashAlgorithmOshash
if usingMD5 {
defaultAlgorithm = models.HashAlgorithmMd5
}
viper.SetDefault(config.VideoFileNamingAlgorithm, defaultAlgorithm)
viper.SetDefault(config.CalculateMD5, usingMD5)
if err := config.Write(); err != nil {
logger.Errorf("Error while writing configuration file: %s", err.Error())
}
}
// ValidateVideoFileNamingAlgorithm validates changing the
// VideoFileNamingAlgorithm configuration flag.
//
// If setting VideoFileNamingAlgorithm to MD5, then this function will ensure
// that all checksum values are set on all scenes.
//
// Likewise, if VideoFileNamingAlgorithm is set to oshash, then this function
// will ensure that all oshash values are set on all scenes.
func ValidateVideoFileNamingAlgorithm(newValue models.HashAlgorithm) error {
// if algorithm is being set to MD5, then all checksums must be present
qb := models.NewSceneQueryBuilder()
if newValue == models.HashAlgorithmMd5 {
missingMD5, err := qb.CountMissingChecksum()
if err != nil {
return err
}
if missingMD5 > 0 {
return errors.New("some checksums are missing on scenes. Run Scan with calculateMD5 set to true")
}
} else if newValue == models.HashAlgorithmOshash {
missingOSHash, err := qb.CountMissingOSHash()
if err != nil {
return err
}
if missingOSHash > 0 {
return errors.New("some oshash values are missing on scenes. Run Scan to populate")
}
}
return nil
}

Some files were not shown because too many files have changed in this diff Show More