mirror of
https://github.com/stashapp/stash.git
synced 2025-12-17 20:34:37 +03:00
Updated dependencies
This commit is contained in:
19
vendor/github.com/99designs/gqlgen/LICENSE
generated
vendored
Normal file
19
vendor/github.com/99designs/gqlgen/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
Copyright (c) 2018 Adam Scarr
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
104
vendor/github.com/99designs/gqlgen/complexity/complexity.go
generated
vendored
Normal file
104
vendor/github.com/99designs/gqlgen/complexity/complexity.go
generated
vendored
Normal file
@@ -0,0 +1,104 @@
|
||||
package complexity
|
||||
|
||||
import (
|
||||
"github.com/99designs/gqlgen/graphql"
|
||||
"github.com/vektah/gqlparser/ast"
|
||||
)
|
||||
|
||||
func Calculate(es graphql.ExecutableSchema, op *ast.OperationDefinition, vars map[string]interface{}) int {
|
||||
walker := complexityWalker{
|
||||
es: es,
|
||||
schema: es.Schema(),
|
||||
vars: vars,
|
||||
}
|
||||
return walker.selectionSetComplexity(op.SelectionSet)
|
||||
}
|
||||
|
||||
type complexityWalker struct {
|
||||
es graphql.ExecutableSchema
|
||||
schema *ast.Schema
|
||||
vars map[string]interface{}
|
||||
}
|
||||
|
||||
func (cw complexityWalker) selectionSetComplexity(selectionSet ast.SelectionSet) int {
|
||||
var complexity int
|
||||
for _, selection := range selectionSet {
|
||||
switch s := selection.(type) {
|
||||
case *ast.Field:
|
||||
fieldDefinition := cw.schema.Types[s.Definition.Type.Name()]
|
||||
var childComplexity int
|
||||
switch fieldDefinition.Kind {
|
||||
case ast.Object, ast.Interface, ast.Union:
|
||||
childComplexity = cw.selectionSetComplexity(s.SelectionSet)
|
||||
}
|
||||
|
||||
args := s.ArgumentMap(cw.vars)
|
||||
var fieldComplexity int
|
||||
if s.ObjectDefinition.Kind == ast.Interface {
|
||||
fieldComplexity = cw.interfaceFieldComplexity(s.ObjectDefinition, s.Name, childComplexity, args)
|
||||
} else {
|
||||
fieldComplexity = cw.fieldComplexity(s.ObjectDefinition.Name, s.Name, childComplexity, args)
|
||||
}
|
||||
complexity = safeAdd(complexity, fieldComplexity)
|
||||
|
||||
case *ast.FragmentSpread:
|
||||
complexity = safeAdd(complexity, cw.selectionSetComplexity(s.Definition.SelectionSet))
|
||||
|
||||
case *ast.InlineFragment:
|
||||
complexity = safeAdd(complexity, cw.selectionSetComplexity(s.SelectionSet))
|
||||
}
|
||||
}
|
||||
return complexity
|
||||
}
|
||||
|
||||
func (cw complexityWalker) interfaceFieldComplexity(def *ast.Definition, field string, childComplexity int, args map[string]interface{}) int {
|
||||
// Interfaces don't have their own separate field costs, so they have to assume the worst case.
|
||||
// We iterate over all implementors and choose the most expensive one.
|
||||
maxComplexity := 0
|
||||
implementors := cw.schema.GetPossibleTypes(def)
|
||||
for _, t := range implementors {
|
||||
fieldComplexity := cw.fieldComplexity(t.Name, field, childComplexity, args)
|
||||
if fieldComplexity > maxComplexity {
|
||||
maxComplexity = fieldComplexity
|
||||
}
|
||||
}
|
||||
return maxComplexity
|
||||
}
|
||||
|
||||
func (cw complexityWalker) fieldComplexity(object, field string, childComplexity int, args map[string]interface{}) int {
|
||||
if customComplexity, ok := cw.es.Complexity(object, field, childComplexity, args); ok && customComplexity >= childComplexity {
|
||||
return customComplexity
|
||||
}
|
||||
// default complexity calculation
|
||||
return safeAdd(1, childComplexity)
|
||||
}
|
||||
|
||||
const maxInt = int(^uint(0) >> 1)
|
||||
|
||||
// safeAdd is a saturating add of a and b that ignores negative operands.
|
||||
// If a + b would overflow through normal Go addition,
|
||||
// it returns the maximum integer value instead.
|
||||
//
|
||||
// Adding complexities with this function prevents attackers from intentionally
|
||||
// overflowing the complexity calculation to allow overly-complex queries.
|
||||
//
|
||||
// It also helps mitigate the impact of custom complexities that accidentally
|
||||
// return negative values.
|
||||
func safeAdd(a, b int) int {
|
||||
// Ignore negative operands.
|
||||
if a < 0 {
|
||||
if b < 0 {
|
||||
return 1
|
||||
}
|
||||
return b
|
||||
} else if b < 0 {
|
||||
return a
|
||||
}
|
||||
|
||||
c := a + b
|
||||
if c < a {
|
||||
// Set c to maximum integer instead of overflowing.
|
||||
c = maxInt
|
||||
}
|
||||
return c
|
||||
}
|
||||
30
vendor/github.com/99designs/gqlgen/graphql/bool.go
generated
vendored
Normal file
30
vendor/github.com/99designs/gqlgen/graphql/bool.go
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
package graphql
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func MarshalBoolean(b bool) Marshaler {
|
||||
return WriterFunc(func(w io.Writer) {
|
||||
if b {
|
||||
w.Write(trueLit)
|
||||
} else {
|
||||
w.Write(falseLit)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func UnmarshalBoolean(v interface{}) (bool, error) {
|
||||
switch v := v.(type) {
|
||||
case string:
|
||||
return strings.ToLower(v) == "true", nil
|
||||
case int:
|
||||
return v != 0, nil
|
||||
case bool:
|
||||
return v, nil
|
||||
default:
|
||||
return false, fmt.Errorf("%T is not a bool", v)
|
||||
}
|
||||
}
|
||||
253
vendor/github.com/99designs/gqlgen/graphql/context.go
generated
vendored
Normal file
253
vendor/github.com/99designs/gqlgen/graphql/context.go
generated
vendored
Normal file
@@ -0,0 +1,253 @@
|
||||
package graphql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"sync"
|
||||
|
||||
"github.com/vektah/gqlparser/ast"
|
||||
"github.com/vektah/gqlparser/gqlerror"
|
||||
)
|
||||
|
||||
type Resolver func(ctx context.Context) (res interface{}, err error)
|
||||
type FieldMiddleware func(ctx context.Context, next Resolver) (res interface{}, err error)
|
||||
type RequestMiddleware func(ctx context.Context, next func(ctx context.Context) []byte) []byte
|
||||
|
||||
type RequestContext struct {
|
||||
RawQuery string
|
||||
Variables map[string]interface{}
|
||||
Doc *ast.QueryDocument
|
||||
|
||||
ComplexityLimit int
|
||||
OperationComplexity int
|
||||
DisableIntrospection bool
|
||||
|
||||
// ErrorPresenter will be used to generate the error
|
||||
// message from errors given to Error().
|
||||
ErrorPresenter ErrorPresenterFunc
|
||||
Recover RecoverFunc
|
||||
ResolverMiddleware FieldMiddleware
|
||||
DirectiveMiddleware FieldMiddleware
|
||||
RequestMiddleware RequestMiddleware
|
||||
Tracer Tracer
|
||||
|
||||
errorsMu sync.Mutex
|
||||
Errors gqlerror.List
|
||||
extensionsMu sync.Mutex
|
||||
Extensions map[string]interface{}
|
||||
}
|
||||
|
||||
func DefaultResolverMiddleware(ctx context.Context, next Resolver) (res interface{}, err error) {
|
||||
return next(ctx)
|
||||
}
|
||||
|
||||
func DefaultDirectiveMiddleware(ctx context.Context, next Resolver) (res interface{}, err error) {
|
||||
return next(ctx)
|
||||
}
|
||||
|
||||
func DefaultRequestMiddleware(ctx context.Context, next func(ctx context.Context) []byte) []byte {
|
||||
return next(ctx)
|
||||
}
|
||||
|
||||
func NewRequestContext(doc *ast.QueryDocument, query string, variables map[string]interface{}) *RequestContext {
|
||||
return &RequestContext{
|
||||
Doc: doc,
|
||||
RawQuery: query,
|
||||
Variables: variables,
|
||||
ResolverMiddleware: DefaultResolverMiddleware,
|
||||
DirectiveMiddleware: DefaultDirectiveMiddleware,
|
||||
RequestMiddleware: DefaultRequestMiddleware,
|
||||
Recover: DefaultRecover,
|
||||
ErrorPresenter: DefaultErrorPresenter,
|
||||
Tracer: &NopTracer{},
|
||||
}
|
||||
}
|
||||
|
||||
type key string
|
||||
|
||||
const (
|
||||
request key = "request_context"
|
||||
resolver key = "resolver_context"
|
||||
)
|
||||
|
||||
func GetRequestContext(ctx context.Context) *RequestContext {
|
||||
val := ctx.Value(request)
|
||||
if val == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return val.(*RequestContext)
|
||||
}
|
||||
|
||||
func WithRequestContext(ctx context.Context, rc *RequestContext) context.Context {
|
||||
return context.WithValue(ctx, request, rc)
|
||||
}
|
||||
|
||||
type ResolverContext struct {
|
||||
Parent *ResolverContext
|
||||
// The name of the type this field belongs to
|
||||
Object string
|
||||
// These are the args after processing, they can be mutated in middleware to change what the resolver will get.
|
||||
Args map[string]interface{}
|
||||
// The raw field
|
||||
Field CollectedField
|
||||
// The index of array in path.
|
||||
Index *int
|
||||
// The result object of resolver
|
||||
Result interface{}
|
||||
}
|
||||
|
||||
func (r *ResolverContext) Path() []interface{} {
|
||||
var path []interface{}
|
||||
for it := r; it != nil; it = it.Parent {
|
||||
if it.Index != nil {
|
||||
path = append(path, *it.Index)
|
||||
} else if it.Field.Field != nil {
|
||||
path = append(path, it.Field.Alias)
|
||||
}
|
||||
}
|
||||
|
||||
// because we are walking up the chain, all the elements are backwards, do an inplace flip.
|
||||
for i := len(path)/2 - 1; i >= 0; i-- {
|
||||
opp := len(path) - 1 - i
|
||||
path[i], path[opp] = path[opp], path[i]
|
||||
}
|
||||
|
||||
return path
|
||||
}
|
||||
|
||||
func GetResolverContext(ctx context.Context) *ResolverContext {
|
||||
val, _ := ctx.Value(resolver).(*ResolverContext)
|
||||
return val
|
||||
}
|
||||
|
||||
func WithResolverContext(ctx context.Context, rc *ResolverContext) context.Context {
|
||||
rc.Parent = GetResolverContext(ctx)
|
||||
return context.WithValue(ctx, resolver, rc)
|
||||
}
|
||||
|
||||
// This is just a convenient wrapper method for CollectFields
|
||||
func CollectFieldsCtx(ctx context.Context, satisfies []string) []CollectedField {
|
||||
resctx := GetResolverContext(ctx)
|
||||
return CollectFields(ctx, resctx.Field.Selections, satisfies)
|
||||
}
|
||||
|
||||
// Errorf sends an error string to the client, passing it through the formatter.
|
||||
func (c *RequestContext) Errorf(ctx context.Context, format string, args ...interface{}) {
|
||||
c.errorsMu.Lock()
|
||||
defer c.errorsMu.Unlock()
|
||||
|
||||
c.Errors = append(c.Errors, c.ErrorPresenter(ctx, fmt.Errorf(format, args...)))
|
||||
}
|
||||
|
||||
// Error sends an error to the client, passing it through the formatter.
|
||||
func (c *RequestContext) Error(ctx context.Context, err error) {
|
||||
c.errorsMu.Lock()
|
||||
defer c.errorsMu.Unlock()
|
||||
|
||||
c.Errors = append(c.Errors, c.ErrorPresenter(ctx, err))
|
||||
}
|
||||
|
||||
// HasError returns true if the current field has already errored
|
||||
func (c *RequestContext) HasError(rctx *ResolverContext) bool {
|
||||
c.errorsMu.Lock()
|
||||
defer c.errorsMu.Unlock()
|
||||
path := rctx.Path()
|
||||
|
||||
for _, err := range c.Errors {
|
||||
if equalPath(err.Path, path) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// GetErrors returns a list of errors that occurred in the current field
|
||||
func (c *RequestContext) GetErrors(rctx *ResolverContext) gqlerror.List {
|
||||
c.errorsMu.Lock()
|
||||
defer c.errorsMu.Unlock()
|
||||
path := rctx.Path()
|
||||
|
||||
var errs gqlerror.List
|
||||
for _, err := range c.Errors {
|
||||
if equalPath(err.Path, path) {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
}
|
||||
return errs
|
||||
}
|
||||
|
||||
func equalPath(a []interface{}, b []interface{}) bool {
|
||||
if len(a) != len(b) {
|
||||
return false
|
||||
}
|
||||
|
||||
for i := 0; i < len(a); i++ {
|
||||
if a[i] != b[i] {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// AddError is a convenience method for adding an error to the current response
|
||||
func AddError(ctx context.Context, err error) {
|
||||
GetRequestContext(ctx).Error(ctx, err)
|
||||
}
|
||||
|
||||
// AddErrorf is a convenience method for adding an error to the current response
|
||||
func AddErrorf(ctx context.Context, format string, args ...interface{}) {
|
||||
GetRequestContext(ctx).Errorf(ctx, format, args...)
|
||||
}
|
||||
|
||||
// RegisterExtension registers an extension, returns error if extension has already been registered
|
||||
func (c *RequestContext) RegisterExtension(key string, value interface{}) error {
|
||||
c.extensionsMu.Lock()
|
||||
defer c.extensionsMu.Unlock()
|
||||
|
||||
if c.Extensions == nil {
|
||||
c.Extensions = make(map[string]interface{})
|
||||
}
|
||||
|
||||
if _, ok := c.Extensions[key]; ok {
|
||||
return fmt.Errorf("extension already registered for key %s", key)
|
||||
}
|
||||
|
||||
c.Extensions[key] = value
|
||||
return nil
|
||||
}
|
||||
|
||||
// ChainFieldMiddleware add chain by FieldMiddleware
|
||||
func ChainFieldMiddleware(handleFunc ...FieldMiddleware) FieldMiddleware {
|
||||
n := len(handleFunc)
|
||||
|
||||
if n > 1 {
|
||||
lastI := n - 1
|
||||
return func(ctx context.Context, next Resolver) (interface{}, error) {
|
||||
var (
|
||||
chainHandler Resolver
|
||||
curI int
|
||||
)
|
||||
chainHandler = func(currentCtx context.Context) (interface{}, error) {
|
||||
if curI == lastI {
|
||||
return next(currentCtx)
|
||||
}
|
||||
curI++
|
||||
res, err := handleFunc[curI](currentCtx, chainHandler)
|
||||
curI--
|
||||
return res, err
|
||||
|
||||
}
|
||||
return handleFunc[0](ctx, chainHandler)
|
||||
}
|
||||
}
|
||||
|
||||
if n == 1 {
|
||||
return handleFunc[0]
|
||||
}
|
||||
|
||||
return func(ctx context.Context, next Resolver) (interface{}, error) {
|
||||
return next(ctx)
|
||||
}
|
||||
}
|
||||
31
vendor/github.com/99designs/gqlgen/graphql/error.go
generated
vendored
Normal file
31
vendor/github.com/99designs/gqlgen/graphql/error.go
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
package graphql
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/vektah/gqlparser/gqlerror"
|
||||
)
|
||||
|
||||
type ErrorPresenterFunc func(context.Context, error) *gqlerror.Error
|
||||
|
||||
type ExtendedError interface {
|
||||
Extensions() map[string]interface{}
|
||||
}
|
||||
|
||||
func DefaultErrorPresenter(ctx context.Context, err error) *gqlerror.Error {
|
||||
if gqlerr, ok := err.(*gqlerror.Error); ok {
|
||||
gqlerr.Path = GetResolverContext(ctx).Path()
|
||||
return gqlerr
|
||||
}
|
||||
|
||||
var extensions map[string]interface{}
|
||||
if ee, ok := err.(ExtendedError); ok {
|
||||
extensions = ee.Extensions()
|
||||
}
|
||||
|
||||
return &gqlerror.Error{
|
||||
Message: err.Error(),
|
||||
Path: GetResolverContext(ctx).Path(),
|
||||
Extensions: extensions,
|
||||
}
|
||||
}
|
||||
135
vendor/github.com/99designs/gqlgen/graphql/exec.go
generated
vendored
Normal file
135
vendor/github.com/99designs/gqlgen/graphql/exec.go
generated
vendored
Normal file
@@ -0,0 +1,135 @@
|
||||
package graphql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/vektah/gqlparser/ast"
|
||||
)
|
||||
|
||||
type ExecutableSchema interface {
|
||||
Schema() *ast.Schema
|
||||
|
||||
Complexity(typeName, fieldName string, childComplexity int, args map[string]interface{}) (int, bool)
|
||||
Query(ctx context.Context, op *ast.OperationDefinition) *Response
|
||||
Mutation(ctx context.Context, op *ast.OperationDefinition) *Response
|
||||
Subscription(ctx context.Context, op *ast.OperationDefinition) func() *Response
|
||||
}
|
||||
|
||||
func CollectFields(ctx context.Context, selSet ast.SelectionSet, satisfies []string) []CollectedField {
|
||||
return collectFields(GetRequestContext(ctx), selSet, satisfies, map[string]bool{})
|
||||
}
|
||||
|
||||
func collectFields(reqCtx *RequestContext, selSet ast.SelectionSet, satisfies []string, visited map[string]bool) []CollectedField {
|
||||
var groupedFields []CollectedField
|
||||
|
||||
for _, sel := range selSet {
|
||||
switch sel := sel.(type) {
|
||||
case *ast.Field:
|
||||
if !shouldIncludeNode(sel.Directives, reqCtx.Variables) {
|
||||
continue
|
||||
}
|
||||
f := getOrCreateField(&groupedFields, sel.Alias, func() CollectedField {
|
||||
return CollectedField{Field: sel}
|
||||
})
|
||||
|
||||
f.Selections = append(f.Selections, sel.SelectionSet...)
|
||||
case *ast.InlineFragment:
|
||||
if !shouldIncludeNode(sel.Directives, reqCtx.Variables) || !instanceOf(sel.TypeCondition, satisfies) {
|
||||
continue
|
||||
}
|
||||
for _, childField := range collectFields(reqCtx, sel.SelectionSet, satisfies, visited) {
|
||||
f := getOrCreateField(&groupedFields, childField.Name, func() CollectedField { return childField })
|
||||
f.Selections = append(f.Selections, childField.Selections...)
|
||||
}
|
||||
|
||||
case *ast.FragmentSpread:
|
||||
if !shouldIncludeNode(sel.Directives, reqCtx.Variables) {
|
||||
continue
|
||||
}
|
||||
fragmentName := sel.Name
|
||||
if _, seen := visited[fragmentName]; seen {
|
||||
continue
|
||||
}
|
||||
visited[fragmentName] = true
|
||||
|
||||
fragment := reqCtx.Doc.Fragments.ForName(fragmentName)
|
||||
if fragment == nil {
|
||||
// should never happen, validator has already run
|
||||
panic(fmt.Errorf("missing fragment %s", fragmentName))
|
||||
}
|
||||
|
||||
if !instanceOf(fragment.TypeCondition, satisfies) {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, childField := range collectFields(reqCtx, fragment.SelectionSet, satisfies, visited) {
|
||||
f := getOrCreateField(&groupedFields, childField.Name, func() CollectedField { return childField })
|
||||
f.Selections = append(f.Selections, childField.Selections...)
|
||||
}
|
||||
|
||||
default:
|
||||
panic(fmt.Errorf("unsupported %T", sel))
|
||||
}
|
||||
}
|
||||
|
||||
return groupedFields
|
||||
}
|
||||
|
||||
type CollectedField struct {
|
||||
*ast.Field
|
||||
|
||||
Selections ast.SelectionSet
|
||||
}
|
||||
|
||||
func instanceOf(val string, satisfies []string) bool {
|
||||
for _, s := range satisfies {
|
||||
if val == s {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func getOrCreateField(c *[]CollectedField, name string, creator func() CollectedField) *CollectedField {
|
||||
for i, cf := range *c {
|
||||
if cf.Alias == name {
|
||||
return &(*c)[i]
|
||||
}
|
||||
}
|
||||
|
||||
f := creator()
|
||||
|
||||
*c = append(*c, f)
|
||||
return &(*c)[len(*c)-1]
|
||||
}
|
||||
|
||||
func shouldIncludeNode(directives ast.DirectiveList, variables map[string]interface{}) bool {
|
||||
skip, include := false, true
|
||||
|
||||
if d := directives.ForName("skip"); d != nil {
|
||||
skip = resolveIfArgument(d, variables)
|
||||
}
|
||||
|
||||
if d := directives.ForName("include"); d != nil {
|
||||
include = resolveIfArgument(d, variables)
|
||||
}
|
||||
|
||||
return !skip && include
|
||||
}
|
||||
|
||||
func resolveIfArgument(d *ast.Directive, variables map[string]interface{}) bool {
|
||||
arg := d.Arguments.ForName("if")
|
||||
if arg == nil {
|
||||
panic(fmt.Sprintf("%s: argument 'if' not defined", d.Name))
|
||||
}
|
||||
value, err := arg.Value.Value(variables)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
ret, ok := value.(bool)
|
||||
if !ok {
|
||||
panic(fmt.Sprintf("%s: argument 'if' is not a boolean", d.Name))
|
||||
}
|
||||
return ret
|
||||
}
|
||||
63
vendor/github.com/99designs/gqlgen/graphql/fieldset.go
generated
vendored
Normal file
63
vendor/github.com/99designs/gqlgen/graphql/fieldset.go
generated
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
package graphql
|
||||
|
||||
import (
|
||||
"io"
|
||||
"sync"
|
||||
)
|
||||
|
||||
type FieldSet struct {
|
||||
fields []CollectedField
|
||||
Values []Marshaler
|
||||
delayed []delayedResult
|
||||
}
|
||||
|
||||
type delayedResult struct {
|
||||
i int
|
||||
f func() Marshaler
|
||||
}
|
||||
|
||||
func NewFieldSet(fields []CollectedField) *FieldSet {
|
||||
return &FieldSet{
|
||||
fields: fields,
|
||||
Values: make([]Marshaler, len(fields)),
|
||||
}
|
||||
}
|
||||
|
||||
func (m *FieldSet) Concurrently(i int, f func() Marshaler) {
|
||||
m.delayed = append(m.delayed, delayedResult{i: i, f: f})
|
||||
}
|
||||
|
||||
func (m *FieldSet) Dispatch() {
|
||||
if len(m.delayed) == 1 {
|
||||
// only one concurrent task, no need to spawn a goroutine or deal create waitgroups
|
||||
d := m.delayed[0]
|
||||
m.Values[d.i] = d.f()
|
||||
} else if len(m.delayed) > 1 {
|
||||
// more than one concurrent task, use the main goroutine to do one, only spawn goroutines for the others
|
||||
|
||||
var wg sync.WaitGroup
|
||||
for _, d := range m.delayed[1:] {
|
||||
wg.Add(1)
|
||||
go func(d delayedResult) {
|
||||
m.Values[d.i] = d.f()
|
||||
wg.Done()
|
||||
}(d)
|
||||
}
|
||||
|
||||
m.Values[m.delayed[0].i] = m.delayed[0].f()
|
||||
wg.Wait()
|
||||
}
|
||||
}
|
||||
|
||||
func (m *FieldSet) MarshalGQL(writer io.Writer) {
|
||||
writer.Write(openBrace)
|
||||
for i, field := range m.fields {
|
||||
if i != 0 {
|
||||
writer.Write(comma)
|
||||
}
|
||||
writeQuotedString(writer, field.Alias)
|
||||
writer.Write(colon)
|
||||
m.Values[i].MarshalGQL(writer)
|
||||
}
|
||||
writer.Write(closeBrace)
|
||||
}
|
||||
31
vendor/github.com/99designs/gqlgen/graphql/float.go
generated
vendored
Normal file
31
vendor/github.com/99designs/gqlgen/graphql/float.go
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
package graphql
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
func MarshalFloat(f float64) Marshaler {
|
||||
return WriterFunc(func(w io.Writer) {
|
||||
io.WriteString(w, fmt.Sprintf("%g", f))
|
||||
})
|
||||
}
|
||||
|
||||
func UnmarshalFloat(v interface{}) (float64, error) {
|
||||
switch v := v.(type) {
|
||||
case string:
|
||||
return strconv.ParseFloat(v, 64)
|
||||
case int:
|
||||
return float64(v), nil
|
||||
case int64:
|
||||
return float64(v), nil
|
||||
case float64:
|
||||
return v, nil
|
||||
case json.Number:
|
||||
return strconv.ParseFloat(string(v), 64)
|
||||
default:
|
||||
return 0, fmt.Errorf("%T is not an float", v)
|
||||
}
|
||||
}
|
||||
36
vendor/github.com/99designs/gqlgen/graphql/id.go
generated
vendored
Normal file
36
vendor/github.com/99designs/gqlgen/graphql/id.go
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
package graphql
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
func MarshalID(s string) Marshaler {
|
||||
return WriterFunc(func(w io.Writer) {
|
||||
io.WriteString(w, strconv.Quote(s))
|
||||
})
|
||||
}
|
||||
func UnmarshalID(v interface{}) (string, error) {
|
||||
switch v := v.(type) {
|
||||
case string:
|
||||
return v, nil
|
||||
case json.Number:
|
||||
return string(v), nil
|
||||
case int:
|
||||
return strconv.Itoa(v), nil
|
||||
case float64:
|
||||
return fmt.Sprintf("%f", v), nil
|
||||
case bool:
|
||||
if v {
|
||||
return "true", nil
|
||||
} else {
|
||||
return "false", nil
|
||||
}
|
||||
case nil:
|
||||
return "null", nil
|
||||
default:
|
||||
return "", fmt.Errorf("%T is not a string", v)
|
||||
}
|
||||
}
|
||||
29
vendor/github.com/99designs/gqlgen/graphql/int.go
generated
vendored
Normal file
29
vendor/github.com/99designs/gqlgen/graphql/int.go
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
package graphql
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
func MarshalInt(i int) Marshaler {
|
||||
return WriterFunc(func(w io.Writer) {
|
||||
io.WriteString(w, strconv.Itoa(i))
|
||||
})
|
||||
}
|
||||
|
||||
func UnmarshalInt(v interface{}) (int, error) {
|
||||
switch v := v.(type) {
|
||||
case string:
|
||||
return strconv.Atoi(v)
|
||||
case int:
|
||||
return v, nil
|
||||
case int64:
|
||||
return int(v), nil
|
||||
case json.Number:
|
||||
return strconv.Atoi(string(v))
|
||||
default:
|
||||
return 0, fmt.Errorf("%T is not an int", v)
|
||||
}
|
||||
}
|
||||
72
vendor/github.com/99designs/gqlgen/graphql/introspection/introspection.go
generated
vendored
Normal file
72
vendor/github.com/99designs/gqlgen/graphql/introspection/introspection.go
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
// introspection implements the spec defined in https://github.com/facebook/graphql/blob/master/spec/Section%204%20--%20Introspection.md#schema-introspection
|
||||
package introspection
|
||||
|
||||
import "github.com/vektah/gqlparser/ast"
|
||||
|
||||
type (
|
||||
Directive struct {
|
||||
Name string
|
||||
Description string
|
||||
Locations []string
|
||||
Args []InputValue
|
||||
}
|
||||
|
||||
EnumValue struct {
|
||||
Name string
|
||||
Description string
|
||||
deprecation *ast.Directive
|
||||
}
|
||||
|
||||
Field struct {
|
||||
Name string
|
||||
Description string
|
||||
Type *Type
|
||||
Args []InputValue
|
||||
deprecation *ast.Directive
|
||||
}
|
||||
|
||||
InputValue struct {
|
||||
Name string
|
||||
Description string
|
||||
DefaultValue *string
|
||||
Type *Type
|
||||
}
|
||||
)
|
||||
|
||||
func WrapSchema(schema *ast.Schema) *Schema {
|
||||
return &Schema{schema: schema}
|
||||
}
|
||||
|
||||
func (f *EnumValue) IsDeprecated() bool {
|
||||
return f.deprecation != nil
|
||||
}
|
||||
|
||||
func (f *EnumValue) DeprecationReason() *string {
|
||||
if f.deprecation == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
reason := f.deprecation.Arguments.ForName("reason")
|
||||
if reason == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return &reason.Value.Raw
|
||||
}
|
||||
|
||||
func (f *Field) IsDeprecated() bool {
|
||||
return f.deprecation != nil
|
||||
}
|
||||
|
||||
func (f *Field) DeprecationReason() *string {
|
||||
if f.deprecation == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
reason := f.deprecation.Arguments.ForName("reason")
|
||||
if reason == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return &reason.Value.Raw
|
||||
}
|
||||
104
vendor/github.com/99designs/gqlgen/graphql/introspection/query.go
generated
vendored
Normal file
104
vendor/github.com/99designs/gqlgen/graphql/introspection/query.go
generated
vendored
Normal file
@@ -0,0 +1,104 @@
|
||||
package introspection
|
||||
|
||||
// Query is the query generated by graphiql to determine type information
|
||||
const Query = `
|
||||
query IntrospectionQuery {
|
||||
__schema {
|
||||
queryType {
|
||||
name
|
||||
}
|
||||
mutationType {
|
||||
name
|
||||
}
|
||||
subscriptionType {
|
||||
name
|
||||
}
|
||||
types {
|
||||
...FullType
|
||||
}
|
||||
directives {
|
||||
name
|
||||
description
|
||||
locations
|
||||
args {
|
||||
...InputValue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fragment FullType on __Type {
|
||||
kind
|
||||
name
|
||||
description
|
||||
fields(includeDeprecated: true) {
|
||||
name
|
||||
description
|
||||
args {
|
||||
...InputValue
|
||||
}
|
||||
type {
|
||||
...TypeRef
|
||||
}
|
||||
isDeprecated
|
||||
deprecationReason
|
||||
}
|
||||
inputFields {
|
||||
...InputValue
|
||||
}
|
||||
interfaces {
|
||||
...TypeRef
|
||||
}
|
||||
enumValues(includeDeprecated: true) {
|
||||
name
|
||||
description
|
||||
isDeprecated
|
||||
deprecationReason
|
||||
}
|
||||
possibleTypes {
|
||||
...TypeRef
|
||||
}
|
||||
}
|
||||
|
||||
fragment InputValue on __InputValue {
|
||||
name
|
||||
description
|
||||
type {
|
||||
...TypeRef
|
||||
}
|
||||
defaultValue
|
||||
}
|
||||
|
||||
fragment TypeRef on __Type {
|
||||
kind
|
||||
name
|
||||
ofType {
|
||||
kind
|
||||
name
|
||||
ofType {
|
||||
kind
|
||||
name
|
||||
ofType {
|
||||
kind
|
||||
name
|
||||
ofType {
|
||||
kind
|
||||
name
|
||||
ofType {
|
||||
kind
|
||||
name
|
||||
ofType {
|
||||
kind
|
||||
name
|
||||
ofType {
|
||||
kind
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`
|
||||
68
vendor/github.com/99designs/gqlgen/graphql/introspection/schema.go
generated
vendored
Normal file
68
vendor/github.com/99designs/gqlgen/graphql/introspection/schema.go
generated
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
package introspection
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/vektah/gqlparser/ast"
|
||||
)
|
||||
|
||||
type Schema struct {
|
||||
schema *ast.Schema
|
||||
}
|
||||
|
||||
func (s *Schema) Types() []Type {
|
||||
var types []Type
|
||||
for _, typ := range s.schema.Types {
|
||||
if strings.HasPrefix(typ.Name, "__") {
|
||||
continue
|
||||
}
|
||||
types = append(types, *WrapTypeFromDef(s.schema, typ))
|
||||
}
|
||||
return types
|
||||
}
|
||||
|
||||
func (s *Schema) QueryType() *Type {
|
||||
return WrapTypeFromDef(s.schema, s.schema.Query)
|
||||
}
|
||||
|
||||
func (s *Schema) MutationType() *Type {
|
||||
return WrapTypeFromDef(s.schema, s.schema.Mutation)
|
||||
}
|
||||
|
||||
func (s *Schema) SubscriptionType() *Type {
|
||||
return WrapTypeFromDef(s.schema, s.schema.Subscription)
|
||||
}
|
||||
|
||||
func (s *Schema) Directives() []Directive {
|
||||
var res []Directive
|
||||
|
||||
for _, d := range s.schema.Directives {
|
||||
res = append(res, s.directiveFromDef(d))
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func (s *Schema) directiveFromDef(d *ast.DirectiveDefinition) Directive {
|
||||
var locs []string
|
||||
for _, loc := range d.Locations {
|
||||
locs = append(locs, string(loc))
|
||||
}
|
||||
|
||||
var args []InputValue
|
||||
for _, arg := range d.Arguments {
|
||||
args = append(args, InputValue{
|
||||
Name: arg.Name,
|
||||
Description: arg.Description,
|
||||
DefaultValue: defaultValue(arg.DefaultValue),
|
||||
Type: WrapTypeFromType(s.schema, arg.Type),
|
||||
})
|
||||
}
|
||||
|
||||
return Directive{
|
||||
Name: d.Name,
|
||||
Description: d.Description,
|
||||
Locations: locs,
|
||||
Args: args,
|
||||
}
|
||||
}
|
||||
172
vendor/github.com/99designs/gqlgen/graphql/introspection/type.go
generated
vendored
Normal file
172
vendor/github.com/99designs/gqlgen/graphql/introspection/type.go
generated
vendored
Normal file
@@ -0,0 +1,172 @@
|
||||
package introspection
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/vektah/gqlparser/ast"
|
||||
)
|
||||
|
||||
type Type struct {
|
||||
schema *ast.Schema
|
||||
def *ast.Definition
|
||||
typ *ast.Type
|
||||
}
|
||||
|
||||
func WrapTypeFromDef(s *ast.Schema, def *ast.Definition) *Type {
|
||||
if def == nil {
|
||||
return nil
|
||||
}
|
||||
return &Type{schema: s, def: def}
|
||||
}
|
||||
|
||||
func WrapTypeFromType(s *ast.Schema, typ *ast.Type) *Type {
|
||||
if typ == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if !typ.NonNull && typ.NamedType != "" {
|
||||
return &Type{schema: s, def: s.Types[typ.NamedType]}
|
||||
}
|
||||
return &Type{schema: s, typ: typ}
|
||||
}
|
||||
|
||||
func (t *Type) Kind() string {
|
||||
if t.typ != nil {
|
||||
if t.typ.NonNull {
|
||||
return "NON_NULL"
|
||||
}
|
||||
|
||||
if t.typ.Elem != nil {
|
||||
return "LIST"
|
||||
}
|
||||
} else {
|
||||
return string(t.def.Kind)
|
||||
}
|
||||
|
||||
panic("UNKNOWN")
|
||||
}
|
||||
|
||||
func (t *Type) Name() *string {
|
||||
if t.def == nil {
|
||||
return nil
|
||||
}
|
||||
return &t.def.Name
|
||||
}
|
||||
|
||||
func (t *Type) Description() string {
|
||||
if t.def == nil {
|
||||
return ""
|
||||
}
|
||||
return t.def.Description
|
||||
}
|
||||
|
||||
func (t *Type) Fields(includeDeprecated bool) []Field {
|
||||
if t.def == nil || (t.def.Kind != ast.Object && t.def.Kind != ast.Interface) {
|
||||
return nil
|
||||
}
|
||||
var fields []Field
|
||||
for _, f := range t.def.Fields {
|
||||
if strings.HasPrefix(f.Name, "__") {
|
||||
continue
|
||||
}
|
||||
|
||||
var args []InputValue
|
||||
for _, arg := range f.Arguments {
|
||||
args = append(args, InputValue{
|
||||
Type: WrapTypeFromType(t.schema, arg.Type),
|
||||
Name: arg.Name,
|
||||
Description: arg.Description,
|
||||
DefaultValue: defaultValue(arg.DefaultValue),
|
||||
})
|
||||
}
|
||||
|
||||
fields = append(fields, Field{
|
||||
Name: f.Name,
|
||||
Description: f.Description,
|
||||
Args: args,
|
||||
Type: WrapTypeFromType(t.schema, f.Type),
|
||||
deprecation: f.Directives.ForName("deprecated"),
|
||||
})
|
||||
}
|
||||
return fields
|
||||
}
|
||||
|
||||
func (t *Type) InputFields() []InputValue {
|
||||
if t.def == nil || t.def.Kind != ast.InputObject {
|
||||
return nil
|
||||
}
|
||||
|
||||
var res []InputValue
|
||||
for _, f := range t.def.Fields {
|
||||
res = append(res, InputValue{
|
||||
Name: f.Name,
|
||||
Description: f.Description,
|
||||
Type: WrapTypeFromType(t.schema, f.Type),
|
||||
DefaultValue: defaultValue(f.DefaultValue),
|
||||
})
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func defaultValue(value *ast.Value) *string {
|
||||
if value == nil {
|
||||
return nil
|
||||
}
|
||||
val := value.String()
|
||||
return &val
|
||||
}
|
||||
|
||||
func (t *Type) Interfaces() []Type {
|
||||
if t.def == nil || t.def.Kind != ast.Object {
|
||||
return nil
|
||||
}
|
||||
|
||||
var res []Type
|
||||
for _, intf := range t.def.Interfaces {
|
||||
res = append(res, *WrapTypeFromDef(t.schema, t.schema.Types[intf]))
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func (t *Type) PossibleTypes() []Type {
|
||||
if t.def == nil || (t.def.Kind != ast.Interface && t.def.Kind != ast.Union) {
|
||||
return nil
|
||||
}
|
||||
|
||||
var res []Type
|
||||
for _, pt := range t.schema.GetPossibleTypes(t.def) {
|
||||
res = append(res, *WrapTypeFromDef(t.schema, pt))
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func (t *Type) EnumValues(includeDeprecated bool) []EnumValue {
|
||||
if t.def == nil || t.def.Kind != ast.Enum {
|
||||
return nil
|
||||
}
|
||||
|
||||
var res []EnumValue
|
||||
for _, val := range t.def.EnumValues {
|
||||
res = append(res, EnumValue{
|
||||
Name: val.Name,
|
||||
Description: val.Description,
|
||||
deprecation: val.Directives.ForName("deprecated"),
|
||||
})
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func (t *Type) OfType() *Type {
|
||||
if t.typ == nil {
|
||||
return nil
|
||||
}
|
||||
if t.typ.NonNull {
|
||||
// fake non null nodes
|
||||
cpy := *t.typ
|
||||
cpy.NonNull = false
|
||||
|
||||
return WrapTypeFromType(t.schema, &cpy)
|
||||
}
|
||||
return WrapTypeFromType(t.schema, t.typ.Elem)
|
||||
}
|
||||
52
vendor/github.com/99designs/gqlgen/graphql/jsonw.go
generated
vendored
Normal file
52
vendor/github.com/99designs/gqlgen/graphql/jsonw.go
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
package graphql
|
||||
|
||||
import (
|
||||
"io"
|
||||
)
|
||||
|
||||
var nullLit = []byte(`null`)
|
||||
var trueLit = []byte(`true`)
|
||||
var falseLit = []byte(`false`)
|
||||
var openBrace = []byte(`{`)
|
||||
var closeBrace = []byte(`}`)
|
||||
var openBracket = []byte(`[`)
|
||||
var closeBracket = []byte(`]`)
|
||||
var colon = []byte(`:`)
|
||||
var comma = []byte(`,`)
|
||||
|
||||
var Null = &lit{nullLit}
|
||||
var True = &lit{trueLit}
|
||||
var False = &lit{falseLit}
|
||||
|
||||
type Marshaler interface {
|
||||
MarshalGQL(w io.Writer)
|
||||
}
|
||||
|
||||
type Unmarshaler interface {
|
||||
UnmarshalGQL(v interface{}) error
|
||||
}
|
||||
|
||||
type WriterFunc func(writer io.Writer)
|
||||
|
||||
func (f WriterFunc) MarshalGQL(w io.Writer) {
|
||||
f(w)
|
||||
}
|
||||
|
||||
type Array []Marshaler
|
||||
|
||||
func (a Array) MarshalGQL(writer io.Writer) {
|
||||
writer.Write(openBracket)
|
||||
for i, val := range a {
|
||||
if i != 0 {
|
||||
writer.Write(comma)
|
||||
}
|
||||
val.MarshalGQL(writer)
|
||||
}
|
||||
writer.Write(closeBracket)
|
||||
}
|
||||
|
||||
type lit struct{ b []byte }
|
||||
|
||||
func (l lit) MarshalGQL(w io.Writer) {
|
||||
w.Write(l.b)
|
||||
}
|
||||
24
vendor/github.com/99designs/gqlgen/graphql/map.go
generated
vendored
Normal file
24
vendor/github.com/99designs/gqlgen/graphql/map.go
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
package graphql
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
)
|
||||
|
||||
func MarshalMap(val map[string]interface{}) Marshaler {
|
||||
return WriterFunc(func(w io.Writer) {
|
||||
err := json.NewEncoder(w).Encode(val)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func UnmarshalMap(v interface{}) (map[string]interface{}, error) {
|
||||
if m, ok := v.(map[string]interface{}); ok {
|
||||
return m, nil
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("%T is not a map", v)
|
||||
}
|
||||
14
vendor/github.com/99designs/gqlgen/graphql/oneshot.go
generated
vendored
Normal file
14
vendor/github.com/99designs/gqlgen/graphql/oneshot.go
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
package graphql
|
||||
|
||||
func OneShot(resp *Response) func() *Response {
|
||||
var oneshot bool
|
||||
|
||||
return func() *Response {
|
||||
if oneshot {
|
||||
return nil
|
||||
}
|
||||
oneshot = true
|
||||
|
||||
return resp
|
||||
}
|
||||
}
|
||||
19
vendor/github.com/99designs/gqlgen/graphql/recovery.go
generated
vendored
Normal file
19
vendor/github.com/99designs/gqlgen/graphql/recovery.go
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
package graphql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"runtime/debug"
|
||||
)
|
||||
|
||||
type RecoverFunc func(ctx context.Context, err interface{}) (userMessage error)
|
||||
|
||||
func DefaultRecover(ctx context.Context, err interface{}) error {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
fmt.Fprintln(os.Stderr)
|
||||
debug.PrintStack()
|
||||
|
||||
return errors.New("internal system error")
|
||||
}
|
||||
24
vendor/github.com/99designs/gqlgen/graphql/response.go
generated
vendored
Normal file
24
vendor/github.com/99designs/gqlgen/graphql/response.go
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
package graphql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/vektah/gqlparser/gqlerror"
|
||||
)
|
||||
|
||||
// Errors are intentionally serialized first based on the advice in
|
||||
// https://github.com/facebook/graphql/commit/7b40390d48680b15cb93e02d46ac5eb249689876#diff-757cea6edf0288677a9eea4cfc801d87R107
|
||||
// and https://github.com/facebook/graphql/pull/384
|
||||
type Response struct {
|
||||
Errors gqlerror.List `json:"errors,omitempty"`
|
||||
Data json.RawMessage `json:"data"`
|
||||
Extensions map[string]interface{} `json:"extensions,omitempty"`
|
||||
}
|
||||
|
||||
func ErrorResponse(ctx context.Context, messagef string, args ...interface{}) *Response {
|
||||
return &Response{
|
||||
Errors: gqlerror.List{{Message: fmt.Sprintf(messagef, args...)}},
|
||||
}
|
||||
}
|
||||
68
vendor/github.com/99designs/gqlgen/graphql/string.go
generated
vendored
Normal file
68
vendor/github.com/99designs/gqlgen/graphql/string.go
generated
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
package graphql
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
const encodeHex = "0123456789ABCDEF"
|
||||
|
||||
func MarshalString(s string) Marshaler {
|
||||
return WriterFunc(func(w io.Writer) {
|
||||
writeQuotedString(w, s)
|
||||
})
|
||||
}
|
||||
|
||||
func writeQuotedString(w io.Writer, s string) {
|
||||
start := 0
|
||||
io.WriteString(w, `"`)
|
||||
|
||||
for i, c := range s {
|
||||
if c < 0x20 || c == '\\' || c == '"' {
|
||||
io.WriteString(w, s[start:i])
|
||||
|
||||
switch c {
|
||||
case '\t':
|
||||
io.WriteString(w, `\t`)
|
||||
case '\r':
|
||||
io.WriteString(w, `\r`)
|
||||
case '\n':
|
||||
io.WriteString(w, `\n`)
|
||||
case '\\':
|
||||
io.WriteString(w, `\\`)
|
||||
case '"':
|
||||
io.WriteString(w, `\"`)
|
||||
default:
|
||||
io.WriteString(w, `\u00`)
|
||||
w.Write([]byte{encodeHex[c>>4], encodeHex[c&0xf]})
|
||||
}
|
||||
|
||||
start = i + 1
|
||||
}
|
||||
}
|
||||
|
||||
io.WriteString(w, s[start:])
|
||||
io.WriteString(w, `"`)
|
||||
}
|
||||
|
||||
func UnmarshalString(v interface{}) (string, error) {
|
||||
switch v := v.(type) {
|
||||
case string:
|
||||
return v, nil
|
||||
case int:
|
||||
return strconv.Itoa(v), nil
|
||||
case float64:
|
||||
return fmt.Sprintf("%f", v), nil
|
||||
case bool:
|
||||
if v {
|
||||
return "true", nil
|
||||
} else {
|
||||
return "false", nil
|
||||
}
|
||||
case nil:
|
||||
return "null", nil
|
||||
default:
|
||||
return "", fmt.Errorf("%T is not a string", v)
|
||||
}
|
||||
}
|
||||
21
vendor/github.com/99designs/gqlgen/graphql/time.go
generated
vendored
Normal file
21
vendor/github.com/99designs/gqlgen/graphql/time.go
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
package graphql
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
func MarshalTime(t time.Time) Marshaler {
|
||||
return WriterFunc(func(w io.Writer) {
|
||||
io.WriteString(w, strconv.Quote(t.Format(time.RFC3339)))
|
||||
})
|
||||
}
|
||||
|
||||
func UnmarshalTime(v interface{}) (time.Time, error) {
|
||||
if tmpStr, ok := v.(string); ok {
|
||||
return time.Parse(time.RFC3339, tmpStr)
|
||||
}
|
||||
return time.Time{}, errors.New("time should be RFC3339 formatted string")
|
||||
}
|
||||
58
vendor/github.com/99designs/gqlgen/graphql/tracer.go
generated
vendored
Normal file
58
vendor/github.com/99designs/gqlgen/graphql/tracer.go
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
package graphql
|
||||
|
||||
import (
|
||||
"context"
|
||||
)
|
||||
|
||||
var _ Tracer = (*NopTracer)(nil)
|
||||
|
||||
type Tracer interface {
|
||||
StartOperationParsing(ctx context.Context) context.Context
|
||||
EndOperationParsing(ctx context.Context)
|
||||
StartOperationValidation(ctx context.Context) context.Context
|
||||
EndOperationValidation(ctx context.Context)
|
||||
StartOperationExecution(ctx context.Context) context.Context
|
||||
StartFieldExecution(ctx context.Context, field CollectedField) context.Context
|
||||
StartFieldResolverExecution(ctx context.Context, rc *ResolverContext) context.Context
|
||||
StartFieldChildExecution(ctx context.Context) context.Context
|
||||
EndFieldExecution(ctx context.Context)
|
||||
EndOperationExecution(ctx context.Context)
|
||||
}
|
||||
|
||||
type NopTracer struct{}
|
||||
|
||||
func (NopTracer) StartOperationParsing(ctx context.Context) context.Context {
|
||||
return ctx
|
||||
}
|
||||
|
||||
func (NopTracer) EndOperationParsing(ctx context.Context) {
|
||||
}
|
||||
|
||||
func (NopTracer) StartOperationValidation(ctx context.Context) context.Context {
|
||||
return ctx
|
||||
}
|
||||
|
||||
func (NopTracer) EndOperationValidation(ctx context.Context) {
|
||||
}
|
||||
|
||||
func (NopTracer) StartOperationExecution(ctx context.Context) context.Context {
|
||||
return ctx
|
||||
}
|
||||
|
||||
func (NopTracer) StartFieldExecution(ctx context.Context, field CollectedField) context.Context {
|
||||
return ctx
|
||||
}
|
||||
|
||||
func (NopTracer) StartFieldResolverExecution(ctx context.Context, rc *ResolverContext) context.Context {
|
||||
return ctx
|
||||
}
|
||||
|
||||
func (NopTracer) StartFieldChildExecution(ctx context.Context) context.Context {
|
||||
return ctx
|
||||
}
|
||||
|
||||
func (NopTracer) EndFieldExecution(ctx context.Context) {
|
||||
}
|
||||
|
||||
func (NopTracer) EndOperationExecution(ctx context.Context) {
|
||||
}
|
||||
3
vendor/github.com/99designs/gqlgen/graphql/version.go
generated
vendored
Normal file
3
vendor/github.com/99designs/gqlgen/graphql/version.go
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
package graphql
|
||||
|
||||
const Version = "dev"
|
||||
57
vendor/github.com/99designs/gqlgen/handler/context.go
generated
vendored
Normal file
57
vendor/github.com/99designs/gqlgen/handler/context.go
generated
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
package handler
|
||||
|
||||
import "context"
|
||||
|
||||
type key string
|
||||
|
||||
const (
|
||||
initpayload key = "ws_initpayload_context"
|
||||
)
|
||||
|
||||
// InitPayload is a structure that is parsed from the websocket init message payload. TO use
|
||||
// request headers for non-websocket, instead wrap the graphql handler in a middleware.
|
||||
type InitPayload map[string]interface{}
|
||||
|
||||
// GetString safely gets a string value from the payload. It returns an empty string if the
|
||||
// payload is nil or the value isn't set.
|
||||
func (payload InitPayload) GetString(key string) string {
|
||||
if payload == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
if value, ok := payload[key]; ok {
|
||||
res, _ := value.(string)
|
||||
return res
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
// Authorization is a short hand for getting the Authorization header from the
|
||||
// payload.
|
||||
func (payload InitPayload) Authorization() string {
|
||||
if value := payload.GetString("Authorization"); value != "" {
|
||||
return value
|
||||
}
|
||||
|
||||
if value := payload.GetString("authorization"); value != "" {
|
||||
return value
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func withInitPayload(ctx context.Context, payload InitPayload) context.Context {
|
||||
return context.WithValue(ctx, initpayload, payload)
|
||||
}
|
||||
|
||||
// GetInitPayload gets a map of the data sent with the connection_init message, which is used by
|
||||
// graphql clients as a stand-in for HTTP headers.
|
||||
func GetInitPayload(ctx context.Context) InitPayload {
|
||||
payload, ok := ctx.Value(initpayload).(InitPayload)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
return payload
|
||||
}
|
||||
467
vendor/github.com/99designs/gqlgen/handler/graphql.go
generated
vendored
Normal file
467
vendor/github.com/99designs/gqlgen/handler/graphql.go
generated
vendored
Normal file
@@ -0,0 +1,467 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/99designs/gqlgen/complexity"
|
||||
"github.com/99designs/gqlgen/graphql"
|
||||
"github.com/gorilla/websocket"
|
||||
"github.com/hashicorp/golang-lru"
|
||||
"github.com/vektah/gqlparser/ast"
|
||||
"github.com/vektah/gqlparser/gqlerror"
|
||||
"github.com/vektah/gqlparser/parser"
|
||||
"github.com/vektah/gqlparser/validator"
|
||||
)
|
||||
|
||||
type params struct {
|
||||
Query string `json:"query"`
|
||||
OperationName string `json:"operationName"`
|
||||
Variables map[string]interface{} `json:"variables"`
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
cacheSize int
|
||||
upgrader websocket.Upgrader
|
||||
recover graphql.RecoverFunc
|
||||
errorPresenter graphql.ErrorPresenterFunc
|
||||
resolverHook graphql.FieldMiddleware
|
||||
requestHook graphql.RequestMiddleware
|
||||
tracer graphql.Tracer
|
||||
complexityLimit int
|
||||
disableIntrospection bool
|
||||
}
|
||||
|
||||
func (c *Config) newRequestContext(es graphql.ExecutableSchema, doc *ast.QueryDocument, op *ast.OperationDefinition, query string, variables map[string]interface{}) *graphql.RequestContext {
|
||||
reqCtx := graphql.NewRequestContext(doc, query, variables)
|
||||
reqCtx.DisableIntrospection = c.disableIntrospection
|
||||
|
||||
if hook := c.recover; hook != nil {
|
||||
reqCtx.Recover = hook
|
||||
}
|
||||
|
||||
if hook := c.errorPresenter; hook != nil {
|
||||
reqCtx.ErrorPresenter = hook
|
||||
}
|
||||
|
||||
if hook := c.resolverHook; hook != nil {
|
||||
reqCtx.ResolverMiddleware = hook
|
||||
}
|
||||
|
||||
if hook := c.requestHook; hook != nil {
|
||||
reqCtx.RequestMiddleware = hook
|
||||
}
|
||||
|
||||
if hook := c.tracer; hook != nil {
|
||||
reqCtx.Tracer = hook
|
||||
}
|
||||
|
||||
if c.complexityLimit > 0 {
|
||||
reqCtx.ComplexityLimit = c.complexityLimit
|
||||
operationComplexity := complexity.Calculate(es, op, variables)
|
||||
reqCtx.OperationComplexity = operationComplexity
|
||||
}
|
||||
|
||||
return reqCtx
|
||||
}
|
||||
|
||||
type Option func(cfg *Config)
|
||||
|
||||
func WebsocketUpgrader(upgrader websocket.Upgrader) Option {
|
||||
return func(cfg *Config) {
|
||||
cfg.upgrader = upgrader
|
||||
}
|
||||
}
|
||||
|
||||
func RecoverFunc(recover graphql.RecoverFunc) Option {
|
||||
return func(cfg *Config) {
|
||||
cfg.recover = recover
|
||||
}
|
||||
}
|
||||
|
||||
// ErrorPresenter transforms errors found while resolving into errors that will be returned to the user. It provides
|
||||
// a good place to add any extra fields, like error.type, that might be desired by your frontend. Check the default
|
||||
// implementation in graphql.DefaultErrorPresenter for an example.
|
||||
func ErrorPresenter(f graphql.ErrorPresenterFunc) Option {
|
||||
return func(cfg *Config) {
|
||||
cfg.errorPresenter = f
|
||||
}
|
||||
}
|
||||
|
||||
// IntrospectionEnabled = false will forbid clients from calling introspection endpoints. Can be useful in prod when you dont
|
||||
// want clients introspecting the full schema.
|
||||
func IntrospectionEnabled(enabled bool) Option {
|
||||
return func(cfg *Config) {
|
||||
cfg.disableIntrospection = !enabled
|
||||
}
|
||||
}
|
||||
|
||||
// ComplexityLimit sets a maximum query complexity that is allowed to be executed.
|
||||
// If a query is submitted that exceeds the limit, a 422 status code will be returned.
|
||||
func ComplexityLimit(limit int) Option {
|
||||
return func(cfg *Config) {
|
||||
cfg.complexityLimit = limit
|
||||
}
|
||||
}
|
||||
|
||||
// ResolverMiddleware allows you to define a function that will be called around every resolver,
|
||||
// useful for logging.
|
||||
func ResolverMiddleware(middleware graphql.FieldMiddleware) Option {
|
||||
return func(cfg *Config) {
|
||||
if cfg.resolverHook == nil {
|
||||
cfg.resolverHook = middleware
|
||||
return
|
||||
}
|
||||
|
||||
lastResolve := cfg.resolverHook
|
||||
cfg.resolverHook = func(ctx context.Context, next graphql.Resolver) (res interface{}, err error) {
|
||||
return lastResolve(ctx, func(ctx context.Context) (res interface{}, err error) {
|
||||
return middleware(ctx, next)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// RequestMiddleware allows you to define a function that will be called around the root request,
|
||||
// after the query has been parsed. This is useful for logging
|
||||
func RequestMiddleware(middleware graphql.RequestMiddleware) Option {
|
||||
return func(cfg *Config) {
|
||||
if cfg.requestHook == nil {
|
||||
cfg.requestHook = middleware
|
||||
return
|
||||
}
|
||||
|
||||
lastResolve := cfg.requestHook
|
||||
cfg.requestHook = func(ctx context.Context, next func(ctx context.Context) []byte) []byte {
|
||||
return lastResolve(ctx, func(ctx context.Context) []byte {
|
||||
return middleware(ctx, next)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Tracer allows you to add a request/resolver tracer that will be called around the root request,
|
||||
// calling resolver. This is useful for tracing
|
||||
func Tracer(tracer graphql.Tracer) Option {
|
||||
return func(cfg *Config) {
|
||||
if cfg.tracer == nil {
|
||||
cfg.tracer = tracer
|
||||
|
||||
} else {
|
||||
lastResolve := cfg.tracer
|
||||
cfg.tracer = &tracerWrapper{
|
||||
tracer1: lastResolve,
|
||||
tracer2: tracer,
|
||||
}
|
||||
}
|
||||
|
||||
opt := RequestMiddleware(func(ctx context.Context, next func(ctx context.Context) []byte) []byte {
|
||||
ctx = tracer.StartOperationExecution(ctx)
|
||||
resp := next(ctx)
|
||||
tracer.EndOperationExecution(ctx)
|
||||
|
||||
return resp
|
||||
})
|
||||
opt(cfg)
|
||||
}
|
||||
}
|
||||
|
||||
type tracerWrapper struct {
|
||||
tracer1 graphql.Tracer
|
||||
tracer2 graphql.Tracer
|
||||
}
|
||||
|
||||
func (tw *tracerWrapper) StartOperationParsing(ctx context.Context) context.Context {
|
||||
ctx = tw.tracer1.StartOperationParsing(ctx)
|
||||
ctx = tw.tracer2.StartOperationParsing(ctx)
|
||||
return ctx
|
||||
}
|
||||
|
||||
func (tw *tracerWrapper) EndOperationParsing(ctx context.Context) {
|
||||
tw.tracer2.EndOperationParsing(ctx)
|
||||
tw.tracer1.EndOperationParsing(ctx)
|
||||
}
|
||||
|
||||
func (tw *tracerWrapper) StartOperationValidation(ctx context.Context) context.Context {
|
||||
ctx = tw.tracer1.StartOperationValidation(ctx)
|
||||
ctx = tw.tracer2.StartOperationValidation(ctx)
|
||||
return ctx
|
||||
}
|
||||
|
||||
func (tw *tracerWrapper) EndOperationValidation(ctx context.Context) {
|
||||
tw.tracer2.EndOperationValidation(ctx)
|
||||
tw.tracer1.EndOperationValidation(ctx)
|
||||
}
|
||||
|
||||
func (tw *tracerWrapper) StartOperationExecution(ctx context.Context) context.Context {
|
||||
ctx = tw.tracer1.StartOperationExecution(ctx)
|
||||
ctx = tw.tracer2.StartOperationExecution(ctx)
|
||||
return ctx
|
||||
}
|
||||
|
||||
func (tw *tracerWrapper) StartFieldExecution(ctx context.Context, field graphql.CollectedField) context.Context {
|
||||
ctx = tw.tracer1.StartFieldExecution(ctx, field)
|
||||
ctx = tw.tracer2.StartFieldExecution(ctx, field)
|
||||
return ctx
|
||||
}
|
||||
|
||||
func (tw *tracerWrapper) StartFieldResolverExecution(ctx context.Context, rc *graphql.ResolverContext) context.Context {
|
||||
ctx = tw.tracer1.StartFieldResolverExecution(ctx, rc)
|
||||
ctx = tw.tracer2.StartFieldResolverExecution(ctx, rc)
|
||||
return ctx
|
||||
}
|
||||
|
||||
func (tw *tracerWrapper) StartFieldChildExecution(ctx context.Context) context.Context {
|
||||
ctx = tw.tracer1.StartFieldChildExecution(ctx)
|
||||
ctx = tw.tracer2.StartFieldChildExecution(ctx)
|
||||
return ctx
|
||||
}
|
||||
|
||||
func (tw *tracerWrapper) EndFieldExecution(ctx context.Context) {
|
||||
tw.tracer2.EndFieldExecution(ctx)
|
||||
tw.tracer1.EndFieldExecution(ctx)
|
||||
}
|
||||
|
||||
func (tw *tracerWrapper) EndOperationExecution(ctx context.Context) {
|
||||
tw.tracer2.EndOperationExecution(ctx)
|
||||
tw.tracer1.EndOperationExecution(ctx)
|
||||
}
|
||||
|
||||
// CacheSize sets the maximum size of the query cache.
|
||||
// If size is less than or equal to 0, the cache is disabled.
|
||||
func CacheSize(size int) Option {
|
||||
return func(cfg *Config) {
|
||||
cfg.cacheSize = size
|
||||
}
|
||||
}
|
||||
|
||||
const DefaultCacheSize = 1000
|
||||
|
||||
func GraphQL(exec graphql.ExecutableSchema, options ...Option) http.HandlerFunc {
|
||||
cfg := &Config{
|
||||
cacheSize: DefaultCacheSize,
|
||||
upgrader: websocket.Upgrader{
|
||||
ReadBufferSize: 1024,
|
||||
WriteBufferSize: 1024,
|
||||
},
|
||||
}
|
||||
|
||||
for _, option := range options {
|
||||
option(cfg)
|
||||
}
|
||||
|
||||
var cache *lru.Cache
|
||||
if cfg.cacheSize > 0 {
|
||||
var err error
|
||||
cache, err = lru.New(cfg.cacheSize)
|
||||
if err != nil {
|
||||
// An error is only returned for non-positive cache size
|
||||
// and we already checked for that.
|
||||
panic("unexpected error creating cache: " + err.Error())
|
||||
}
|
||||
}
|
||||
if cfg.tracer == nil {
|
||||
cfg.tracer = &graphql.NopTracer{}
|
||||
}
|
||||
|
||||
handler := &graphqlHandler{
|
||||
cfg: cfg,
|
||||
cache: cache,
|
||||
exec: exec,
|
||||
}
|
||||
|
||||
return handler.ServeHTTP
|
||||
}
|
||||
|
||||
var _ http.Handler = (*graphqlHandler)(nil)
|
||||
|
||||
type graphqlHandler struct {
|
||||
cfg *Config
|
||||
cache *lru.Cache
|
||||
exec graphql.ExecutableSchema
|
||||
}
|
||||
|
||||
func (gh *graphqlHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method == http.MethodOptions {
|
||||
w.Header().Set("Allow", "OPTIONS, GET, POST")
|
||||
w.WriteHeader(http.StatusOK)
|
||||
return
|
||||
}
|
||||
|
||||
if strings.Contains(r.Header.Get("Upgrade"), "websocket") {
|
||||
connectWs(gh.exec, w, r, gh.cfg, gh.cache)
|
||||
return
|
||||
}
|
||||
|
||||
var reqParams params
|
||||
switch r.Method {
|
||||
case http.MethodGet:
|
||||
reqParams.Query = r.URL.Query().Get("query")
|
||||
reqParams.OperationName = r.URL.Query().Get("operationName")
|
||||
|
||||
if variables := r.URL.Query().Get("variables"); variables != "" {
|
||||
if err := jsonDecode(strings.NewReader(variables), &reqParams.Variables); err != nil {
|
||||
sendErrorf(w, http.StatusBadRequest, "variables could not be decoded")
|
||||
return
|
||||
}
|
||||
}
|
||||
case http.MethodPost:
|
||||
if err := jsonDecode(r.Body, &reqParams); err != nil {
|
||||
sendErrorf(w, http.StatusBadRequest, "json body could not be decoded: "+err.Error())
|
||||
return
|
||||
}
|
||||
default:
|
||||
w.WriteHeader(http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
ctx := r.Context()
|
||||
|
||||
var doc *ast.QueryDocument
|
||||
var cacheHit bool
|
||||
if gh.cache != nil {
|
||||
val, ok := gh.cache.Get(reqParams.Query)
|
||||
if ok {
|
||||
doc = val.(*ast.QueryDocument)
|
||||
cacheHit = true
|
||||
}
|
||||
}
|
||||
|
||||
ctx, doc, gqlErr := gh.parseOperation(ctx, &parseOperationArgs{
|
||||
Query: reqParams.Query,
|
||||
CachedDoc: doc,
|
||||
})
|
||||
if gqlErr != nil {
|
||||
sendError(w, http.StatusUnprocessableEntity, gqlErr)
|
||||
return
|
||||
}
|
||||
|
||||
ctx, op, vars, listErr := gh.validateOperation(ctx, &validateOperationArgs{
|
||||
Doc: doc,
|
||||
OperationName: reqParams.OperationName,
|
||||
CacheHit: cacheHit,
|
||||
R: r,
|
||||
Variables: reqParams.Variables,
|
||||
})
|
||||
if len(listErr) != 0 {
|
||||
sendError(w, http.StatusUnprocessableEntity, listErr...)
|
||||
return
|
||||
}
|
||||
|
||||
if gh.cache != nil && !cacheHit {
|
||||
gh.cache.Add(reqParams.Query, doc)
|
||||
}
|
||||
|
||||
reqCtx := gh.cfg.newRequestContext(gh.exec, doc, op, reqParams.Query, vars)
|
||||
ctx = graphql.WithRequestContext(ctx, reqCtx)
|
||||
|
||||
defer func() {
|
||||
if err := recover(); err != nil {
|
||||
userErr := reqCtx.Recover(ctx, err)
|
||||
sendErrorf(w, http.StatusUnprocessableEntity, userErr.Error())
|
||||
}
|
||||
}()
|
||||
|
||||
if reqCtx.ComplexityLimit > 0 && reqCtx.OperationComplexity > reqCtx.ComplexityLimit {
|
||||
sendErrorf(w, http.StatusUnprocessableEntity, "operation has complexity %d, which exceeds the limit of %d", reqCtx.OperationComplexity, reqCtx.ComplexityLimit)
|
||||
return
|
||||
}
|
||||
|
||||
switch op.Operation {
|
||||
case ast.Query:
|
||||
b, err := json.Marshal(gh.exec.Query(ctx, op))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
w.Write(b)
|
||||
case ast.Mutation:
|
||||
b, err := json.Marshal(gh.exec.Mutation(ctx, op))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
w.Write(b)
|
||||
default:
|
||||
sendErrorf(w, http.StatusBadRequest, "unsupported operation type")
|
||||
}
|
||||
}
|
||||
|
||||
type parseOperationArgs struct {
|
||||
Query string
|
||||
CachedDoc *ast.QueryDocument
|
||||
}
|
||||
|
||||
func (gh *graphqlHandler) parseOperation(ctx context.Context, args *parseOperationArgs) (context.Context, *ast.QueryDocument, *gqlerror.Error) {
|
||||
ctx = gh.cfg.tracer.StartOperationParsing(ctx)
|
||||
defer func() { gh.cfg.tracer.EndOperationParsing(ctx) }()
|
||||
|
||||
if args.CachedDoc != nil {
|
||||
return ctx, args.CachedDoc, nil
|
||||
}
|
||||
|
||||
doc, gqlErr := parser.ParseQuery(&ast.Source{Input: args.Query})
|
||||
if gqlErr != nil {
|
||||
return ctx, nil, gqlErr
|
||||
}
|
||||
|
||||
return ctx, doc, nil
|
||||
}
|
||||
|
||||
type validateOperationArgs struct {
|
||||
Doc *ast.QueryDocument
|
||||
OperationName string
|
||||
CacheHit bool
|
||||
R *http.Request
|
||||
Variables map[string]interface{}
|
||||
}
|
||||
|
||||
func (gh *graphqlHandler) validateOperation(ctx context.Context, args *validateOperationArgs) (context.Context, *ast.OperationDefinition, map[string]interface{}, gqlerror.List) {
|
||||
ctx = gh.cfg.tracer.StartOperationValidation(ctx)
|
||||
defer func() { gh.cfg.tracer.EndOperationValidation(ctx) }()
|
||||
|
||||
if !args.CacheHit {
|
||||
listErr := validator.Validate(gh.exec.Schema(), args.Doc)
|
||||
if len(listErr) != 0 {
|
||||
return ctx, nil, nil, listErr
|
||||
}
|
||||
}
|
||||
|
||||
op := args.Doc.Operations.ForName(args.OperationName)
|
||||
if op == nil {
|
||||
return ctx, nil, nil, gqlerror.List{gqlerror.Errorf("operation %s not found", args.OperationName)}
|
||||
}
|
||||
|
||||
if op.Operation != ast.Query && args.R.Method == http.MethodGet {
|
||||
return ctx, nil, nil, gqlerror.List{gqlerror.Errorf("GET requests only allow query operations")}
|
||||
}
|
||||
|
||||
vars, err := validator.VariableValues(gh.exec.Schema(), op, args.Variables)
|
||||
if err != nil {
|
||||
return ctx, nil, nil, gqlerror.List{err}
|
||||
}
|
||||
|
||||
return ctx, op, vars, nil
|
||||
}
|
||||
|
||||
func jsonDecode(r io.Reader, val interface{}) error {
|
||||
dec := json.NewDecoder(r)
|
||||
dec.UseNumber()
|
||||
return dec.Decode(val)
|
||||
}
|
||||
|
||||
func sendError(w http.ResponseWriter, code int, errors ...*gqlerror.Error) {
|
||||
w.WriteHeader(code)
|
||||
b, err := json.Marshal(&graphql.Response{Errors: errors})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
w.Write(b)
|
||||
}
|
||||
|
||||
func sendErrorf(w http.ResponseWriter, code int, format string, args ...interface{}) {
|
||||
sendError(w, code, &gqlerror.Error{Message: fmt.Sprintf(format, args...)})
|
||||
}
|
||||
54
vendor/github.com/99designs/gqlgen/handler/playground.go
generated
vendored
Normal file
54
vendor/github.com/99designs/gqlgen/handler/playground.go
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"html/template"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
var page = template.Must(template.New("graphiql").Parse(`<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset=utf-8/>
|
||||
<meta name="viewport" content="user-scalable=no, initial-scale=1.0, minimum-scale=1.0, maximum-scale=1.0, minimal-ui">
|
||||
<link rel="shortcut icon" href="https://graphcool-playground.netlify.com/favicon.png">
|
||||
<link rel="stylesheet" href="//cdn.jsdelivr.net/npm/graphql-playground-react@{{ .version }}/build/static/css/index.css"/>
|
||||
<link rel="shortcut icon" href="//cdn.jsdelivr.net/npm/graphql-playground-react@{{ .version }}/build/favicon.png"/>
|
||||
<script src="//cdn.jsdelivr.net/npm/graphql-playground-react@{{ .version }}/build/static/js/middleware.js"></script>
|
||||
<title>{{.title}}</title>
|
||||
</head>
|
||||
<body>
|
||||
<style type="text/css">
|
||||
html { font-family: "Open Sans", sans-serif; overflow: hidden; }
|
||||
body { margin: 0; background: #172a3a; }
|
||||
</style>
|
||||
<div id="root"/>
|
||||
<script type="text/javascript">
|
||||
window.addEventListener('load', function (event) {
|
||||
const root = document.getElementById('root');
|
||||
root.classList.add('playgroundIn');
|
||||
const wsProto = location.protocol == 'https:' ? 'wss:' : 'ws:'
|
||||
GraphQLPlayground.init(root, {
|
||||
endpoint: location.protocol + '//' + location.host + '{{.endpoint}}',
|
||||
subscriptionsEndpoint: wsProto + '//' + location.host + '{{.endpoint }}',
|
||||
settings: {
|
||||
'request.credentials': 'same-origin'
|
||||
}
|
||||
})
|
||||
})
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
`))
|
||||
|
||||
func Playground(title string, endpoint string) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
err := page.Execute(w, map[string]string{
|
||||
"title": title,
|
||||
"endpoint": endpoint,
|
||||
"version": "1.7.8",
|
||||
})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
51
vendor/github.com/99designs/gqlgen/handler/stub.go
generated
vendored
Normal file
51
vendor/github.com/99designs/gqlgen/handler/stub.go
generated
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/99designs/gqlgen/graphql"
|
||||
"github.com/vektah/gqlparser"
|
||||
"github.com/vektah/gqlparser/ast"
|
||||
)
|
||||
|
||||
type executableSchemaStub struct {
|
||||
NextResp chan struct{}
|
||||
}
|
||||
|
||||
var _ graphql.ExecutableSchema = &executableSchemaStub{}
|
||||
|
||||
func (e *executableSchemaStub) Schema() *ast.Schema {
|
||||
return gqlparser.MustLoadSchema(&ast.Source{Input: `
|
||||
schema { query: Query }
|
||||
type Query {
|
||||
me: User!
|
||||
user(id: Int): User!
|
||||
}
|
||||
type User { name: String! }
|
||||
`})
|
||||
}
|
||||
|
||||
func (e *executableSchemaStub) Complexity(typeName, field string, childComplexity int, args map[string]interface{}) (int, bool) {
|
||||
return 0, false
|
||||
}
|
||||
|
||||
func (e *executableSchemaStub) Query(ctx context.Context, op *ast.OperationDefinition) *graphql.Response {
|
||||
return &graphql.Response{Data: []byte(`{"name":"test"}`)}
|
||||
}
|
||||
|
||||
func (e *executableSchemaStub) Mutation(ctx context.Context, op *ast.OperationDefinition) *graphql.Response {
|
||||
return graphql.ErrorResponse(ctx, "mutations are not supported")
|
||||
}
|
||||
|
||||
func (e *executableSchemaStub) Subscription(ctx context.Context, op *ast.OperationDefinition) func() *graphql.Response {
|
||||
return func() *graphql.Response {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return nil
|
||||
case <-e.NextResp:
|
||||
return &graphql.Response{
|
||||
Data: []byte(`{"name":"test"}`),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
286
vendor/github.com/99designs/gqlgen/handler/websocket.go
generated
vendored
Normal file
286
vendor/github.com/99designs/gqlgen/handler/websocket.go
generated
vendored
Normal file
@@ -0,0 +1,286 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"sync"
|
||||
|
||||
"github.com/99designs/gqlgen/graphql"
|
||||
"github.com/gorilla/websocket"
|
||||
"github.com/hashicorp/golang-lru"
|
||||
"github.com/vektah/gqlparser"
|
||||
"github.com/vektah/gqlparser/ast"
|
||||
"github.com/vektah/gqlparser/gqlerror"
|
||||
"github.com/vektah/gqlparser/validator"
|
||||
)
|
||||
|
||||
const (
|
||||
connectionInitMsg = "connection_init" // Client -> Server
|
||||
connectionTerminateMsg = "connection_terminate" // Client -> Server
|
||||
startMsg = "start" // Client -> Server
|
||||
stopMsg = "stop" // Client -> Server
|
||||
connectionAckMsg = "connection_ack" // Server -> Client
|
||||
connectionErrorMsg = "connection_error" // Server -> Client
|
||||
dataMsg = "data" // Server -> Client
|
||||
errorMsg = "error" // Server -> Client
|
||||
completeMsg = "complete" // Server -> Client
|
||||
//connectionKeepAliveMsg = "ka" // Server -> Client TODO: keepalives
|
||||
)
|
||||
|
||||
type operationMessage struct {
|
||||
Payload json.RawMessage `json:"payload,omitempty"`
|
||||
ID string `json:"id,omitempty"`
|
||||
Type string `json:"type"`
|
||||
}
|
||||
|
||||
type wsConnection struct {
|
||||
ctx context.Context
|
||||
conn *websocket.Conn
|
||||
exec graphql.ExecutableSchema
|
||||
active map[string]context.CancelFunc
|
||||
mu sync.Mutex
|
||||
cfg *Config
|
||||
cache *lru.Cache
|
||||
|
||||
initPayload InitPayload
|
||||
}
|
||||
|
||||
func connectWs(exec graphql.ExecutableSchema, w http.ResponseWriter, r *http.Request, cfg *Config, cache *lru.Cache) {
|
||||
ws, err := cfg.upgrader.Upgrade(w, r, http.Header{
|
||||
"Sec-Websocket-Protocol": []string{"graphql-ws"},
|
||||
})
|
||||
if err != nil {
|
||||
log.Printf("unable to upgrade %T to websocket %s: ", w, err.Error())
|
||||
sendErrorf(w, http.StatusBadRequest, "unable to upgrade")
|
||||
return
|
||||
}
|
||||
|
||||
conn := wsConnection{
|
||||
active: map[string]context.CancelFunc{},
|
||||
exec: exec,
|
||||
conn: ws,
|
||||
ctx: r.Context(),
|
||||
cfg: cfg,
|
||||
cache: cache,
|
||||
}
|
||||
|
||||
if !conn.init() {
|
||||
return
|
||||
}
|
||||
|
||||
conn.run()
|
||||
}
|
||||
|
||||
func (c *wsConnection) init() bool {
|
||||
message := c.readOp()
|
||||
if message == nil {
|
||||
c.close(websocket.CloseProtocolError, "decoding error")
|
||||
return false
|
||||
}
|
||||
|
||||
switch message.Type {
|
||||
case connectionInitMsg:
|
||||
if len(message.Payload) > 0 {
|
||||
c.initPayload = make(InitPayload)
|
||||
err := json.Unmarshal(message.Payload, &c.initPayload)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
c.write(&operationMessage{Type: connectionAckMsg})
|
||||
case connectionTerminateMsg:
|
||||
c.close(websocket.CloseNormalClosure, "terminated")
|
||||
return false
|
||||
default:
|
||||
c.sendConnectionError("unexpected message %s", message.Type)
|
||||
c.close(websocket.CloseProtocolError, "unexpected message")
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (c *wsConnection) write(msg *operationMessage) {
|
||||
c.mu.Lock()
|
||||
c.conn.WriteJSON(msg)
|
||||
c.mu.Unlock()
|
||||
}
|
||||
|
||||
func (c *wsConnection) run() {
|
||||
for {
|
||||
message := c.readOp()
|
||||
if message == nil {
|
||||
return
|
||||
}
|
||||
|
||||
switch message.Type {
|
||||
case startMsg:
|
||||
if !c.subscribe(message) {
|
||||
return
|
||||
}
|
||||
case stopMsg:
|
||||
c.mu.Lock()
|
||||
closer := c.active[message.ID]
|
||||
c.mu.Unlock()
|
||||
if closer == nil {
|
||||
c.sendError(message.ID, gqlerror.Errorf("%s is not running, cannot stop", message.ID))
|
||||
continue
|
||||
}
|
||||
|
||||
closer()
|
||||
case connectionTerminateMsg:
|
||||
c.close(websocket.CloseNormalClosure, "terminated")
|
||||
return
|
||||
default:
|
||||
c.sendConnectionError("unexpected message %s", message.Type)
|
||||
c.close(websocket.CloseProtocolError, "unexpected message")
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (c *wsConnection) subscribe(message *operationMessage) bool {
|
||||
var reqParams params
|
||||
if err := jsonDecode(bytes.NewReader(message.Payload), &reqParams); err != nil {
|
||||
c.sendConnectionError("invalid json")
|
||||
return false
|
||||
}
|
||||
|
||||
var (
|
||||
doc *ast.QueryDocument
|
||||
cacheHit bool
|
||||
)
|
||||
if c.cache != nil {
|
||||
val, ok := c.cache.Get(reqParams.Query)
|
||||
if ok {
|
||||
doc = val.(*ast.QueryDocument)
|
||||
cacheHit = true
|
||||
}
|
||||
}
|
||||
if !cacheHit {
|
||||
var qErr gqlerror.List
|
||||
doc, qErr = gqlparser.LoadQuery(c.exec.Schema(), reqParams.Query)
|
||||
if qErr != nil {
|
||||
c.sendError(message.ID, qErr...)
|
||||
return true
|
||||
}
|
||||
if c.cache != nil {
|
||||
c.cache.Add(reqParams.Query, doc)
|
||||
}
|
||||
}
|
||||
|
||||
op := doc.Operations.ForName(reqParams.OperationName)
|
||||
if op == nil {
|
||||
c.sendError(message.ID, gqlerror.Errorf("operation %s not found", reqParams.OperationName))
|
||||
return true
|
||||
}
|
||||
|
||||
vars, err := validator.VariableValues(c.exec.Schema(), op, reqParams.Variables)
|
||||
if err != nil {
|
||||
c.sendError(message.ID, err)
|
||||
return true
|
||||
}
|
||||
reqCtx := c.cfg.newRequestContext(c.exec, doc, op, reqParams.Query, vars)
|
||||
ctx := graphql.WithRequestContext(c.ctx, reqCtx)
|
||||
|
||||
if c.initPayload != nil {
|
||||
ctx = withInitPayload(ctx, c.initPayload)
|
||||
}
|
||||
|
||||
if op.Operation != ast.Subscription {
|
||||
var result *graphql.Response
|
||||
if op.Operation == ast.Query {
|
||||
result = c.exec.Query(ctx, op)
|
||||
} else {
|
||||
result = c.exec.Mutation(ctx, op)
|
||||
}
|
||||
|
||||
c.sendData(message.ID, result)
|
||||
c.write(&operationMessage{ID: message.ID, Type: completeMsg})
|
||||
return true
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithCancel(ctx)
|
||||
c.mu.Lock()
|
||||
c.active[message.ID] = cancel
|
||||
c.mu.Unlock()
|
||||
go func() {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
userErr := reqCtx.Recover(ctx, r)
|
||||
c.sendError(message.ID, &gqlerror.Error{Message: userErr.Error()})
|
||||
}
|
||||
}()
|
||||
next := c.exec.Subscription(ctx, op)
|
||||
for result := next(); result != nil; result = next() {
|
||||
c.sendData(message.ID, result)
|
||||
}
|
||||
|
||||
c.write(&operationMessage{ID: message.ID, Type: completeMsg})
|
||||
|
||||
c.mu.Lock()
|
||||
delete(c.active, message.ID)
|
||||
c.mu.Unlock()
|
||||
cancel()
|
||||
}()
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (c *wsConnection) sendData(id string, response *graphql.Response) {
|
||||
b, err := json.Marshal(response)
|
||||
if err != nil {
|
||||
c.sendError(id, gqlerror.Errorf("unable to encode json response: %s", err.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
c.write(&operationMessage{Type: dataMsg, ID: id, Payload: b})
|
||||
}
|
||||
|
||||
func (c *wsConnection) sendError(id string, errors ...*gqlerror.Error) {
|
||||
var errs []error
|
||||
for _, err := range errors {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
b, err := json.Marshal(errs)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
c.write(&operationMessage{Type: errorMsg, ID: id, Payload: b})
|
||||
}
|
||||
|
||||
func (c *wsConnection) sendConnectionError(format string, args ...interface{}) {
|
||||
b, err := json.Marshal(&gqlerror.Error{Message: fmt.Sprintf(format, args...)})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
c.write(&operationMessage{Type: connectionErrorMsg, Payload: b})
|
||||
}
|
||||
|
||||
func (c *wsConnection) readOp() *operationMessage {
|
||||
_, r, err := c.conn.NextReader()
|
||||
if err != nil {
|
||||
c.sendConnectionError("invalid json")
|
||||
return nil
|
||||
}
|
||||
message := operationMessage{}
|
||||
if err := jsonDecode(r, &message); err != nil {
|
||||
c.sendConnectionError("invalid json")
|
||||
return nil
|
||||
}
|
||||
|
||||
return &message
|
||||
}
|
||||
|
||||
func (c *wsConnection) close(closeCode int, message string) {
|
||||
c.mu.Lock()
|
||||
_ = c.conn.WriteMessage(websocket.CloseMessage, websocket.FormatCloseMessage(closeCode, message))
|
||||
c.mu.Unlock()
|
||||
_ = c.conn.Close()
|
||||
}
|
||||
1
vendor/github.com/PuerkitoBio/goquery/.gitattributes
generated
vendored
Normal file
1
vendor/github.com/PuerkitoBio/goquery/.gitattributes
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
testdata/* linguist-vendored
|
||||
16
vendor/github.com/PuerkitoBio/goquery/.gitignore
generated
vendored
Normal file
16
vendor/github.com/PuerkitoBio/goquery/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
# editor temporary files
|
||||
*.sublime-*
|
||||
.DS_Store
|
||||
*.swp
|
||||
#*.*#
|
||||
tags
|
||||
|
||||
# direnv config
|
||||
.env*
|
||||
|
||||
# test binaries
|
||||
*.test
|
||||
|
||||
# coverage and profilte outputs
|
||||
*.out
|
||||
|
||||
16
vendor/github.com/PuerkitoBio/goquery/.travis.yml
generated
vendored
Normal file
16
vendor/github.com/PuerkitoBio/goquery/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
language: go
|
||||
|
||||
go:
|
||||
- 1.1
|
||||
- 1.2.x
|
||||
- 1.3.x
|
||||
- 1.4.x
|
||||
- 1.5.x
|
||||
- 1.6.x
|
||||
- 1.7.x
|
||||
- 1.8.x
|
||||
- 1.9.x
|
||||
- "1.10.x"
|
||||
- 1.11.x
|
||||
- tip
|
||||
|
||||
12
vendor/github.com/PuerkitoBio/goquery/LICENSE
generated
vendored
Normal file
12
vendor/github.com/PuerkitoBio/goquery/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
Copyright (c) 2012-2016, Martin Angers & Contributors
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of the author nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
179
vendor/github.com/PuerkitoBio/goquery/README.md
generated
vendored
Normal file
179
vendor/github.com/PuerkitoBio/goquery/README.md
generated
vendored
Normal file
@@ -0,0 +1,179 @@
|
||||
# goquery - a little like that j-thing, only in Go
|
||||
[](http://travis-ci.org/PuerkitoBio/goquery) [](http://godoc.org/github.com/PuerkitoBio/goquery) [](https://sourcegraph.com/github.com/PuerkitoBio/goquery?badge)
|
||||
|
||||
goquery brings a syntax and a set of features similar to [jQuery][] to the [Go language][go]. It is based on Go's [net/html package][html] and the CSS Selector library [cascadia][]. Since the net/html parser returns nodes, and not a full-featured DOM tree, jQuery's stateful manipulation functions (like height(), css(), detach()) have been left off.
|
||||
|
||||
Also, because the net/html parser requires UTF-8 encoding, so does goquery: it is the caller's responsibility to ensure that the source document provides UTF-8 encoded HTML. See the [wiki][] for various options to do this.
|
||||
|
||||
Syntax-wise, it is as close as possible to jQuery, with the same function names when possible, and that warm and fuzzy chainable interface. jQuery being the ultra-popular library that it is, I felt that writing a similar HTML-manipulating library was better to follow its API than to start anew (in the same spirit as Go's `fmt` package), even though some of its methods are less than intuitive (looking at you, [index()][index]...).
|
||||
|
||||
## Table of Contents
|
||||
|
||||
* [Installation](#installation)
|
||||
* [Changelog](#changelog)
|
||||
* [API](#api)
|
||||
* [Examples](#examples)
|
||||
* [Related Projects](#related-projects)
|
||||
* [Support](#support)
|
||||
* [License](#license)
|
||||
|
||||
## Installation
|
||||
|
||||
Please note that because of the net/html dependency, goquery requires Go1.1+.
|
||||
|
||||
$ go get github.com/PuerkitoBio/goquery
|
||||
|
||||
(optional) To run unit tests:
|
||||
|
||||
$ cd $GOPATH/src/github.com/PuerkitoBio/goquery
|
||||
$ go test
|
||||
|
||||
(optional) To run benchmarks (warning: it runs for a few minutes):
|
||||
|
||||
$ cd $GOPATH/src/github.com/PuerkitoBio/goquery
|
||||
$ go test -bench=".*"
|
||||
|
||||
## Changelog
|
||||
|
||||
**Note that goquery's API is now stable, and will not break.**
|
||||
|
||||
* **2018-11-15 (v1.5.0)** : Go module support (thanks @Zaba505).
|
||||
* **2018-06-07 (v1.4.1)** : Add `NewDocumentFromReader` examples.
|
||||
* **2018-03-24 (v1.4.0)** : Deprecate `NewDocument(url)` and `NewDocumentFromResponse(response)`.
|
||||
* **2018-01-28 (v1.3.0)** : Add `ToEnd` constant to `Slice` until the end of the selection (thanks to @davidjwilkins for raising the issue).
|
||||
* **2018-01-11 (v1.2.0)** : Add `AddBack*` and deprecate `AndSelf` (thanks to @davidjwilkins).
|
||||
* **2017-02-12 (v1.1.0)** : Add `SetHtml` and `SetText` (thanks to @glebtv).
|
||||
* **2016-12-29 (v1.0.2)** : Optimize allocations for `Selection.Text` (thanks to @radovskyb).
|
||||
* **2016-08-28 (v1.0.1)** : Optimize performance for large documents.
|
||||
* **2016-07-27 (v1.0.0)** : Tag version 1.0.0.
|
||||
* **2016-06-15** : Invalid selector strings internally compile to a `Matcher` implementation that never matches any node (instead of a panic). So for example, `doc.Find("~")` returns an empty `*Selection` object.
|
||||
* **2016-02-02** : Add `NodeName` utility function similar to the DOM's `nodeName` property. It returns the tag name of the first element in a selection, and other relevant values of non-element nodes (see godoc for details). Add `OuterHtml` utility function similar to the DOM's `outerHTML` property (named `OuterHtml` in small caps for consistency with the existing `Html` method on the `Selection`).
|
||||
* **2015-04-20** : Add `AttrOr` helper method to return the attribute's value or a default value if absent. Thanks to [piotrkowalczuk][piotr].
|
||||
* **2015-02-04** : Add more manipulation functions - Prepend* - thanks again to [Andrew Stone][thatguystone].
|
||||
* **2014-11-28** : Add more manipulation functions - ReplaceWith*, Wrap* and Unwrap - thanks again to [Andrew Stone][thatguystone].
|
||||
* **2014-11-07** : Add manipulation functions (thanks to [Andrew Stone][thatguystone]) and `*Matcher` functions, that receive compiled cascadia selectors instead of selector strings, thus avoiding potential panics thrown by goquery via `cascadia.MustCompile` calls. This results in better performance (selectors can be compiled once and reused) and more idiomatic error handling (you can handle cascadia's compilation errors, instead of recovering from panics, which had been bugging me for a long time). Note that the actual type expected is a `Matcher` interface, that `cascadia.Selector` implements. Other matcher implementations could be used.
|
||||
* **2014-11-06** : Change import paths of net/html to golang.org/x/net/html (see https://groups.google.com/forum/#!topic/golang-nuts/eD8dh3T9yyA). Make sure to update your code to use the new import path too when you call goquery with `html.Node`s.
|
||||
* **v0.3.2** : Add `NewDocumentFromReader()` (thanks jweir) which allows creating a goquery document from an io.Reader.
|
||||
* **v0.3.1** : Add `NewDocumentFromResponse()` (thanks assassingj) which allows creating a goquery document from an http response.
|
||||
* **v0.3.0** : Add `EachWithBreak()` which allows to break out of an `Each()` loop by returning false. This function was added instead of changing the existing `Each()` to avoid breaking compatibility.
|
||||
* **v0.2.1** : Make go-getable, now that [go.net/html is Go1.0-compatible][gonet] (thanks to @matrixik for pointing this out).
|
||||
* **v0.2.0** : Add support for negative indices in Slice(). **BREAKING CHANGE** `Document.Root` is removed, `Document` is now a `Selection` itself (a selection of one, the root element, just like `Document.Root` was before). Add jQuery's Closest() method.
|
||||
* **v0.1.1** : Add benchmarks to use as baseline for refactorings, refactor Next...() and Prev...() methods to use the new html package's linked list features (Next/PrevSibling, FirstChild). Good performance boost (40+% in some cases).
|
||||
* **v0.1.0** : Initial release.
|
||||
|
||||
## API
|
||||
|
||||
goquery exposes two structs, `Document` and `Selection`, and the `Matcher` interface. Unlike jQuery, which is loaded as part of a DOM document, and thus acts on its containing document, goquery doesn't know which HTML document to act upon. So it needs to be told, and that's what the `Document` type is for. It holds the root document node as the initial Selection value to manipulate.
|
||||
|
||||
jQuery often has many variants for the same function (no argument, a selector string argument, a jQuery object argument, a DOM element argument, ...). Instead of exposing the same features in goquery as a single method with variadic empty interface arguments, statically-typed signatures are used following this naming convention:
|
||||
|
||||
* When the jQuery equivalent can be called with no argument, it has the same name as jQuery for the no argument signature (e.g.: `Prev()`), and the version with a selector string argument is called `XxxFiltered()` (e.g.: `PrevFiltered()`)
|
||||
* When the jQuery equivalent **requires** one argument, the same name as jQuery is used for the selector string version (e.g.: `Is()`)
|
||||
* The signatures accepting a jQuery object as argument are defined in goquery as `XxxSelection()` and take a `*Selection` object as argument (e.g.: `FilterSelection()`)
|
||||
* The signatures accepting a DOM element as argument in jQuery are defined in goquery as `XxxNodes()` and take a variadic argument of type `*html.Node` (e.g.: `FilterNodes()`)
|
||||
* The signatures accepting a function as argument in jQuery are defined in goquery as `XxxFunction()` and take a function as argument (e.g.: `FilterFunction()`)
|
||||
* The goquery methods that can be called with a selector string have a corresponding version that take a `Matcher` interface and are defined as `XxxMatcher()` (e.g.: `IsMatcher()`)
|
||||
|
||||
Utility functions that are not in jQuery but are useful in Go are implemented as functions (that take a `*Selection` as parameter), to avoid a potential naming clash on the `*Selection`'s methods (reserved for jQuery-equivalent behaviour).
|
||||
|
||||
The complete [godoc reference documentation can be found here][doc].
|
||||
|
||||
Please note that Cascadia's selectors do not necessarily match all supported selectors of jQuery (Sizzle). See the [cascadia project][cascadia] for details. Invalid selector strings compile to a `Matcher` that fails to match any node. Behaviour of the various functions that take a selector string as argument follows from that fact, e.g. (where `~` is an invalid selector string):
|
||||
|
||||
* `Find("~")` returns an empty selection because the selector string doesn't match anything.
|
||||
* `Add("~")` returns a new selection that holds the same nodes as the original selection, because it didn't add any node (selector string didn't match anything).
|
||||
* `ParentsFiltered("~")` returns an empty selection because the selector string doesn't match anything.
|
||||
* `ParentsUntil("~")` returns all parents of the selection because the selector string didn't match any element to stop before the top element.
|
||||
|
||||
## Examples
|
||||
|
||||
See some tips and tricks in the [wiki][].
|
||||
|
||||
Adapted from example_test.go:
|
||||
|
||||
```Go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
)
|
||||
|
||||
func ExampleScrape() {
|
||||
// Request the HTML page.
|
||||
res, err := http.Get("http://metalsucks.net")
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
defer res.Body.Close()
|
||||
if res.StatusCode != 200 {
|
||||
log.Fatalf("status code error: %d %s", res.StatusCode, res.Status)
|
||||
}
|
||||
|
||||
// Load the HTML document
|
||||
doc, err := goquery.NewDocumentFromReader(res.Body)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// Find the review items
|
||||
doc.Find(".sidebar-reviews article .content-block").Each(func(i int, s *goquery.Selection) {
|
||||
// For each item found, get the band and title
|
||||
band := s.Find("a").Text()
|
||||
title := s.Find("i").Text()
|
||||
fmt.Printf("Review %d: %s - %s\n", i, band, title)
|
||||
})
|
||||
}
|
||||
|
||||
func main() {
|
||||
ExampleScrape()
|
||||
}
|
||||
```
|
||||
|
||||
## Related Projects
|
||||
|
||||
- [Goq][goq], an HTML deserialization and scraping library based on goquery and struct tags.
|
||||
- [andybalholm/cascadia][cascadia], the CSS selector library used by goquery.
|
||||
- [suntong/cascadia][cascadiacli], a command-line interface to the cascadia CSS selector library, useful to test selectors.
|
||||
- [asciimoo/colly](https://github.com/asciimoo/colly), a lightning fast and elegant Scraping Framework
|
||||
- [gnulnx/goperf](https://github.com/gnulnx/goperf), a website performance test tool that also fetches static assets.
|
||||
- [MontFerret/ferret](https://github.com/MontFerret/ferret), declarative web scraping.
|
||||
|
||||
## Support
|
||||
|
||||
There are a number of ways you can support the project:
|
||||
|
||||
* Use it, star it, build something with it, spread the word!
|
||||
- If you do build something open-source or otherwise publicly-visible, let me know so I can add it to the [Related Projects](#related-projects) section!
|
||||
* Raise issues to improve the project (note: doc typos and clarifications are issues too!)
|
||||
- Please search existing issues before opening a new one - it may have already been adressed.
|
||||
* Pull requests: please discuss new code in an issue first, unless the fix is really trivial.
|
||||
- Make sure new code is tested.
|
||||
- Be mindful of existing code - PRs that break existing code have a high probability of being declined, unless it fixes a serious issue.
|
||||
|
||||
If you desperately want to send money my way, I have a BuyMeACoffee.com page:
|
||||
|
||||
<a href="https://www.buymeacoffee.com/mna" target="_blank"><img src="https://www.buymeacoffee.com/assets/img/custom_images/orange_img.png" alt="Buy Me A Coffee" style="height: 41px !important;width: 174px !important;box-shadow: 0px 3px 2px 0px rgba(190, 190, 190, 0.5) !important;-webkit-box-shadow: 0px 3px 2px 0px rgba(190, 190, 190, 0.5) !important;" ></a>
|
||||
|
||||
## License
|
||||
|
||||
The [BSD 3-Clause license][bsd], the same as the [Go language][golic]. Cascadia's license is [here][caslic].
|
||||
|
||||
[jquery]: http://jquery.com/
|
||||
[go]: http://golang.org/
|
||||
[cascadia]: https://github.com/andybalholm/cascadia
|
||||
[cascadiacli]: https://github.com/suntong/cascadia
|
||||
[bsd]: http://opensource.org/licenses/BSD-3-Clause
|
||||
[golic]: http://golang.org/LICENSE
|
||||
[caslic]: https://github.com/andybalholm/cascadia/blob/master/LICENSE
|
||||
[doc]: http://godoc.org/github.com/PuerkitoBio/goquery
|
||||
[index]: http://api.jquery.com/index/
|
||||
[gonet]: https://github.com/golang/net/
|
||||
[html]: http://godoc.org/golang.org/x/net/html
|
||||
[wiki]: https://github.com/PuerkitoBio/goquery/wiki/Tips-and-tricks
|
||||
[thatguystone]: https://github.com/thatguystone
|
||||
[piotr]: https://github.com/piotrkowalczuk
|
||||
[goq]: https://github.com/andrewstuart/goq
|
||||
124
vendor/github.com/PuerkitoBio/goquery/array.go
generated
vendored
Normal file
124
vendor/github.com/PuerkitoBio/goquery/array.go
generated
vendored
Normal file
@@ -0,0 +1,124 @@
|
||||
package goquery
|
||||
|
||||
import (
|
||||
"golang.org/x/net/html"
|
||||
)
|
||||
|
||||
const (
|
||||
maxUint = ^uint(0)
|
||||
maxInt = int(maxUint >> 1)
|
||||
|
||||
// ToEnd is a special index value that can be used as end index in a call
|
||||
// to Slice so that all elements are selected until the end of the Selection.
|
||||
// It is equivalent to passing (*Selection).Length().
|
||||
ToEnd = maxInt
|
||||
)
|
||||
|
||||
// First reduces the set of matched elements to the first in the set.
|
||||
// It returns a new Selection object, and an empty Selection object if the
|
||||
// the selection is empty.
|
||||
func (s *Selection) First() *Selection {
|
||||
return s.Eq(0)
|
||||
}
|
||||
|
||||
// Last reduces the set of matched elements to the last in the set.
|
||||
// It returns a new Selection object, and an empty Selection object if
|
||||
// the selection is empty.
|
||||
func (s *Selection) Last() *Selection {
|
||||
return s.Eq(-1)
|
||||
}
|
||||
|
||||
// Eq reduces the set of matched elements to the one at the specified index.
|
||||
// If a negative index is given, it counts backwards starting at the end of the
|
||||
// set. It returns a new Selection object, and an empty Selection object if the
|
||||
// index is invalid.
|
||||
func (s *Selection) Eq(index int) *Selection {
|
||||
if index < 0 {
|
||||
index += len(s.Nodes)
|
||||
}
|
||||
|
||||
if index >= len(s.Nodes) || index < 0 {
|
||||
return newEmptySelection(s.document)
|
||||
}
|
||||
|
||||
return s.Slice(index, index+1)
|
||||
}
|
||||
|
||||
// Slice reduces the set of matched elements to a subset specified by a range
|
||||
// of indices. The start index is 0-based and indicates the index of the first
|
||||
// element to select. The end index is 0-based and indicates the index at which
|
||||
// the elements stop being selected (the end index is not selected).
|
||||
//
|
||||
// The indices may be negative, in which case they represent an offset from the
|
||||
// end of the selection.
|
||||
//
|
||||
// The special value ToEnd may be specified as end index, in which case all elements
|
||||
// until the end are selected. This works both for a positive and negative start
|
||||
// index.
|
||||
func (s *Selection) Slice(start, end int) *Selection {
|
||||
if start < 0 {
|
||||
start += len(s.Nodes)
|
||||
}
|
||||
if end == ToEnd {
|
||||
end = len(s.Nodes)
|
||||
} else if end < 0 {
|
||||
end += len(s.Nodes)
|
||||
}
|
||||
return pushStack(s, s.Nodes[start:end])
|
||||
}
|
||||
|
||||
// Get retrieves the underlying node at the specified index.
|
||||
// Get without parameter is not implemented, since the node array is available
|
||||
// on the Selection object.
|
||||
func (s *Selection) Get(index int) *html.Node {
|
||||
if index < 0 {
|
||||
index += len(s.Nodes) // Negative index gets from the end
|
||||
}
|
||||
return s.Nodes[index]
|
||||
}
|
||||
|
||||
// Index returns the position of the first element within the Selection object
|
||||
// relative to its sibling elements.
|
||||
func (s *Selection) Index() int {
|
||||
if len(s.Nodes) > 0 {
|
||||
return newSingleSelection(s.Nodes[0], s.document).PrevAll().Length()
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
// IndexSelector returns the position of the first element within the
|
||||
// Selection object relative to the elements matched by the selector, or -1 if
|
||||
// not found.
|
||||
func (s *Selection) IndexSelector(selector string) int {
|
||||
if len(s.Nodes) > 0 {
|
||||
sel := s.document.Find(selector)
|
||||
return indexInSlice(sel.Nodes, s.Nodes[0])
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
// IndexMatcher returns the position of the first element within the
|
||||
// Selection object relative to the elements matched by the matcher, or -1 if
|
||||
// not found.
|
||||
func (s *Selection) IndexMatcher(m Matcher) int {
|
||||
if len(s.Nodes) > 0 {
|
||||
sel := s.document.FindMatcher(m)
|
||||
return indexInSlice(sel.Nodes, s.Nodes[0])
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
// IndexOfNode returns the position of the specified node within the Selection
|
||||
// object, or -1 if not found.
|
||||
func (s *Selection) IndexOfNode(node *html.Node) int {
|
||||
return indexInSlice(s.Nodes, node)
|
||||
}
|
||||
|
||||
// IndexOfSelection returns the position of the first node in the specified
|
||||
// Selection object within this Selection object, or -1 if not found.
|
||||
func (s *Selection) IndexOfSelection(sel *Selection) int {
|
||||
if sel != nil && len(sel.Nodes) > 0 {
|
||||
return indexInSlice(s.Nodes, sel.Nodes[0])
|
||||
}
|
||||
return -1
|
||||
}
|
||||
123
vendor/github.com/PuerkitoBio/goquery/doc.go
generated
vendored
Normal file
123
vendor/github.com/PuerkitoBio/goquery/doc.go
generated
vendored
Normal file
@@ -0,0 +1,123 @@
|
||||
// Copyright (c) 2012-2016, Martin Angers & Contributors
|
||||
// All rights reserved.
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without modification,
|
||||
// are permitted provided that the following conditions are met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright notice,
|
||||
// this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above copyright notice,
|
||||
// this list of conditions and the following disclaimer in the documentation and/or
|
||||
// other materials provided with the distribution.
|
||||
// * Neither the name of the author nor the names of its contributors may be used to
|
||||
// endorse or promote products derived from this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
|
||||
// OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
|
||||
// AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
|
||||
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY
|
||||
// WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
/*
|
||||
Package goquery implements features similar to jQuery, including the chainable
|
||||
syntax, to manipulate and query an HTML document.
|
||||
|
||||
It brings a syntax and a set of features similar to jQuery to the Go language.
|
||||
It is based on Go's net/html package and the CSS Selector library cascadia.
|
||||
Since the net/html parser returns nodes, and not a full-featured DOM
|
||||
tree, jQuery's stateful manipulation functions (like height(), css(), detach())
|
||||
have been left off.
|
||||
|
||||
Also, because the net/html parser requires UTF-8 encoding, so does goquery: it is
|
||||
the caller's responsibility to ensure that the source document provides UTF-8 encoded HTML.
|
||||
See the repository's wiki for various options on how to do this.
|
||||
|
||||
Syntax-wise, it is as close as possible to jQuery, with the same method names when
|
||||
possible, and that warm and fuzzy chainable interface. jQuery being the
|
||||
ultra-popular library that it is, writing a similar HTML-manipulating
|
||||
library was better to follow its API than to start anew (in the same spirit as
|
||||
Go's fmt package), even though some of its methods are less than intuitive (looking
|
||||
at you, index()...).
|
||||
|
||||
It is hosted on GitHub, along with additional documentation in the README.md
|
||||
file: https://github.com/puerkitobio/goquery
|
||||
|
||||
Please note that because of the net/html dependency, goquery requires Go1.1+.
|
||||
|
||||
The various methods are split into files based on the category of behavior.
|
||||
The three dots (...) indicate that various "overloads" are available.
|
||||
|
||||
* array.go : array-like positional manipulation of the selection.
|
||||
- Eq()
|
||||
- First()
|
||||
- Get()
|
||||
- Index...()
|
||||
- Last()
|
||||
- Slice()
|
||||
|
||||
* expand.go : methods that expand or augment the selection's set.
|
||||
- Add...()
|
||||
- AndSelf()
|
||||
- Union(), which is an alias for AddSelection()
|
||||
|
||||
* filter.go : filtering methods, that reduce the selection's set.
|
||||
- End()
|
||||
- Filter...()
|
||||
- Has...()
|
||||
- Intersection(), which is an alias of FilterSelection()
|
||||
- Not...()
|
||||
|
||||
* iteration.go : methods to loop over the selection's nodes.
|
||||
- Each()
|
||||
- EachWithBreak()
|
||||
- Map()
|
||||
|
||||
* manipulation.go : methods for modifying the document
|
||||
- After...()
|
||||
- Append...()
|
||||
- Before...()
|
||||
- Clone()
|
||||
- Empty()
|
||||
- Prepend...()
|
||||
- Remove...()
|
||||
- ReplaceWith...()
|
||||
- Unwrap()
|
||||
- Wrap...()
|
||||
- WrapAll...()
|
||||
- WrapInner...()
|
||||
|
||||
* property.go : methods that inspect and get the node's properties values.
|
||||
- Attr*(), RemoveAttr(), SetAttr()
|
||||
- AddClass(), HasClass(), RemoveClass(), ToggleClass()
|
||||
- Html()
|
||||
- Length()
|
||||
- Size(), which is an alias for Length()
|
||||
- Text()
|
||||
|
||||
* query.go : methods that query, or reflect, a node's identity.
|
||||
- Contains()
|
||||
- Is...()
|
||||
|
||||
* traversal.go : methods to traverse the HTML document tree.
|
||||
- Children...()
|
||||
- Contents()
|
||||
- Find...()
|
||||
- Next...()
|
||||
- Parent[s]...()
|
||||
- Prev...()
|
||||
- Siblings...()
|
||||
|
||||
* type.go : definition of the types exposed by goquery.
|
||||
- Document
|
||||
- Selection
|
||||
- Matcher
|
||||
|
||||
* utilities.go : definition of helper functions (and not methods on a *Selection)
|
||||
that are not part of jQuery, but are useful to goquery.
|
||||
- NodeName
|
||||
- OuterHtml
|
||||
*/
|
||||
package goquery
|
||||
70
vendor/github.com/PuerkitoBio/goquery/expand.go
generated
vendored
Normal file
70
vendor/github.com/PuerkitoBio/goquery/expand.go
generated
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
package goquery
|
||||
|
||||
import "golang.org/x/net/html"
|
||||
|
||||
// Add adds the selector string's matching nodes to those in the current
|
||||
// selection and returns a new Selection object.
|
||||
// The selector string is run in the context of the document of the current
|
||||
// Selection object.
|
||||
func (s *Selection) Add(selector string) *Selection {
|
||||
return s.AddNodes(findWithMatcher([]*html.Node{s.document.rootNode}, compileMatcher(selector))...)
|
||||
}
|
||||
|
||||
// AddMatcher adds the matcher's matching nodes to those in the current
|
||||
// selection and returns a new Selection object.
|
||||
// The matcher is run in the context of the document of the current
|
||||
// Selection object.
|
||||
func (s *Selection) AddMatcher(m Matcher) *Selection {
|
||||
return s.AddNodes(findWithMatcher([]*html.Node{s.document.rootNode}, m)...)
|
||||
}
|
||||
|
||||
// AddSelection adds the specified Selection object's nodes to those in the
|
||||
// current selection and returns a new Selection object.
|
||||
func (s *Selection) AddSelection(sel *Selection) *Selection {
|
||||
if sel == nil {
|
||||
return s.AddNodes()
|
||||
}
|
||||
return s.AddNodes(sel.Nodes...)
|
||||
}
|
||||
|
||||
// Union is an alias for AddSelection.
|
||||
func (s *Selection) Union(sel *Selection) *Selection {
|
||||
return s.AddSelection(sel)
|
||||
}
|
||||
|
||||
// AddNodes adds the specified nodes to those in the
|
||||
// current selection and returns a new Selection object.
|
||||
func (s *Selection) AddNodes(nodes ...*html.Node) *Selection {
|
||||
return pushStack(s, appendWithoutDuplicates(s.Nodes, nodes, nil))
|
||||
}
|
||||
|
||||
// AndSelf adds the previous set of elements on the stack to the current set.
|
||||
// It returns a new Selection object containing the current Selection combined
|
||||
// with the previous one.
|
||||
// Deprecated: This function has been deprecated and is now an alias for AddBack().
|
||||
func (s *Selection) AndSelf() *Selection {
|
||||
return s.AddBack()
|
||||
}
|
||||
|
||||
// AddBack adds the previous set of elements on the stack to the current set.
|
||||
// It returns a new Selection object containing the current Selection combined
|
||||
// with the previous one.
|
||||
func (s *Selection) AddBack() *Selection {
|
||||
return s.AddSelection(s.prevSel)
|
||||
}
|
||||
|
||||
// AddBackFiltered reduces the previous set of elements on the stack to those that
|
||||
// match the selector string, and adds them to the current set.
|
||||
// It returns a new Selection object containing the current Selection combined
|
||||
// with the filtered previous one
|
||||
func (s *Selection) AddBackFiltered(selector string) *Selection {
|
||||
return s.AddSelection(s.prevSel.Filter(selector))
|
||||
}
|
||||
|
||||
// AddBackMatcher reduces the previous set of elements on the stack to those that match
|
||||
// the mateher, and adds them to the curernt set.
|
||||
// It returns a new Selection object containing the current Selection combined
|
||||
// with the filtered previous one
|
||||
func (s *Selection) AddBackMatcher(m Matcher) *Selection {
|
||||
return s.AddSelection(s.prevSel.FilterMatcher(m))
|
||||
}
|
||||
163
vendor/github.com/PuerkitoBio/goquery/filter.go
generated
vendored
Normal file
163
vendor/github.com/PuerkitoBio/goquery/filter.go
generated
vendored
Normal file
@@ -0,0 +1,163 @@
|
||||
package goquery
|
||||
|
||||
import "golang.org/x/net/html"
|
||||
|
||||
// Filter reduces the set of matched elements to those that match the selector string.
|
||||
// It returns a new Selection object for this subset of matching elements.
|
||||
func (s *Selection) Filter(selector string) *Selection {
|
||||
return s.FilterMatcher(compileMatcher(selector))
|
||||
}
|
||||
|
||||
// FilterMatcher reduces the set of matched elements to those that match
|
||||
// the given matcher. It returns a new Selection object for this subset
|
||||
// of matching elements.
|
||||
func (s *Selection) FilterMatcher(m Matcher) *Selection {
|
||||
return pushStack(s, winnow(s, m, true))
|
||||
}
|
||||
|
||||
// Not removes elements from the Selection that match the selector string.
|
||||
// It returns a new Selection object with the matching elements removed.
|
||||
func (s *Selection) Not(selector string) *Selection {
|
||||
return s.NotMatcher(compileMatcher(selector))
|
||||
}
|
||||
|
||||
// NotMatcher removes elements from the Selection that match the given matcher.
|
||||
// It returns a new Selection object with the matching elements removed.
|
||||
func (s *Selection) NotMatcher(m Matcher) *Selection {
|
||||
return pushStack(s, winnow(s, m, false))
|
||||
}
|
||||
|
||||
// FilterFunction reduces the set of matched elements to those that pass the function's test.
|
||||
// It returns a new Selection object for this subset of elements.
|
||||
func (s *Selection) FilterFunction(f func(int, *Selection) bool) *Selection {
|
||||
return pushStack(s, winnowFunction(s, f, true))
|
||||
}
|
||||
|
||||
// NotFunction removes elements from the Selection that pass the function's test.
|
||||
// It returns a new Selection object with the matching elements removed.
|
||||
func (s *Selection) NotFunction(f func(int, *Selection) bool) *Selection {
|
||||
return pushStack(s, winnowFunction(s, f, false))
|
||||
}
|
||||
|
||||
// FilterNodes reduces the set of matched elements to those that match the specified nodes.
|
||||
// It returns a new Selection object for this subset of elements.
|
||||
func (s *Selection) FilterNodes(nodes ...*html.Node) *Selection {
|
||||
return pushStack(s, winnowNodes(s, nodes, true))
|
||||
}
|
||||
|
||||
// NotNodes removes elements from the Selection that match the specified nodes.
|
||||
// It returns a new Selection object with the matching elements removed.
|
||||
func (s *Selection) NotNodes(nodes ...*html.Node) *Selection {
|
||||
return pushStack(s, winnowNodes(s, nodes, false))
|
||||
}
|
||||
|
||||
// FilterSelection reduces the set of matched elements to those that match a
|
||||
// node in the specified Selection object.
|
||||
// It returns a new Selection object for this subset of elements.
|
||||
func (s *Selection) FilterSelection(sel *Selection) *Selection {
|
||||
if sel == nil {
|
||||
return pushStack(s, winnowNodes(s, nil, true))
|
||||
}
|
||||
return pushStack(s, winnowNodes(s, sel.Nodes, true))
|
||||
}
|
||||
|
||||
// NotSelection removes elements from the Selection that match a node in the specified
|
||||
// Selection object. It returns a new Selection object with the matching elements removed.
|
||||
func (s *Selection) NotSelection(sel *Selection) *Selection {
|
||||
if sel == nil {
|
||||
return pushStack(s, winnowNodes(s, nil, false))
|
||||
}
|
||||
return pushStack(s, winnowNodes(s, sel.Nodes, false))
|
||||
}
|
||||
|
||||
// Intersection is an alias for FilterSelection.
|
||||
func (s *Selection) Intersection(sel *Selection) *Selection {
|
||||
return s.FilterSelection(sel)
|
||||
}
|
||||
|
||||
// Has reduces the set of matched elements to those that have a descendant
|
||||
// that matches the selector.
|
||||
// It returns a new Selection object with the matching elements.
|
||||
func (s *Selection) Has(selector string) *Selection {
|
||||
return s.HasSelection(s.document.Find(selector))
|
||||
}
|
||||
|
||||
// HasMatcher reduces the set of matched elements to those that have a descendant
|
||||
// that matches the matcher.
|
||||
// It returns a new Selection object with the matching elements.
|
||||
func (s *Selection) HasMatcher(m Matcher) *Selection {
|
||||
return s.HasSelection(s.document.FindMatcher(m))
|
||||
}
|
||||
|
||||
// HasNodes reduces the set of matched elements to those that have a
|
||||
// descendant that matches one of the nodes.
|
||||
// It returns a new Selection object with the matching elements.
|
||||
func (s *Selection) HasNodes(nodes ...*html.Node) *Selection {
|
||||
return s.FilterFunction(func(_ int, sel *Selection) bool {
|
||||
// Add all nodes that contain one of the specified nodes
|
||||
for _, n := range nodes {
|
||||
if sel.Contains(n) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
})
|
||||
}
|
||||
|
||||
// HasSelection reduces the set of matched elements to those that have a
|
||||
// descendant that matches one of the nodes of the specified Selection object.
|
||||
// It returns a new Selection object with the matching elements.
|
||||
func (s *Selection) HasSelection(sel *Selection) *Selection {
|
||||
if sel == nil {
|
||||
return s.HasNodes()
|
||||
}
|
||||
return s.HasNodes(sel.Nodes...)
|
||||
}
|
||||
|
||||
// End ends the most recent filtering operation in the current chain and
|
||||
// returns the set of matched elements to its previous state.
|
||||
func (s *Selection) End() *Selection {
|
||||
if s.prevSel != nil {
|
||||
return s.prevSel
|
||||
}
|
||||
return newEmptySelection(s.document)
|
||||
}
|
||||
|
||||
// Filter based on the matcher, and the indicator to keep (Filter) or
|
||||
// to get rid of (Not) the matching elements.
|
||||
func winnow(sel *Selection, m Matcher, keep bool) []*html.Node {
|
||||
// Optimize if keep is requested
|
||||
if keep {
|
||||
return m.Filter(sel.Nodes)
|
||||
}
|
||||
// Use grep
|
||||
return grep(sel, func(i int, s *Selection) bool {
|
||||
return !m.Match(s.Get(0))
|
||||
})
|
||||
}
|
||||
|
||||
// Filter based on an array of nodes, and the indicator to keep (Filter) or
|
||||
// to get rid of (Not) the matching elements.
|
||||
func winnowNodes(sel *Selection, nodes []*html.Node, keep bool) []*html.Node {
|
||||
if len(nodes)+len(sel.Nodes) < minNodesForSet {
|
||||
return grep(sel, func(i int, s *Selection) bool {
|
||||
return isInSlice(nodes, s.Get(0)) == keep
|
||||
})
|
||||
}
|
||||
|
||||
set := make(map[*html.Node]bool)
|
||||
for _, n := range nodes {
|
||||
set[n] = true
|
||||
}
|
||||
return grep(sel, func(i int, s *Selection) bool {
|
||||
return set[s.Get(0)] == keep
|
||||
})
|
||||
}
|
||||
|
||||
// Filter based on a function test, and the indicator to keep (Filter) or
|
||||
// to get rid of (Not) the matching elements.
|
||||
func winnowFunction(sel *Selection, f func(int, *Selection) bool, keep bool) []*html.Node {
|
||||
return grep(sel, func(i int, s *Selection) bool {
|
||||
return f(i, s) == keep
|
||||
})
|
||||
}
|
||||
6
vendor/github.com/PuerkitoBio/goquery/go.mod
generated
vendored
Normal file
6
vendor/github.com/PuerkitoBio/goquery/go.mod
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
module github.com/PuerkitoBio/goquery
|
||||
|
||||
require (
|
||||
github.com/andybalholm/cascadia v1.0.0
|
||||
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a
|
||||
)
|
||||
5
vendor/github.com/PuerkitoBio/goquery/go.sum
generated
vendored
Normal file
5
vendor/github.com/PuerkitoBio/goquery/go.sum
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
github.com/andybalholm/cascadia v1.0.0 h1:hOCXnnZ5A+3eVDX8pvgl4kofXv2ELss0bKcqRySc45o=
|
||||
github.com/andybalholm/cascadia v1.0.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
|
||||
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a h1:gOpx8G595UYyvj8UK4+OFyY4rx037g3fmfhe5SasG3U=
|
||||
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
39
vendor/github.com/PuerkitoBio/goquery/iteration.go
generated
vendored
Normal file
39
vendor/github.com/PuerkitoBio/goquery/iteration.go
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
package goquery
|
||||
|
||||
// Each iterates over a Selection object, executing a function for each
|
||||
// matched element. It returns the current Selection object. The function
|
||||
// f is called for each element in the selection with the index of the
|
||||
// element in that selection starting at 0, and a *Selection that contains
|
||||
// only that element.
|
||||
func (s *Selection) Each(f func(int, *Selection)) *Selection {
|
||||
for i, n := range s.Nodes {
|
||||
f(i, newSingleSelection(n, s.document))
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// EachWithBreak iterates over a Selection object, executing a function for each
|
||||
// matched element. It is identical to Each except that it is possible to break
|
||||
// out of the loop by returning false in the callback function. It returns the
|
||||
// current Selection object.
|
||||
func (s *Selection) EachWithBreak(f func(int, *Selection) bool) *Selection {
|
||||
for i, n := range s.Nodes {
|
||||
if !f(i, newSingleSelection(n, s.document)) {
|
||||
return s
|
||||
}
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// Map passes each element in the current matched set through a function,
|
||||
// producing a slice of string holding the returned values. The function
|
||||
// f is called for each element in the selection with the index of the
|
||||
// element in that selection starting at 0, and a *Selection that contains
|
||||
// only that element.
|
||||
func (s *Selection) Map(f func(int, *Selection) string) (result []string) {
|
||||
for i, n := range s.Nodes {
|
||||
result = append(result, f(i, newSingleSelection(n, s.document)))
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
574
vendor/github.com/PuerkitoBio/goquery/manipulation.go
generated
vendored
Normal file
574
vendor/github.com/PuerkitoBio/goquery/manipulation.go
generated
vendored
Normal file
@@ -0,0 +1,574 @@
|
||||
package goquery
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"golang.org/x/net/html"
|
||||
)
|
||||
|
||||
// After applies the selector from the root document and inserts the matched elements
|
||||
// after the elements in the set of matched elements.
|
||||
//
|
||||
// If one of the matched elements in the selection is not currently in the
|
||||
// document, it's impossible to insert nodes after it, so it will be ignored.
|
||||
//
|
||||
// This follows the same rules as Selection.Append.
|
||||
func (s *Selection) After(selector string) *Selection {
|
||||
return s.AfterMatcher(compileMatcher(selector))
|
||||
}
|
||||
|
||||
// AfterMatcher applies the matcher from the root document and inserts the matched elements
|
||||
// after the elements in the set of matched elements.
|
||||
//
|
||||
// If one of the matched elements in the selection is not currently in the
|
||||
// document, it's impossible to insert nodes after it, so it will be ignored.
|
||||
//
|
||||
// This follows the same rules as Selection.Append.
|
||||
func (s *Selection) AfterMatcher(m Matcher) *Selection {
|
||||
return s.AfterNodes(m.MatchAll(s.document.rootNode)...)
|
||||
}
|
||||
|
||||
// AfterSelection inserts the elements in the selection after each element in the set of matched
|
||||
// elements.
|
||||
//
|
||||
// This follows the same rules as Selection.Append.
|
||||
func (s *Selection) AfterSelection(sel *Selection) *Selection {
|
||||
return s.AfterNodes(sel.Nodes...)
|
||||
}
|
||||
|
||||
// AfterHtml parses the html and inserts it after the set of matched elements.
|
||||
//
|
||||
// This follows the same rules as Selection.Append.
|
||||
func (s *Selection) AfterHtml(html string) *Selection {
|
||||
return s.AfterNodes(parseHtml(html)...)
|
||||
}
|
||||
|
||||
// AfterNodes inserts the nodes after each element in the set of matched elements.
|
||||
//
|
||||
// This follows the same rules as Selection.Append.
|
||||
func (s *Selection) AfterNodes(ns ...*html.Node) *Selection {
|
||||
return s.manipulateNodes(ns, true, func(sn *html.Node, n *html.Node) {
|
||||
if sn.Parent != nil {
|
||||
sn.Parent.InsertBefore(n, sn.NextSibling)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Append appends the elements specified by the selector to the end of each element
|
||||
// in the set of matched elements, following those rules:
|
||||
//
|
||||
// 1) The selector is applied to the root document.
|
||||
//
|
||||
// 2) Elements that are part of the document will be moved to the new location.
|
||||
//
|
||||
// 3) If there are multiple locations to append to, cloned nodes will be
|
||||
// appended to all target locations except the last one, which will be moved
|
||||
// as noted in (2).
|
||||
func (s *Selection) Append(selector string) *Selection {
|
||||
return s.AppendMatcher(compileMatcher(selector))
|
||||
}
|
||||
|
||||
// AppendMatcher appends the elements specified by the matcher to the end of each element
|
||||
// in the set of matched elements.
|
||||
//
|
||||
// This follows the same rules as Selection.Append.
|
||||
func (s *Selection) AppendMatcher(m Matcher) *Selection {
|
||||
return s.AppendNodes(m.MatchAll(s.document.rootNode)...)
|
||||
}
|
||||
|
||||
// AppendSelection appends the elements in the selection to the end of each element
|
||||
// in the set of matched elements.
|
||||
//
|
||||
// This follows the same rules as Selection.Append.
|
||||
func (s *Selection) AppendSelection(sel *Selection) *Selection {
|
||||
return s.AppendNodes(sel.Nodes...)
|
||||
}
|
||||
|
||||
// AppendHtml parses the html and appends it to the set of matched elements.
|
||||
func (s *Selection) AppendHtml(html string) *Selection {
|
||||
return s.AppendNodes(parseHtml(html)...)
|
||||
}
|
||||
|
||||
// AppendNodes appends the specified nodes to each node in the set of matched elements.
|
||||
//
|
||||
// This follows the same rules as Selection.Append.
|
||||
func (s *Selection) AppendNodes(ns ...*html.Node) *Selection {
|
||||
return s.manipulateNodes(ns, false, func(sn *html.Node, n *html.Node) {
|
||||
sn.AppendChild(n)
|
||||
})
|
||||
}
|
||||
|
||||
// Before inserts the matched elements before each element in the set of matched elements.
|
||||
//
|
||||
// This follows the same rules as Selection.Append.
|
||||
func (s *Selection) Before(selector string) *Selection {
|
||||
return s.BeforeMatcher(compileMatcher(selector))
|
||||
}
|
||||
|
||||
// BeforeMatcher inserts the matched elements before each element in the set of matched elements.
|
||||
//
|
||||
// This follows the same rules as Selection.Append.
|
||||
func (s *Selection) BeforeMatcher(m Matcher) *Selection {
|
||||
return s.BeforeNodes(m.MatchAll(s.document.rootNode)...)
|
||||
}
|
||||
|
||||
// BeforeSelection inserts the elements in the selection before each element in the set of matched
|
||||
// elements.
|
||||
//
|
||||
// This follows the same rules as Selection.Append.
|
||||
func (s *Selection) BeforeSelection(sel *Selection) *Selection {
|
||||
return s.BeforeNodes(sel.Nodes...)
|
||||
}
|
||||
|
||||
// BeforeHtml parses the html and inserts it before the set of matched elements.
|
||||
//
|
||||
// This follows the same rules as Selection.Append.
|
||||
func (s *Selection) BeforeHtml(html string) *Selection {
|
||||
return s.BeforeNodes(parseHtml(html)...)
|
||||
}
|
||||
|
||||
// BeforeNodes inserts the nodes before each element in the set of matched elements.
|
||||
//
|
||||
// This follows the same rules as Selection.Append.
|
||||
func (s *Selection) BeforeNodes(ns ...*html.Node) *Selection {
|
||||
return s.manipulateNodes(ns, false, func(sn *html.Node, n *html.Node) {
|
||||
if sn.Parent != nil {
|
||||
sn.Parent.InsertBefore(n, sn)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Clone creates a deep copy of the set of matched nodes. The new nodes will not be
|
||||
// attached to the document.
|
||||
func (s *Selection) Clone() *Selection {
|
||||
ns := newEmptySelection(s.document)
|
||||
ns.Nodes = cloneNodes(s.Nodes)
|
||||
return ns
|
||||
}
|
||||
|
||||
// Empty removes all children nodes from the set of matched elements.
|
||||
// It returns the children nodes in a new Selection.
|
||||
func (s *Selection) Empty() *Selection {
|
||||
var nodes []*html.Node
|
||||
|
||||
for _, n := range s.Nodes {
|
||||
for c := n.FirstChild; c != nil; c = n.FirstChild {
|
||||
n.RemoveChild(c)
|
||||
nodes = append(nodes, c)
|
||||
}
|
||||
}
|
||||
|
||||
return pushStack(s, nodes)
|
||||
}
|
||||
|
||||
// Prepend prepends the elements specified by the selector to each element in
|
||||
// the set of matched elements, following the same rules as Append.
|
||||
func (s *Selection) Prepend(selector string) *Selection {
|
||||
return s.PrependMatcher(compileMatcher(selector))
|
||||
}
|
||||
|
||||
// PrependMatcher prepends the elements specified by the matcher to each
|
||||
// element in the set of matched elements.
|
||||
//
|
||||
// This follows the same rules as Selection.Append.
|
||||
func (s *Selection) PrependMatcher(m Matcher) *Selection {
|
||||
return s.PrependNodes(m.MatchAll(s.document.rootNode)...)
|
||||
}
|
||||
|
||||
// PrependSelection prepends the elements in the selection to each element in
|
||||
// the set of matched elements.
|
||||
//
|
||||
// This follows the same rules as Selection.Append.
|
||||
func (s *Selection) PrependSelection(sel *Selection) *Selection {
|
||||
return s.PrependNodes(sel.Nodes...)
|
||||
}
|
||||
|
||||
// PrependHtml parses the html and prepends it to the set of matched elements.
|
||||
func (s *Selection) PrependHtml(html string) *Selection {
|
||||
return s.PrependNodes(parseHtml(html)...)
|
||||
}
|
||||
|
||||
// PrependNodes prepends the specified nodes to each node in the set of
|
||||
// matched elements.
|
||||
//
|
||||
// This follows the same rules as Selection.Append.
|
||||
func (s *Selection) PrependNodes(ns ...*html.Node) *Selection {
|
||||
return s.manipulateNodes(ns, true, func(sn *html.Node, n *html.Node) {
|
||||
// sn.FirstChild may be nil, in which case this functions like
|
||||
// sn.AppendChild()
|
||||
sn.InsertBefore(n, sn.FirstChild)
|
||||
})
|
||||
}
|
||||
|
||||
// Remove removes the set of matched elements from the document.
|
||||
// It returns the same selection, now consisting of nodes not in the document.
|
||||
func (s *Selection) Remove() *Selection {
|
||||
for _, n := range s.Nodes {
|
||||
if n.Parent != nil {
|
||||
n.Parent.RemoveChild(n)
|
||||
}
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
// RemoveFiltered removes the set of matched elements by selector.
|
||||
// It returns the Selection of removed nodes.
|
||||
func (s *Selection) RemoveFiltered(selector string) *Selection {
|
||||
return s.RemoveMatcher(compileMatcher(selector))
|
||||
}
|
||||
|
||||
// RemoveMatcher removes the set of matched elements.
|
||||
// It returns the Selection of removed nodes.
|
||||
func (s *Selection) RemoveMatcher(m Matcher) *Selection {
|
||||
return s.FilterMatcher(m).Remove()
|
||||
}
|
||||
|
||||
// ReplaceWith replaces each element in the set of matched elements with the
|
||||
// nodes matched by the given selector.
|
||||
// It returns the removed elements.
|
||||
//
|
||||
// This follows the same rules as Selection.Append.
|
||||
func (s *Selection) ReplaceWith(selector string) *Selection {
|
||||
return s.ReplaceWithMatcher(compileMatcher(selector))
|
||||
}
|
||||
|
||||
// ReplaceWithMatcher replaces each element in the set of matched elements with
|
||||
// the nodes matched by the given Matcher.
|
||||
// It returns the removed elements.
|
||||
//
|
||||
// This follows the same rules as Selection.Append.
|
||||
func (s *Selection) ReplaceWithMatcher(m Matcher) *Selection {
|
||||
return s.ReplaceWithNodes(m.MatchAll(s.document.rootNode)...)
|
||||
}
|
||||
|
||||
// ReplaceWithSelection replaces each element in the set of matched elements with
|
||||
// the nodes from the given Selection.
|
||||
// It returns the removed elements.
|
||||
//
|
||||
// This follows the same rules as Selection.Append.
|
||||
func (s *Selection) ReplaceWithSelection(sel *Selection) *Selection {
|
||||
return s.ReplaceWithNodes(sel.Nodes...)
|
||||
}
|
||||
|
||||
// ReplaceWithHtml replaces each element in the set of matched elements with
|
||||
// the parsed HTML.
|
||||
// It returns the removed elements.
|
||||
//
|
||||
// This follows the same rules as Selection.Append.
|
||||
func (s *Selection) ReplaceWithHtml(html string) *Selection {
|
||||
return s.ReplaceWithNodes(parseHtml(html)...)
|
||||
}
|
||||
|
||||
// ReplaceWithNodes replaces each element in the set of matched elements with
|
||||
// the given nodes.
|
||||
// It returns the removed elements.
|
||||
//
|
||||
// This follows the same rules as Selection.Append.
|
||||
func (s *Selection) ReplaceWithNodes(ns ...*html.Node) *Selection {
|
||||
s.AfterNodes(ns...)
|
||||
return s.Remove()
|
||||
}
|
||||
|
||||
// SetHtml sets the html content of each element in the selection to
|
||||
// specified html string.
|
||||
func (s *Selection) SetHtml(html string) *Selection {
|
||||
return setHtmlNodes(s, parseHtml(html)...)
|
||||
}
|
||||
|
||||
// SetText sets the content of each element in the selection to specified content.
|
||||
// The provided text string is escaped.
|
||||
func (s *Selection) SetText(text string) *Selection {
|
||||
return s.SetHtml(html.EscapeString(text))
|
||||
}
|
||||
|
||||
// Unwrap removes the parents of the set of matched elements, leaving the matched
|
||||
// elements (and their siblings, if any) in their place.
|
||||
// It returns the original selection.
|
||||
func (s *Selection) Unwrap() *Selection {
|
||||
s.Parent().Each(func(i int, ss *Selection) {
|
||||
// For some reason, jquery allows unwrap to remove the <head> element, so
|
||||
// allowing it here too. Same for <html>. Why it allows those elements to
|
||||
// be unwrapped while not allowing body is a mystery to me.
|
||||
if ss.Nodes[0].Data != "body" {
|
||||
ss.ReplaceWithSelection(ss.Contents())
|
||||
}
|
||||
})
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
// Wrap wraps each element in the set of matched elements inside the first
|
||||
// element matched by the given selector. The matched child is cloned before
|
||||
// being inserted into the document.
|
||||
//
|
||||
// It returns the original set of elements.
|
||||
func (s *Selection) Wrap(selector string) *Selection {
|
||||
return s.WrapMatcher(compileMatcher(selector))
|
||||
}
|
||||
|
||||
// WrapMatcher wraps each element in the set of matched elements inside the
|
||||
// first element matched by the given matcher. The matched child is cloned
|
||||
// before being inserted into the document.
|
||||
//
|
||||
// It returns the original set of elements.
|
||||
func (s *Selection) WrapMatcher(m Matcher) *Selection {
|
||||
return s.wrapNodes(m.MatchAll(s.document.rootNode)...)
|
||||
}
|
||||
|
||||
// WrapSelection wraps each element in the set of matched elements inside the
|
||||
// first element in the given Selection. The element is cloned before being
|
||||
// inserted into the document.
|
||||
//
|
||||
// It returns the original set of elements.
|
||||
func (s *Selection) WrapSelection(sel *Selection) *Selection {
|
||||
return s.wrapNodes(sel.Nodes...)
|
||||
}
|
||||
|
||||
// WrapHtml wraps each element in the set of matched elements inside the inner-
|
||||
// most child of the given HTML.
|
||||
//
|
||||
// It returns the original set of elements.
|
||||
func (s *Selection) WrapHtml(html string) *Selection {
|
||||
return s.wrapNodes(parseHtml(html)...)
|
||||
}
|
||||
|
||||
// WrapNode wraps each element in the set of matched elements inside the inner-
|
||||
// most child of the given node. The given node is copied before being inserted
|
||||
// into the document.
|
||||
//
|
||||
// It returns the original set of elements.
|
||||
func (s *Selection) WrapNode(n *html.Node) *Selection {
|
||||
return s.wrapNodes(n)
|
||||
}
|
||||
|
||||
func (s *Selection) wrapNodes(ns ...*html.Node) *Selection {
|
||||
s.Each(func(i int, ss *Selection) {
|
||||
ss.wrapAllNodes(ns...)
|
||||
})
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
// WrapAll wraps a single HTML structure, matched by the given selector, around
|
||||
// all elements in the set of matched elements. The matched child is cloned
|
||||
// before being inserted into the document.
|
||||
//
|
||||
// It returns the original set of elements.
|
||||
func (s *Selection) WrapAll(selector string) *Selection {
|
||||
return s.WrapAllMatcher(compileMatcher(selector))
|
||||
}
|
||||
|
||||
// WrapAllMatcher wraps a single HTML structure, matched by the given Matcher,
|
||||
// around all elements in the set of matched elements. The matched child is
|
||||
// cloned before being inserted into the document.
|
||||
//
|
||||
// It returns the original set of elements.
|
||||
func (s *Selection) WrapAllMatcher(m Matcher) *Selection {
|
||||
return s.wrapAllNodes(m.MatchAll(s.document.rootNode)...)
|
||||
}
|
||||
|
||||
// WrapAllSelection wraps a single HTML structure, the first node of the given
|
||||
// Selection, around all elements in the set of matched elements. The matched
|
||||
// child is cloned before being inserted into the document.
|
||||
//
|
||||
// It returns the original set of elements.
|
||||
func (s *Selection) WrapAllSelection(sel *Selection) *Selection {
|
||||
return s.wrapAllNodes(sel.Nodes...)
|
||||
}
|
||||
|
||||
// WrapAllHtml wraps the given HTML structure around all elements in the set of
|
||||
// matched elements. The matched child is cloned before being inserted into the
|
||||
// document.
|
||||
//
|
||||
// It returns the original set of elements.
|
||||
func (s *Selection) WrapAllHtml(html string) *Selection {
|
||||
return s.wrapAllNodes(parseHtml(html)...)
|
||||
}
|
||||
|
||||
func (s *Selection) wrapAllNodes(ns ...*html.Node) *Selection {
|
||||
if len(ns) > 0 {
|
||||
return s.WrapAllNode(ns[0])
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// WrapAllNode wraps the given node around the first element in the Selection,
|
||||
// making all other nodes in the Selection children of the given node. The node
|
||||
// is cloned before being inserted into the document.
|
||||
//
|
||||
// It returns the original set of elements.
|
||||
func (s *Selection) WrapAllNode(n *html.Node) *Selection {
|
||||
if s.Size() == 0 {
|
||||
return s
|
||||
}
|
||||
|
||||
wrap := cloneNode(n)
|
||||
|
||||
first := s.Nodes[0]
|
||||
if first.Parent != nil {
|
||||
first.Parent.InsertBefore(wrap, first)
|
||||
first.Parent.RemoveChild(first)
|
||||
}
|
||||
|
||||
for c := getFirstChildEl(wrap); c != nil; c = getFirstChildEl(wrap) {
|
||||
wrap = c
|
||||
}
|
||||
|
||||
newSingleSelection(wrap, s.document).AppendSelection(s)
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
// WrapInner wraps an HTML structure, matched by the given selector, around the
|
||||
// content of element in the set of matched elements. The matched child is
|
||||
// cloned before being inserted into the document.
|
||||
//
|
||||
// It returns the original set of elements.
|
||||
func (s *Selection) WrapInner(selector string) *Selection {
|
||||
return s.WrapInnerMatcher(compileMatcher(selector))
|
||||
}
|
||||
|
||||
// WrapInnerMatcher wraps an HTML structure, matched by the given selector,
|
||||
// around the content of element in the set of matched elements. The matched
|
||||
// child is cloned before being inserted into the document.
|
||||
//
|
||||
// It returns the original set of elements.
|
||||
func (s *Selection) WrapInnerMatcher(m Matcher) *Selection {
|
||||
return s.wrapInnerNodes(m.MatchAll(s.document.rootNode)...)
|
||||
}
|
||||
|
||||
// WrapInnerSelection wraps an HTML structure, matched by the given selector,
|
||||
// around the content of element in the set of matched elements. The matched
|
||||
// child is cloned before being inserted into the document.
|
||||
//
|
||||
// It returns the original set of elements.
|
||||
func (s *Selection) WrapInnerSelection(sel *Selection) *Selection {
|
||||
return s.wrapInnerNodes(sel.Nodes...)
|
||||
}
|
||||
|
||||
// WrapInnerHtml wraps an HTML structure, matched by the given selector, around
|
||||
// the content of element in the set of matched elements. The matched child is
|
||||
// cloned before being inserted into the document.
|
||||
//
|
||||
// It returns the original set of elements.
|
||||
func (s *Selection) WrapInnerHtml(html string) *Selection {
|
||||
return s.wrapInnerNodes(parseHtml(html)...)
|
||||
}
|
||||
|
||||
// WrapInnerNode wraps an HTML structure, matched by the given selector, around
|
||||
// the content of element in the set of matched elements. The matched child is
|
||||
// cloned before being inserted into the document.
|
||||
//
|
||||
// It returns the original set of elements.
|
||||
func (s *Selection) WrapInnerNode(n *html.Node) *Selection {
|
||||
return s.wrapInnerNodes(n)
|
||||
}
|
||||
|
||||
func (s *Selection) wrapInnerNodes(ns ...*html.Node) *Selection {
|
||||
if len(ns) == 0 {
|
||||
return s
|
||||
}
|
||||
|
||||
s.Each(func(i int, s *Selection) {
|
||||
contents := s.Contents()
|
||||
|
||||
if contents.Size() > 0 {
|
||||
contents.wrapAllNodes(ns...)
|
||||
} else {
|
||||
s.AppendNodes(cloneNode(ns[0]))
|
||||
}
|
||||
})
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
func parseHtml(h string) []*html.Node {
|
||||
// Errors are only returned when the io.Reader returns any error besides
|
||||
// EOF, but strings.Reader never will
|
||||
nodes, err := html.ParseFragment(strings.NewReader(h), &html.Node{Type: html.ElementNode})
|
||||
if err != nil {
|
||||
panic("goquery: failed to parse HTML: " + err.Error())
|
||||
}
|
||||
return nodes
|
||||
}
|
||||
|
||||
func setHtmlNodes(s *Selection, ns ...*html.Node) *Selection {
|
||||
for _, n := range s.Nodes {
|
||||
for c := n.FirstChild; c != nil; c = n.FirstChild {
|
||||
n.RemoveChild(c)
|
||||
}
|
||||
for _, c := range ns {
|
||||
n.AppendChild(cloneNode(c))
|
||||
}
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// Get the first child that is an ElementNode
|
||||
func getFirstChildEl(n *html.Node) *html.Node {
|
||||
c := n.FirstChild
|
||||
for c != nil && c.Type != html.ElementNode {
|
||||
c = c.NextSibling
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
// Deep copy a slice of nodes.
|
||||
func cloneNodes(ns []*html.Node) []*html.Node {
|
||||
cns := make([]*html.Node, 0, len(ns))
|
||||
|
||||
for _, n := range ns {
|
||||
cns = append(cns, cloneNode(n))
|
||||
}
|
||||
|
||||
return cns
|
||||
}
|
||||
|
||||
// Deep copy a node. The new node has clones of all the original node's
|
||||
// children but none of its parents or siblings.
|
||||
func cloneNode(n *html.Node) *html.Node {
|
||||
nn := &html.Node{
|
||||
Type: n.Type,
|
||||
DataAtom: n.DataAtom,
|
||||
Data: n.Data,
|
||||
Attr: make([]html.Attribute, len(n.Attr)),
|
||||
}
|
||||
|
||||
copy(nn.Attr, n.Attr)
|
||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||
nn.AppendChild(cloneNode(c))
|
||||
}
|
||||
|
||||
return nn
|
||||
}
|
||||
|
||||
func (s *Selection) manipulateNodes(ns []*html.Node, reverse bool,
|
||||
f func(sn *html.Node, n *html.Node)) *Selection {
|
||||
|
||||
lasti := s.Size() - 1
|
||||
|
||||
// net.Html doesn't provide document fragments for insertion, so to get
|
||||
// things in the correct order with After() and Prepend(), the callback
|
||||
// needs to be called on the reverse of the nodes.
|
||||
if reverse {
|
||||
for i, j := 0, len(ns)-1; i < j; i, j = i+1, j-1 {
|
||||
ns[i], ns[j] = ns[j], ns[i]
|
||||
}
|
||||
}
|
||||
|
||||
for i, sn := range s.Nodes {
|
||||
for _, n := range ns {
|
||||
if i != lasti {
|
||||
f(sn, cloneNode(n))
|
||||
} else {
|
||||
if n.Parent != nil {
|
||||
n.Parent.RemoveChild(n)
|
||||
}
|
||||
f(sn, n)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
||||
275
vendor/github.com/PuerkitoBio/goquery/property.go
generated
vendored
Normal file
275
vendor/github.com/PuerkitoBio/goquery/property.go
generated
vendored
Normal file
@@ -0,0 +1,275 @@
|
||||
package goquery
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/net/html"
|
||||
)
|
||||
|
||||
var rxClassTrim = regexp.MustCompile("[\t\r\n]")
|
||||
|
||||
// Attr gets the specified attribute's value for the first element in the
|
||||
// Selection. To get the value for each element individually, use a looping
|
||||
// construct such as Each or Map method.
|
||||
func (s *Selection) Attr(attrName string) (val string, exists bool) {
|
||||
if len(s.Nodes) == 0 {
|
||||
return
|
||||
}
|
||||
return getAttributeValue(attrName, s.Nodes[0])
|
||||
}
|
||||
|
||||
// AttrOr works like Attr but returns default value if attribute is not present.
|
||||
func (s *Selection) AttrOr(attrName, defaultValue string) string {
|
||||
if len(s.Nodes) == 0 {
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
val, exists := getAttributeValue(attrName, s.Nodes[0])
|
||||
if !exists {
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
return val
|
||||
}
|
||||
|
||||
// RemoveAttr removes the named attribute from each element in the set of matched elements.
|
||||
func (s *Selection) RemoveAttr(attrName string) *Selection {
|
||||
for _, n := range s.Nodes {
|
||||
removeAttr(n, attrName)
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
// SetAttr sets the given attribute on each element in the set of matched elements.
|
||||
func (s *Selection) SetAttr(attrName, val string) *Selection {
|
||||
for _, n := range s.Nodes {
|
||||
attr := getAttributePtr(attrName, n)
|
||||
if attr == nil {
|
||||
n.Attr = append(n.Attr, html.Attribute{Key: attrName, Val: val})
|
||||
} else {
|
||||
attr.Val = val
|
||||
}
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
// Text gets the combined text contents of each element in the set of matched
|
||||
// elements, including their descendants.
|
||||
func (s *Selection) Text() string {
|
||||
var buf bytes.Buffer
|
||||
|
||||
// Slightly optimized vs calling Each: no single selection object created
|
||||
var f func(*html.Node)
|
||||
f = func(n *html.Node) {
|
||||
if n.Type == html.TextNode {
|
||||
// Keep newlines and spaces, like jQuery
|
||||
buf.WriteString(n.Data)
|
||||
}
|
||||
if n.FirstChild != nil {
|
||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||
f(c)
|
||||
}
|
||||
}
|
||||
}
|
||||
for _, n := range s.Nodes {
|
||||
f(n)
|
||||
}
|
||||
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
// Size is an alias for Length.
|
||||
func (s *Selection) Size() int {
|
||||
return s.Length()
|
||||
}
|
||||
|
||||
// Length returns the number of elements in the Selection object.
|
||||
func (s *Selection) Length() int {
|
||||
return len(s.Nodes)
|
||||
}
|
||||
|
||||
// Html gets the HTML contents of the first element in the set of matched
|
||||
// elements. It includes text and comment nodes.
|
||||
func (s *Selection) Html() (ret string, e error) {
|
||||
// Since there is no .innerHtml, the HTML content must be re-created from
|
||||
// the nodes using html.Render.
|
||||
var buf bytes.Buffer
|
||||
|
||||
if len(s.Nodes) > 0 {
|
||||
for c := s.Nodes[0].FirstChild; c != nil; c = c.NextSibling {
|
||||
e = html.Render(&buf, c)
|
||||
if e != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
ret = buf.String()
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// AddClass adds the given class(es) to each element in the set of matched elements.
|
||||
// Multiple class names can be specified, separated by a space or via multiple arguments.
|
||||
func (s *Selection) AddClass(class ...string) *Selection {
|
||||
classStr := strings.TrimSpace(strings.Join(class, " "))
|
||||
|
||||
if classStr == "" {
|
||||
return s
|
||||
}
|
||||
|
||||
tcls := getClassesSlice(classStr)
|
||||
for _, n := range s.Nodes {
|
||||
curClasses, attr := getClassesAndAttr(n, true)
|
||||
for _, newClass := range tcls {
|
||||
if !strings.Contains(curClasses, " "+newClass+" ") {
|
||||
curClasses += newClass + " "
|
||||
}
|
||||
}
|
||||
|
||||
setClasses(n, attr, curClasses)
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
// HasClass determines whether any of the matched elements are assigned the
|
||||
// given class.
|
||||
func (s *Selection) HasClass(class string) bool {
|
||||
class = " " + class + " "
|
||||
for _, n := range s.Nodes {
|
||||
classes, _ := getClassesAndAttr(n, false)
|
||||
if strings.Contains(classes, class) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// RemoveClass removes the given class(es) from each element in the set of matched elements.
|
||||
// Multiple class names can be specified, separated by a space or via multiple arguments.
|
||||
// If no class name is provided, all classes are removed.
|
||||
func (s *Selection) RemoveClass(class ...string) *Selection {
|
||||
var rclasses []string
|
||||
|
||||
classStr := strings.TrimSpace(strings.Join(class, " "))
|
||||
remove := classStr == ""
|
||||
|
||||
if !remove {
|
||||
rclasses = getClassesSlice(classStr)
|
||||
}
|
||||
|
||||
for _, n := range s.Nodes {
|
||||
if remove {
|
||||
removeAttr(n, "class")
|
||||
} else {
|
||||
classes, attr := getClassesAndAttr(n, true)
|
||||
for _, rcl := range rclasses {
|
||||
classes = strings.Replace(classes, " "+rcl+" ", " ", -1)
|
||||
}
|
||||
|
||||
setClasses(n, attr, classes)
|
||||
}
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
// ToggleClass adds or removes the given class(es) for each element in the set of matched elements.
|
||||
// Multiple class names can be specified, separated by a space or via multiple arguments.
|
||||
func (s *Selection) ToggleClass(class ...string) *Selection {
|
||||
classStr := strings.TrimSpace(strings.Join(class, " "))
|
||||
|
||||
if classStr == "" {
|
||||
return s
|
||||
}
|
||||
|
||||
tcls := getClassesSlice(classStr)
|
||||
|
||||
for _, n := range s.Nodes {
|
||||
classes, attr := getClassesAndAttr(n, true)
|
||||
for _, tcl := range tcls {
|
||||
if strings.Contains(classes, " "+tcl+" ") {
|
||||
classes = strings.Replace(classes, " "+tcl+" ", " ", -1)
|
||||
} else {
|
||||
classes += tcl + " "
|
||||
}
|
||||
}
|
||||
|
||||
setClasses(n, attr, classes)
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
func getAttributePtr(attrName string, n *html.Node) *html.Attribute {
|
||||
if n == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
for i, a := range n.Attr {
|
||||
if a.Key == attrName {
|
||||
return &n.Attr[i]
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Private function to get the specified attribute's value from a node.
|
||||
func getAttributeValue(attrName string, n *html.Node) (val string, exists bool) {
|
||||
if a := getAttributePtr(attrName, n); a != nil {
|
||||
val = a.Val
|
||||
exists = true
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Get and normalize the "class" attribute from the node.
|
||||
func getClassesAndAttr(n *html.Node, create bool) (classes string, attr *html.Attribute) {
|
||||
// Applies only to element nodes
|
||||
if n.Type == html.ElementNode {
|
||||
attr = getAttributePtr("class", n)
|
||||
if attr == nil && create {
|
||||
n.Attr = append(n.Attr, html.Attribute{
|
||||
Key: "class",
|
||||
Val: "",
|
||||
})
|
||||
attr = &n.Attr[len(n.Attr)-1]
|
||||
}
|
||||
}
|
||||
|
||||
if attr == nil {
|
||||
classes = " "
|
||||
} else {
|
||||
classes = rxClassTrim.ReplaceAllString(" "+attr.Val+" ", " ")
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func getClassesSlice(classes string) []string {
|
||||
return strings.Split(rxClassTrim.ReplaceAllString(" "+classes+" ", " "), " ")
|
||||
}
|
||||
|
||||
func removeAttr(n *html.Node, attrName string) {
|
||||
for i, a := range n.Attr {
|
||||
if a.Key == attrName {
|
||||
n.Attr[i], n.Attr[len(n.Attr)-1], n.Attr =
|
||||
n.Attr[len(n.Attr)-1], html.Attribute{}, n.Attr[:len(n.Attr)-1]
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func setClasses(n *html.Node, attr *html.Attribute, classes string) {
|
||||
classes = strings.TrimSpace(classes)
|
||||
if classes == "" {
|
||||
removeAttr(n, "class")
|
||||
return
|
||||
}
|
||||
|
||||
attr.Val = classes
|
||||
}
|
||||
49
vendor/github.com/PuerkitoBio/goquery/query.go
generated
vendored
Normal file
49
vendor/github.com/PuerkitoBio/goquery/query.go
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
package goquery
|
||||
|
||||
import "golang.org/x/net/html"
|
||||
|
||||
// Is checks the current matched set of elements against a selector and
|
||||
// returns true if at least one of these elements matches.
|
||||
func (s *Selection) Is(selector string) bool {
|
||||
return s.IsMatcher(compileMatcher(selector))
|
||||
}
|
||||
|
||||
// IsMatcher checks the current matched set of elements against a matcher and
|
||||
// returns true if at least one of these elements matches.
|
||||
func (s *Selection) IsMatcher(m Matcher) bool {
|
||||
if len(s.Nodes) > 0 {
|
||||
if len(s.Nodes) == 1 {
|
||||
return m.Match(s.Nodes[0])
|
||||
}
|
||||
return len(m.Filter(s.Nodes)) > 0
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// IsFunction checks the current matched set of elements against a predicate and
|
||||
// returns true if at least one of these elements matches.
|
||||
func (s *Selection) IsFunction(f func(int, *Selection) bool) bool {
|
||||
return s.FilterFunction(f).Length() > 0
|
||||
}
|
||||
|
||||
// IsSelection checks the current matched set of elements against a Selection object
|
||||
// and returns true if at least one of these elements matches.
|
||||
func (s *Selection) IsSelection(sel *Selection) bool {
|
||||
return s.FilterSelection(sel).Length() > 0
|
||||
}
|
||||
|
||||
// IsNodes checks the current matched set of elements against the specified nodes
|
||||
// and returns true if at least one of these elements matches.
|
||||
func (s *Selection) IsNodes(nodes ...*html.Node) bool {
|
||||
return s.FilterNodes(nodes...).Length() > 0
|
||||
}
|
||||
|
||||
// Contains returns true if the specified Node is within,
|
||||
// at any depth, one of the nodes in the Selection object.
|
||||
// It is NOT inclusive, to behave like jQuery's implementation, and
|
||||
// unlike Javascript's .contains, so if the contained
|
||||
// node is itself in the selection, it returns false.
|
||||
func (s *Selection) Contains(n *html.Node) bool {
|
||||
return sliceContains(s.Nodes, n)
|
||||
}
|
||||
698
vendor/github.com/PuerkitoBio/goquery/traversal.go
generated
vendored
Normal file
698
vendor/github.com/PuerkitoBio/goquery/traversal.go
generated
vendored
Normal file
@@ -0,0 +1,698 @@
|
||||
package goquery
|
||||
|
||||
import "golang.org/x/net/html"
|
||||
|
||||
type siblingType int
|
||||
|
||||
// Sibling type, used internally when iterating over children at the same
|
||||
// level (siblings) to specify which nodes are requested.
|
||||
const (
|
||||
siblingPrevUntil siblingType = iota - 3
|
||||
siblingPrevAll
|
||||
siblingPrev
|
||||
siblingAll
|
||||
siblingNext
|
||||
siblingNextAll
|
||||
siblingNextUntil
|
||||
siblingAllIncludingNonElements
|
||||
)
|
||||
|
||||
// Find gets the descendants of each element in the current set of matched
|
||||
// elements, filtered by a selector. It returns a new Selection object
|
||||
// containing these matched elements.
|
||||
func (s *Selection) Find(selector string) *Selection {
|
||||
return pushStack(s, findWithMatcher(s.Nodes, compileMatcher(selector)))
|
||||
}
|
||||
|
||||
// FindMatcher gets the descendants of each element in the current set of matched
|
||||
// elements, filtered by the matcher. It returns a new Selection object
|
||||
// containing these matched elements.
|
||||
func (s *Selection) FindMatcher(m Matcher) *Selection {
|
||||
return pushStack(s, findWithMatcher(s.Nodes, m))
|
||||
}
|
||||
|
||||
// FindSelection gets the descendants of each element in the current
|
||||
// Selection, filtered by a Selection. It returns a new Selection object
|
||||
// containing these matched elements.
|
||||
func (s *Selection) FindSelection(sel *Selection) *Selection {
|
||||
if sel == nil {
|
||||
return pushStack(s, nil)
|
||||
}
|
||||
return s.FindNodes(sel.Nodes...)
|
||||
}
|
||||
|
||||
// FindNodes gets the descendants of each element in the current
|
||||
// Selection, filtered by some nodes. It returns a new Selection object
|
||||
// containing these matched elements.
|
||||
func (s *Selection) FindNodes(nodes ...*html.Node) *Selection {
|
||||
return pushStack(s, mapNodes(nodes, func(i int, n *html.Node) []*html.Node {
|
||||
if sliceContains(s.Nodes, n) {
|
||||
return []*html.Node{n}
|
||||
}
|
||||
return nil
|
||||
}))
|
||||
}
|
||||
|
||||
// Contents gets the children of each element in the Selection,
|
||||
// including text and comment nodes. It returns a new Selection object
|
||||
// containing these elements.
|
||||
func (s *Selection) Contents() *Selection {
|
||||
return pushStack(s, getChildrenNodes(s.Nodes, siblingAllIncludingNonElements))
|
||||
}
|
||||
|
||||
// ContentsFiltered gets the children of each element in the Selection,
|
||||
// filtered by the specified selector. It returns a new Selection
|
||||
// object containing these elements. Since selectors only act on Element nodes,
|
||||
// this function is an alias to ChildrenFiltered unless the selector is empty,
|
||||
// in which case it is an alias to Contents.
|
||||
func (s *Selection) ContentsFiltered(selector string) *Selection {
|
||||
if selector != "" {
|
||||
return s.ChildrenFiltered(selector)
|
||||
}
|
||||
return s.Contents()
|
||||
}
|
||||
|
||||
// ContentsMatcher gets the children of each element in the Selection,
|
||||
// filtered by the specified matcher. It returns a new Selection
|
||||
// object containing these elements. Since matchers only act on Element nodes,
|
||||
// this function is an alias to ChildrenMatcher.
|
||||
func (s *Selection) ContentsMatcher(m Matcher) *Selection {
|
||||
return s.ChildrenMatcher(m)
|
||||
}
|
||||
|
||||
// Children gets the child elements of each element in the Selection.
|
||||
// It returns a new Selection object containing these elements.
|
||||
func (s *Selection) Children() *Selection {
|
||||
return pushStack(s, getChildrenNodes(s.Nodes, siblingAll))
|
||||
}
|
||||
|
||||
// ChildrenFiltered gets the child elements of each element in the Selection,
|
||||
// filtered by the specified selector. It returns a new
|
||||
// Selection object containing these elements.
|
||||
func (s *Selection) ChildrenFiltered(selector string) *Selection {
|
||||
return filterAndPush(s, getChildrenNodes(s.Nodes, siblingAll), compileMatcher(selector))
|
||||
}
|
||||
|
||||
// ChildrenMatcher gets the child elements of each element in the Selection,
|
||||
// filtered by the specified matcher. It returns a new
|
||||
// Selection object containing these elements.
|
||||
func (s *Selection) ChildrenMatcher(m Matcher) *Selection {
|
||||
return filterAndPush(s, getChildrenNodes(s.Nodes, siblingAll), m)
|
||||
}
|
||||
|
||||
// Parent gets the parent of each element in the Selection. It returns a
|
||||
// new Selection object containing the matched elements.
|
||||
func (s *Selection) Parent() *Selection {
|
||||
return pushStack(s, getParentNodes(s.Nodes))
|
||||
}
|
||||
|
||||
// ParentFiltered gets the parent of each element in the Selection filtered by a
|
||||
// selector. It returns a new Selection object containing the matched elements.
|
||||
func (s *Selection) ParentFiltered(selector string) *Selection {
|
||||
return filterAndPush(s, getParentNodes(s.Nodes), compileMatcher(selector))
|
||||
}
|
||||
|
||||
// ParentMatcher gets the parent of each element in the Selection filtered by a
|
||||
// matcher. It returns a new Selection object containing the matched elements.
|
||||
func (s *Selection) ParentMatcher(m Matcher) *Selection {
|
||||
return filterAndPush(s, getParentNodes(s.Nodes), m)
|
||||
}
|
||||
|
||||
// Closest gets the first element that matches the selector by testing the
|
||||
// element itself and traversing up through its ancestors in the DOM tree.
|
||||
func (s *Selection) Closest(selector string) *Selection {
|
||||
cs := compileMatcher(selector)
|
||||
return s.ClosestMatcher(cs)
|
||||
}
|
||||
|
||||
// ClosestMatcher gets the first element that matches the matcher by testing the
|
||||
// element itself and traversing up through its ancestors in the DOM tree.
|
||||
func (s *Selection) ClosestMatcher(m Matcher) *Selection {
|
||||
return pushStack(s, mapNodes(s.Nodes, func(i int, n *html.Node) []*html.Node {
|
||||
// For each node in the selection, test the node itself, then each parent
|
||||
// until a match is found.
|
||||
for ; n != nil; n = n.Parent {
|
||||
if m.Match(n) {
|
||||
return []*html.Node{n}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}))
|
||||
}
|
||||
|
||||
// ClosestNodes gets the first element that matches one of the nodes by testing the
|
||||
// element itself and traversing up through its ancestors in the DOM tree.
|
||||
func (s *Selection) ClosestNodes(nodes ...*html.Node) *Selection {
|
||||
set := make(map[*html.Node]bool)
|
||||
for _, n := range nodes {
|
||||
set[n] = true
|
||||
}
|
||||
return pushStack(s, mapNodes(s.Nodes, func(i int, n *html.Node) []*html.Node {
|
||||
// For each node in the selection, test the node itself, then each parent
|
||||
// until a match is found.
|
||||
for ; n != nil; n = n.Parent {
|
||||
if set[n] {
|
||||
return []*html.Node{n}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}))
|
||||
}
|
||||
|
||||
// ClosestSelection gets the first element that matches one of the nodes in the
|
||||
// Selection by testing the element itself and traversing up through its ancestors
|
||||
// in the DOM tree.
|
||||
func (s *Selection) ClosestSelection(sel *Selection) *Selection {
|
||||
if sel == nil {
|
||||
return pushStack(s, nil)
|
||||
}
|
||||
return s.ClosestNodes(sel.Nodes...)
|
||||
}
|
||||
|
||||
// Parents gets the ancestors of each element in the current Selection. It
|
||||
// returns a new Selection object with the matched elements.
|
||||
func (s *Selection) Parents() *Selection {
|
||||
return pushStack(s, getParentsNodes(s.Nodes, nil, nil))
|
||||
}
|
||||
|
||||
// ParentsFiltered gets the ancestors of each element in the current
|
||||
// Selection. It returns a new Selection object with the matched elements.
|
||||
func (s *Selection) ParentsFiltered(selector string) *Selection {
|
||||
return filterAndPush(s, getParentsNodes(s.Nodes, nil, nil), compileMatcher(selector))
|
||||
}
|
||||
|
||||
// ParentsMatcher gets the ancestors of each element in the current
|
||||
// Selection. It returns a new Selection object with the matched elements.
|
||||
func (s *Selection) ParentsMatcher(m Matcher) *Selection {
|
||||
return filterAndPush(s, getParentsNodes(s.Nodes, nil, nil), m)
|
||||
}
|
||||
|
||||
// ParentsUntil gets the ancestors of each element in the Selection, up to but
|
||||
// not including the element matched by the selector. It returns a new Selection
|
||||
// object containing the matched elements.
|
||||
func (s *Selection) ParentsUntil(selector string) *Selection {
|
||||
return pushStack(s, getParentsNodes(s.Nodes, compileMatcher(selector), nil))
|
||||
}
|
||||
|
||||
// ParentsUntilMatcher gets the ancestors of each element in the Selection, up to but
|
||||
// not including the element matched by the matcher. It returns a new Selection
|
||||
// object containing the matched elements.
|
||||
func (s *Selection) ParentsUntilMatcher(m Matcher) *Selection {
|
||||
return pushStack(s, getParentsNodes(s.Nodes, m, nil))
|
||||
}
|
||||
|
||||
// ParentsUntilSelection gets the ancestors of each element in the Selection,
|
||||
// up to but not including the elements in the specified Selection. It returns a
|
||||
// new Selection object containing the matched elements.
|
||||
func (s *Selection) ParentsUntilSelection(sel *Selection) *Selection {
|
||||
if sel == nil {
|
||||
return s.Parents()
|
||||
}
|
||||
return s.ParentsUntilNodes(sel.Nodes...)
|
||||
}
|
||||
|
||||
// ParentsUntilNodes gets the ancestors of each element in the Selection,
|
||||
// up to but not including the specified nodes. It returns a
|
||||
// new Selection object containing the matched elements.
|
||||
func (s *Selection) ParentsUntilNodes(nodes ...*html.Node) *Selection {
|
||||
return pushStack(s, getParentsNodes(s.Nodes, nil, nodes))
|
||||
}
|
||||
|
||||
// ParentsFilteredUntil is like ParentsUntil, with the option to filter the
|
||||
// results based on a selector string. It returns a new Selection
|
||||
// object containing the matched elements.
|
||||
func (s *Selection) ParentsFilteredUntil(filterSelector, untilSelector string) *Selection {
|
||||
return filterAndPush(s, getParentsNodes(s.Nodes, compileMatcher(untilSelector), nil), compileMatcher(filterSelector))
|
||||
}
|
||||
|
||||
// ParentsFilteredUntilMatcher is like ParentsUntilMatcher, with the option to filter the
|
||||
// results based on a matcher. It returns a new Selection object containing the matched elements.
|
||||
func (s *Selection) ParentsFilteredUntilMatcher(filter, until Matcher) *Selection {
|
||||
return filterAndPush(s, getParentsNodes(s.Nodes, until, nil), filter)
|
||||
}
|
||||
|
||||
// ParentsFilteredUntilSelection is like ParentsUntilSelection, with the
|
||||
// option to filter the results based on a selector string. It returns a new
|
||||
// Selection object containing the matched elements.
|
||||
func (s *Selection) ParentsFilteredUntilSelection(filterSelector string, sel *Selection) *Selection {
|
||||
return s.ParentsMatcherUntilSelection(compileMatcher(filterSelector), sel)
|
||||
}
|
||||
|
||||
// ParentsMatcherUntilSelection is like ParentsUntilSelection, with the
|
||||
// option to filter the results based on a matcher. It returns a new
|
||||
// Selection object containing the matched elements.
|
||||
func (s *Selection) ParentsMatcherUntilSelection(filter Matcher, sel *Selection) *Selection {
|
||||
if sel == nil {
|
||||
return s.ParentsMatcher(filter)
|
||||
}
|
||||
return s.ParentsMatcherUntilNodes(filter, sel.Nodes...)
|
||||
}
|
||||
|
||||
// ParentsFilteredUntilNodes is like ParentsUntilNodes, with the
|
||||
// option to filter the results based on a selector string. It returns a new
|
||||
// Selection object containing the matched elements.
|
||||
func (s *Selection) ParentsFilteredUntilNodes(filterSelector string, nodes ...*html.Node) *Selection {
|
||||
return filterAndPush(s, getParentsNodes(s.Nodes, nil, nodes), compileMatcher(filterSelector))
|
||||
}
|
||||
|
||||
// ParentsMatcherUntilNodes is like ParentsUntilNodes, with the
|
||||
// option to filter the results based on a matcher. It returns a new
|
||||
// Selection object containing the matched elements.
|
||||
func (s *Selection) ParentsMatcherUntilNodes(filter Matcher, nodes ...*html.Node) *Selection {
|
||||
return filterAndPush(s, getParentsNodes(s.Nodes, nil, nodes), filter)
|
||||
}
|
||||
|
||||
// Siblings gets the siblings of each element in the Selection. It returns
|
||||
// a new Selection object containing the matched elements.
|
||||
func (s *Selection) Siblings() *Selection {
|
||||
return pushStack(s, getSiblingNodes(s.Nodes, siblingAll, nil, nil))
|
||||
}
|
||||
|
||||
// SiblingsFiltered gets the siblings of each element in the Selection
|
||||
// filtered by a selector. It returns a new Selection object containing the
|
||||
// matched elements.
|
||||
func (s *Selection) SiblingsFiltered(selector string) *Selection {
|
||||
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingAll, nil, nil), compileMatcher(selector))
|
||||
}
|
||||
|
||||
// SiblingsMatcher gets the siblings of each element in the Selection
|
||||
// filtered by a matcher. It returns a new Selection object containing the
|
||||
// matched elements.
|
||||
func (s *Selection) SiblingsMatcher(m Matcher) *Selection {
|
||||
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingAll, nil, nil), m)
|
||||
}
|
||||
|
||||
// Next gets the immediately following sibling of each element in the
|
||||
// Selection. It returns a new Selection object containing the matched elements.
|
||||
func (s *Selection) Next() *Selection {
|
||||
return pushStack(s, getSiblingNodes(s.Nodes, siblingNext, nil, nil))
|
||||
}
|
||||
|
||||
// NextFiltered gets the immediately following sibling of each element in the
|
||||
// Selection filtered by a selector. It returns a new Selection object
|
||||
// containing the matched elements.
|
||||
func (s *Selection) NextFiltered(selector string) *Selection {
|
||||
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNext, nil, nil), compileMatcher(selector))
|
||||
}
|
||||
|
||||
// NextMatcher gets the immediately following sibling of each element in the
|
||||
// Selection filtered by a matcher. It returns a new Selection object
|
||||
// containing the matched elements.
|
||||
func (s *Selection) NextMatcher(m Matcher) *Selection {
|
||||
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNext, nil, nil), m)
|
||||
}
|
||||
|
||||
// NextAll gets all the following siblings of each element in the
|
||||
// Selection. It returns a new Selection object containing the matched elements.
|
||||
func (s *Selection) NextAll() *Selection {
|
||||
return pushStack(s, getSiblingNodes(s.Nodes, siblingNextAll, nil, nil))
|
||||
}
|
||||
|
||||
// NextAllFiltered gets all the following siblings of each element in the
|
||||
// Selection filtered by a selector. It returns a new Selection object
|
||||
// containing the matched elements.
|
||||
func (s *Selection) NextAllFiltered(selector string) *Selection {
|
||||
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextAll, nil, nil), compileMatcher(selector))
|
||||
}
|
||||
|
||||
// NextAllMatcher gets all the following siblings of each element in the
|
||||
// Selection filtered by a matcher. It returns a new Selection object
|
||||
// containing the matched elements.
|
||||
func (s *Selection) NextAllMatcher(m Matcher) *Selection {
|
||||
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextAll, nil, nil), m)
|
||||
}
|
||||
|
||||
// Prev gets the immediately preceding sibling of each element in the
|
||||
// Selection. It returns a new Selection object containing the matched elements.
|
||||
func (s *Selection) Prev() *Selection {
|
||||
return pushStack(s, getSiblingNodes(s.Nodes, siblingPrev, nil, nil))
|
||||
}
|
||||
|
||||
// PrevFiltered gets the immediately preceding sibling of each element in the
|
||||
// Selection filtered by a selector. It returns a new Selection object
|
||||
// containing the matched elements.
|
||||
func (s *Selection) PrevFiltered(selector string) *Selection {
|
||||
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrev, nil, nil), compileMatcher(selector))
|
||||
}
|
||||
|
||||
// PrevMatcher gets the immediately preceding sibling of each element in the
|
||||
// Selection filtered by a matcher. It returns a new Selection object
|
||||
// containing the matched elements.
|
||||
func (s *Selection) PrevMatcher(m Matcher) *Selection {
|
||||
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrev, nil, nil), m)
|
||||
}
|
||||
|
||||
// PrevAll gets all the preceding siblings of each element in the
|
||||
// Selection. It returns a new Selection object containing the matched elements.
|
||||
func (s *Selection) PrevAll() *Selection {
|
||||
return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevAll, nil, nil))
|
||||
}
|
||||
|
||||
// PrevAllFiltered gets all the preceding siblings of each element in the
|
||||
// Selection filtered by a selector. It returns a new Selection object
|
||||
// containing the matched elements.
|
||||
func (s *Selection) PrevAllFiltered(selector string) *Selection {
|
||||
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevAll, nil, nil), compileMatcher(selector))
|
||||
}
|
||||
|
||||
// PrevAllMatcher gets all the preceding siblings of each element in the
|
||||
// Selection filtered by a matcher. It returns a new Selection object
|
||||
// containing the matched elements.
|
||||
func (s *Selection) PrevAllMatcher(m Matcher) *Selection {
|
||||
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevAll, nil, nil), m)
|
||||
}
|
||||
|
||||
// NextUntil gets all following siblings of each element up to but not
|
||||
// including the element matched by the selector. It returns a new Selection
|
||||
// object containing the matched elements.
|
||||
func (s *Selection) NextUntil(selector string) *Selection {
|
||||
return pushStack(s, getSiblingNodes(s.Nodes, siblingNextUntil,
|
||||
compileMatcher(selector), nil))
|
||||
}
|
||||
|
||||
// NextUntilMatcher gets all following siblings of each element up to but not
|
||||
// including the element matched by the matcher. It returns a new Selection
|
||||
// object containing the matched elements.
|
||||
func (s *Selection) NextUntilMatcher(m Matcher) *Selection {
|
||||
return pushStack(s, getSiblingNodes(s.Nodes, siblingNextUntil,
|
||||
m, nil))
|
||||
}
|
||||
|
||||
// NextUntilSelection gets all following siblings of each element up to but not
|
||||
// including the element matched by the Selection. It returns a new Selection
|
||||
// object containing the matched elements.
|
||||
func (s *Selection) NextUntilSelection(sel *Selection) *Selection {
|
||||
if sel == nil {
|
||||
return s.NextAll()
|
||||
}
|
||||
return s.NextUntilNodes(sel.Nodes...)
|
||||
}
|
||||
|
||||
// NextUntilNodes gets all following siblings of each element up to but not
|
||||
// including the element matched by the nodes. It returns a new Selection
|
||||
// object containing the matched elements.
|
||||
func (s *Selection) NextUntilNodes(nodes ...*html.Node) *Selection {
|
||||
return pushStack(s, getSiblingNodes(s.Nodes, siblingNextUntil,
|
||||
nil, nodes))
|
||||
}
|
||||
|
||||
// PrevUntil gets all preceding siblings of each element up to but not
|
||||
// including the element matched by the selector. It returns a new Selection
|
||||
// object containing the matched elements.
|
||||
func (s *Selection) PrevUntil(selector string) *Selection {
|
||||
return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
|
||||
compileMatcher(selector), nil))
|
||||
}
|
||||
|
||||
// PrevUntilMatcher gets all preceding siblings of each element up to but not
|
||||
// including the element matched by the matcher. It returns a new Selection
|
||||
// object containing the matched elements.
|
||||
func (s *Selection) PrevUntilMatcher(m Matcher) *Selection {
|
||||
return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
|
||||
m, nil))
|
||||
}
|
||||
|
||||
// PrevUntilSelection gets all preceding siblings of each element up to but not
|
||||
// including the element matched by the Selection. It returns a new Selection
|
||||
// object containing the matched elements.
|
||||
func (s *Selection) PrevUntilSelection(sel *Selection) *Selection {
|
||||
if sel == nil {
|
||||
return s.PrevAll()
|
||||
}
|
||||
return s.PrevUntilNodes(sel.Nodes...)
|
||||
}
|
||||
|
||||
// PrevUntilNodes gets all preceding siblings of each element up to but not
|
||||
// including the element matched by the nodes. It returns a new Selection
|
||||
// object containing the matched elements.
|
||||
func (s *Selection) PrevUntilNodes(nodes ...*html.Node) *Selection {
|
||||
return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
|
||||
nil, nodes))
|
||||
}
|
||||
|
||||
// NextFilteredUntil is like NextUntil, with the option to filter
|
||||
// the results based on a selector string.
|
||||
// It returns a new Selection object containing the matched elements.
|
||||
func (s *Selection) NextFilteredUntil(filterSelector, untilSelector string) *Selection {
|
||||
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil,
|
||||
compileMatcher(untilSelector), nil), compileMatcher(filterSelector))
|
||||
}
|
||||
|
||||
// NextFilteredUntilMatcher is like NextUntilMatcher, with the option to filter
|
||||
// the results based on a matcher.
|
||||
// It returns a new Selection object containing the matched elements.
|
||||
func (s *Selection) NextFilteredUntilMatcher(filter, until Matcher) *Selection {
|
||||
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil,
|
||||
until, nil), filter)
|
||||
}
|
||||
|
||||
// NextFilteredUntilSelection is like NextUntilSelection, with the
|
||||
// option to filter the results based on a selector string. It returns a new
|
||||
// Selection object containing the matched elements.
|
||||
func (s *Selection) NextFilteredUntilSelection(filterSelector string, sel *Selection) *Selection {
|
||||
return s.NextMatcherUntilSelection(compileMatcher(filterSelector), sel)
|
||||
}
|
||||
|
||||
// NextMatcherUntilSelection is like NextUntilSelection, with the
|
||||
// option to filter the results based on a matcher. It returns a new
|
||||
// Selection object containing the matched elements.
|
||||
func (s *Selection) NextMatcherUntilSelection(filter Matcher, sel *Selection) *Selection {
|
||||
if sel == nil {
|
||||
return s.NextMatcher(filter)
|
||||
}
|
||||
return s.NextMatcherUntilNodes(filter, sel.Nodes...)
|
||||
}
|
||||
|
||||
// NextFilteredUntilNodes is like NextUntilNodes, with the
|
||||
// option to filter the results based on a selector string. It returns a new
|
||||
// Selection object containing the matched elements.
|
||||
func (s *Selection) NextFilteredUntilNodes(filterSelector string, nodes ...*html.Node) *Selection {
|
||||
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil,
|
||||
nil, nodes), compileMatcher(filterSelector))
|
||||
}
|
||||
|
||||
// NextMatcherUntilNodes is like NextUntilNodes, with the
|
||||
// option to filter the results based on a matcher. It returns a new
|
||||
// Selection object containing the matched elements.
|
||||
func (s *Selection) NextMatcherUntilNodes(filter Matcher, nodes ...*html.Node) *Selection {
|
||||
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil,
|
||||
nil, nodes), filter)
|
||||
}
|
||||
|
||||
// PrevFilteredUntil is like PrevUntil, with the option to filter
|
||||
// the results based on a selector string.
|
||||
// It returns a new Selection object containing the matched elements.
|
||||
func (s *Selection) PrevFilteredUntil(filterSelector, untilSelector string) *Selection {
|
||||
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
|
||||
compileMatcher(untilSelector), nil), compileMatcher(filterSelector))
|
||||
}
|
||||
|
||||
// PrevFilteredUntilMatcher is like PrevUntilMatcher, with the option to filter
|
||||
// the results based on a matcher.
|
||||
// It returns a new Selection object containing the matched elements.
|
||||
func (s *Selection) PrevFilteredUntilMatcher(filter, until Matcher) *Selection {
|
||||
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
|
||||
until, nil), filter)
|
||||
}
|
||||
|
||||
// PrevFilteredUntilSelection is like PrevUntilSelection, with the
|
||||
// option to filter the results based on a selector string. It returns a new
|
||||
// Selection object containing the matched elements.
|
||||
func (s *Selection) PrevFilteredUntilSelection(filterSelector string, sel *Selection) *Selection {
|
||||
return s.PrevMatcherUntilSelection(compileMatcher(filterSelector), sel)
|
||||
}
|
||||
|
||||
// PrevMatcherUntilSelection is like PrevUntilSelection, with the
|
||||
// option to filter the results based on a matcher. It returns a new
|
||||
// Selection object containing the matched elements.
|
||||
func (s *Selection) PrevMatcherUntilSelection(filter Matcher, sel *Selection) *Selection {
|
||||
if sel == nil {
|
||||
return s.PrevMatcher(filter)
|
||||
}
|
||||
return s.PrevMatcherUntilNodes(filter, sel.Nodes...)
|
||||
}
|
||||
|
||||
// PrevFilteredUntilNodes is like PrevUntilNodes, with the
|
||||
// option to filter the results based on a selector string. It returns a new
|
||||
// Selection object containing the matched elements.
|
||||
func (s *Selection) PrevFilteredUntilNodes(filterSelector string, nodes ...*html.Node) *Selection {
|
||||
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
|
||||
nil, nodes), compileMatcher(filterSelector))
|
||||
}
|
||||
|
||||
// PrevMatcherUntilNodes is like PrevUntilNodes, with the
|
||||
// option to filter the results based on a matcher. It returns a new
|
||||
// Selection object containing the matched elements.
|
||||
func (s *Selection) PrevMatcherUntilNodes(filter Matcher, nodes ...*html.Node) *Selection {
|
||||
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
|
||||
nil, nodes), filter)
|
||||
}
|
||||
|
||||
// Filter and push filters the nodes based on a matcher, and pushes the results
|
||||
// on the stack, with the srcSel as previous selection.
|
||||
func filterAndPush(srcSel *Selection, nodes []*html.Node, m Matcher) *Selection {
|
||||
// Create a temporary Selection with the specified nodes to filter using winnow
|
||||
sel := &Selection{nodes, srcSel.document, nil}
|
||||
// Filter based on matcher and push on stack
|
||||
return pushStack(srcSel, winnow(sel, m, true))
|
||||
}
|
||||
|
||||
// Internal implementation of Find that return raw nodes.
|
||||
func findWithMatcher(nodes []*html.Node, m Matcher) []*html.Node {
|
||||
// Map nodes to find the matches within the children of each node
|
||||
return mapNodes(nodes, func(i int, n *html.Node) (result []*html.Node) {
|
||||
// Go down one level, becausejQuery's Find selects only within descendants
|
||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||
if c.Type == html.ElementNode {
|
||||
result = append(result, m.MatchAll(c)...)
|
||||
}
|
||||
}
|
||||
return
|
||||
})
|
||||
}
|
||||
|
||||
// Internal implementation to get all parent nodes, stopping at the specified
|
||||
// node (or nil if no stop).
|
||||
func getParentsNodes(nodes []*html.Node, stopm Matcher, stopNodes []*html.Node) []*html.Node {
|
||||
return mapNodes(nodes, func(i int, n *html.Node) (result []*html.Node) {
|
||||
for p := n.Parent; p != nil; p = p.Parent {
|
||||
sel := newSingleSelection(p, nil)
|
||||
if stopm != nil {
|
||||
if sel.IsMatcher(stopm) {
|
||||
break
|
||||
}
|
||||
} else if len(stopNodes) > 0 {
|
||||
if sel.IsNodes(stopNodes...) {
|
||||
break
|
||||
}
|
||||
}
|
||||
if p.Type == html.ElementNode {
|
||||
result = append(result, p)
|
||||
}
|
||||
}
|
||||
return
|
||||
})
|
||||
}
|
||||
|
||||
// Internal implementation of sibling nodes that return a raw slice of matches.
|
||||
func getSiblingNodes(nodes []*html.Node, st siblingType, untilm Matcher, untilNodes []*html.Node) []*html.Node {
|
||||
var f func(*html.Node) bool
|
||||
|
||||
// If the requested siblings are ...Until, create the test function to
|
||||
// determine if the until condition is reached (returns true if it is)
|
||||
if st == siblingNextUntil || st == siblingPrevUntil {
|
||||
f = func(n *html.Node) bool {
|
||||
if untilm != nil {
|
||||
// Matcher-based condition
|
||||
sel := newSingleSelection(n, nil)
|
||||
return sel.IsMatcher(untilm)
|
||||
} else if len(untilNodes) > 0 {
|
||||
// Nodes-based condition
|
||||
sel := newSingleSelection(n, nil)
|
||||
return sel.IsNodes(untilNodes...)
|
||||
}
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return mapNodes(nodes, func(i int, n *html.Node) []*html.Node {
|
||||
return getChildrenWithSiblingType(n.Parent, st, n, f)
|
||||
})
|
||||
}
|
||||
|
||||
// Gets the children nodes of each node in the specified slice of nodes,
|
||||
// based on the sibling type request.
|
||||
func getChildrenNodes(nodes []*html.Node, st siblingType) []*html.Node {
|
||||
return mapNodes(nodes, func(i int, n *html.Node) []*html.Node {
|
||||
return getChildrenWithSiblingType(n, st, nil, nil)
|
||||
})
|
||||
}
|
||||
|
||||
// Gets the children of the specified parent, based on the requested sibling
|
||||
// type, skipping a specified node if required.
|
||||
func getChildrenWithSiblingType(parent *html.Node, st siblingType, skipNode *html.Node,
|
||||
untilFunc func(*html.Node) bool) (result []*html.Node) {
|
||||
|
||||
// Create the iterator function
|
||||
var iter = func(cur *html.Node) (ret *html.Node) {
|
||||
// Based on the sibling type requested, iterate the right way
|
||||
for {
|
||||
switch st {
|
||||
case siblingAll, siblingAllIncludingNonElements:
|
||||
if cur == nil {
|
||||
// First iteration, start with first child of parent
|
||||
// Skip node if required
|
||||
if ret = parent.FirstChild; ret == skipNode && skipNode != nil {
|
||||
ret = skipNode.NextSibling
|
||||
}
|
||||
} else {
|
||||
// Skip node if required
|
||||
if ret = cur.NextSibling; ret == skipNode && skipNode != nil {
|
||||
ret = skipNode.NextSibling
|
||||
}
|
||||
}
|
||||
case siblingPrev, siblingPrevAll, siblingPrevUntil:
|
||||
if cur == nil {
|
||||
// Start with previous sibling of the skip node
|
||||
ret = skipNode.PrevSibling
|
||||
} else {
|
||||
ret = cur.PrevSibling
|
||||
}
|
||||
case siblingNext, siblingNextAll, siblingNextUntil:
|
||||
if cur == nil {
|
||||
// Start with next sibling of the skip node
|
||||
ret = skipNode.NextSibling
|
||||
} else {
|
||||
ret = cur.NextSibling
|
||||
}
|
||||
default:
|
||||
panic("Invalid sibling type.")
|
||||
}
|
||||
if ret == nil || ret.Type == html.ElementNode || st == siblingAllIncludingNonElements {
|
||||
return
|
||||
}
|
||||
// Not a valid node, try again from this one
|
||||
cur = ret
|
||||
}
|
||||
}
|
||||
|
||||
for c := iter(nil); c != nil; c = iter(c) {
|
||||
// If this is an ...Until case, test before append (returns true
|
||||
// if the until condition is reached)
|
||||
if st == siblingNextUntil || st == siblingPrevUntil {
|
||||
if untilFunc(c) {
|
||||
return
|
||||
}
|
||||
}
|
||||
result = append(result, c)
|
||||
if st == siblingNext || st == siblingPrev {
|
||||
// Only one node was requested (immediate next or previous), so exit
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Internal implementation of parent nodes that return a raw slice of Nodes.
|
||||
func getParentNodes(nodes []*html.Node) []*html.Node {
|
||||
return mapNodes(nodes, func(i int, n *html.Node) []*html.Node {
|
||||
if n.Parent != nil && n.Parent.Type == html.ElementNode {
|
||||
return []*html.Node{n.Parent}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
// Internal map function used by many traversing methods. Takes the source nodes
|
||||
// to iterate on and the mapping function that returns an array of nodes.
|
||||
// Returns an array of nodes mapped by calling the callback function once for
|
||||
// each node in the source nodes.
|
||||
func mapNodes(nodes []*html.Node, f func(int, *html.Node) []*html.Node) (result []*html.Node) {
|
||||
set := make(map[*html.Node]bool)
|
||||
for i, n := range nodes {
|
||||
if vals := f(i, n); len(vals) > 0 {
|
||||
result = appendWithoutDuplicates(result, vals, set)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
141
vendor/github.com/PuerkitoBio/goquery/type.go
generated
vendored
Normal file
141
vendor/github.com/PuerkitoBio/goquery/type.go
generated
vendored
Normal file
@@ -0,0 +1,141 @@
|
||||
package goquery
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
||||
"github.com/andybalholm/cascadia"
|
||||
|
||||
"golang.org/x/net/html"
|
||||
)
|
||||
|
||||
// Document represents an HTML document to be manipulated. Unlike jQuery, which
|
||||
// is loaded as part of a DOM document, and thus acts upon its containing
|
||||
// document, GoQuery doesn't know which HTML document to act upon. So it needs
|
||||
// to be told, and that's what the Document class is for. It holds the root
|
||||
// document node to manipulate, and can make selections on this document.
|
||||
type Document struct {
|
||||
*Selection
|
||||
Url *url.URL
|
||||
rootNode *html.Node
|
||||
}
|
||||
|
||||
// NewDocumentFromNode is a Document constructor that takes a root html Node
|
||||
// as argument.
|
||||
func NewDocumentFromNode(root *html.Node) *Document {
|
||||
return newDocument(root, nil)
|
||||
}
|
||||
|
||||
// NewDocument is a Document constructor that takes a string URL as argument.
|
||||
// It loads the specified document, parses it, and stores the root Document
|
||||
// node, ready to be manipulated.
|
||||
//
|
||||
// Deprecated: Use the net/http standard library package to make the request
|
||||
// and validate the response before calling goquery.NewDocumentFromReader
|
||||
// with the response's body.
|
||||
func NewDocument(url string) (*Document, error) {
|
||||
// Load the URL
|
||||
res, e := http.Get(url)
|
||||
if e != nil {
|
||||
return nil, e
|
||||
}
|
||||
return NewDocumentFromResponse(res)
|
||||
}
|
||||
|
||||
// NewDocumentFromReader returns a Document from an io.Reader.
|
||||
// It returns an error as second value if the reader's data cannot be parsed
|
||||
// as html. It does not check if the reader is also an io.Closer, the
|
||||
// provided reader is never closed by this call. It is the responsibility
|
||||
// of the caller to close it if required.
|
||||
func NewDocumentFromReader(r io.Reader) (*Document, error) {
|
||||
root, e := html.Parse(r)
|
||||
if e != nil {
|
||||
return nil, e
|
||||
}
|
||||
return newDocument(root, nil), nil
|
||||
}
|
||||
|
||||
// NewDocumentFromResponse is another Document constructor that takes an http response as argument.
|
||||
// It loads the specified response's document, parses it, and stores the root Document
|
||||
// node, ready to be manipulated. The response's body is closed on return.
|
||||
//
|
||||
// Deprecated: Use goquery.NewDocumentFromReader with the response's body.
|
||||
func NewDocumentFromResponse(res *http.Response) (*Document, error) {
|
||||
if res == nil {
|
||||
return nil, errors.New("Response is nil")
|
||||
}
|
||||
defer res.Body.Close()
|
||||
if res.Request == nil {
|
||||
return nil, errors.New("Response.Request is nil")
|
||||
}
|
||||
|
||||
// Parse the HTML into nodes
|
||||
root, e := html.Parse(res.Body)
|
||||
if e != nil {
|
||||
return nil, e
|
||||
}
|
||||
|
||||
// Create and fill the document
|
||||
return newDocument(root, res.Request.URL), nil
|
||||
}
|
||||
|
||||
// CloneDocument creates a deep-clone of a document.
|
||||
func CloneDocument(doc *Document) *Document {
|
||||
return newDocument(cloneNode(doc.rootNode), doc.Url)
|
||||
}
|
||||
|
||||
// Private constructor, make sure all fields are correctly filled.
|
||||
func newDocument(root *html.Node, url *url.URL) *Document {
|
||||
// Create and fill the document
|
||||
d := &Document{nil, url, root}
|
||||
d.Selection = newSingleSelection(root, d)
|
||||
return d
|
||||
}
|
||||
|
||||
// Selection represents a collection of nodes matching some criteria. The
|
||||
// initial Selection can be created by using Document.Find, and then
|
||||
// manipulated using the jQuery-like chainable syntax and methods.
|
||||
type Selection struct {
|
||||
Nodes []*html.Node
|
||||
document *Document
|
||||
prevSel *Selection
|
||||
}
|
||||
|
||||
// Helper constructor to create an empty selection
|
||||
func newEmptySelection(doc *Document) *Selection {
|
||||
return &Selection{nil, doc, nil}
|
||||
}
|
||||
|
||||
// Helper constructor to create a selection of only one node
|
||||
func newSingleSelection(node *html.Node, doc *Document) *Selection {
|
||||
return &Selection{[]*html.Node{node}, doc, nil}
|
||||
}
|
||||
|
||||
// Matcher is an interface that defines the methods to match
|
||||
// HTML nodes against a compiled selector string. Cascadia's
|
||||
// Selector implements this interface.
|
||||
type Matcher interface {
|
||||
Match(*html.Node) bool
|
||||
MatchAll(*html.Node) []*html.Node
|
||||
Filter([]*html.Node) []*html.Node
|
||||
}
|
||||
|
||||
// compileMatcher compiles the selector string s and returns
|
||||
// the corresponding Matcher. If s is an invalid selector string,
|
||||
// it returns a Matcher that fails all matches.
|
||||
func compileMatcher(s string) Matcher {
|
||||
cs, err := cascadia.Compile(s)
|
||||
if err != nil {
|
||||
return invalidMatcher{}
|
||||
}
|
||||
return cs
|
||||
}
|
||||
|
||||
// invalidMatcher is a Matcher that always fails to match.
|
||||
type invalidMatcher struct{}
|
||||
|
||||
func (invalidMatcher) Match(n *html.Node) bool { return false }
|
||||
func (invalidMatcher) MatchAll(n *html.Node) []*html.Node { return nil }
|
||||
func (invalidMatcher) Filter(ns []*html.Node) []*html.Node { return nil }
|
||||
161
vendor/github.com/PuerkitoBio/goquery/utilities.go
generated
vendored
Normal file
161
vendor/github.com/PuerkitoBio/goquery/utilities.go
generated
vendored
Normal file
@@ -0,0 +1,161 @@
|
||||
package goquery
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
|
||||
"golang.org/x/net/html"
|
||||
)
|
||||
|
||||
// used to determine if a set (map[*html.Node]bool) should be used
|
||||
// instead of iterating over a slice. The set uses more memory and
|
||||
// is slower than slice iteration for small N.
|
||||
const minNodesForSet = 1000
|
||||
|
||||
var nodeNames = []string{
|
||||
html.ErrorNode: "#error",
|
||||
html.TextNode: "#text",
|
||||
html.DocumentNode: "#document",
|
||||
html.CommentNode: "#comment",
|
||||
}
|
||||
|
||||
// NodeName returns the node name of the first element in the selection.
|
||||
// It tries to behave in a similar way as the DOM's nodeName property
|
||||
// (https://developer.mozilla.org/en-US/docs/Web/API/Node/nodeName).
|
||||
//
|
||||
// Go's net/html package defines the following node types, listed with
|
||||
// the corresponding returned value from this function:
|
||||
//
|
||||
// ErrorNode : #error
|
||||
// TextNode : #text
|
||||
// DocumentNode : #document
|
||||
// ElementNode : the element's tag name
|
||||
// CommentNode : #comment
|
||||
// DoctypeNode : the name of the document type
|
||||
//
|
||||
func NodeName(s *Selection) string {
|
||||
if s.Length() == 0 {
|
||||
return ""
|
||||
}
|
||||
switch n := s.Get(0); n.Type {
|
||||
case html.ElementNode, html.DoctypeNode:
|
||||
return n.Data
|
||||
default:
|
||||
if n.Type >= 0 && int(n.Type) < len(nodeNames) {
|
||||
return nodeNames[n.Type]
|
||||
}
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
// OuterHtml returns the outer HTML rendering of the first item in
|
||||
// the selection - that is, the HTML including the first element's
|
||||
// tag and attributes.
|
||||
//
|
||||
// Unlike InnerHtml, this is a function and not a method on the Selection,
|
||||
// because this is not a jQuery method (in javascript-land, this is
|
||||
// a property provided by the DOM).
|
||||
func OuterHtml(s *Selection) (string, error) {
|
||||
var buf bytes.Buffer
|
||||
|
||||
if s.Length() == 0 {
|
||||
return "", nil
|
||||
}
|
||||
n := s.Get(0)
|
||||
if err := html.Render(&buf, n); err != nil {
|
||||
return "", err
|
||||
}
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
// Loop through all container nodes to search for the target node.
|
||||
func sliceContains(container []*html.Node, contained *html.Node) bool {
|
||||
for _, n := range container {
|
||||
if nodeContains(n, contained) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// Checks if the contained node is within the container node.
|
||||
func nodeContains(container *html.Node, contained *html.Node) bool {
|
||||
// Check if the parent of the contained node is the container node, traversing
|
||||
// upward until the top is reached, or the container is found.
|
||||
for contained = contained.Parent; contained != nil; contained = contained.Parent {
|
||||
if container == contained {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Checks if the target node is in the slice of nodes.
|
||||
func isInSlice(slice []*html.Node, node *html.Node) bool {
|
||||
return indexInSlice(slice, node) > -1
|
||||
}
|
||||
|
||||
// Returns the index of the target node in the slice, or -1.
|
||||
func indexInSlice(slice []*html.Node, node *html.Node) int {
|
||||
if node != nil {
|
||||
for i, n := range slice {
|
||||
if n == node {
|
||||
return i
|
||||
}
|
||||
}
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
// Appends the new nodes to the target slice, making sure no duplicate is added.
|
||||
// There is no check to the original state of the target slice, so it may still
|
||||
// contain duplicates. The target slice is returned because append() may create
|
||||
// a new underlying array. If targetSet is nil, a local set is created with the
|
||||
// target if len(target) + len(nodes) is greater than minNodesForSet.
|
||||
func appendWithoutDuplicates(target []*html.Node, nodes []*html.Node, targetSet map[*html.Node]bool) []*html.Node {
|
||||
// if there are not that many nodes, don't use the map, faster to just use nested loops
|
||||
// (unless a non-nil targetSet is passed, in which case the caller knows better).
|
||||
if targetSet == nil && len(target)+len(nodes) < minNodesForSet {
|
||||
for _, n := range nodes {
|
||||
if !isInSlice(target, n) {
|
||||
target = append(target, n)
|
||||
}
|
||||
}
|
||||
return target
|
||||
}
|
||||
|
||||
// if a targetSet is passed, then assume it is reliable, otherwise create one
|
||||
// and initialize it with the current target contents.
|
||||
if targetSet == nil {
|
||||
targetSet = make(map[*html.Node]bool, len(target))
|
||||
for _, n := range target {
|
||||
targetSet[n] = true
|
||||
}
|
||||
}
|
||||
for _, n := range nodes {
|
||||
if !targetSet[n] {
|
||||
target = append(target, n)
|
||||
targetSet[n] = true
|
||||
}
|
||||
}
|
||||
|
||||
return target
|
||||
}
|
||||
|
||||
// Loop through a selection, returning only those nodes that pass the predicate
|
||||
// function.
|
||||
func grep(sel *Selection, predicate func(i int, s *Selection) bool) (result []*html.Node) {
|
||||
for i, n := range sel.Nodes {
|
||||
if predicate(i, newSingleSelection(n, sel.document)) {
|
||||
result = append(result, n)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// Creates a new Selection object based on the specified nodes, and keeps the
|
||||
// source Selection object on the stack (linked list).
|
||||
func pushStack(fromSel *Selection, nodes []*html.Node) *Selection {
|
||||
result := &Selection{nodes, fromSel.document, fromSel}
|
||||
return result
|
||||
}
|
||||
5
vendor/github.com/agnivade/levenshtein/.gitignore
generated
vendored
Normal file
5
vendor/github.com/agnivade/levenshtein/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
coverage.txt
|
||||
fuzz/fuzz-fuzz.zip
|
||||
fuzz/corpus/corpus/*
|
||||
fuzz/corpus/suppressions/*
|
||||
fuzz/corpus/crashes/*
|
||||
7
vendor/github.com/agnivade/levenshtein/.travis.yml
generated
vendored
Normal file
7
vendor/github.com/agnivade/levenshtein/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
language: go
|
||||
|
||||
go:
|
||||
- 1.9.x
|
||||
- 1.10.x
|
||||
- 1.11.x
|
||||
- tip
|
||||
21
vendor/github.com/agnivade/levenshtein/License.txt
generated
vendored
Normal file
21
vendor/github.com/agnivade/levenshtein/License.txt
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015 Agniva De Sarker
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
13
vendor/github.com/agnivade/levenshtein/Makefile
generated
vendored
Normal file
13
vendor/github.com/agnivade/levenshtein/Makefile
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
all: test install
|
||||
|
||||
install:
|
||||
go install
|
||||
|
||||
lint:
|
||||
gofmt -l -s -w . && go tool vet -all . && golint
|
||||
|
||||
test:
|
||||
go test -race -v -coverprofile=coverage.txt -covermode=atomic
|
||||
|
||||
bench:
|
||||
go test -run=XXX -bench=. -benchmem
|
||||
57
vendor/github.com/agnivade/levenshtein/README.md
generated
vendored
Normal file
57
vendor/github.com/agnivade/levenshtein/README.md
generated
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
levenshtein [](https://travis-ci.org/agnivade/levenshtein) [](https://goreportcard.com/report/github.com/agnivade/levenshtein) [](https://godoc.org/github.com/agnivade/levenshtein)
|
||||
===========
|
||||
|
||||
[Go](http://golang.org) package to calculate the [Levenshtein Distance](http://en.wikipedia.org/wiki/Levenshtein_distance)
|
||||
|
||||
The library is fully capable of working with non-ascii strings. But the strings are not normalized. That is left as a user-dependant use case. Please normalize the strings before passing it to the library if you have such a requirement.
|
||||
- https://blog.golang.org/normalization
|
||||
|
||||
Install
|
||||
-------
|
||||
|
||||
go get github.com/agnivade/levenshtein
|
||||
|
||||
Example
|
||||
-------
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/agnivade/levenshtein"
|
||||
)
|
||||
|
||||
func main() {
|
||||
s1 := "kitten"
|
||||
s2 := "sitting"
|
||||
distance := levenshtein.ComputeDistance(s1, s2)
|
||||
fmt.Printf("The distance between %s and %s is %d.\n", s1, s2, distance)
|
||||
// Output:
|
||||
// The distance between kitten and sitting is 3.
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
Benchmarks
|
||||
----------
|
||||
|
||||
```
|
||||
name time/op
|
||||
Simple/ASCII-4 537ns ± 2%
|
||||
Simple/French-4 956ns ± 0%
|
||||
Simple/Nordic-4 1.95µs ± 1%
|
||||
Simple/Tibetan-4 1.53µs ± 2%
|
||||
|
||||
name alloc/op
|
||||
Simple/ASCII-4 96.0B ± 0%
|
||||
Simple/French-4 128B ± 0%
|
||||
Simple/Nordic-4 192B ± 0%
|
||||
Simple/Tibetan-4 144B ± 0%
|
||||
|
||||
name allocs/op
|
||||
Simple/ASCII-4 1.00 ± 0%
|
||||
Simple/French-4 1.00 ± 0%
|
||||
Simple/Nordic-4 1.00 ± 0%
|
||||
Simple/Tibetan-4 1.00 ± 0%
|
||||
```
|
||||
1
vendor/github.com/agnivade/levenshtein/go.mod
generated
vendored
Normal file
1
vendor/github.com/agnivade/levenshtein/go.mod
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
module github.com/agnivade/levenshtein
|
||||
75
vendor/github.com/agnivade/levenshtein/levenshtein.go
generated
vendored
Normal file
75
vendor/github.com/agnivade/levenshtein/levenshtein.go
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
// Package levenshtein is a Go implementation to calculate Levenshtein Distance.
|
||||
//
|
||||
// Implementation taken from
|
||||
// https://gist.github.com/andrei-m/982927#gistcomment-1931258
|
||||
package levenshtein
|
||||
|
||||
import "unicode/utf8"
|
||||
|
||||
// ComputeDistance computes the levenshtein distance between the two
|
||||
// strings passed as an argument. The return value is the levenshtein distance
|
||||
//
|
||||
// Works on runes (Unicode code points) but does not normalize
|
||||
// the input strings. See https://blog.golang.org/normalization
|
||||
// and the golang.org/x/text/unicode/norm pacage.
|
||||
func ComputeDistance(a, b string) int {
|
||||
if len(a) == 0 {
|
||||
return utf8.RuneCountInString(b)
|
||||
}
|
||||
|
||||
if len(b) == 0 {
|
||||
return utf8.RuneCountInString(a)
|
||||
}
|
||||
|
||||
if a == b {
|
||||
return 0
|
||||
}
|
||||
|
||||
// We need to convert to []rune if the strings are non-ascii.
|
||||
// This could be avoided by using utf8.RuneCountInString
|
||||
// and then doing some juggling with rune indices.
|
||||
// The primary challenge is keeping track of the previous rune.
|
||||
// With a range loop, its not that easy. And with a for-loop
|
||||
// we need to keep track of the inter-rune width using utf8.DecodeRuneInString
|
||||
s1 := []rune(a)
|
||||
s2 := []rune(b)
|
||||
|
||||
// swap to save some memory O(min(a,b)) instead of O(a)
|
||||
if len(s1) > len(s2) {
|
||||
s1, s2 = s2, s1
|
||||
}
|
||||
lenS1 := len(s1)
|
||||
lenS2 := len(s2)
|
||||
|
||||
// init the row
|
||||
x := make([]int, lenS1+1)
|
||||
for i := 0; i <= lenS1; i++ {
|
||||
x[i] = i
|
||||
}
|
||||
|
||||
// fill in the rest
|
||||
for i := 1; i <= lenS2; i++ {
|
||||
prev := i
|
||||
var current int
|
||||
|
||||
for j := 1; j <= lenS1; j++ {
|
||||
|
||||
if s2[i-1] == s1[j-1] {
|
||||
current = x[j-1] // match
|
||||
} else {
|
||||
current = min(min(x[j-1]+1, prev+1), x[j]+1)
|
||||
}
|
||||
x[j-1] = prev
|
||||
prev = current
|
||||
}
|
||||
x[lenS1] = prev
|
||||
}
|
||||
return x[lenS1]
|
||||
}
|
||||
|
||||
func min(a, b int) int {
|
||||
if a < b {
|
||||
return a
|
||||
}
|
||||
return b
|
||||
}
|
||||
14
vendor/github.com/andybalholm/cascadia/.travis.yml
generated
vendored
Normal file
14
vendor/github.com/andybalholm/cascadia/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
language: go
|
||||
|
||||
go:
|
||||
- 1.3
|
||||
- 1.4
|
||||
|
||||
install:
|
||||
- go get github.com/andybalholm/cascadia
|
||||
|
||||
script:
|
||||
- go test -v
|
||||
|
||||
notifications:
|
||||
email: false
|
||||
24
vendor/github.com/andybalholm/cascadia/LICENSE
generated
vendored
Normal file
24
vendor/github.com/andybalholm/cascadia/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
Copyright (c) 2011 Andy Balholm. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
7
vendor/github.com/andybalholm/cascadia/README.md
generated
vendored
Normal file
7
vendor/github.com/andybalholm/cascadia/README.md
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
# cascadia
|
||||
|
||||
[](https://travis-ci.org/andybalholm/cascadia)
|
||||
|
||||
The Cascadia package implements CSS selectors for use with the parse trees produced by the html package.
|
||||
|
||||
To test CSS selectors without writing Go code, check out [cascadia](https://github.com/suntong/cascadia) the command line tool, a thin wrapper around this package.
|
||||
3
vendor/github.com/andybalholm/cascadia/go.mod
generated
vendored
Normal file
3
vendor/github.com/andybalholm/cascadia/go.mod
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
module "github.com/andybalholm/cascadia"
|
||||
|
||||
require "golang.org/x/net" v0.0.0-20180218175443-cbe0f9307d01
|
||||
835
vendor/github.com/andybalholm/cascadia/parser.go
generated
vendored
Normal file
835
vendor/github.com/andybalholm/cascadia/parser.go
generated
vendored
Normal file
@@ -0,0 +1,835 @@
|
||||
// Package cascadia is an implementation of CSS selectors.
|
||||
package cascadia
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/net/html"
|
||||
)
|
||||
|
||||
// a parser for CSS selectors
|
||||
type parser struct {
|
||||
s string // the source text
|
||||
i int // the current position
|
||||
}
|
||||
|
||||
// parseEscape parses a backslash escape.
|
||||
func (p *parser) parseEscape() (result string, err error) {
|
||||
if len(p.s) < p.i+2 || p.s[p.i] != '\\' {
|
||||
return "", errors.New("invalid escape sequence")
|
||||
}
|
||||
|
||||
start := p.i + 1
|
||||
c := p.s[start]
|
||||
switch {
|
||||
case c == '\r' || c == '\n' || c == '\f':
|
||||
return "", errors.New("escaped line ending outside string")
|
||||
case hexDigit(c):
|
||||
// unicode escape (hex)
|
||||
var i int
|
||||
for i = start; i < p.i+6 && i < len(p.s) && hexDigit(p.s[i]); i++ {
|
||||
// empty
|
||||
}
|
||||
v, _ := strconv.ParseUint(p.s[start:i], 16, 21)
|
||||
if len(p.s) > i {
|
||||
switch p.s[i] {
|
||||
case '\r':
|
||||
i++
|
||||
if len(p.s) > i && p.s[i] == '\n' {
|
||||
i++
|
||||
}
|
||||
case ' ', '\t', '\n', '\f':
|
||||
i++
|
||||
}
|
||||
}
|
||||
p.i = i
|
||||
return string(rune(v)), nil
|
||||
}
|
||||
|
||||
// Return the literal character after the backslash.
|
||||
result = p.s[start : start+1]
|
||||
p.i += 2
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func hexDigit(c byte) bool {
|
||||
return '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F'
|
||||
}
|
||||
|
||||
// nameStart returns whether c can be the first character of an identifier
|
||||
// (not counting an initial hyphen, or an escape sequence).
|
||||
func nameStart(c byte) bool {
|
||||
return 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || c == '_' || c > 127
|
||||
}
|
||||
|
||||
// nameChar returns whether c can be a character within an identifier
|
||||
// (not counting an escape sequence).
|
||||
func nameChar(c byte) bool {
|
||||
return 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || c == '_' || c > 127 ||
|
||||
c == '-' || '0' <= c && c <= '9'
|
||||
}
|
||||
|
||||
// parseIdentifier parses an identifier.
|
||||
func (p *parser) parseIdentifier() (result string, err error) {
|
||||
startingDash := false
|
||||
if len(p.s) > p.i && p.s[p.i] == '-' {
|
||||
startingDash = true
|
||||
p.i++
|
||||
}
|
||||
|
||||
if len(p.s) <= p.i {
|
||||
return "", errors.New("expected identifier, found EOF instead")
|
||||
}
|
||||
|
||||
if c := p.s[p.i]; !(nameStart(c) || c == '\\') {
|
||||
return "", fmt.Errorf("expected identifier, found %c instead", c)
|
||||
}
|
||||
|
||||
result, err = p.parseName()
|
||||
if startingDash && err == nil {
|
||||
result = "-" + result
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// parseName parses a name (which is like an identifier, but doesn't have
|
||||
// extra restrictions on the first character).
|
||||
func (p *parser) parseName() (result string, err error) {
|
||||
i := p.i
|
||||
loop:
|
||||
for i < len(p.s) {
|
||||
c := p.s[i]
|
||||
switch {
|
||||
case nameChar(c):
|
||||
start := i
|
||||
for i < len(p.s) && nameChar(p.s[i]) {
|
||||
i++
|
||||
}
|
||||
result += p.s[start:i]
|
||||
case c == '\\':
|
||||
p.i = i
|
||||
val, err := p.parseEscape()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
i = p.i
|
||||
result += val
|
||||
default:
|
||||
break loop
|
||||
}
|
||||
}
|
||||
|
||||
if result == "" {
|
||||
return "", errors.New("expected name, found EOF instead")
|
||||
}
|
||||
|
||||
p.i = i
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// parseString parses a single- or double-quoted string.
|
||||
func (p *parser) parseString() (result string, err error) {
|
||||
i := p.i
|
||||
if len(p.s) < i+2 {
|
||||
return "", errors.New("expected string, found EOF instead")
|
||||
}
|
||||
|
||||
quote := p.s[i]
|
||||
i++
|
||||
|
||||
loop:
|
||||
for i < len(p.s) {
|
||||
switch p.s[i] {
|
||||
case '\\':
|
||||
if len(p.s) > i+1 {
|
||||
switch c := p.s[i+1]; c {
|
||||
case '\r':
|
||||
if len(p.s) > i+2 && p.s[i+2] == '\n' {
|
||||
i += 3
|
||||
continue loop
|
||||
}
|
||||
fallthrough
|
||||
case '\n', '\f':
|
||||
i += 2
|
||||
continue loop
|
||||
}
|
||||
}
|
||||
p.i = i
|
||||
val, err := p.parseEscape()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
i = p.i
|
||||
result += val
|
||||
case quote:
|
||||
break loop
|
||||
case '\r', '\n', '\f':
|
||||
return "", errors.New("unexpected end of line in string")
|
||||
default:
|
||||
start := i
|
||||
for i < len(p.s) {
|
||||
if c := p.s[i]; c == quote || c == '\\' || c == '\r' || c == '\n' || c == '\f' {
|
||||
break
|
||||
}
|
||||
i++
|
||||
}
|
||||
result += p.s[start:i]
|
||||
}
|
||||
}
|
||||
|
||||
if i >= len(p.s) {
|
||||
return "", errors.New("EOF in string")
|
||||
}
|
||||
|
||||
// Consume the final quote.
|
||||
i++
|
||||
|
||||
p.i = i
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// parseRegex parses a regular expression; the end is defined by encountering an
|
||||
// unmatched closing ')' or ']' which is not consumed
|
||||
func (p *parser) parseRegex() (rx *regexp.Regexp, err error) {
|
||||
i := p.i
|
||||
if len(p.s) < i+2 {
|
||||
return nil, errors.New("expected regular expression, found EOF instead")
|
||||
}
|
||||
|
||||
// number of open parens or brackets;
|
||||
// when it becomes negative, finished parsing regex
|
||||
open := 0
|
||||
|
||||
loop:
|
||||
for i < len(p.s) {
|
||||
switch p.s[i] {
|
||||
case '(', '[':
|
||||
open++
|
||||
case ')', ']':
|
||||
open--
|
||||
if open < 0 {
|
||||
break loop
|
||||
}
|
||||
}
|
||||
i++
|
||||
}
|
||||
|
||||
if i >= len(p.s) {
|
||||
return nil, errors.New("EOF in regular expression")
|
||||
}
|
||||
rx, err = regexp.Compile(p.s[p.i:i])
|
||||
p.i = i
|
||||
return rx, err
|
||||
}
|
||||
|
||||
// skipWhitespace consumes whitespace characters and comments.
|
||||
// It returns true if there was actually anything to skip.
|
||||
func (p *parser) skipWhitespace() bool {
|
||||
i := p.i
|
||||
for i < len(p.s) {
|
||||
switch p.s[i] {
|
||||
case ' ', '\t', '\r', '\n', '\f':
|
||||
i++
|
||||
continue
|
||||
case '/':
|
||||
if strings.HasPrefix(p.s[i:], "/*") {
|
||||
end := strings.Index(p.s[i+len("/*"):], "*/")
|
||||
if end != -1 {
|
||||
i += end + len("/**/")
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
if i > p.i {
|
||||
p.i = i
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// consumeParenthesis consumes an opening parenthesis and any following
|
||||
// whitespace. It returns true if there was actually a parenthesis to skip.
|
||||
func (p *parser) consumeParenthesis() bool {
|
||||
if p.i < len(p.s) && p.s[p.i] == '(' {
|
||||
p.i++
|
||||
p.skipWhitespace()
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// consumeClosingParenthesis consumes a closing parenthesis and any preceding
|
||||
// whitespace. It returns true if there was actually a parenthesis to skip.
|
||||
func (p *parser) consumeClosingParenthesis() bool {
|
||||
i := p.i
|
||||
p.skipWhitespace()
|
||||
if p.i < len(p.s) && p.s[p.i] == ')' {
|
||||
p.i++
|
||||
return true
|
||||
}
|
||||
p.i = i
|
||||
return false
|
||||
}
|
||||
|
||||
// parseTypeSelector parses a type selector (one that matches by tag name).
|
||||
func (p *parser) parseTypeSelector() (result Selector, err error) {
|
||||
tag, err := p.parseIdentifier()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return typeSelector(tag), nil
|
||||
}
|
||||
|
||||
// parseIDSelector parses a selector that matches by id attribute.
|
||||
func (p *parser) parseIDSelector() (Selector, error) {
|
||||
if p.i >= len(p.s) {
|
||||
return nil, fmt.Errorf("expected id selector (#id), found EOF instead")
|
||||
}
|
||||
if p.s[p.i] != '#' {
|
||||
return nil, fmt.Errorf("expected id selector (#id), found '%c' instead", p.s[p.i])
|
||||
}
|
||||
|
||||
p.i++
|
||||
id, err := p.parseName()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return attributeEqualsSelector("id", id), nil
|
||||
}
|
||||
|
||||
// parseClassSelector parses a selector that matches by class attribute.
|
||||
func (p *parser) parseClassSelector() (Selector, error) {
|
||||
if p.i >= len(p.s) {
|
||||
return nil, fmt.Errorf("expected class selector (.class), found EOF instead")
|
||||
}
|
||||
if p.s[p.i] != '.' {
|
||||
return nil, fmt.Errorf("expected class selector (.class), found '%c' instead", p.s[p.i])
|
||||
}
|
||||
|
||||
p.i++
|
||||
class, err := p.parseIdentifier()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return attributeIncludesSelector("class", class), nil
|
||||
}
|
||||
|
||||
// parseAttributeSelector parses a selector that matches by attribute value.
|
||||
func (p *parser) parseAttributeSelector() (Selector, error) {
|
||||
if p.i >= len(p.s) {
|
||||
return nil, fmt.Errorf("expected attribute selector ([attribute]), found EOF instead")
|
||||
}
|
||||
if p.s[p.i] != '[' {
|
||||
return nil, fmt.Errorf("expected attribute selector ([attribute]), found '%c' instead", p.s[p.i])
|
||||
}
|
||||
|
||||
p.i++
|
||||
p.skipWhitespace()
|
||||
key, err := p.parseIdentifier()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p.skipWhitespace()
|
||||
if p.i >= len(p.s) {
|
||||
return nil, errors.New("unexpected EOF in attribute selector")
|
||||
}
|
||||
|
||||
if p.s[p.i] == ']' {
|
||||
p.i++
|
||||
return attributeExistsSelector(key), nil
|
||||
}
|
||||
|
||||
if p.i+2 >= len(p.s) {
|
||||
return nil, errors.New("unexpected EOF in attribute selector")
|
||||
}
|
||||
|
||||
op := p.s[p.i : p.i+2]
|
||||
if op[0] == '=' {
|
||||
op = "="
|
||||
} else if op[1] != '=' {
|
||||
return nil, fmt.Errorf(`expected equality operator, found "%s" instead`, op)
|
||||
}
|
||||
p.i += len(op)
|
||||
|
||||
p.skipWhitespace()
|
||||
if p.i >= len(p.s) {
|
||||
return nil, errors.New("unexpected EOF in attribute selector")
|
||||
}
|
||||
var val string
|
||||
var rx *regexp.Regexp
|
||||
if op == "#=" {
|
||||
rx, err = p.parseRegex()
|
||||
} else {
|
||||
switch p.s[p.i] {
|
||||
case '\'', '"':
|
||||
val, err = p.parseString()
|
||||
default:
|
||||
val, err = p.parseIdentifier()
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p.skipWhitespace()
|
||||
if p.i >= len(p.s) {
|
||||
return nil, errors.New("unexpected EOF in attribute selector")
|
||||
}
|
||||
if p.s[p.i] != ']' {
|
||||
return nil, fmt.Errorf("expected ']', found '%c' instead", p.s[p.i])
|
||||
}
|
||||
p.i++
|
||||
|
||||
switch op {
|
||||
case "=":
|
||||
return attributeEqualsSelector(key, val), nil
|
||||
case "!=":
|
||||
return attributeNotEqualSelector(key, val), nil
|
||||
case "~=":
|
||||
return attributeIncludesSelector(key, val), nil
|
||||
case "|=":
|
||||
return attributeDashmatchSelector(key, val), nil
|
||||
case "^=":
|
||||
return attributePrefixSelector(key, val), nil
|
||||
case "$=":
|
||||
return attributeSuffixSelector(key, val), nil
|
||||
case "*=":
|
||||
return attributeSubstringSelector(key, val), nil
|
||||
case "#=":
|
||||
return attributeRegexSelector(key, rx), nil
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("attribute operator %q is not supported", op)
|
||||
}
|
||||
|
||||
var errExpectedParenthesis = errors.New("expected '(' but didn't find it")
|
||||
var errExpectedClosingParenthesis = errors.New("expected ')' but didn't find it")
|
||||
var errUnmatchedParenthesis = errors.New("unmatched '('")
|
||||
|
||||
// parsePseudoclassSelector parses a pseudoclass selector like :not(p).
|
||||
func (p *parser) parsePseudoclassSelector() (Selector, error) {
|
||||
if p.i >= len(p.s) {
|
||||
return nil, fmt.Errorf("expected pseudoclass selector (:pseudoclass), found EOF instead")
|
||||
}
|
||||
if p.s[p.i] != ':' {
|
||||
return nil, fmt.Errorf("expected attribute selector (:pseudoclass), found '%c' instead", p.s[p.i])
|
||||
}
|
||||
|
||||
p.i++
|
||||
name, err := p.parseIdentifier()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
name = toLowerASCII(name)
|
||||
|
||||
switch name {
|
||||
case "not", "has", "haschild":
|
||||
if !p.consumeParenthesis() {
|
||||
return nil, errExpectedParenthesis
|
||||
}
|
||||
sel, parseErr := p.parseSelectorGroup()
|
||||
if parseErr != nil {
|
||||
return nil, parseErr
|
||||
}
|
||||
if !p.consumeClosingParenthesis() {
|
||||
return nil, errExpectedClosingParenthesis
|
||||
}
|
||||
|
||||
switch name {
|
||||
case "not":
|
||||
return negatedSelector(sel), nil
|
||||
case "has":
|
||||
return hasDescendantSelector(sel), nil
|
||||
case "haschild":
|
||||
return hasChildSelector(sel), nil
|
||||
}
|
||||
|
||||
case "contains", "containsown":
|
||||
if !p.consumeParenthesis() {
|
||||
return nil, errExpectedParenthesis
|
||||
}
|
||||
if p.i == len(p.s) {
|
||||
return nil, errUnmatchedParenthesis
|
||||
}
|
||||
var val string
|
||||
switch p.s[p.i] {
|
||||
case '\'', '"':
|
||||
val, err = p.parseString()
|
||||
default:
|
||||
val, err = p.parseIdentifier()
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
val = strings.ToLower(val)
|
||||
p.skipWhitespace()
|
||||
if p.i >= len(p.s) {
|
||||
return nil, errors.New("unexpected EOF in pseudo selector")
|
||||
}
|
||||
if !p.consumeClosingParenthesis() {
|
||||
return nil, errExpectedClosingParenthesis
|
||||
}
|
||||
|
||||
switch name {
|
||||
case "contains":
|
||||
return textSubstrSelector(val), nil
|
||||
case "containsown":
|
||||
return ownTextSubstrSelector(val), nil
|
||||
}
|
||||
|
||||
case "matches", "matchesown":
|
||||
if !p.consumeParenthesis() {
|
||||
return nil, errExpectedParenthesis
|
||||
}
|
||||
rx, err := p.parseRegex()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if p.i >= len(p.s) {
|
||||
return nil, errors.New("unexpected EOF in pseudo selector")
|
||||
}
|
||||
if !p.consumeClosingParenthesis() {
|
||||
return nil, errExpectedClosingParenthesis
|
||||
}
|
||||
|
||||
switch name {
|
||||
case "matches":
|
||||
return textRegexSelector(rx), nil
|
||||
case "matchesown":
|
||||
return ownTextRegexSelector(rx), nil
|
||||
}
|
||||
|
||||
case "nth-child", "nth-last-child", "nth-of-type", "nth-last-of-type":
|
||||
if !p.consumeParenthesis() {
|
||||
return nil, errExpectedParenthesis
|
||||
}
|
||||
a, b, err := p.parseNth()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !p.consumeClosingParenthesis() {
|
||||
return nil, errExpectedClosingParenthesis
|
||||
}
|
||||
if a == 0 {
|
||||
switch name {
|
||||
case "nth-child":
|
||||
return simpleNthChildSelector(b, false), nil
|
||||
case "nth-of-type":
|
||||
return simpleNthChildSelector(b, true), nil
|
||||
case "nth-last-child":
|
||||
return simpleNthLastChildSelector(b, false), nil
|
||||
case "nth-last-of-type":
|
||||
return simpleNthLastChildSelector(b, true), nil
|
||||
}
|
||||
}
|
||||
return nthChildSelector(a, b,
|
||||
name == "nth-last-child" || name == "nth-last-of-type",
|
||||
name == "nth-of-type" || name == "nth-last-of-type"),
|
||||
nil
|
||||
|
||||
case "first-child":
|
||||
return simpleNthChildSelector(1, false), nil
|
||||
case "last-child":
|
||||
return simpleNthLastChildSelector(1, false), nil
|
||||
case "first-of-type":
|
||||
return simpleNthChildSelector(1, true), nil
|
||||
case "last-of-type":
|
||||
return simpleNthLastChildSelector(1, true), nil
|
||||
case "only-child":
|
||||
return onlyChildSelector(false), nil
|
||||
case "only-of-type":
|
||||
return onlyChildSelector(true), nil
|
||||
case "input":
|
||||
return inputSelector, nil
|
||||
case "empty":
|
||||
return emptyElementSelector, nil
|
||||
case "root":
|
||||
return rootSelector, nil
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("unknown pseudoclass :%s", name)
|
||||
}
|
||||
|
||||
// parseInteger parses a decimal integer.
|
||||
func (p *parser) parseInteger() (int, error) {
|
||||
i := p.i
|
||||
start := i
|
||||
for i < len(p.s) && '0' <= p.s[i] && p.s[i] <= '9' {
|
||||
i++
|
||||
}
|
||||
if i == start {
|
||||
return 0, errors.New("expected integer, but didn't find it")
|
||||
}
|
||||
p.i = i
|
||||
|
||||
val, err := strconv.Atoi(p.s[start:i])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return val, nil
|
||||
}
|
||||
|
||||
// parseNth parses the argument for :nth-child (normally of the form an+b).
|
||||
func (p *parser) parseNth() (a, b int, err error) {
|
||||
// initial state
|
||||
if p.i >= len(p.s) {
|
||||
goto eof
|
||||
}
|
||||
switch p.s[p.i] {
|
||||
case '-':
|
||||
p.i++
|
||||
goto negativeA
|
||||
case '+':
|
||||
p.i++
|
||||
goto positiveA
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
goto positiveA
|
||||
case 'n', 'N':
|
||||
a = 1
|
||||
p.i++
|
||||
goto readN
|
||||
case 'o', 'O', 'e', 'E':
|
||||
id, nameErr := p.parseName()
|
||||
if nameErr != nil {
|
||||
return 0, 0, nameErr
|
||||
}
|
||||
id = toLowerASCII(id)
|
||||
if id == "odd" {
|
||||
return 2, 1, nil
|
||||
}
|
||||
if id == "even" {
|
||||
return 2, 0, nil
|
||||
}
|
||||
return 0, 0, fmt.Errorf("expected 'odd' or 'even', but found '%s' instead", id)
|
||||
default:
|
||||
goto invalid
|
||||
}
|
||||
|
||||
positiveA:
|
||||
if p.i >= len(p.s) {
|
||||
goto eof
|
||||
}
|
||||
switch p.s[p.i] {
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
a, err = p.parseInteger()
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
goto readA
|
||||
case 'n', 'N':
|
||||
a = 1
|
||||
p.i++
|
||||
goto readN
|
||||
default:
|
||||
goto invalid
|
||||
}
|
||||
|
||||
negativeA:
|
||||
if p.i >= len(p.s) {
|
||||
goto eof
|
||||
}
|
||||
switch p.s[p.i] {
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
a, err = p.parseInteger()
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
a = -a
|
||||
goto readA
|
||||
case 'n', 'N':
|
||||
a = -1
|
||||
p.i++
|
||||
goto readN
|
||||
default:
|
||||
goto invalid
|
||||
}
|
||||
|
||||
readA:
|
||||
if p.i >= len(p.s) {
|
||||
goto eof
|
||||
}
|
||||
switch p.s[p.i] {
|
||||
case 'n', 'N':
|
||||
p.i++
|
||||
goto readN
|
||||
default:
|
||||
// The number we read as a is actually b.
|
||||
return 0, a, nil
|
||||
}
|
||||
|
||||
readN:
|
||||
p.skipWhitespace()
|
||||
if p.i >= len(p.s) {
|
||||
goto eof
|
||||
}
|
||||
switch p.s[p.i] {
|
||||
case '+':
|
||||
p.i++
|
||||
p.skipWhitespace()
|
||||
b, err = p.parseInteger()
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
return a, b, nil
|
||||
case '-':
|
||||
p.i++
|
||||
p.skipWhitespace()
|
||||
b, err = p.parseInteger()
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
return a, -b, nil
|
||||
default:
|
||||
return a, 0, nil
|
||||
}
|
||||
|
||||
eof:
|
||||
return 0, 0, errors.New("unexpected EOF while attempting to parse expression of form an+b")
|
||||
|
||||
invalid:
|
||||
return 0, 0, errors.New("unexpected character while attempting to parse expression of form an+b")
|
||||
}
|
||||
|
||||
// parseSimpleSelectorSequence parses a selector sequence that applies to
|
||||
// a single element.
|
||||
func (p *parser) parseSimpleSelectorSequence() (Selector, error) {
|
||||
var result Selector
|
||||
|
||||
if p.i >= len(p.s) {
|
||||
return nil, errors.New("expected selector, found EOF instead")
|
||||
}
|
||||
|
||||
switch p.s[p.i] {
|
||||
case '*':
|
||||
// It's the universal selector. Just skip over it, since it doesn't affect the meaning.
|
||||
p.i++
|
||||
case '#', '.', '[', ':':
|
||||
// There's no type selector. Wait to process the other till the main loop.
|
||||
default:
|
||||
r, err := p.parseTypeSelector()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result = r
|
||||
}
|
||||
|
||||
loop:
|
||||
for p.i < len(p.s) {
|
||||
var ns Selector
|
||||
var err error
|
||||
switch p.s[p.i] {
|
||||
case '#':
|
||||
ns, err = p.parseIDSelector()
|
||||
case '.':
|
||||
ns, err = p.parseClassSelector()
|
||||
case '[':
|
||||
ns, err = p.parseAttributeSelector()
|
||||
case ':':
|
||||
ns, err = p.parsePseudoclassSelector()
|
||||
default:
|
||||
break loop
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if result == nil {
|
||||
result = ns
|
||||
} else {
|
||||
result = intersectionSelector(result, ns)
|
||||
}
|
||||
}
|
||||
|
||||
if result == nil {
|
||||
result = func(n *html.Node) bool {
|
||||
return n.Type == html.ElementNode
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// parseSelector parses a selector that may include combinators.
|
||||
func (p *parser) parseSelector() (result Selector, err error) {
|
||||
p.skipWhitespace()
|
||||
result, err = p.parseSimpleSelectorSequence()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
for {
|
||||
var combinator byte
|
||||
if p.skipWhitespace() {
|
||||
combinator = ' '
|
||||
}
|
||||
if p.i >= len(p.s) {
|
||||
return
|
||||
}
|
||||
|
||||
switch p.s[p.i] {
|
||||
case '+', '>', '~':
|
||||
combinator = p.s[p.i]
|
||||
p.i++
|
||||
p.skipWhitespace()
|
||||
case ',', ')':
|
||||
// These characters can't begin a selector, but they can legally occur after one.
|
||||
return
|
||||
}
|
||||
|
||||
if combinator == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
c, err := p.parseSimpleSelectorSequence()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch combinator {
|
||||
case ' ':
|
||||
result = descendantSelector(result, c)
|
||||
case '>':
|
||||
result = childSelector(result, c)
|
||||
case '+':
|
||||
result = siblingSelector(result, c, true)
|
||||
case '~':
|
||||
result = siblingSelector(result, c, false)
|
||||
}
|
||||
}
|
||||
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// parseSelectorGroup parses a group of selectors, separated by commas.
|
||||
func (p *parser) parseSelectorGroup() (result Selector, err error) {
|
||||
result, err = p.parseSelector()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
for p.i < len(p.s) {
|
||||
if p.s[p.i] != ',' {
|
||||
return result, nil
|
||||
}
|
||||
p.i++
|
||||
c, err := p.parseSelector()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result = unionSelector(result, c)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
622
vendor/github.com/andybalholm/cascadia/selector.go
generated
vendored
Normal file
622
vendor/github.com/andybalholm/cascadia/selector.go
generated
vendored
Normal file
@@ -0,0 +1,622 @@
|
||||
package cascadia
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/net/html"
|
||||
)
|
||||
|
||||
// the Selector type, and functions for creating them
|
||||
|
||||
// A Selector is a function which tells whether a node matches or not.
|
||||
type Selector func(*html.Node) bool
|
||||
|
||||
// hasChildMatch returns whether n has any child that matches a.
|
||||
func hasChildMatch(n *html.Node, a Selector) bool {
|
||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||
if a(c) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// hasDescendantMatch performs a depth-first search of n's descendants,
|
||||
// testing whether any of them match a. It returns true as soon as a match is
|
||||
// found, or false if no match is found.
|
||||
func hasDescendantMatch(n *html.Node, a Selector) bool {
|
||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||
if a(c) || (c.Type == html.ElementNode && hasDescendantMatch(c, a)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Compile parses a selector and returns, if successful, a Selector object
|
||||
// that can be used to match against html.Node objects.
|
||||
func Compile(sel string) (Selector, error) {
|
||||
p := &parser{s: sel}
|
||||
compiled, err := p.parseSelectorGroup()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if p.i < len(sel) {
|
||||
return nil, fmt.Errorf("parsing %q: %d bytes left over", sel, len(sel)-p.i)
|
||||
}
|
||||
|
||||
return compiled, nil
|
||||
}
|
||||
|
||||
// MustCompile is like Compile, but panics instead of returning an error.
|
||||
func MustCompile(sel string) Selector {
|
||||
compiled, err := Compile(sel)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return compiled
|
||||
}
|
||||
|
||||
// MatchAll returns a slice of the nodes that match the selector,
|
||||
// from n and its children.
|
||||
func (s Selector) MatchAll(n *html.Node) []*html.Node {
|
||||
return s.matchAllInto(n, nil)
|
||||
}
|
||||
|
||||
func (s Selector) matchAllInto(n *html.Node, storage []*html.Node) []*html.Node {
|
||||
if s(n) {
|
||||
storage = append(storage, n)
|
||||
}
|
||||
|
||||
for child := n.FirstChild; child != nil; child = child.NextSibling {
|
||||
storage = s.matchAllInto(child, storage)
|
||||
}
|
||||
|
||||
return storage
|
||||
}
|
||||
|
||||
// Match returns true if the node matches the selector.
|
||||
func (s Selector) Match(n *html.Node) bool {
|
||||
return s(n)
|
||||
}
|
||||
|
||||
// MatchFirst returns the first node that matches s, from n and its children.
|
||||
func (s Selector) MatchFirst(n *html.Node) *html.Node {
|
||||
if s.Match(n) {
|
||||
return n
|
||||
}
|
||||
|
||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||
m := s.MatchFirst(c)
|
||||
if m != nil {
|
||||
return m
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Filter returns the nodes in nodes that match the selector.
|
||||
func (s Selector) Filter(nodes []*html.Node) (result []*html.Node) {
|
||||
for _, n := range nodes {
|
||||
if s(n) {
|
||||
result = append(result, n)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// typeSelector returns a Selector that matches elements with a given tag name.
|
||||
func typeSelector(tag string) Selector {
|
||||
tag = toLowerASCII(tag)
|
||||
return func(n *html.Node) bool {
|
||||
return n.Type == html.ElementNode && n.Data == tag
|
||||
}
|
||||
}
|
||||
|
||||
// toLowerASCII returns s with all ASCII capital letters lowercased.
|
||||
func toLowerASCII(s string) string {
|
||||
var b []byte
|
||||
for i := 0; i < len(s); i++ {
|
||||
if c := s[i]; 'A' <= c && c <= 'Z' {
|
||||
if b == nil {
|
||||
b = make([]byte, len(s))
|
||||
copy(b, s)
|
||||
}
|
||||
b[i] = s[i] + ('a' - 'A')
|
||||
}
|
||||
}
|
||||
|
||||
if b == nil {
|
||||
return s
|
||||
}
|
||||
|
||||
return string(b)
|
||||
}
|
||||
|
||||
// attributeSelector returns a Selector that matches elements
|
||||
// where the attribute named key satisifes the function f.
|
||||
func attributeSelector(key string, f func(string) bool) Selector {
|
||||
key = toLowerASCII(key)
|
||||
return func(n *html.Node) bool {
|
||||
if n.Type != html.ElementNode {
|
||||
return false
|
||||
}
|
||||
for _, a := range n.Attr {
|
||||
if a.Key == key && f(a.Val) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// attributeExistsSelector returns a Selector that matches elements that have
|
||||
// an attribute named key.
|
||||
func attributeExistsSelector(key string) Selector {
|
||||
return attributeSelector(key, func(string) bool { return true })
|
||||
}
|
||||
|
||||
// attributeEqualsSelector returns a Selector that matches elements where
|
||||
// the attribute named key has the value val.
|
||||
func attributeEqualsSelector(key, val string) Selector {
|
||||
return attributeSelector(key,
|
||||
func(s string) bool {
|
||||
return s == val
|
||||
})
|
||||
}
|
||||
|
||||
// attributeNotEqualSelector returns a Selector that matches elements where
|
||||
// the attribute named key does not have the value val.
|
||||
func attributeNotEqualSelector(key, val string) Selector {
|
||||
key = toLowerASCII(key)
|
||||
return func(n *html.Node) bool {
|
||||
if n.Type != html.ElementNode {
|
||||
return false
|
||||
}
|
||||
for _, a := range n.Attr {
|
||||
if a.Key == key && a.Val == val {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// attributeIncludesSelector returns a Selector that matches elements where
|
||||
// the attribute named key is a whitespace-separated list that includes val.
|
||||
func attributeIncludesSelector(key, val string) Selector {
|
||||
return attributeSelector(key,
|
||||
func(s string) bool {
|
||||
for s != "" {
|
||||
i := strings.IndexAny(s, " \t\r\n\f")
|
||||
if i == -1 {
|
||||
return s == val
|
||||
}
|
||||
if s[:i] == val {
|
||||
return true
|
||||
}
|
||||
s = s[i+1:]
|
||||
}
|
||||
return false
|
||||
})
|
||||
}
|
||||
|
||||
// attributeDashmatchSelector returns a Selector that matches elements where
|
||||
// the attribute named key equals val or starts with val plus a hyphen.
|
||||
func attributeDashmatchSelector(key, val string) Selector {
|
||||
return attributeSelector(key,
|
||||
func(s string) bool {
|
||||
if s == val {
|
||||
return true
|
||||
}
|
||||
if len(s) <= len(val) {
|
||||
return false
|
||||
}
|
||||
if s[:len(val)] == val && s[len(val)] == '-' {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})
|
||||
}
|
||||
|
||||
// attributePrefixSelector returns a Selector that matches elements where
|
||||
// the attribute named key starts with val.
|
||||
func attributePrefixSelector(key, val string) Selector {
|
||||
return attributeSelector(key,
|
||||
func(s string) bool {
|
||||
if strings.TrimSpace(s) == "" {
|
||||
return false
|
||||
}
|
||||
return strings.HasPrefix(s, val)
|
||||
})
|
||||
}
|
||||
|
||||
// attributeSuffixSelector returns a Selector that matches elements where
|
||||
// the attribute named key ends with val.
|
||||
func attributeSuffixSelector(key, val string) Selector {
|
||||
return attributeSelector(key,
|
||||
func(s string) bool {
|
||||
if strings.TrimSpace(s) == "" {
|
||||
return false
|
||||
}
|
||||
return strings.HasSuffix(s, val)
|
||||
})
|
||||
}
|
||||
|
||||
// attributeSubstringSelector returns a Selector that matches nodes where
|
||||
// the attribute named key contains val.
|
||||
func attributeSubstringSelector(key, val string) Selector {
|
||||
return attributeSelector(key,
|
||||
func(s string) bool {
|
||||
if strings.TrimSpace(s) == "" {
|
||||
return false
|
||||
}
|
||||
return strings.Contains(s, val)
|
||||
})
|
||||
}
|
||||
|
||||
// attributeRegexSelector returns a Selector that matches nodes where
|
||||
// the attribute named key matches the regular expression rx
|
||||
func attributeRegexSelector(key string, rx *regexp.Regexp) Selector {
|
||||
return attributeSelector(key,
|
||||
func(s string) bool {
|
||||
return rx.MatchString(s)
|
||||
})
|
||||
}
|
||||
|
||||
// intersectionSelector returns a selector that matches nodes that match
|
||||
// both a and b.
|
||||
func intersectionSelector(a, b Selector) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
return a(n) && b(n)
|
||||
}
|
||||
}
|
||||
|
||||
// unionSelector returns a selector that matches elements that match
|
||||
// either a or b.
|
||||
func unionSelector(a, b Selector) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
return a(n) || b(n)
|
||||
}
|
||||
}
|
||||
|
||||
// negatedSelector returns a selector that matches elements that do not match a.
|
||||
func negatedSelector(a Selector) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
if n.Type != html.ElementNode {
|
||||
return false
|
||||
}
|
||||
return !a(n)
|
||||
}
|
||||
}
|
||||
|
||||
// writeNodeText writes the text contained in n and its descendants to b.
|
||||
func writeNodeText(n *html.Node, b *bytes.Buffer) {
|
||||
switch n.Type {
|
||||
case html.TextNode:
|
||||
b.WriteString(n.Data)
|
||||
case html.ElementNode:
|
||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||
writeNodeText(c, b)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// nodeText returns the text contained in n and its descendants.
|
||||
func nodeText(n *html.Node) string {
|
||||
var b bytes.Buffer
|
||||
writeNodeText(n, &b)
|
||||
return b.String()
|
||||
}
|
||||
|
||||
// nodeOwnText returns the contents of the text nodes that are direct
|
||||
// children of n.
|
||||
func nodeOwnText(n *html.Node) string {
|
||||
var b bytes.Buffer
|
||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||
if c.Type == html.TextNode {
|
||||
b.WriteString(c.Data)
|
||||
}
|
||||
}
|
||||
return b.String()
|
||||
}
|
||||
|
||||
// textSubstrSelector returns a selector that matches nodes that
|
||||
// contain the given text.
|
||||
func textSubstrSelector(val string) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
text := strings.ToLower(nodeText(n))
|
||||
return strings.Contains(text, val)
|
||||
}
|
||||
}
|
||||
|
||||
// ownTextSubstrSelector returns a selector that matches nodes that
|
||||
// directly contain the given text
|
||||
func ownTextSubstrSelector(val string) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
text := strings.ToLower(nodeOwnText(n))
|
||||
return strings.Contains(text, val)
|
||||
}
|
||||
}
|
||||
|
||||
// textRegexSelector returns a selector that matches nodes whose text matches
|
||||
// the specified regular expression
|
||||
func textRegexSelector(rx *regexp.Regexp) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
return rx.MatchString(nodeText(n))
|
||||
}
|
||||
}
|
||||
|
||||
// ownTextRegexSelector returns a selector that matches nodes whose text
|
||||
// directly matches the specified regular expression
|
||||
func ownTextRegexSelector(rx *regexp.Regexp) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
return rx.MatchString(nodeOwnText(n))
|
||||
}
|
||||
}
|
||||
|
||||
// hasChildSelector returns a selector that matches elements
|
||||
// with a child that matches a.
|
||||
func hasChildSelector(a Selector) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
if n.Type != html.ElementNode {
|
||||
return false
|
||||
}
|
||||
return hasChildMatch(n, a)
|
||||
}
|
||||
}
|
||||
|
||||
// hasDescendantSelector returns a selector that matches elements
|
||||
// with any descendant that matches a.
|
||||
func hasDescendantSelector(a Selector) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
if n.Type != html.ElementNode {
|
||||
return false
|
||||
}
|
||||
return hasDescendantMatch(n, a)
|
||||
}
|
||||
}
|
||||
|
||||
// nthChildSelector returns a selector that implements :nth-child(an+b).
|
||||
// If last is true, implements :nth-last-child instead.
|
||||
// If ofType is true, implements :nth-of-type instead.
|
||||
func nthChildSelector(a, b int, last, ofType bool) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
if n.Type != html.ElementNode {
|
||||
return false
|
||||
}
|
||||
|
||||
parent := n.Parent
|
||||
if parent == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if parent.Type == html.DocumentNode {
|
||||
return false
|
||||
}
|
||||
|
||||
i := -1
|
||||
count := 0
|
||||
for c := parent.FirstChild; c != nil; c = c.NextSibling {
|
||||
if (c.Type != html.ElementNode) || (ofType && c.Data != n.Data) {
|
||||
continue
|
||||
}
|
||||
count++
|
||||
if c == n {
|
||||
i = count
|
||||
if !last {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if i == -1 {
|
||||
// This shouldn't happen, since n should always be one of its parent's children.
|
||||
return false
|
||||
}
|
||||
|
||||
if last {
|
||||
i = count - i + 1
|
||||
}
|
||||
|
||||
i -= b
|
||||
if a == 0 {
|
||||
return i == 0
|
||||
}
|
||||
|
||||
return i%a == 0 && i/a >= 0
|
||||
}
|
||||
}
|
||||
|
||||
// simpleNthChildSelector returns a selector that implements :nth-child(b).
|
||||
// If ofType is true, implements :nth-of-type instead.
|
||||
func simpleNthChildSelector(b int, ofType bool) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
if n.Type != html.ElementNode {
|
||||
return false
|
||||
}
|
||||
|
||||
parent := n.Parent
|
||||
if parent == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if parent.Type == html.DocumentNode {
|
||||
return false
|
||||
}
|
||||
|
||||
count := 0
|
||||
for c := parent.FirstChild; c != nil; c = c.NextSibling {
|
||||
if c.Type != html.ElementNode || (ofType && c.Data != n.Data) {
|
||||
continue
|
||||
}
|
||||
count++
|
||||
if c == n {
|
||||
return count == b
|
||||
}
|
||||
if count >= b {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// simpleNthLastChildSelector returns a selector that implements
|
||||
// :nth-last-child(b). If ofType is true, implements :nth-last-of-type
|
||||
// instead.
|
||||
func simpleNthLastChildSelector(b int, ofType bool) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
if n.Type != html.ElementNode {
|
||||
return false
|
||||
}
|
||||
|
||||
parent := n.Parent
|
||||
if parent == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if parent.Type == html.DocumentNode {
|
||||
return false
|
||||
}
|
||||
|
||||
count := 0
|
||||
for c := parent.LastChild; c != nil; c = c.PrevSibling {
|
||||
if c.Type != html.ElementNode || (ofType && c.Data != n.Data) {
|
||||
continue
|
||||
}
|
||||
count++
|
||||
if c == n {
|
||||
return count == b
|
||||
}
|
||||
if count >= b {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// onlyChildSelector returns a selector that implements :only-child.
|
||||
// If ofType is true, it implements :only-of-type instead.
|
||||
func onlyChildSelector(ofType bool) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
if n.Type != html.ElementNode {
|
||||
return false
|
||||
}
|
||||
|
||||
parent := n.Parent
|
||||
if parent == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if parent.Type == html.DocumentNode {
|
||||
return false
|
||||
}
|
||||
|
||||
count := 0
|
||||
for c := parent.FirstChild; c != nil; c = c.NextSibling {
|
||||
if (c.Type != html.ElementNode) || (ofType && c.Data != n.Data) {
|
||||
continue
|
||||
}
|
||||
count++
|
||||
if count > 1 {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return count == 1
|
||||
}
|
||||
}
|
||||
|
||||
// inputSelector is a Selector that matches input, select, textarea and button elements.
|
||||
func inputSelector(n *html.Node) bool {
|
||||
return n.Type == html.ElementNode && (n.Data == "input" || n.Data == "select" || n.Data == "textarea" || n.Data == "button")
|
||||
}
|
||||
|
||||
// emptyElementSelector is a Selector that matches empty elements.
|
||||
func emptyElementSelector(n *html.Node) bool {
|
||||
if n.Type != html.ElementNode {
|
||||
return false
|
||||
}
|
||||
|
||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||
switch c.Type {
|
||||
case html.ElementNode, html.TextNode:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// descendantSelector returns a Selector that matches an element if
|
||||
// it matches d and has an ancestor that matches a.
|
||||
func descendantSelector(a, d Selector) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
if !d(n) {
|
||||
return false
|
||||
}
|
||||
|
||||
for p := n.Parent; p != nil; p = p.Parent {
|
||||
if a(p) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// childSelector returns a Selector that matches an element if
|
||||
// it matches d and its parent matches a.
|
||||
func childSelector(a, d Selector) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
return d(n) && n.Parent != nil && a(n.Parent)
|
||||
}
|
||||
}
|
||||
|
||||
// siblingSelector returns a Selector that matches an element
|
||||
// if it matches s2 and in is preceded by an element that matches s1.
|
||||
// If adjacent is true, the sibling must be immediately before the element.
|
||||
func siblingSelector(s1, s2 Selector, adjacent bool) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
if !s2(n) {
|
||||
return false
|
||||
}
|
||||
|
||||
if adjacent {
|
||||
for n = n.PrevSibling; n != nil; n = n.PrevSibling {
|
||||
if n.Type == html.TextNode || n.Type == html.CommentNode {
|
||||
continue
|
||||
}
|
||||
return s1(n)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Walk backwards looking for element that matches s1
|
||||
for c := n.PrevSibling; c != nil; c = c.PrevSibling {
|
||||
if s1(c) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// rootSelector implements :root
|
||||
func rootSelector(n *html.Node) bool {
|
||||
if n.Type != html.ElementNode {
|
||||
return false
|
||||
}
|
||||
if n.Parent == nil {
|
||||
return false
|
||||
}
|
||||
return n.Parent.Type == html.DocumentNode
|
||||
}
|
||||
29
vendor/github.com/bmatcuk/doublestar/.gitignore
generated
vendored
Normal file
29
vendor/github.com/bmatcuk/doublestar/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
# vi
|
||||
*~
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# Compiled Object files, Static and Dynamic libs (Shared Objects)
|
||||
*.o
|
||||
*.a
|
||||
*.so
|
||||
|
||||
# Folders
|
||||
_obj
|
||||
_test
|
||||
|
||||
# Architecture specific extensions/prefixes
|
||||
*.[568vq]
|
||||
[568vq].out
|
||||
|
||||
*.cgo1.go
|
||||
*.cgo2.c
|
||||
_cgo_defun.c
|
||||
_cgo_gotypes.go
|
||||
_cgo_export.*
|
||||
|
||||
_testmain.go
|
||||
|
||||
*.exe
|
||||
*.test
|
||||
*.prof
|
||||
17
vendor/github.com/bmatcuk/doublestar/.travis.yml
generated
vendored
Normal file
17
vendor/github.com/bmatcuk/doublestar/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
language: go
|
||||
|
||||
go:
|
||||
- 1.3
|
||||
- 1.4
|
||||
- 1.5
|
||||
- 1.6
|
||||
|
||||
before_install:
|
||||
- go get -t -v ./...
|
||||
|
||||
script:
|
||||
- go test -race -coverprofile=coverage.txt -covermode=atomic
|
||||
|
||||
after_success:
|
||||
- bash <(curl -s https://codecov.io/bash)
|
||||
|
||||
22
vendor/github.com/bmatcuk/doublestar/LICENSE
generated
vendored
Normal file
22
vendor/github.com/bmatcuk/doublestar/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Bob Matcuk
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
109
vendor/github.com/bmatcuk/doublestar/README.md
generated
vendored
Normal file
109
vendor/github.com/bmatcuk/doublestar/README.md
generated
vendored
Normal file
@@ -0,0 +1,109 @@
|
||||

|
||||
[](https://travis-ci.org/bmatcuk/doublestar)
|
||||
[](https://codecov.io/github/bmatcuk/doublestar?branch=master)
|
||||
|
||||
# doublestar
|
||||
|
||||
**doublestar** is a [golang](http://golang.org/) implementation of path pattern
|
||||
matching and globbing with support for "doublestar" (aka globstar: `**`)
|
||||
patterns.
|
||||
|
||||
doublestar patterns match files and directories recursively. For example, if
|
||||
you had the following directory structure:
|
||||
|
||||
```
|
||||
grandparent
|
||||
`-- parent
|
||||
|-- child1
|
||||
`-- child2
|
||||
```
|
||||
|
||||
You could find the children with patterns such as: `**/child*`,
|
||||
`grandparent/**/child?`, `**/parent/*`, or even just `**` by itself (which will
|
||||
return all files and directories recursively).
|
||||
|
||||
Bash's globstar is doublestar's inspiration and, as such, works similarly.
|
||||
Note that the doublestar must appear as a path component by itself. A pattern
|
||||
such as `/path**` is invalid and will be treated the same as `/path*`, but
|
||||
`/path*/**` should achieve the desired result. Additionally, `/path/**` will
|
||||
match all directories and files under the path directory, but `/path/**/` will
|
||||
only match directories.
|
||||
|
||||
## Installation
|
||||
|
||||
**doublestar** can be installed via `go get`:
|
||||
|
||||
```bash
|
||||
go get github.com/bmatcuk/doublestar
|
||||
```
|
||||
|
||||
To use it in your code, you must import it:
|
||||
|
||||
```go
|
||||
import "github.com/bmatcuk/doublestar"
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### Match
|
||||
```go
|
||||
func Match(pattern, name string) (bool, error)
|
||||
```
|
||||
|
||||
Match returns true if `name` matches the file name `pattern`
|
||||
([see below](#patterns)). `name` and `pattern` are split on forward slash (`/`)
|
||||
characters and may be relative or absolute.
|
||||
|
||||
Note: `Match()` is meant to be a drop-in replacement for `path.Match()`. As
|
||||
such, it always uses `/` as the path separator. If you are writing code that
|
||||
will run on systems where `/` is not the path separator (such as Windows), you
|
||||
want to use `PathMatch()` (below) instead.
|
||||
|
||||
|
||||
### PathMatch
|
||||
```go
|
||||
func PathMatch(pattern, name string) (bool, error)
|
||||
```
|
||||
|
||||
PathMatch returns true if `name` matches the file name `pattern`
|
||||
([see below](#patterns)). The difference between Match and PathMatch is that
|
||||
PathMatch will automatically use your system's path separator to split `name`
|
||||
and `pattern`.
|
||||
|
||||
`PathMatch()` is meant to be a drop-in replacement for `filepath.Match()`.
|
||||
|
||||
### Glob
|
||||
```go
|
||||
func Glob(pattern string) ([]string, error)
|
||||
```
|
||||
|
||||
Glob finds all files and directories in the filesystem that match `pattern`
|
||||
([see below](#patterns)). `pattern` may be relative (to the current working
|
||||
directory), or absolute.
|
||||
|
||||
`Glob()` is meant to be a drop-in replacement for `filepath.Glob()`.
|
||||
|
||||
## Patterns
|
||||
|
||||
**doublestar** supports the following special terms in the patterns:
|
||||
|
||||
Special Terms | Meaning
|
||||
------------- | -------
|
||||
`*` | matches any sequence of non-path-separators
|
||||
`**` | matches any sequence of characters, including path separators
|
||||
`?` | matches any single non-path-separator character
|
||||
`[class]` | matches any single non-path-separator character against a class of characters ([see below](#character-classes))
|
||||
`{alt1,...}` | matches a sequence of characters if one of the comma-separated alternatives matches
|
||||
|
||||
Any character with a special meaning can be escaped with a backslash (`\`).
|
||||
|
||||
### Character Classes
|
||||
|
||||
Character classes support the following:
|
||||
|
||||
Class | Meaning
|
||||
---------- | -------
|
||||
`[abc]` | matches any single character within the set
|
||||
`[a-z]` | matches any single character in the range
|
||||
`[^class]` | matches any single character which does *not* match the class
|
||||
|
||||
455
vendor/github.com/bmatcuk/doublestar/doublestar.go
generated
vendored
Normal file
455
vendor/github.com/bmatcuk/doublestar/doublestar.go
generated
vendored
Normal file
@@ -0,0 +1,455 @@
|
||||
package doublestar
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
var ErrBadPattern = path.ErrBadPattern
|
||||
|
||||
// Split a path on the given separator, respecting escaping.
|
||||
func splitPathOnSeparator(path string, separator rune) []string {
|
||||
// if the separator is '\\', then we can just split...
|
||||
if separator == '\\' {
|
||||
return strings.Split(path, string(separator))
|
||||
}
|
||||
|
||||
// otherwise, we need to be careful of situations where the separator was escaped
|
||||
cnt := strings.Count(path, string(separator))
|
||||
if cnt == 0 {
|
||||
return []string{path}
|
||||
}
|
||||
ret := make([]string, cnt+1)
|
||||
pathlen := len(path)
|
||||
separatorLen := utf8.RuneLen(separator)
|
||||
idx := 0
|
||||
for start := 0; start < pathlen; {
|
||||
end := indexRuneWithEscaping(path[start:], separator)
|
||||
if end == -1 {
|
||||
end = pathlen
|
||||
} else {
|
||||
end += start
|
||||
}
|
||||
ret[idx] = path[start:end]
|
||||
start = end + separatorLen
|
||||
idx++
|
||||
}
|
||||
return ret[:idx]
|
||||
}
|
||||
|
||||
// Find the first index of a rune in a string,
|
||||
// ignoring any times the rune is escaped using "\".
|
||||
func indexRuneWithEscaping(s string, r rune) int {
|
||||
end := strings.IndexRune(s, r)
|
||||
if end == -1 {
|
||||
return -1
|
||||
}
|
||||
if end > 0 && s[end-1] == '\\' {
|
||||
start := end + utf8.RuneLen(r)
|
||||
end = indexRuneWithEscaping(s[start:], r)
|
||||
if end != -1 {
|
||||
end += start
|
||||
}
|
||||
}
|
||||
return end
|
||||
}
|
||||
|
||||
// Match returns true if name matches the shell file name pattern.
|
||||
// The pattern syntax is:
|
||||
//
|
||||
// pattern:
|
||||
// { term }
|
||||
// term:
|
||||
// '*' matches any sequence of non-path-separators
|
||||
// '**' matches any sequence of characters, including
|
||||
// path separators.
|
||||
// '?' matches any single non-path-separator character
|
||||
// '[' [ '^' ] { character-range } ']'
|
||||
// character class (must be non-empty)
|
||||
// '{' { term } [ ',' { term } ... ] '}'
|
||||
// c matches character c (c != '*', '?', '\\', '[')
|
||||
// '\\' c matches character c
|
||||
//
|
||||
// character-range:
|
||||
// c matches character c (c != '\\', '-', ']')
|
||||
// '\\' c matches character c
|
||||
// lo '-' hi matches character c for lo <= c <= hi
|
||||
//
|
||||
// Match requires pattern to match all of name, not just a substring.
|
||||
// The path-separator defaults to the '/' character. The only possible
|
||||
// returned error is ErrBadPattern, when pattern is malformed.
|
||||
//
|
||||
// Note: this is meant as a drop-in replacement for path.Match() which
|
||||
// always uses '/' as the path separator. If you want to support systems
|
||||
// which use a different path separator (such as Windows), what you want
|
||||
// is the PathMatch() function below.
|
||||
//
|
||||
func Match(pattern, name string) (bool, error) {
|
||||
return matchWithSeparator(pattern, name, '/')
|
||||
}
|
||||
|
||||
// PathMatch is like Match except that it uses your system's path separator.
|
||||
// For most systems, this will be '/'. However, for Windows, it would be '\\'.
|
||||
// Note that for systems where the path separator is '\\', escaping is
|
||||
// disabled.
|
||||
//
|
||||
// Note: this is meant as a drop-in replacement for filepath.Match().
|
||||
//
|
||||
func PathMatch(pattern, name string) (bool, error) {
|
||||
return matchWithSeparator(pattern, name, os.PathSeparator)
|
||||
}
|
||||
|
||||
// Match returns true if name matches the shell file name pattern.
|
||||
// The pattern syntax is:
|
||||
//
|
||||
// pattern:
|
||||
// { term }
|
||||
// term:
|
||||
// '*' matches any sequence of non-path-separators
|
||||
// '**' matches any sequence of characters, including
|
||||
// path separators.
|
||||
// '?' matches any single non-path-separator character
|
||||
// '[' [ '^' ] { character-range } ']'
|
||||
// character class (must be non-empty)
|
||||
// '{' { term } [ ',' { term } ... ] '}'
|
||||
// c matches character c (c != '*', '?', '\\', '[')
|
||||
// '\\' c matches character c
|
||||
//
|
||||
// character-range:
|
||||
// c matches character c (c != '\\', '-', ']')
|
||||
// '\\' c matches character c, unless separator is '\\'
|
||||
// lo '-' hi matches character c for lo <= c <= hi
|
||||
//
|
||||
// Match requires pattern to match all of name, not just a substring.
|
||||
// The only possible returned error is ErrBadPattern, when pattern
|
||||
// is malformed.
|
||||
//
|
||||
func matchWithSeparator(pattern, name string, separator rune) (bool, error) {
|
||||
patternComponents := splitPathOnSeparator(pattern, separator)
|
||||
nameComponents := splitPathOnSeparator(name, separator)
|
||||
return doMatching(patternComponents, nameComponents)
|
||||
}
|
||||
|
||||
func doMatching(patternComponents, nameComponents []string) (matched bool, err error) {
|
||||
// check for some base-cases
|
||||
patternLen, nameLen := len(patternComponents), len(nameComponents)
|
||||
if patternLen == 0 && nameLen == 0 {
|
||||
return true, nil
|
||||
}
|
||||
if patternLen == 0 || nameLen == 0 {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
patIdx, nameIdx := 0, 0
|
||||
for patIdx < patternLen && nameIdx < nameLen {
|
||||
if patternComponents[patIdx] == "**" {
|
||||
// if our last pattern component is a doublestar, we're done -
|
||||
// doublestar will match any remaining name components, if any.
|
||||
if patIdx++; patIdx >= patternLen {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// otherwise, try matching remaining components
|
||||
for ; nameIdx < nameLen; nameIdx++ {
|
||||
if m, _ := doMatching(patternComponents[patIdx:], nameComponents[nameIdx:]); m {
|
||||
return true, nil
|
||||
}
|
||||
}
|
||||
return false, nil
|
||||
} else {
|
||||
// try matching components
|
||||
matched, err = matchComponent(patternComponents[patIdx], nameComponents[nameIdx])
|
||||
if !matched || err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
patIdx++
|
||||
nameIdx++
|
||||
}
|
||||
return patIdx >= patternLen && nameIdx >= nameLen, nil
|
||||
}
|
||||
|
||||
// Glob returns the names of all files matching pattern or nil
|
||||
// if there is no matching file. The syntax of pattern is the same
|
||||
// as in Match. The pattern may describe hierarchical names such as
|
||||
// /usr/*/bin/ed (assuming the Separator is '/').
|
||||
//
|
||||
// Glob ignores file system errors such as I/O errors reading directories.
|
||||
// The only possible returned error is ErrBadPattern, when pattern
|
||||
// is malformed.
|
||||
//
|
||||
// Your system path separator is automatically used. This means on
|
||||
// systems where the separator is '\\' (Windows), escaping will be
|
||||
// disabled.
|
||||
//
|
||||
// Note: this is meant as a drop-in replacement for filepath.Glob().
|
||||
//
|
||||
func Glob(pattern string) (matches []string, err error) {
|
||||
patternComponents := splitPathOnSeparator(filepath.ToSlash(pattern), '/')
|
||||
if len(patternComponents) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// On Windows systems, this will return the drive name ('C:'), on others,
|
||||
// it will return an empty string.
|
||||
volumeName := filepath.VolumeName(pattern)
|
||||
|
||||
// If the first pattern component is equal to the volume name, then the
|
||||
// pattern is an absolute path.
|
||||
if patternComponents[0] == volumeName {
|
||||
return doGlob(fmt.Sprintf("%s%s", volumeName, string(os.PathSeparator)), patternComponents[1:], matches)
|
||||
}
|
||||
|
||||
// otherwise, it's a relative pattern
|
||||
return doGlob(".", patternComponents, matches)
|
||||
}
|
||||
|
||||
// Perform a glob
|
||||
func doGlob(basedir string, components, matches []string) (m []string, e error) {
|
||||
m = matches
|
||||
e = nil
|
||||
|
||||
// figure out how many components we don't need to glob because they're
|
||||
// just names without patterns - we'll use os.Lstat below to check if that
|
||||
// path actually exists
|
||||
patLen := len(components)
|
||||
patIdx := 0
|
||||
for ; patIdx < patLen; patIdx++ {
|
||||
if strings.IndexAny(components[patIdx], "*?[{\\") >= 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
if patIdx > 0 {
|
||||
basedir = filepath.Join(basedir, filepath.Join(components[0:patIdx]...))
|
||||
}
|
||||
|
||||
// Lstat will return an error if the file/directory doesn't exist
|
||||
fi, err := os.Lstat(basedir)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// if there are no more components, we've found a match
|
||||
if patIdx >= patLen {
|
||||
m = append(m, basedir)
|
||||
return
|
||||
}
|
||||
|
||||
// otherwise, we need to check each item in the directory...
|
||||
// first, if basedir is a symlink, follow it...
|
||||
if (fi.Mode() & os.ModeSymlink) != 0 {
|
||||
fi, err = os.Stat(basedir)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// confirm it's a directory...
|
||||
if !fi.IsDir() {
|
||||
return
|
||||
}
|
||||
|
||||
// read directory
|
||||
dir, err := os.Open(basedir)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
defer dir.Close()
|
||||
|
||||
files, _ := dir.Readdir(-1)
|
||||
lastComponent := (patIdx + 1) >= patLen
|
||||
if components[patIdx] == "**" {
|
||||
// if the current component is a doublestar, we'll try depth-first
|
||||
for _, file := range files {
|
||||
// if symlink, we may want to follow
|
||||
if (file.Mode() & os.ModeSymlink) != 0 {
|
||||
file, err = os.Stat(filepath.Join(basedir, file.Name()))
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if file.IsDir() {
|
||||
// recurse into directories
|
||||
if lastComponent {
|
||||
m = append(m, filepath.Join(basedir, file.Name()))
|
||||
}
|
||||
m, e = doGlob(filepath.Join(basedir, file.Name()), components[patIdx:], m)
|
||||
} else if lastComponent {
|
||||
// if the pattern's last component is a doublestar, we match filenames, too
|
||||
m = append(m, filepath.Join(basedir, file.Name()))
|
||||
}
|
||||
}
|
||||
if lastComponent {
|
||||
return // we're done
|
||||
}
|
||||
patIdx++
|
||||
lastComponent = (patIdx + 1) >= patLen
|
||||
}
|
||||
|
||||
// check items in current directory and recurse
|
||||
var match bool
|
||||
for _, file := range files {
|
||||
match, e = matchComponent(components[patIdx], file.Name())
|
||||
if e != nil {
|
||||
return
|
||||
}
|
||||
if match {
|
||||
if lastComponent {
|
||||
m = append(m, filepath.Join(basedir, file.Name()))
|
||||
} else {
|
||||
m, e = doGlob(filepath.Join(basedir, file.Name()), components[patIdx+1:], m)
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Attempt to match a single pattern component with a path component
|
||||
func matchComponent(pattern, name string) (bool, error) {
|
||||
// check some base cases
|
||||
patternLen, nameLen := len(pattern), len(name)
|
||||
if patternLen == 0 && nameLen == 0 {
|
||||
return true, nil
|
||||
}
|
||||
if patternLen == 0 {
|
||||
return false, nil
|
||||
}
|
||||
if nameLen == 0 && pattern != "*" {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
// check for matches one rune at a time
|
||||
patIdx, nameIdx := 0, 0
|
||||
for patIdx < patternLen && nameIdx < nameLen {
|
||||
patRune, patAdj := utf8.DecodeRuneInString(pattern[patIdx:])
|
||||
nameRune, nameAdj := utf8.DecodeRuneInString(name[nameIdx:])
|
||||
if patRune == '\\' {
|
||||
// handle escaped runes
|
||||
patIdx += patAdj
|
||||
patRune, patAdj = utf8.DecodeRuneInString(pattern[patIdx:])
|
||||
if patRune == utf8.RuneError {
|
||||
return false, ErrBadPattern
|
||||
} else if patRune == nameRune {
|
||||
patIdx += patAdj
|
||||
nameIdx += nameAdj
|
||||
} else {
|
||||
return false, nil
|
||||
}
|
||||
} else if patRune == '*' {
|
||||
// handle stars
|
||||
if patIdx += patAdj; patIdx >= patternLen {
|
||||
// a star at the end of a pattern will always
|
||||
// match the rest of the path
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// check if we can make any matches
|
||||
for ; nameIdx < nameLen; nameIdx += nameAdj {
|
||||
if m, _ := matchComponent(pattern[patIdx:], name[nameIdx:]); m {
|
||||
return true, nil
|
||||
}
|
||||
}
|
||||
return false, nil
|
||||
} else if patRune == '[' {
|
||||
// handle character sets
|
||||
patIdx += patAdj
|
||||
endClass := indexRuneWithEscaping(pattern[patIdx:], ']')
|
||||
if endClass == -1 {
|
||||
return false, ErrBadPattern
|
||||
}
|
||||
endClass += patIdx
|
||||
classRunes := []rune(pattern[patIdx:endClass])
|
||||
classRunesLen := len(classRunes)
|
||||
if classRunesLen > 0 {
|
||||
classIdx := 0
|
||||
matchClass := false
|
||||
if classRunes[0] == '^' {
|
||||
classIdx++
|
||||
}
|
||||
for classIdx < classRunesLen {
|
||||
low := classRunes[classIdx]
|
||||
if low == '-' {
|
||||
return false, ErrBadPattern
|
||||
}
|
||||
classIdx++
|
||||
if low == '\\' {
|
||||
if classIdx < classRunesLen {
|
||||
low = classRunes[classIdx]
|
||||
classIdx++
|
||||
} else {
|
||||
return false, ErrBadPattern
|
||||
}
|
||||
}
|
||||
high := low
|
||||
if classIdx < classRunesLen && classRunes[classIdx] == '-' {
|
||||
// we have a range of runes
|
||||
if classIdx++; classIdx >= classRunesLen {
|
||||
return false, ErrBadPattern
|
||||
}
|
||||
high = classRunes[classIdx]
|
||||
if high == '-' {
|
||||
return false, ErrBadPattern
|
||||
}
|
||||
classIdx++
|
||||
if high == '\\' {
|
||||
if classIdx < classRunesLen {
|
||||
high = classRunes[classIdx]
|
||||
classIdx++
|
||||
} else {
|
||||
return false, ErrBadPattern
|
||||
}
|
||||
}
|
||||
}
|
||||
if low <= nameRune && nameRune <= high {
|
||||
matchClass = true
|
||||
}
|
||||
}
|
||||
if matchClass == (classRunes[0] == '^') {
|
||||
return false, nil
|
||||
}
|
||||
} else {
|
||||
return false, ErrBadPattern
|
||||
}
|
||||
patIdx = endClass + 1
|
||||
nameIdx += nameAdj
|
||||
} else if patRune == '{' {
|
||||
// handle alternatives such as {alt1,alt2,...}
|
||||
patIdx += patAdj
|
||||
endOptions := indexRuneWithEscaping(pattern[patIdx:], '}')
|
||||
if endOptions == -1 {
|
||||
return false, ErrBadPattern
|
||||
}
|
||||
endOptions += patIdx
|
||||
options := splitPathOnSeparator(pattern[patIdx:endOptions], ',')
|
||||
patIdx = endOptions + 1
|
||||
for _, o := range options {
|
||||
m, e := matchComponent(o+pattern[patIdx:], name[nameIdx:])
|
||||
if e != nil {
|
||||
return false, e
|
||||
}
|
||||
if m {
|
||||
return true, nil
|
||||
}
|
||||
}
|
||||
return false, nil
|
||||
} else if patRune == '?' || patRune == nameRune {
|
||||
// handle single-rune wildcard
|
||||
patIdx += patAdj
|
||||
nameIdx += nameAdj
|
||||
} else {
|
||||
return false, nil
|
||||
}
|
||||
}
|
||||
if patIdx >= patternLen && nameIdx >= nameLen {
|
||||
return true, nil
|
||||
}
|
||||
if nameIdx >= nameLen && pattern[patIdx:] == "*" || pattern[patIdx:] == "**" {
|
||||
return true, nil
|
||||
}
|
||||
return false, nil
|
||||
}
|
||||
1
vendor/github.com/bmatcuk/doublestar/go.mod
generated
vendored
Normal file
1
vendor/github.com/bmatcuk/doublestar/go.mod
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
module github.com/bmatcuk/doublestar
|
||||
14
vendor/github.com/disintegration/imaging/.travis.yml
generated
vendored
Normal file
14
vendor/github.com/disintegration/imaging/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
language: go
|
||||
go:
|
||||
- "1.7.x"
|
||||
- "1.8.x"
|
||||
- "1.9.x"
|
||||
- "1.10.x"
|
||||
- "1.11.x"
|
||||
|
||||
before_install:
|
||||
- go get github.com/mattn/goveralls
|
||||
|
||||
script:
|
||||
- go test -v -race -cover
|
||||
- $GOPATH/bin/goveralls -service=travis-ci
|
||||
21
vendor/github.com/disintegration/imaging/LICENSE
generated
vendored
Normal file
21
vendor/github.com/disintegration/imaging/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2012 Grigory Dryapak
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
197
vendor/github.com/disintegration/imaging/README.md
generated
vendored
Normal file
197
vendor/github.com/disintegration/imaging/README.md
generated
vendored
Normal file
@@ -0,0 +1,197 @@
|
||||
# Imaging
|
||||
|
||||
[](https://godoc.org/github.com/disintegration/imaging)
|
||||
[](https://travis-ci.org/disintegration/imaging)
|
||||
[](https://coveralls.io/github/disintegration/imaging?branch=master)
|
||||
[](https://goreportcard.com/report/github.com/disintegration/imaging)
|
||||
|
||||
Package imaging provides basic image processing functions (resize, rotate, crop, brightness/contrast adjustments, etc.).
|
||||
|
||||
All the image processing functions provided by the package accept any image type that implements `image.Image` interface
|
||||
as an input, and return a new image of `*image.NRGBA` type (32bit RGBA colors, non-premultiplied alpha).
|
||||
|
||||
## Installation
|
||||
|
||||
go get -u github.com/disintegration/imaging
|
||||
|
||||
## Documentation
|
||||
|
||||
http://godoc.org/github.com/disintegration/imaging
|
||||
|
||||
## Usage examples
|
||||
|
||||
A few usage examples can be found below. See the documentation for the full list of supported functions.
|
||||
|
||||
### Image resizing
|
||||
|
||||
```go
|
||||
// Resize srcImage to size = 128x128px using the Lanczos filter.
|
||||
dstImage128 := imaging.Resize(srcImage, 128, 128, imaging.Lanczos)
|
||||
|
||||
// Resize srcImage to width = 800px preserving the aspect ratio.
|
||||
dstImage800 := imaging.Resize(srcImage, 800, 0, imaging.Lanczos)
|
||||
|
||||
// Scale down srcImage to fit the 800x600px bounding box.
|
||||
dstImageFit := imaging.Fit(srcImage, 800, 600, imaging.Lanczos)
|
||||
|
||||
// Resize and crop the srcImage to fill the 100x100px area.
|
||||
dstImageFill := imaging.Fill(srcImage, 100, 100, imaging.Center, imaging.Lanczos)
|
||||
```
|
||||
|
||||
Imaging supports image resizing using various resampling filters. The most notable ones:
|
||||
- `Lanczos` - A high-quality resampling filter for photographic images yielding sharp results.
|
||||
- `CatmullRom` - A sharp cubic filter that is faster than Lanczos filter while providing similar results.
|
||||
- `MitchellNetravali` - A cubic filter that produces smoother results with less ringing artifacts than CatmullRom.
|
||||
- `Linear` - Bilinear resampling filter, produces smooth output. Faster than cubic filters.
|
||||
- `Box` - Simple and fast averaging filter appropriate for downscaling. When upscaling it's similar to NearestNeighbor.
|
||||
- `NearestNeighbor` - Fastest resampling filter, no antialiasing.
|
||||
|
||||
The full list of supported filters: NearestNeighbor, Box, Linear, Hermite, MitchellNetravali, CatmullRom, BSpline, Gaussian, Lanczos, Hann, Hamming, Blackman, Bartlett, Welch, Cosine. Custom filters can be created using ResampleFilter struct.
|
||||
|
||||
**Resampling filters comparison**
|
||||
|
||||
Original image:
|
||||
|
||||

|
||||
|
||||
The same image resized from 600x400px to 150x100px using different resampling filters.
|
||||
From faster (lower quality) to slower (higher quality):
|
||||
|
||||
Filter | Resize result
|
||||
--------------------------|---------------------------------------------
|
||||
`imaging.NearestNeighbor` | 
|
||||
`imaging.Linear` | 
|
||||
`imaging.CatmullRom` | 
|
||||
`imaging.Lanczos` | 
|
||||
|
||||
|
||||
### Gaussian Blur
|
||||
|
||||
```go
|
||||
dstImage := imaging.Blur(srcImage, 0.5)
|
||||
```
|
||||
|
||||
Sigma parameter allows to control the strength of the blurring effect.
|
||||
|
||||
Original image | Sigma = 0.5 | Sigma = 1.5
|
||||
-----------------------------------|----------------------------------------|---------------------------------------
|
||||
 |  | 
|
||||
|
||||
### Sharpening
|
||||
|
||||
```go
|
||||
dstImage := imaging.Sharpen(srcImage, 0.5)
|
||||
```
|
||||
|
||||
`Sharpen` uses gaussian function internally. Sigma parameter allows to control the strength of the sharpening effect.
|
||||
|
||||
Original image | Sigma = 0.5 | Sigma = 1.5
|
||||
-----------------------------------|-------------------------------------------|------------------------------------------
|
||||
 |  | 
|
||||
|
||||
### Gamma correction
|
||||
|
||||
```go
|
||||
dstImage := imaging.AdjustGamma(srcImage, 0.75)
|
||||
```
|
||||
|
||||
Original image | Gamma = 0.75 | Gamma = 1.25
|
||||
-----------------------------------|------------------------------------------|-----------------------------------------
|
||||
 |  | 
|
||||
|
||||
### Contrast adjustment
|
||||
|
||||
```go
|
||||
dstImage := imaging.AdjustContrast(srcImage, 20)
|
||||
```
|
||||
|
||||
Original image | Contrast = 15 | Contrast = -15
|
||||
-----------------------------------|--------------------------------------------|-------------------------------------------
|
||||
 |  | 
|
||||
|
||||
### Brightness adjustment
|
||||
|
||||
```go
|
||||
dstImage := imaging.AdjustBrightness(srcImage, 20)
|
||||
```
|
||||
|
||||
Original image | Brightness = 10 | Brightness = -10
|
||||
-----------------------------------|----------------------------------------------|---------------------------------------------
|
||||
 |  | 
|
||||
|
||||
### Saturation adjustment
|
||||
|
||||
```go
|
||||
dstImage := imaging.AdjustSaturation(srcImage, 20)
|
||||
```
|
||||
|
||||
Original image | Saturation = 30 | Saturation = -30
|
||||
-----------------------------------|----------------------------------------------|---------------------------------------------
|
||||
 |  | 
|
||||
|
||||
## Example code
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"image"
|
||||
"image/color"
|
||||
"log"
|
||||
|
||||
"github.com/disintegration/imaging"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Open a test image.
|
||||
src, err := imaging.Open("testdata/flowers.png")
|
||||
if err != nil {
|
||||
log.Fatalf("failed to open image: %v", err)
|
||||
}
|
||||
|
||||
// Crop the original image to 300x300px size using the center anchor.
|
||||
src = imaging.CropAnchor(src, 300, 300, imaging.Center)
|
||||
|
||||
// Resize the cropped image to width = 200px preserving the aspect ratio.
|
||||
src = imaging.Resize(src, 200, 0, imaging.Lanczos)
|
||||
|
||||
// Create a blurred version of the image.
|
||||
img1 := imaging.Blur(src, 5)
|
||||
|
||||
// Create a grayscale version of the image with higher contrast and sharpness.
|
||||
img2 := imaging.Grayscale(src)
|
||||
img2 = imaging.AdjustContrast(img2, 20)
|
||||
img2 = imaging.Sharpen(img2, 2)
|
||||
|
||||
// Create an inverted version of the image.
|
||||
img3 := imaging.Invert(src)
|
||||
|
||||
// Create an embossed version of the image using a convolution filter.
|
||||
img4 := imaging.Convolve3x3(
|
||||
src,
|
||||
[9]float64{
|
||||
-1, -1, 0,
|
||||
-1, 1, 1,
|
||||
0, 1, 1,
|
||||
},
|
||||
nil,
|
||||
)
|
||||
|
||||
// Create a new image and paste the four produced images into it.
|
||||
dst := imaging.New(400, 400, color.NRGBA{0, 0, 0, 0})
|
||||
dst = imaging.Paste(dst, img1, image.Pt(0, 0))
|
||||
dst = imaging.Paste(dst, img2, image.Pt(0, 200))
|
||||
dst = imaging.Paste(dst, img3, image.Pt(200, 0))
|
||||
dst = imaging.Paste(dst, img4, image.Pt(200, 200))
|
||||
|
||||
// Save the resulting image as JPEG.
|
||||
err = imaging.Save(dst, "testdata/out_example.jpg")
|
||||
if err != nil {
|
||||
log.Fatalf("failed to save image: %v", err)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Output:
|
||||
|
||||

|
||||
252
vendor/github.com/disintegration/imaging/adjust.go
generated
vendored
Normal file
252
vendor/github.com/disintegration/imaging/adjust.go
generated
vendored
Normal file
@@ -0,0 +1,252 @@
|
||||
package imaging
|
||||
|
||||
import (
|
||||
"image"
|
||||
"image/color"
|
||||
"math"
|
||||
)
|
||||
|
||||
// Grayscale produces a grayscale version of the image.
|
||||
func Grayscale(img image.Image) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
i := y * dst.Stride
|
||||
src.scan(0, y, src.w, y+1, dst.Pix[i:i+src.w*4])
|
||||
for x := 0; x < src.w; x++ {
|
||||
d := dst.Pix[i : i+3 : i+3]
|
||||
r := d[0]
|
||||
g := d[1]
|
||||
b := d[2]
|
||||
f := 0.299*float64(r) + 0.587*float64(g) + 0.114*float64(b)
|
||||
y := uint8(f + 0.5)
|
||||
d[0] = y
|
||||
d[1] = y
|
||||
d[2] = y
|
||||
i += 4
|
||||
}
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// Invert produces an inverted (negated) version of the image.
|
||||
func Invert(img image.Image) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
i := y * dst.Stride
|
||||
src.scan(0, y, src.w, y+1, dst.Pix[i:i+src.w*4])
|
||||
for x := 0; x < src.w; x++ {
|
||||
d := dst.Pix[i : i+3 : i+3]
|
||||
d[0] = 255 - d[0]
|
||||
d[1] = 255 - d[1]
|
||||
d[2] = 255 - d[2]
|
||||
i += 4
|
||||
}
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// AdjustSaturation changes the saturation of the image using the percentage parameter and returns the adjusted image.
|
||||
// The percentage must be in the range (-100, 100).
|
||||
// The percentage = 0 gives the original image.
|
||||
// The percentage = 100 gives the image with the saturation value doubled for each pixel.
|
||||
// The percentage = -100 gives the image with the saturation value zeroed for each pixel (grayscale).
|
||||
//
|
||||
// Examples:
|
||||
// dstImage = imaging.AdjustSaturation(srcImage, 25) // Increase image saturation by 25%.
|
||||
// dstImage = imaging.AdjustSaturation(srcImage, -10) // Decrease image saturation by 10%.
|
||||
//
|
||||
func AdjustSaturation(img image.Image, percentage float64) *image.NRGBA {
|
||||
percentage = math.Min(math.Max(percentage, -100), 100)
|
||||
multiplier := 1 + percentage/100
|
||||
|
||||
return AdjustFunc(img, func(c color.NRGBA) color.NRGBA {
|
||||
h, s, l := rgbToHSL(c.R, c.G, c.B)
|
||||
s *= multiplier
|
||||
if s > 1 {
|
||||
s = 1
|
||||
}
|
||||
r, g, b := hslToRGB(h, s, l)
|
||||
return color.NRGBA{r, g, b, c.A}
|
||||
})
|
||||
}
|
||||
|
||||
// AdjustContrast changes the contrast of the image using the percentage parameter and returns the adjusted image.
|
||||
// The percentage must be in range (-100, 100). The percentage = 0 gives the original image.
|
||||
// The percentage = -100 gives solid gray image.
|
||||
//
|
||||
// Examples:
|
||||
//
|
||||
// dstImage = imaging.AdjustContrast(srcImage, -10) // Decrease image contrast by 10%.
|
||||
// dstImage = imaging.AdjustContrast(srcImage, 20) // Increase image contrast by 20%.
|
||||
//
|
||||
func AdjustContrast(img image.Image, percentage float64) *image.NRGBA {
|
||||
percentage = math.Min(math.Max(percentage, -100.0), 100.0)
|
||||
lut := make([]uint8, 256)
|
||||
|
||||
v := (100.0 + percentage) / 100.0
|
||||
for i := 0; i < 256; i++ {
|
||||
if 0 <= v && v <= 1 {
|
||||
lut[i] = clamp((0.5 + (float64(i)/255.0-0.5)*v) * 255.0)
|
||||
} else if 1 < v && v < 2 {
|
||||
lut[i] = clamp((0.5 + (float64(i)/255.0-0.5)*(1/(2.0-v))) * 255.0)
|
||||
} else {
|
||||
lut[i] = uint8(float64(i)/255.0+0.5) * 255
|
||||
}
|
||||
}
|
||||
|
||||
return adjustLUT(img, lut)
|
||||
}
|
||||
|
||||
// AdjustBrightness changes the brightness of the image using the percentage parameter and returns the adjusted image.
|
||||
// The percentage must be in range (-100, 100). The percentage = 0 gives the original image.
|
||||
// The percentage = -100 gives solid black image. The percentage = 100 gives solid white image.
|
||||
//
|
||||
// Examples:
|
||||
//
|
||||
// dstImage = imaging.AdjustBrightness(srcImage, -15) // Decrease image brightness by 15%.
|
||||
// dstImage = imaging.AdjustBrightness(srcImage, 10) // Increase image brightness by 10%.
|
||||
//
|
||||
func AdjustBrightness(img image.Image, percentage float64) *image.NRGBA {
|
||||
percentage = math.Min(math.Max(percentage, -100.0), 100.0)
|
||||
lut := make([]uint8, 256)
|
||||
|
||||
shift := 255.0 * percentage / 100.0
|
||||
for i := 0; i < 256; i++ {
|
||||
lut[i] = clamp(float64(i) + shift)
|
||||
}
|
||||
|
||||
return adjustLUT(img, lut)
|
||||
}
|
||||
|
||||
// AdjustGamma performs a gamma correction on the image and returns the adjusted image.
|
||||
// Gamma parameter must be positive. Gamma = 1.0 gives the original image.
|
||||
// Gamma less than 1.0 darkens the image and gamma greater than 1.0 lightens it.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// dstImage = imaging.AdjustGamma(srcImage, 0.7)
|
||||
//
|
||||
func AdjustGamma(img image.Image, gamma float64) *image.NRGBA {
|
||||
e := 1.0 / math.Max(gamma, 0.0001)
|
||||
lut := make([]uint8, 256)
|
||||
|
||||
for i := 0; i < 256; i++ {
|
||||
lut[i] = clamp(math.Pow(float64(i)/255.0, e) * 255.0)
|
||||
}
|
||||
|
||||
return adjustLUT(img, lut)
|
||||
}
|
||||
|
||||
// AdjustSigmoid changes the contrast of the image using a sigmoidal function and returns the adjusted image.
|
||||
// It's a non-linear contrast change useful for photo adjustments as it preserves highlight and shadow detail.
|
||||
// The midpoint parameter is the midpoint of contrast that must be between 0 and 1, typically 0.5.
|
||||
// The factor parameter indicates how much to increase or decrease the contrast, typically in range (-10, 10).
|
||||
// If the factor parameter is positive the image contrast is increased otherwise the contrast is decreased.
|
||||
//
|
||||
// Examples:
|
||||
//
|
||||
// dstImage = imaging.AdjustSigmoid(srcImage, 0.5, 3.0) // Increase the contrast.
|
||||
// dstImage = imaging.AdjustSigmoid(srcImage, 0.5, -3.0) // Decrease the contrast.
|
||||
//
|
||||
func AdjustSigmoid(img image.Image, midpoint, factor float64) *image.NRGBA {
|
||||
if factor == 0 {
|
||||
return Clone(img)
|
||||
}
|
||||
|
||||
lut := make([]uint8, 256)
|
||||
a := math.Min(math.Max(midpoint, 0.0), 1.0)
|
||||
b := math.Abs(factor)
|
||||
sig0 := sigmoid(a, b, 0)
|
||||
sig1 := sigmoid(a, b, 1)
|
||||
e := 1.0e-6
|
||||
|
||||
if factor > 0 {
|
||||
for i := 0; i < 256; i++ {
|
||||
x := float64(i) / 255.0
|
||||
sigX := sigmoid(a, b, x)
|
||||
f := (sigX - sig0) / (sig1 - sig0)
|
||||
lut[i] = clamp(f * 255.0)
|
||||
}
|
||||
} else {
|
||||
for i := 0; i < 256; i++ {
|
||||
x := float64(i) / 255.0
|
||||
arg := math.Min(math.Max((sig1-sig0)*x+sig0, e), 1.0-e)
|
||||
f := a - math.Log(1.0/arg-1.0)/b
|
||||
lut[i] = clamp(f * 255.0)
|
||||
}
|
||||
}
|
||||
|
||||
return adjustLUT(img, lut)
|
||||
}
|
||||
|
||||
func sigmoid(a, b, x float64) float64 {
|
||||
return 1 / (1 + math.Exp(b*(a-x)))
|
||||
}
|
||||
|
||||
// adjustLUT applies the given lookup table to the colors of the image.
|
||||
func adjustLUT(img image.Image, lut []uint8) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
|
||||
lut = lut[0:256]
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
i := y * dst.Stride
|
||||
src.scan(0, y, src.w, y+1, dst.Pix[i:i+src.w*4])
|
||||
for x := 0; x < src.w; x++ {
|
||||
d := dst.Pix[i : i+3 : i+3]
|
||||
d[0] = lut[d[0]]
|
||||
d[1] = lut[d[1]]
|
||||
d[2] = lut[d[2]]
|
||||
i += 4
|
||||
}
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// AdjustFunc applies the fn function to each pixel of the img image and returns the adjusted image.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// dstImage = imaging.AdjustFunc(
|
||||
// srcImage,
|
||||
// func(c color.NRGBA) color.NRGBA {
|
||||
// // Shift the red channel by 16.
|
||||
// r := int(c.R) + 16
|
||||
// if r > 255 {
|
||||
// r = 255
|
||||
// }
|
||||
// return color.NRGBA{uint8(r), c.G, c.B, c.A}
|
||||
// }
|
||||
// )
|
||||
//
|
||||
func AdjustFunc(img image.Image, fn func(c color.NRGBA) color.NRGBA) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
i := y * dst.Stride
|
||||
src.scan(0, y, src.w, y+1, dst.Pix[i:i+src.w*4])
|
||||
for x := 0; x < src.w; x++ {
|
||||
d := dst.Pix[i : i+4 : i+4]
|
||||
r := d[0]
|
||||
g := d[1]
|
||||
b := d[2]
|
||||
a := d[3]
|
||||
c := fn(color.NRGBA{r, g, b, a})
|
||||
d[0] = c.R
|
||||
d[1] = c.G
|
||||
d[2] = c.B
|
||||
d[3] = c.A
|
||||
i += 4
|
||||
}
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
148
vendor/github.com/disintegration/imaging/convolution.go
generated
vendored
Normal file
148
vendor/github.com/disintegration/imaging/convolution.go
generated
vendored
Normal file
@@ -0,0 +1,148 @@
|
||||
package imaging
|
||||
|
||||
import (
|
||||
"image"
|
||||
)
|
||||
|
||||
// ConvolveOptions are convolution parameters.
|
||||
type ConvolveOptions struct {
|
||||
// If Normalize is true the kernel is normalized before convolution.
|
||||
Normalize bool
|
||||
|
||||
// If Abs is true the absolute value of each color channel is taken after convolution.
|
||||
Abs bool
|
||||
|
||||
// Bias is added to each color channel value after convolution.
|
||||
Bias int
|
||||
}
|
||||
|
||||
// Convolve3x3 convolves the image with the specified 3x3 convolution kernel.
|
||||
// Default parameters are used if a nil *ConvolveOptions is passed.
|
||||
func Convolve3x3(img image.Image, kernel [9]float64, options *ConvolveOptions) *image.NRGBA {
|
||||
return convolve(img, kernel[:], options)
|
||||
}
|
||||
|
||||
// Convolve5x5 convolves the image with the specified 5x5 convolution kernel.
|
||||
// Default parameters are used if a nil *ConvolveOptions is passed.
|
||||
func Convolve5x5(img image.Image, kernel [25]float64, options *ConvolveOptions) *image.NRGBA {
|
||||
return convolve(img, kernel[:], options)
|
||||
}
|
||||
|
||||
func convolve(img image.Image, kernel []float64, options *ConvolveOptions) *image.NRGBA {
|
||||
src := toNRGBA(img)
|
||||
w := src.Bounds().Max.X
|
||||
h := src.Bounds().Max.Y
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, w, h))
|
||||
|
||||
if w < 1 || h < 1 {
|
||||
return dst
|
||||
}
|
||||
|
||||
if options == nil {
|
||||
options = &ConvolveOptions{}
|
||||
}
|
||||
|
||||
if options.Normalize {
|
||||
normalizeKernel(kernel)
|
||||
}
|
||||
|
||||
type coef struct {
|
||||
x, y int
|
||||
k float64
|
||||
}
|
||||
var coefs []coef
|
||||
var m int
|
||||
|
||||
switch len(kernel) {
|
||||
case 9:
|
||||
m = 1
|
||||
case 25:
|
||||
m = 2
|
||||
}
|
||||
|
||||
i := 0
|
||||
for y := -m; y <= m; y++ {
|
||||
for x := -m; x <= m; x++ {
|
||||
if kernel[i] != 0 {
|
||||
coefs = append(coefs, coef{x: x, y: y, k: kernel[i]})
|
||||
}
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
parallel(0, h, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
for x := 0; x < w; x++ {
|
||||
var r, g, b float64
|
||||
for _, c := range coefs {
|
||||
ix := x + c.x
|
||||
if ix < 0 {
|
||||
ix = 0
|
||||
} else if ix >= w {
|
||||
ix = w - 1
|
||||
}
|
||||
|
||||
iy := y + c.y
|
||||
if iy < 0 {
|
||||
iy = 0
|
||||
} else if iy >= h {
|
||||
iy = h - 1
|
||||
}
|
||||
|
||||
off := iy*src.Stride + ix*4
|
||||
s := src.Pix[off : off+3 : off+3]
|
||||
r += float64(s[0]) * c.k
|
||||
g += float64(s[1]) * c.k
|
||||
b += float64(s[2]) * c.k
|
||||
}
|
||||
|
||||
if options.Abs {
|
||||
if r < 0 {
|
||||
r = -r
|
||||
}
|
||||
if g < 0 {
|
||||
g = -g
|
||||
}
|
||||
if b < 0 {
|
||||
b = -b
|
||||
}
|
||||
}
|
||||
|
||||
if options.Bias != 0 {
|
||||
r += float64(options.Bias)
|
||||
g += float64(options.Bias)
|
||||
b += float64(options.Bias)
|
||||
}
|
||||
|
||||
srcOff := y*src.Stride + x*4
|
||||
dstOff := y*dst.Stride + x*4
|
||||
d := dst.Pix[dstOff : dstOff+4 : dstOff+4]
|
||||
d[0] = clamp(r)
|
||||
d[1] = clamp(g)
|
||||
d[2] = clamp(b)
|
||||
d[3] = src.Pix[srcOff+3]
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return dst
|
||||
}
|
||||
|
||||
func normalizeKernel(kernel []float64) {
|
||||
var sum, sumpos float64
|
||||
for i := range kernel {
|
||||
sum += kernel[i]
|
||||
if kernel[i] > 0 {
|
||||
sumpos += kernel[i]
|
||||
}
|
||||
}
|
||||
if sum != 0 {
|
||||
for i := range kernel {
|
||||
kernel[i] /= sum
|
||||
}
|
||||
} else if sumpos != 0 {
|
||||
for i := range kernel {
|
||||
kernel[i] /= sumpos
|
||||
}
|
||||
}
|
||||
}
|
||||
7
vendor/github.com/disintegration/imaging/doc.go
generated
vendored
Normal file
7
vendor/github.com/disintegration/imaging/doc.go
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
/*
|
||||
Package imaging provides basic image processing functions (resize, rotate, crop, brightness/contrast adjustments, etc.).
|
||||
|
||||
All the image processing functions provided by the package accept any image type that implements image.Image interface
|
||||
as an input, and return a new image of *image.NRGBA type (32bit RGBA colors, non-premultiplied alpha).
|
||||
*/
|
||||
package imaging
|
||||
169
vendor/github.com/disintegration/imaging/effects.go
generated
vendored
Normal file
169
vendor/github.com/disintegration/imaging/effects.go
generated
vendored
Normal file
@@ -0,0 +1,169 @@
|
||||
package imaging
|
||||
|
||||
import (
|
||||
"image"
|
||||
"math"
|
||||
)
|
||||
|
||||
func gaussianBlurKernel(x, sigma float64) float64 {
|
||||
return math.Exp(-(x*x)/(2*sigma*sigma)) / (sigma * math.Sqrt(2*math.Pi))
|
||||
}
|
||||
|
||||
// Blur produces a blurred version of the image using a Gaussian function.
|
||||
// Sigma parameter must be positive and indicates how much the image will be blurred.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// dstImage := imaging.Blur(srcImage, 3.5)
|
||||
//
|
||||
func Blur(img image.Image, sigma float64) *image.NRGBA {
|
||||
if sigma <= 0 {
|
||||
return Clone(img)
|
||||
}
|
||||
|
||||
radius := int(math.Ceil(sigma * 3.0))
|
||||
kernel := make([]float64, radius+1)
|
||||
|
||||
for i := 0; i <= radius; i++ {
|
||||
kernel[i] = gaussianBlurKernel(float64(i), sigma)
|
||||
}
|
||||
|
||||
return blurVertical(blurHorizontal(img, kernel), kernel)
|
||||
}
|
||||
|
||||
func blurHorizontal(img image.Image, kernel []float64) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
|
||||
radius := len(kernel) - 1
|
||||
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
scanLine := make([]uint8, src.w*4)
|
||||
scanLineF := make([]float64, len(scanLine))
|
||||
for y := range ys {
|
||||
src.scan(0, y, src.w, y+1, scanLine)
|
||||
for i, v := range scanLine {
|
||||
scanLineF[i] = float64(v)
|
||||
}
|
||||
for x := 0; x < src.w; x++ {
|
||||
min := x - radius
|
||||
if min < 0 {
|
||||
min = 0
|
||||
}
|
||||
max := x + radius
|
||||
if max > src.w-1 {
|
||||
max = src.w - 1
|
||||
}
|
||||
var r, g, b, a, wsum float64
|
||||
for ix := min; ix <= max; ix++ {
|
||||
i := ix * 4
|
||||
weight := kernel[absint(x-ix)]
|
||||
wsum += weight
|
||||
s := scanLineF[i : i+4 : i+4]
|
||||
wa := s[3] * weight
|
||||
r += s[0] * wa
|
||||
g += s[1] * wa
|
||||
b += s[2] * wa
|
||||
a += wa
|
||||
}
|
||||
if a != 0 {
|
||||
aInv := 1 / a
|
||||
j := y*dst.Stride + x*4
|
||||
d := dst.Pix[j : j+4 : j+4]
|
||||
d[0] = clamp(r * aInv)
|
||||
d[1] = clamp(g * aInv)
|
||||
d[2] = clamp(b * aInv)
|
||||
d[3] = clamp(a / wsum)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return dst
|
||||
}
|
||||
|
||||
func blurVertical(img image.Image, kernel []float64) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
|
||||
radius := len(kernel) - 1
|
||||
|
||||
parallel(0, src.w, func(xs <-chan int) {
|
||||
scanLine := make([]uint8, src.h*4)
|
||||
scanLineF := make([]float64, len(scanLine))
|
||||
for x := range xs {
|
||||
src.scan(x, 0, x+1, src.h, scanLine)
|
||||
for i, v := range scanLine {
|
||||
scanLineF[i] = float64(v)
|
||||
}
|
||||
for y := 0; y < src.h; y++ {
|
||||
min := y - radius
|
||||
if min < 0 {
|
||||
min = 0
|
||||
}
|
||||
max := y + radius
|
||||
if max > src.h-1 {
|
||||
max = src.h - 1
|
||||
}
|
||||
var r, g, b, a, wsum float64
|
||||
for iy := min; iy <= max; iy++ {
|
||||
i := iy * 4
|
||||
weight := kernel[absint(y-iy)]
|
||||
wsum += weight
|
||||
s := scanLineF[i : i+4 : i+4]
|
||||
wa := s[3] * weight
|
||||
r += s[0] * wa
|
||||
g += s[1] * wa
|
||||
b += s[2] * wa
|
||||
a += wa
|
||||
}
|
||||
if a != 0 {
|
||||
aInv := 1 / a
|
||||
j := y*dst.Stride + x*4
|
||||
d := dst.Pix[j : j+4 : j+4]
|
||||
d[0] = clamp(r * aInv)
|
||||
d[1] = clamp(g * aInv)
|
||||
d[2] = clamp(b * aInv)
|
||||
d[3] = clamp(a / wsum)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return dst
|
||||
}
|
||||
|
||||
// Sharpen produces a sharpened version of the image.
|
||||
// Sigma parameter must be positive and indicates how much the image will be sharpened.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// dstImage := imaging.Sharpen(srcImage, 3.5)
|
||||
//
|
||||
func Sharpen(img image.Image, sigma float64) *image.NRGBA {
|
||||
if sigma <= 0 {
|
||||
return Clone(img)
|
||||
}
|
||||
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
|
||||
blurred := Blur(img, sigma)
|
||||
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
scanLine := make([]uint8, src.w*4)
|
||||
for y := range ys {
|
||||
src.scan(0, y, src.w, y+1, scanLine)
|
||||
j := y * dst.Stride
|
||||
for i := 0; i < src.w*4; i++ {
|
||||
val := int(scanLine[i])<<1 - int(blurred.Pix[j])
|
||||
if val < 0 {
|
||||
val = 0
|
||||
} else if val > 0xff {
|
||||
val = 0xff
|
||||
}
|
||||
dst.Pix[j] = uint8(val)
|
||||
j++
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return dst
|
||||
}
|
||||
3
vendor/github.com/disintegration/imaging/go.mod
generated
vendored
Normal file
3
vendor/github.com/disintegration/imaging/go.mod
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
module github.com/disintegration/imaging
|
||||
|
||||
require golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81
|
||||
2
vendor/github.com/disintegration/imaging/go.sum
generated
vendored
Normal file
2
vendor/github.com/disintegration/imaging/go.sum
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81 h1:00VmoueYNlNz/aHIilyyQz/MHSqGoWJzpFv/HW8xpzI=
|
||||
golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs=
|
||||
52
vendor/github.com/disintegration/imaging/histogram.go
generated
vendored
Normal file
52
vendor/github.com/disintegration/imaging/histogram.go
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
package imaging
|
||||
|
||||
import (
|
||||
"image"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// Histogram returns a normalized histogram of an image.
|
||||
//
|
||||
// Resulting histogram is represented as an array of 256 floats, where
|
||||
// histogram[i] is a probability of a pixel being of a particular luminance i.
|
||||
func Histogram(img image.Image) [256]float64 {
|
||||
var mu sync.Mutex
|
||||
var histogram [256]float64
|
||||
var total float64
|
||||
|
||||
src := newScanner(img)
|
||||
if src.w == 0 || src.h == 0 {
|
||||
return histogram
|
||||
}
|
||||
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
var tmpHistogram [256]float64
|
||||
var tmpTotal float64
|
||||
scanLine := make([]uint8, src.w*4)
|
||||
for y := range ys {
|
||||
src.scan(0, y, src.w, y+1, scanLine)
|
||||
i := 0
|
||||
for x := 0; x < src.w; x++ {
|
||||
s := scanLine[i : i+3 : i+3]
|
||||
r := s[0]
|
||||
g := s[1]
|
||||
b := s[2]
|
||||
y := 0.299*float32(r) + 0.587*float32(g) + 0.114*float32(b)
|
||||
tmpHistogram[int(y+0.5)]++
|
||||
tmpTotal++
|
||||
i += 4
|
||||
}
|
||||
}
|
||||
mu.Lock()
|
||||
for i := 0; i < 256; i++ {
|
||||
histogram[i] += tmpHistogram[i]
|
||||
}
|
||||
total += tmpTotal
|
||||
mu.Unlock()
|
||||
})
|
||||
|
||||
for i := 0; i < 256; i++ {
|
||||
histogram[i] = histogram[i] / total
|
||||
}
|
||||
return histogram
|
||||
}
|
||||
444
vendor/github.com/disintegration/imaging/io.go
generated
vendored
Normal file
444
vendor/github.com/disintegration/imaging/io.go
generated
vendored
Normal file
@@ -0,0 +1,444 @@
|
||||
package imaging
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"image"
|
||||
"image/draw"
|
||||
"image/gif"
|
||||
"image/jpeg"
|
||||
"image/png"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/image/bmp"
|
||||
"golang.org/x/image/tiff"
|
||||
)
|
||||
|
||||
type fileSystem interface {
|
||||
Create(string) (io.WriteCloser, error)
|
||||
Open(string) (io.ReadCloser, error)
|
||||
}
|
||||
|
||||
type localFS struct{}
|
||||
|
||||
func (localFS) Create(name string) (io.WriteCloser, error) { return os.Create(name) }
|
||||
func (localFS) Open(name string) (io.ReadCloser, error) { return os.Open(name) }
|
||||
|
||||
var fs fileSystem = localFS{}
|
||||
|
||||
type decodeConfig struct {
|
||||
autoOrientation bool
|
||||
}
|
||||
|
||||
var defaultDecodeConfig = decodeConfig{
|
||||
autoOrientation: false,
|
||||
}
|
||||
|
||||
// DecodeOption sets an optional parameter for the Decode and Open functions.
|
||||
type DecodeOption func(*decodeConfig)
|
||||
|
||||
// AutoOrientation returns a DecodeOption that sets the auto-orientation mode.
|
||||
// If auto-orientation is enabled, the image will be transformed after decoding
|
||||
// according to the EXIF orientation tag (if present). By default it's disabled.
|
||||
func AutoOrientation(enabled bool) DecodeOption {
|
||||
return func(c *decodeConfig) {
|
||||
c.autoOrientation = enabled
|
||||
}
|
||||
}
|
||||
|
||||
// Decode reads an image from r.
|
||||
func Decode(r io.Reader, opts ...DecodeOption) (image.Image, error) {
|
||||
cfg := defaultDecodeConfig
|
||||
for _, option := range opts {
|
||||
option(&cfg)
|
||||
}
|
||||
|
||||
if !cfg.autoOrientation {
|
||||
img, _, err := image.Decode(r)
|
||||
return img, err
|
||||
}
|
||||
|
||||
var orient orientation
|
||||
pr, pw := io.Pipe()
|
||||
r = io.TeeReader(r, pw)
|
||||
done := make(chan struct{})
|
||||
go func() {
|
||||
defer close(done)
|
||||
orient = readOrientation(pr)
|
||||
io.Copy(ioutil.Discard, pr)
|
||||
}()
|
||||
|
||||
img, _, err := image.Decode(r)
|
||||
pw.Close()
|
||||
<-done
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return fixOrientation(img, orient), nil
|
||||
}
|
||||
|
||||
// Open loads an image from file.
|
||||
//
|
||||
// Examples:
|
||||
//
|
||||
// // Load an image from file.
|
||||
// img, err := imaging.Open("test.jpg")
|
||||
//
|
||||
// // Load an image and transform it depending on the EXIF orientation tag (if present).
|
||||
// img, err := imaging.Open("test.jpg", imaging.AutoOrientation(true))
|
||||
//
|
||||
func Open(filename string, opts ...DecodeOption) (image.Image, error) {
|
||||
file, err := fs.Open(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer file.Close()
|
||||
return Decode(file, opts...)
|
||||
}
|
||||
|
||||
// Format is an image file format.
|
||||
type Format int
|
||||
|
||||
// Image file formats.
|
||||
const (
|
||||
JPEG Format = iota
|
||||
PNG
|
||||
GIF
|
||||
TIFF
|
||||
BMP
|
||||
)
|
||||
|
||||
var formatExts = map[string]Format{
|
||||
"jpg": JPEG,
|
||||
"jpeg": JPEG,
|
||||
"png": PNG,
|
||||
"gif": GIF,
|
||||
"tif": TIFF,
|
||||
"tiff": TIFF,
|
||||
"bmp": BMP,
|
||||
}
|
||||
|
||||
var formatNames = map[Format]string{
|
||||
JPEG: "JPEG",
|
||||
PNG: "PNG",
|
||||
GIF: "GIF",
|
||||
TIFF: "TIFF",
|
||||
BMP: "BMP",
|
||||
}
|
||||
|
||||
func (f Format) String() string {
|
||||
return formatNames[f]
|
||||
}
|
||||
|
||||
// ErrUnsupportedFormat means the given image format is not supported.
|
||||
var ErrUnsupportedFormat = errors.New("imaging: unsupported image format")
|
||||
|
||||
// FormatFromExtension parses image format from filename extension:
|
||||
// "jpg" (or "jpeg"), "png", "gif", "tif" (or "tiff") and "bmp" are supported.
|
||||
func FormatFromExtension(ext string) (Format, error) {
|
||||
if f, ok := formatExts[strings.ToLower(strings.TrimPrefix(ext, "."))]; ok {
|
||||
return f, nil
|
||||
}
|
||||
return -1, ErrUnsupportedFormat
|
||||
}
|
||||
|
||||
// FormatFromFilename parses image format from filename:
|
||||
// "jpg" (or "jpeg"), "png", "gif", "tif" (or "tiff") and "bmp" are supported.
|
||||
func FormatFromFilename(filename string) (Format, error) {
|
||||
ext := filepath.Ext(filename)
|
||||
return FormatFromExtension(ext)
|
||||
}
|
||||
|
||||
type encodeConfig struct {
|
||||
jpegQuality int
|
||||
gifNumColors int
|
||||
gifQuantizer draw.Quantizer
|
||||
gifDrawer draw.Drawer
|
||||
pngCompressionLevel png.CompressionLevel
|
||||
}
|
||||
|
||||
var defaultEncodeConfig = encodeConfig{
|
||||
jpegQuality: 95,
|
||||
gifNumColors: 256,
|
||||
gifQuantizer: nil,
|
||||
gifDrawer: nil,
|
||||
pngCompressionLevel: png.DefaultCompression,
|
||||
}
|
||||
|
||||
// EncodeOption sets an optional parameter for the Encode and Save functions.
|
||||
type EncodeOption func(*encodeConfig)
|
||||
|
||||
// JPEGQuality returns an EncodeOption that sets the output JPEG quality.
|
||||
// Quality ranges from 1 to 100 inclusive, higher is better. Default is 95.
|
||||
func JPEGQuality(quality int) EncodeOption {
|
||||
return func(c *encodeConfig) {
|
||||
c.jpegQuality = quality
|
||||
}
|
||||
}
|
||||
|
||||
// GIFNumColors returns an EncodeOption that sets the maximum number of colors
|
||||
// used in the GIF-encoded image. It ranges from 1 to 256. Default is 256.
|
||||
func GIFNumColors(numColors int) EncodeOption {
|
||||
return func(c *encodeConfig) {
|
||||
c.gifNumColors = numColors
|
||||
}
|
||||
}
|
||||
|
||||
// GIFQuantizer returns an EncodeOption that sets the quantizer that is used to produce
|
||||
// a palette of the GIF-encoded image.
|
||||
func GIFQuantizer(quantizer draw.Quantizer) EncodeOption {
|
||||
return func(c *encodeConfig) {
|
||||
c.gifQuantizer = quantizer
|
||||
}
|
||||
}
|
||||
|
||||
// GIFDrawer returns an EncodeOption that sets the drawer that is used to convert
|
||||
// the source image to the desired palette of the GIF-encoded image.
|
||||
func GIFDrawer(drawer draw.Drawer) EncodeOption {
|
||||
return func(c *encodeConfig) {
|
||||
c.gifDrawer = drawer
|
||||
}
|
||||
}
|
||||
|
||||
// PNGCompressionLevel returns an EncodeOption that sets the compression level
|
||||
// of the PNG-encoded image. Default is png.DefaultCompression.
|
||||
func PNGCompressionLevel(level png.CompressionLevel) EncodeOption {
|
||||
return func(c *encodeConfig) {
|
||||
c.pngCompressionLevel = level
|
||||
}
|
||||
}
|
||||
|
||||
// Encode writes the image img to w in the specified format (JPEG, PNG, GIF, TIFF or BMP).
|
||||
func Encode(w io.Writer, img image.Image, format Format, opts ...EncodeOption) error {
|
||||
cfg := defaultEncodeConfig
|
||||
for _, option := range opts {
|
||||
option(&cfg)
|
||||
}
|
||||
|
||||
switch format {
|
||||
case JPEG:
|
||||
if nrgba, ok := img.(*image.NRGBA); ok && nrgba.Opaque() {
|
||||
rgba := &image.RGBA{
|
||||
Pix: nrgba.Pix,
|
||||
Stride: nrgba.Stride,
|
||||
Rect: nrgba.Rect,
|
||||
}
|
||||
return jpeg.Encode(w, rgba, &jpeg.Options{Quality: cfg.jpegQuality})
|
||||
}
|
||||
return jpeg.Encode(w, img, &jpeg.Options{Quality: cfg.jpegQuality})
|
||||
|
||||
case PNG:
|
||||
encoder := png.Encoder{CompressionLevel: cfg.pngCompressionLevel}
|
||||
return encoder.Encode(w, img)
|
||||
|
||||
case GIF:
|
||||
return gif.Encode(w, img, &gif.Options{
|
||||
NumColors: cfg.gifNumColors,
|
||||
Quantizer: cfg.gifQuantizer,
|
||||
Drawer: cfg.gifDrawer,
|
||||
})
|
||||
|
||||
case TIFF:
|
||||
return tiff.Encode(w, img, &tiff.Options{Compression: tiff.Deflate, Predictor: true})
|
||||
|
||||
case BMP:
|
||||
return bmp.Encode(w, img)
|
||||
}
|
||||
|
||||
return ErrUnsupportedFormat
|
||||
}
|
||||
|
||||
// Save saves the image to file with the specified filename.
|
||||
// The format is determined from the filename extension:
|
||||
// "jpg" (or "jpeg"), "png", "gif", "tif" (or "tiff") and "bmp" are supported.
|
||||
//
|
||||
// Examples:
|
||||
//
|
||||
// // Save the image as PNG.
|
||||
// err := imaging.Save(img, "out.png")
|
||||
//
|
||||
// // Save the image as JPEG with optional quality parameter set to 80.
|
||||
// err := imaging.Save(img, "out.jpg", imaging.JPEGQuality(80))
|
||||
//
|
||||
func Save(img image.Image, filename string, opts ...EncodeOption) (err error) {
|
||||
f, err := FormatFromFilename(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
file, err := fs.Create(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = Encode(file, img, f, opts...)
|
||||
errc := file.Close()
|
||||
if err == nil {
|
||||
err = errc
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// orientation is an EXIF flag that specifies the transformation
|
||||
// that should be applied to image to display it correctly.
|
||||
type orientation int
|
||||
|
||||
const (
|
||||
orientationUnspecified = 0
|
||||
orientationNormal = 1
|
||||
orientationFlipH = 2
|
||||
orientationRotate180 = 3
|
||||
orientationFlipV = 4
|
||||
orientationTranspose = 5
|
||||
orientationRotate270 = 6
|
||||
orientationTransverse = 7
|
||||
orientationRotate90 = 8
|
||||
)
|
||||
|
||||
// readOrientation tries to read the orientation EXIF flag from image data in r.
|
||||
// If the EXIF data block is not found or the orientation flag is not found
|
||||
// or any other error occures while reading the data, it returns the
|
||||
// orientationUnspecified (0) value.
|
||||
func readOrientation(r io.Reader) orientation {
|
||||
const (
|
||||
markerSOI = 0xffd8
|
||||
markerAPP1 = 0xffe1
|
||||
exifHeader = 0x45786966
|
||||
byteOrderBE = 0x4d4d
|
||||
byteOrderLE = 0x4949
|
||||
orientationTag = 0x0112
|
||||
)
|
||||
|
||||
// Check if JPEG SOI marker is present.
|
||||
var soi uint16
|
||||
if err := binary.Read(r, binary.BigEndian, &soi); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
if soi != markerSOI {
|
||||
return orientationUnspecified // Missing JPEG SOI marker.
|
||||
}
|
||||
|
||||
// Find JPEG APP1 marker.
|
||||
for {
|
||||
var marker, size uint16
|
||||
if err := binary.Read(r, binary.BigEndian, &marker); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
if err := binary.Read(r, binary.BigEndian, &size); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
if marker>>8 != 0xff {
|
||||
return orientationUnspecified // Invalid JPEG marker.
|
||||
}
|
||||
if marker == markerAPP1 {
|
||||
break
|
||||
}
|
||||
if size < 2 {
|
||||
return orientationUnspecified // Invalid block size.
|
||||
}
|
||||
if _, err := io.CopyN(ioutil.Discard, r, int64(size-2)); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
}
|
||||
|
||||
// Check if EXIF header is present.
|
||||
var header uint32
|
||||
if err := binary.Read(r, binary.BigEndian, &header); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
if header != exifHeader {
|
||||
return orientationUnspecified
|
||||
}
|
||||
if _, err := io.CopyN(ioutil.Discard, r, 2); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
|
||||
// Read byte order information.
|
||||
var (
|
||||
byteOrderTag uint16
|
||||
byteOrder binary.ByteOrder
|
||||
)
|
||||
if err := binary.Read(r, binary.BigEndian, &byteOrderTag); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
switch byteOrderTag {
|
||||
case byteOrderBE:
|
||||
byteOrder = binary.BigEndian
|
||||
case byteOrderLE:
|
||||
byteOrder = binary.LittleEndian
|
||||
default:
|
||||
return orientationUnspecified // Invalid byte order flag.
|
||||
}
|
||||
if _, err := io.CopyN(ioutil.Discard, r, 2); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
|
||||
// Skip the EXIF offset.
|
||||
var offset uint32
|
||||
if err := binary.Read(r, byteOrder, &offset); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
if offset < 8 {
|
||||
return orientationUnspecified // Invalid offset value.
|
||||
}
|
||||
if _, err := io.CopyN(ioutil.Discard, r, int64(offset-8)); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
|
||||
// Read the number of tags.
|
||||
var numTags uint16
|
||||
if err := binary.Read(r, byteOrder, &numTags); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
|
||||
// Find the orientation tag.
|
||||
for i := 0; i < int(numTags); i++ {
|
||||
var tag uint16
|
||||
if err := binary.Read(r, byteOrder, &tag); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
if tag != orientationTag {
|
||||
if _, err := io.CopyN(ioutil.Discard, r, 10); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
continue
|
||||
}
|
||||
if _, err := io.CopyN(ioutil.Discard, r, 6); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
var val uint16
|
||||
if err := binary.Read(r, byteOrder, &val); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
if val < 1 || val > 8 {
|
||||
return orientationUnspecified // Invalid tag value.
|
||||
}
|
||||
return orientation(val)
|
||||
}
|
||||
return orientationUnspecified // Missing orientation tag.
|
||||
}
|
||||
|
||||
// fixOrientation applies a transform to img corresponding to the given orientation flag.
|
||||
func fixOrientation(img image.Image, o orientation) image.Image {
|
||||
switch o {
|
||||
case orientationNormal:
|
||||
case orientationFlipH:
|
||||
img = FlipH(img)
|
||||
case orientationFlipV:
|
||||
img = FlipV(img)
|
||||
case orientationRotate90:
|
||||
img = Rotate90(img)
|
||||
case orientationRotate180:
|
||||
img = Rotate180(img)
|
||||
case orientationRotate270:
|
||||
img = Rotate270(img)
|
||||
case orientationTranspose:
|
||||
img = Transpose(img)
|
||||
case orientationTransverse:
|
||||
img = Transverse(img)
|
||||
}
|
||||
return img
|
||||
}
|
||||
595
vendor/github.com/disintegration/imaging/resize.go
generated
vendored
Normal file
595
vendor/github.com/disintegration/imaging/resize.go
generated
vendored
Normal file
@@ -0,0 +1,595 @@
|
||||
package imaging
|
||||
|
||||
import (
|
||||
"image"
|
||||
"math"
|
||||
)
|
||||
|
||||
type indexWeight struct {
|
||||
index int
|
||||
weight float64
|
||||
}
|
||||
|
||||
func precomputeWeights(dstSize, srcSize int, filter ResampleFilter) [][]indexWeight {
|
||||
du := float64(srcSize) / float64(dstSize)
|
||||
scale := du
|
||||
if scale < 1.0 {
|
||||
scale = 1.0
|
||||
}
|
||||
ru := math.Ceil(scale * filter.Support)
|
||||
|
||||
out := make([][]indexWeight, dstSize)
|
||||
tmp := make([]indexWeight, 0, dstSize*int(ru+2)*2)
|
||||
|
||||
for v := 0; v < dstSize; v++ {
|
||||
fu := (float64(v)+0.5)*du - 0.5
|
||||
|
||||
begin := int(math.Ceil(fu - ru))
|
||||
if begin < 0 {
|
||||
begin = 0
|
||||
}
|
||||
end := int(math.Floor(fu + ru))
|
||||
if end > srcSize-1 {
|
||||
end = srcSize - 1
|
||||
}
|
||||
|
||||
var sum float64
|
||||
for u := begin; u <= end; u++ {
|
||||
w := filter.Kernel((float64(u) - fu) / scale)
|
||||
if w != 0 {
|
||||
sum += w
|
||||
tmp = append(tmp, indexWeight{index: u, weight: w})
|
||||
}
|
||||
}
|
||||
if sum != 0 {
|
||||
for i := range tmp {
|
||||
tmp[i].weight /= sum
|
||||
}
|
||||
}
|
||||
|
||||
out[v] = tmp
|
||||
tmp = tmp[len(tmp):]
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
// Resize resizes the image to the specified width and height using the specified resampling
|
||||
// filter and returns the transformed image. If one of width or height is 0, the image aspect
|
||||
// ratio is preserved.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// dstImage := imaging.Resize(srcImage, 800, 600, imaging.Lanczos)
|
||||
//
|
||||
func Resize(img image.Image, width, height int, filter ResampleFilter) *image.NRGBA {
|
||||
dstW, dstH := width, height
|
||||
if dstW < 0 || dstH < 0 {
|
||||
return &image.NRGBA{}
|
||||
}
|
||||
if dstW == 0 && dstH == 0 {
|
||||
return &image.NRGBA{}
|
||||
}
|
||||
|
||||
srcW := img.Bounds().Dx()
|
||||
srcH := img.Bounds().Dy()
|
||||
if srcW <= 0 || srcH <= 0 {
|
||||
return &image.NRGBA{}
|
||||
}
|
||||
|
||||
// If new width or height is 0 then preserve aspect ratio, minimum 1px.
|
||||
if dstW == 0 {
|
||||
tmpW := float64(dstH) * float64(srcW) / float64(srcH)
|
||||
dstW = int(math.Max(1.0, math.Floor(tmpW+0.5)))
|
||||
}
|
||||
if dstH == 0 {
|
||||
tmpH := float64(dstW) * float64(srcH) / float64(srcW)
|
||||
dstH = int(math.Max(1.0, math.Floor(tmpH+0.5)))
|
||||
}
|
||||
|
||||
if filter.Support <= 0 {
|
||||
// Nearest-neighbor special case.
|
||||
return resizeNearest(img, dstW, dstH)
|
||||
}
|
||||
|
||||
if srcW != dstW && srcH != dstH {
|
||||
return resizeVertical(resizeHorizontal(img, dstW, filter), dstH, filter)
|
||||
}
|
||||
if srcW != dstW {
|
||||
return resizeHorizontal(img, dstW, filter)
|
||||
}
|
||||
if srcH != dstH {
|
||||
return resizeVertical(img, dstH, filter)
|
||||
}
|
||||
return Clone(img)
|
||||
}
|
||||
|
||||
func resizeHorizontal(img image.Image, width int, filter ResampleFilter) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, width, src.h))
|
||||
weights := precomputeWeights(width, src.w, filter)
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
scanLine := make([]uint8, src.w*4)
|
||||
for y := range ys {
|
||||
src.scan(0, y, src.w, y+1, scanLine)
|
||||
j0 := y * dst.Stride
|
||||
for x := range weights {
|
||||
var r, g, b, a float64
|
||||
for _, w := range weights[x] {
|
||||
i := w.index * 4
|
||||
s := scanLine[i : i+4 : i+4]
|
||||
aw := float64(s[3]) * w.weight
|
||||
r += float64(s[0]) * aw
|
||||
g += float64(s[1]) * aw
|
||||
b += float64(s[2]) * aw
|
||||
a += aw
|
||||
}
|
||||
if a != 0 {
|
||||
aInv := 1 / a
|
||||
j := j0 + x*4
|
||||
d := dst.Pix[j : j+4 : j+4]
|
||||
d[0] = clamp(r * aInv)
|
||||
d[1] = clamp(g * aInv)
|
||||
d[2] = clamp(b * aInv)
|
||||
d[3] = clamp(a)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
func resizeVertical(img image.Image, height int, filter ResampleFilter) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, height))
|
||||
weights := precomputeWeights(height, src.h, filter)
|
||||
parallel(0, src.w, func(xs <-chan int) {
|
||||
scanLine := make([]uint8, src.h*4)
|
||||
for x := range xs {
|
||||
src.scan(x, 0, x+1, src.h, scanLine)
|
||||
for y := range weights {
|
||||
var r, g, b, a float64
|
||||
for _, w := range weights[y] {
|
||||
i := w.index * 4
|
||||
s := scanLine[i : i+4 : i+4]
|
||||
aw := float64(s[3]) * w.weight
|
||||
r += float64(s[0]) * aw
|
||||
g += float64(s[1]) * aw
|
||||
b += float64(s[2]) * aw
|
||||
a += aw
|
||||
}
|
||||
if a != 0 {
|
||||
aInv := 1 / a
|
||||
j := y*dst.Stride + x*4
|
||||
d := dst.Pix[j : j+4 : j+4]
|
||||
d[0] = clamp(r * aInv)
|
||||
d[1] = clamp(g * aInv)
|
||||
d[2] = clamp(b * aInv)
|
||||
d[3] = clamp(a)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// resizeNearest is a fast nearest-neighbor resize, no filtering.
|
||||
func resizeNearest(img image.Image, width, height int) *image.NRGBA {
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, width, height))
|
||||
dx := float64(img.Bounds().Dx()) / float64(width)
|
||||
dy := float64(img.Bounds().Dy()) / float64(height)
|
||||
|
||||
if dx > 1 && dy > 1 {
|
||||
src := newScanner(img)
|
||||
parallel(0, height, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
srcY := int((float64(y) + 0.5) * dy)
|
||||
dstOff := y * dst.Stride
|
||||
for x := 0; x < width; x++ {
|
||||
srcX := int((float64(x) + 0.5) * dx)
|
||||
src.scan(srcX, srcY, srcX+1, srcY+1, dst.Pix[dstOff:dstOff+4])
|
||||
dstOff += 4
|
||||
}
|
||||
}
|
||||
})
|
||||
} else {
|
||||
src := toNRGBA(img)
|
||||
parallel(0, height, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
srcY := int((float64(y) + 0.5) * dy)
|
||||
srcOff0 := srcY * src.Stride
|
||||
dstOff := y * dst.Stride
|
||||
for x := 0; x < width; x++ {
|
||||
srcX := int((float64(x) + 0.5) * dx)
|
||||
srcOff := srcOff0 + srcX*4
|
||||
copy(dst.Pix[dstOff:dstOff+4], src.Pix[srcOff:srcOff+4])
|
||||
dstOff += 4
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return dst
|
||||
}
|
||||
|
||||
// Fit scales down the image using the specified resample filter to fit the specified
|
||||
// maximum width and height and returns the transformed image.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// dstImage := imaging.Fit(srcImage, 800, 600, imaging.Lanczos)
|
||||
//
|
||||
func Fit(img image.Image, width, height int, filter ResampleFilter) *image.NRGBA {
|
||||
maxW, maxH := width, height
|
||||
|
||||
if maxW <= 0 || maxH <= 0 {
|
||||
return &image.NRGBA{}
|
||||
}
|
||||
|
||||
srcBounds := img.Bounds()
|
||||
srcW := srcBounds.Dx()
|
||||
srcH := srcBounds.Dy()
|
||||
|
||||
if srcW <= 0 || srcH <= 0 {
|
||||
return &image.NRGBA{}
|
||||
}
|
||||
|
||||
if srcW <= maxW && srcH <= maxH {
|
||||
return Clone(img)
|
||||
}
|
||||
|
||||
srcAspectRatio := float64(srcW) / float64(srcH)
|
||||
maxAspectRatio := float64(maxW) / float64(maxH)
|
||||
|
||||
var newW, newH int
|
||||
if srcAspectRatio > maxAspectRatio {
|
||||
newW = maxW
|
||||
newH = int(float64(newW) / srcAspectRatio)
|
||||
} else {
|
||||
newH = maxH
|
||||
newW = int(float64(newH) * srcAspectRatio)
|
||||
}
|
||||
|
||||
return Resize(img, newW, newH, filter)
|
||||
}
|
||||
|
||||
// Fill creates an image with the specified dimensions and fills it with the scaled source image.
|
||||
// To achieve the correct aspect ratio without stretching, the source image will be cropped.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// dstImage := imaging.Fill(srcImage, 800, 600, imaging.Center, imaging.Lanczos)
|
||||
//
|
||||
func Fill(img image.Image, width, height int, anchor Anchor, filter ResampleFilter) *image.NRGBA {
|
||||
dstW, dstH := width, height
|
||||
|
||||
if dstW <= 0 || dstH <= 0 {
|
||||
return &image.NRGBA{}
|
||||
}
|
||||
|
||||
srcBounds := img.Bounds()
|
||||
srcW := srcBounds.Dx()
|
||||
srcH := srcBounds.Dy()
|
||||
|
||||
if srcW <= 0 || srcH <= 0 {
|
||||
return &image.NRGBA{}
|
||||
}
|
||||
|
||||
if srcW == dstW && srcH == dstH {
|
||||
return Clone(img)
|
||||
}
|
||||
|
||||
if srcW >= 100 && srcH >= 100 {
|
||||
return cropAndResize(img, dstW, dstH, anchor, filter)
|
||||
}
|
||||
return resizeAndCrop(img, dstW, dstH, anchor, filter)
|
||||
}
|
||||
|
||||
// cropAndResize crops the image to the smallest possible size that has the required aspect ratio using
|
||||
// the given anchor point, then scales it to the specified dimensions and returns the transformed image.
|
||||
//
|
||||
// This is generally faster than resizing first, but may result in inaccuracies when used on small source images.
|
||||
func cropAndResize(img image.Image, width, height int, anchor Anchor, filter ResampleFilter) *image.NRGBA {
|
||||
dstW, dstH := width, height
|
||||
|
||||
srcBounds := img.Bounds()
|
||||
srcW := srcBounds.Dx()
|
||||
srcH := srcBounds.Dy()
|
||||
srcAspectRatio := float64(srcW) / float64(srcH)
|
||||
dstAspectRatio := float64(dstW) / float64(dstH)
|
||||
|
||||
var tmp *image.NRGBA
|
||||
if srcAspectRatio < dstAspectRatio {
|
||||
cropH := float64(srcW) * float64(dstH) / float64(dstW)
|
||||
tmp = CropAnchor(img, srcW, int(math.Max(1, cropH)+0.5), anchor)
|
||||
} else {
|
||||
cropW := float64(srcH) * float64(dstW) / float64(dstH)
|
||||
tmp = CropAnchor(img, int(math.Max(1, cropW)+0.5), srcH, anchor)
|
||||
}
|
||||
|
||||
return Resize(tmp, dstW, dstH, filter)
|
||||
}
|
||||
|
||||
// resizeAndCrop resizes the image to the smallest possible size that will cover the specified dimensions,
|
||||
// crops the resized image to the specified dimensions using the given anchor point and returns
|
||||
// the transformed image.
|
||||
func resizeAndCrop(img image.Image, width, height int, anchor Anchor, filter ResampleFilter) *image.NRGBA {
|
||||
dstW, dstH := width, height
|
||||
|
||||
srcBounds := img.Bounds()
|
||||
srcW := srcBounds.Dx()
|
||||
srcH := srcBounds.Dy()
|
||||
srcAspectRatio := float64(srcW) / float64(srcH)
|
||||
dstAspectRatio := float64(dstW) / float64(dstH)
|
||||
|
||||
var tmp *image.NRGBA
|
||||
if srcAspectRatio < dstAspectRatio {
|
||||
tmp = Resize(img, dstW, 0, filter)
|
||||
} else {
|
||||
tmp = Resize(img, 0, dstH, filter)
|
||||
}
|
||||
|
||||
return CropAnchor(tmp, dstW, dstH, anchor)
|
||||
}
|
||||
|
||||
// Thumbnail scales the image up or down using the specified resample filter, crops it
|
||||
// to the specified width and hight and returns the transformed image.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// dstImage := imaging.Thumbnail(srcImage, 100, 100, imaging.Lanczos)
|
||||
//
|
||||
func Thumbnail(img image.Image, width, height int, filter ResampleFilter) *image.NRGBA {
|
||||
return Fill(img, width, height, Center, filter)
|
||||
}
|
||||
|
||||
// ResampleFilter specifies a resampling filter to be used for image resizing.
|
||||
//
|
||||
// General filter recommendations:
|
||||
//
|
||||
// - Lanczos
|
||||
// A high-quality resampling filter for photographic images yielding sharp results.
|
||||
//
|
||||
// - CatmullRom
|
||||
// A sharp cubic filter that is faster than Lanczos filter while providing similar results.
|
||||
//
|
||||
// - MitchellNetravali
|
||||
// A cubic filter that produces smoother results with less ringing artifacts than CatmullRom.
|
||||
//
|
||||
// - Linear
|
||||
// Bilinear resampling filter, produces a smooth output. Faster than cubic filters.
|
||||
//
|
||||
// - Box
|
||||
// Simple and fast averaging filter appropriate for downscaling.
|
||||
// When upscaling it's similar to NearestNeighbor.
|
||||
//
|
||||
// - NearestNeighbor
|
||||
// Fastest resampling filter, no antialiasing.
|
||||
//
|
||||
type ResampleFilter struct {
|
||||
Support float64
|
||||
Kernel func(float64) float64
|
||||
}
|
||||
|
||||
// NearestNeighbor is a nearest-neighbor filter (no anti-aliasing).
|
||||
var NearestNeighbor ResampleFilter
|
||||
|
||||
// Box filter (averaging pixels).
|
||||
var Box ResampleFilter
|
||||
|
||||
// Linear filter.
|
||||
var Linear ResampleFilter
|
||||
|
||||
// Hermite cubic spline filter (BC-spline; B=0; C=0).
|
||||
var Hermite ResampleFilter
|
||||
|
||||
// MitchellNetravali is Mitchell-Netravali cubic filter (BC-spline; B=1/3; C=1/3).
|
||||
var MitchellNetravali ResampleFilter
|
||||
|
||||
// CatmullRom is a Catmull-Rom - sharp cubic filter (BC-spline; B=0; C=0.5).
|
||||
var CatmullRom ResampleFilter
|
||||
|
||||
// BSpline is a smooth cubic filter (BC-spline; B=1; C=0).
|
||||
var BSpline ResampleFilter
|
||||
|
||||
// Gaussian is a Gaussian blurring filter.
|
||||
var Gaussian ResampleFilter
|
||||
|
||||
// Bartlett is a Bartlett-windowed sinc filter (3 lobes).
|
||||
var Bartlett ResampleFilter
|
||||
|
||||
// Lanczos filter (3 lobes).
|
||||
var Lanczos ResampleFilter
|
||||
|
||||
// Hann is a Hann-windowed sinc filter (3 lobes).
|
||||
var Hann ResampleFilter
|
||||
|
||||
// Hamming is a Hamming-windowed sinc filter (3 lobes).
|
||||
var Hamming ResampleFilter
|
||||
|
||||
// Blackman is a Blackman-windowed sinc filter (3 lobes).
|
||||
var Blackman ResampleFilter
|
||||
|
||||
// Welch is a Welch-windowed sinc filter (parabolic window, 3 lobes).
|
||||
var Welch ResampleFilter
|
||||
|
||||
// Cosine is a Cosine-windowed sinc filter (3 lobes).
|
||||
var Cosine ResampleFilter
|
||||
|
||||
func bcspline(x, b, c float64) float64 {
|
||||
var y float64
|
||||
x = math.Abs(x)
|
||||
if x < 1.0 {
|
||||
y = ((12-9*b-6*c)*x*x*x + (-18+12*b+6*c)*x*x + (6 - 2*b)) / 6
|
||||
} else if x < 2.0 {
|
||||
y = ((-b-6*c)*x*x*x + (6*b+30*c)*x*x + (-12*b-48*c)*x + (8*b + 24*c)) / 6
|
||||
}
|
||||
return y
|
||||
}
|
||||
|
||||
func sinc(x float64) float64 {
|
||||
if x == 0 {
|
||||
return 1
|
||||
}
|
||||
return math.Sin(math.Pi*x) / (math.Pi * x)
|
||||
}
|
||||
|
||||
func init() {
|
||||
NearestNeighbor = ResampleFilter{
|
||||
Support: 0.0, // special case - not applying the filter
|
||||
}
|
||||
|
||||
Box = ResampleFilter{
|
||||
Support: 0.5,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x <= 0.5 {
|
||||
return 1.0
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
Linear = ResampleFilter{
|
||||
Support: 1.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 1.0 {
|
||||
return 1.0 - x
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
Hermite = ResampleFilter{
|
||||
Support: 1.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 1.0 {
|
||||
return bcspline(x, 0.0, 0.0)
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
MitchellNetravali = ResampleFilter{
|
||||
Support: 2.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 2.0 {
|
||||
return bcspline(x, 1.0/3.0, 1.0/3.0)
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
CatmullRom = ResampleFilter{
|
||||
Support: 2.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 2.0 {
|
||||
return bcspline(x, 0.0, 0.5)
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
BSpline = ResampleFilter{
|
||||
Support: 2.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 2.0 {
|
||||
return bcspline(x, 1.0, 0.0)
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
Gaussian = ResampleFilter{
|
||||
Support: 2.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 2.0 {
|
||||
return math.Exp(-2 * x * x)
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
Bartlett = ResampleFilter{
|
||||
Support: 3.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 3.0 {
|
||||
return sinc(x) * (3.0 - x) / 3.0
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
Lanczos = ResampleFilter{
|
||||
Support: 3.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 3.0 {
|
||||
return sinc(x) * sinc(x/3.0)
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
Hann = ResampleFilter{
|
||||
Support: 3.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 3.0 {
|
||||
return sinc(x) * (0.5 + 0.5*math.Cos(math.Pi*x/3.0))
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
Hamming = ResampleFilter{
|
||||
Support: 3.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 3.0 {
|
||||
return sinc(x) * (0.54 + 0.46*math.Cos(math.Pi*x/3.0))
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
Blackman = ResampleFilter{
|
||||
Support: 3.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 3.0 {
|
||||
return sinc(x) * (0.42 - 0.5*math.Cos(math.Pi*x/3.0+math.Pi) + 0.08*math.Cos(2.0*math.Pi*x/3.0))
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
Welch = ResampleFilter{
|
||||
Support: 3.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 3.0 {
|
||||
return sinc(x) * (1.0 - (x * x / 9.0))
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
Cosine = ResampleFilter{
|
||||
Support: 3.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 3.0 {
|
||||
return sinc(x) * math.Cos((math.Pi/2.0)*(x/3.0))
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
}
|
||||
285
vendor/github.com/disintegration/imaging/scanner.go
generated
vendored
Normal file
285
vendor/github.com/disintegration/imaging/scanner.go
generated
vendored
Normal file
@@ -0,0 +1,285 @@
|
||||
package imaging
|
||||
|
||||
import (
|
||||
"image"
|
||||
"image/color"
|
||||
)
|
||||
|
||||
type scanner struct {
|
||||
image image.Image
|
||||
w, h int
|
||||
palette []color.NRGBA
|
||||
}
|
||||
|
||||
func newScanner(img image.Image) *scanner {
|
||||
s := &scanner{
|
||||
image: img,
|
||||
w: img.Bounds().Dx(),
|
||||
h: img.Bounds().Dy(),
|
||||
}
|
||||
if img, ok := img.(*image.Paletted); ok {
|
||||
s.palette = make([]color.NRGBA, len(img.Palette))
|
||||
for i := 0; i < len(img.Palette); i++ {
|
||||
s.palette[i] = color.NRGBAModel.Convert(img.Palette[i]).(color.NRGBA)
|
||||
}
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// scan scans the given rectangular region of the image into dst.
|
||||
func (s *scanner) scan(x1, y1, x2, y2 int, dst []uint8) {
|
||||
switch img := s.image.(type) {
|
||||
case *image.NRGBA:
|
||||
size := (x2 - x1) * 4
|
||||
j := 0
|
||||
i := y1*img.Stride + x1*4
|
||||
if size == 4 {
|
||||
for y := y1; y < y2; y++ {
|
||||
d := dst[j : j+4 : j+4]
|
||||
s := img.Pix[i : i+4 : i+4]
|
||||
d[0] = s[0]
|
||||
d[1] = s[1]
|
||||
d[2] = s[2]
|
||||
d[3] = s[3]
|
||||
j += size
|
||||
i += img.Stride
|
||||
}
|
||||
} else {
|
||||
for y := y1; y < y2; y++ {
|
||||
copy(dst[j:j+size], img.Pix[i:i+size])
|
||||
j += size
|
||||
i += img.Stride
|
||||
}
|
||||
}
|
||||
|
||||
case *image.NRGBA64:
|
||||
j := 0
|
||||
for y := y1; y < y2; y++ {
|
||||
i := y*img.Stride + x1*8
|
||||
for x := x1; x < x2; x++ {
|
||||
s := img.Pix[i : i+8 : i+8]
|
||||
d := dst[j : j+4 : j+4]
|
||||
d[0] = s[0]
|
||||
d[1] = s[2]
|
||||
d[2] = s[4]
|
||||
d[3] = s[6]
|
||||
j += 4
|
||||
i += 8
|
||||
}
|
||||
}
|
||||
|
||||
case *image.RGBA:
|
||||
j := 0
|
||||
for y := y1; y < y2; y++ {
|
||||
i := y*img.Stride + x1*4
|
||||
for x := x1; x < x2; x++ {
|
||||
d := dst[j : j+4 : j+4]
|
||||
a := img.Pix[i+3]
|
||||
switch a {
|
||||
case 0:
|
||||
d[0] = 0
|
||||
d[1] = 0
|
||||
d[2] = 0
|
||||
d[3] = a
|
||||
case 0xff:
|
||||
s := img.Pix[i : i+4 : i+4]
|
||||
d[0] = s[0]
|
||||
d[1] = s[1]
|
||||
d[2] = s[2]
|
||||
d[3] = a
|
||||
default:
|
||||
s := img.Pix[i : i+4 : i+4]
|
||||
r16 := uint16(s[0])
|
||||
g16 := uint16(s[1])
|
||||
b16 := uint16(s[2])
|
||||
a16 := uint16(a)
|
||||
d[0] = uint8(r16 * 0xff / a16)
|
||||
d[1] = uint8(g16 * 0xff / a16)
|
||||
d[2] = uint8(b16 * 0xff / a16)
|
||||
d[3] = a
|
||||
}
|
||||
j += 4
|
||||
i += 4
|
||||
}
|
||||
}
|
||||
|
||||
case *image.RGBA64:
|
||||
j := 0
|
||||
for y := y1; y < y2; y++ {
|
||||
i := y*img.Stride + x1*8
|
||||
for x := x1; x < x2; x++ {
|
||||
s := img.Pix[i : i+8 : i+8]
|
||||
d := dst[j : j+4 : j+4]
|
||||
a := s[6]
|
||||
switch a {
|
||||
case 0:
|
||||
d[0] = 0
|
||||
d[1] = 0
|
||||
d[2] = 0
|
||||
case 0xff:
|
||||
d[0] = s[0]
|
||||
d[1] = s[2]
|
||||
d[2] = s[4]
|
||||
default:
|
||||
r32 := uint32(s[0])<<8 | uint32(s[1])
|
||||
g32 := uint32(s[2])<<8 | uint32(s[3])
|
||||
b32 := uint32(s[4])<<8 | uint32(s[5])
|
||||
a32 := uint32(s[6])<<8 | uint32(s[7])
|
||||
d[0] = uint8((r32 * 0xffff / a32) >> 8)
|
||||
d[1] = uint8((g32 * 0xffff / a32) >> 8)
|
||||
d[2] = uint8((b32 * 0xffff / a32) >> 8)
|
||||
}
|
||||
d[3] = a
|
||||
j += 4
|
||||
i += 8
|
||||
}
|
||||
}
|
||||
|
||||
case *image.Gray:
|
||||
j := 0
|
||||
for y := y1; y < y2; y++ {
|
||||
i := y*img.Stride + x1
|
||||
for x := x1; x < x2; x++ {
|
||||
c := img.Pix[i]
|
||||
d := dst[j : j+4 : j+4]
|
||||
d[0] = c
|
||||
d[1] = c
|
||||
d[2] = c
|
||||
d[3] = 0xff
|
||||
j += 4
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
case *image.Gray16:
|
||||
j := 0
|
||||
for y := y1; y < y2; y++ {
|
||||
i := y*img.Stride + x1*2
|
||||
for x := x1; x < x2; x++ {
|
||||
c := img.Pix[i]
|
||||
d := dst[j : j+4 : j+4]
|
||||
d[0] = c
|
||||
d[1] = c
|
||||
d[2] = c
|
||||
d[3] = 0xff
|
||||
j += 4
|
||||
i += 2
|
||||
}
|
||||
}
|
||||
|
||||
case *image.YCbCr:
|
||||
j := 0
|
||||
x1 += img.Rect.Min.X
|
||||
x2 += img.Rect.Min.X
|
||||
y1 += img.Rect.Min.Y
|
||||
y2 += img.Rect.Min.Y
|
||||
|
||||
hy := img.Rect.Min.Y / 2
|
||||
hx := img.Rect.Min.X / 2
|
||||
for y := y1; y < y2; y++ {
|
||||
iy := (y-img.Rect.Min.Y)*img.YStride + (x1 - img.Rect.Min.X)
|
||||
|
||||
var yBase int
|
||||
switch img.SubsampleRatio {
|
||||
case image.YCbCrSubsampleRatio444, image.YCbCrSubsampleRatio422:
|
||||
yBase = (y - img.Rect.Min.Y) * img.CStride
|
||||
case image.YCbCrSubsampleRatio420, image.YCbCrSubsampleRatio440:
|
||||
yBase = (y/2 - hy) * img.CStride
|
||||
}
|
||||
|
||||
for x := x1; x < x2; x++ {
|
||||
var ic int
|
||||
switch img.SubsampleRatio {
|
||||
case image.YCbCrSubsampleRatio444, image.YCbCrSubsampleRatio440:
|
||||
ic = yBase + (x - img.Rect.Min.X)
|
||||
case image.YCbCrSubsampleRatio422, image.YCbCrSubsampleRatio420:
|
||||
ic = yBase + (x/2 - hx)
|
||||
default:
|
||||
ic = img.COffset(x, y)
|
||||
}
|
||||
|
||||
yy1 := int32(img.Y[iy]) * 0x10101
|
||||
cb1 := int32(img.Cb[ic]) - 128
|
||||
cr1 := int32(img.Cr[ic]) - 128
|
||||
|
||||
r := yy1 + 91881*cr1
|
||||
if uint32(r)&0xff000000 == 0 {
|
||||
r >>= 16
|
||||
} else {
|
||||
r = ^(r >> 31)
|
||||
}
|
||||
|
||||
g := yy1 - 22554*cb1 - 46802*cr1
|
||||
if uint32(g)&0xff000000 == 0 {
|
||||
g >>= 16
|
||||
} else {
|
||||
g = ^(g >> 31)
|
||||
}
|
||||
|
||||
b := yy1 + 116130*cb1
|
||||
if uint32(b)&0xff000000 == 0 {
|
||||
b >>= 16
|
||||
} else {
|
||||
b = ^(b >> 31)
|
||||
}
|
||||
|
||||
d := dst[j : j+4 : j+4]
|
||||
d[0] = uint8(r)
|
||||
d[1] = uint8(g)
|
||||
d[2] = uint8(b)
|
||||
d[3] = 0xff
|
||||
|
||||
iy++
|
||||
j += 4
|
||||
}
|
||||
}
|
||||
|
||||
case *image.Paletted:
|
||||
j := 0
|
||||
for y := y1; y < y2; y++ {
|
||||
i := y*img.Stride + x1
|
||||
for x := x1; x < x2; x++ {
|
||||
c := s.palette[img.Pix[i]]
|
||||
d := dst[j : j+4 : j+4]
|
||||
d[0] = c.R
|
||||
d[1] = c.G
|
||||
d[2] = c.B
|
||||
d[3] = c.A
|
||||
j += 4
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
j := 0
|
||||
b := s.image.Bounds()
|
||||
x1 += b.Min.X
|
||||
x2 += b.Min.X
|
||||
y1 += b.Min.Y
|
||||
y2 += b.Min.Y
|
||||
for y := y1; y < y2; y++ {
|
||||
for x := x1; x < x2; x++ {
|
||||
r16, g16, b16, a16 := s.image.At(x, y).RGBA()
|
||||
d := dst[j : j+4 : j+4]
|
||||
switch a16 {
|
||||
case 0xffff:
|
||||
d[0] = uint8(r16 >> 8)
|
||||
d[1] = uint8(g16 >> 8)
|
||||
d[2] = uint8(b16 >> 8)
|
||||
d[3] = 0xff
|
||||
case 0:
|
||||
d[0] = 0
|
||||
d[1] = 0
|
||||
d[2] = 0
|
||||
d[3] = 0
|
||||
default:
|
||||
d[0] = uint8(((r16 * 0xffff) / a16) >> 8)
|
||||
d[1] = uint8(((g16 * 0xffff) / a16) >> 8)
|
||||
d[2] = uint8(((b16 * 0xffff) / a16) >> 8)
|
||||
d[3] = uint8(a16 >> 8)
|
||||
}
|
||||
j += 4
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
249
vendor/github.com/disintegration/imaging/tools.go
generated
vendored
Normal file
249
vendor/github.com/disintegration/imaging/tools.go
generated
vendored
Normal file
@@ -0,0 +1,249 @@
|
||||
package imaging
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"image"
|
||||
"image/color"
|
||||
"math"
|
||||
)
|
||||
|
||||
// New creates a new image with the specified width and height, and fills it with the specified color.
|
||||
func New(width, height int, fillColor color.Color) *image.NRGBA {
|
||||
if width <= 0 || height <= 0 {
|
||||
return &image.NRGBA{}
|
||||
}
|
||||
|
||||
c := color.NRGBAModel.Convert(fillColor).(color.NRGBA)
|
||||
if (c == color.NRGBA{0, 0, 0, 0}) {
|
||||
return image.NewNRGBA(image.Rect(0, 0, width, height))
|
||||
}
|
||||
|
||||
return &image.NRGBA{
|
||||
Pix: bytes.Repeat([]byte{c.R, c.G, c.B, c.A}, width*height),
|
||||
Stride: 4 * width,
|
||||
Rect: image.Rect(0, 0, width, height),
|
||||
}
|
||||
}
|
||||
|
||||
// Clone returns a copy of the given image.
|
||||
func Clone(img image.Image) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
|
||||
size := src.w * 4
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
i := y * dst.Stride
|
||||
src.scan(0, y, src.w, y+1, dst.Pix[i:i+size])
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// Anchor is the anchor point for image alignment.
|
||||
type Anchor int
|
||||
|
||||
// Anchor point positions.
|
||||
const (
|
||||
Center Anchor = iota
|
||||
TopLeft
|
||||
Top
|
||||
TopRight
|
||||
Left
|
||||
Right
|
||||
BottomLeft
|
||||
Bottom
|
||||
BottomRight
|
||||
)
|
||||
|
||||
func anchorPt(b image.Rectangle, w, h int, anchor Anchor) image.Point {
|
||||
var x, y int
|
||||
switch anchor {
|
||||
case TopLeft:
|
||||
x = b.Min.X
|
||||
y = b.Min.Y
|
||||
case Top:
|
||||
x = b.Min.X + (b.Dx()-w)/2
|
||||
y = b.Min.Y
|
||||
case TopRight:
|
||||
x = b.Max.X - w
|
||||
y = b.Min.Y
|
||||
case Left:
|
||||
x = b.Min.X
|
||||
y = b.Min.Y + (b.Dy()-h)/2
|
||||
case Right:
|
||||
x = b.Max.X - w
|
||||
y = b.Min.Y + (b.Dy()-h)/2
|
||||
case BottomLeft:
|
||||
x = b.Min.X
|
||||
y = b.Max.Y - h
|
||||
case Bottom:
|
||||
x = b.Min.X + (b.Dx()-w)/2
|
||||
y = b.Max.Y - h
|
||||
case BottomRight:
|
||||
x = b.Max.X - w
|
||||
y = b.Max.Y - h
|
||||
default:
|
||||
x = b.Min.X + (b.Dx()-w)/2
|
||||
y = b.Min.Y + (b.Dy()-h)/2
|
||||
}
|
||||
return image.Pt(x, y)
|
||||
}
|
||||
|
||||
// Crop cuts out a rectangular region with the specified bounds
|
||||
// from the image and returns the cropped image.
|
||||
func Crop(img image.Image, rect image.Rectangle) *image.NRGBA {
|
||||
r := rect.Intersect(img.Bounds()).Sub(img.Bounds().Min)
|
||||
if r.Empty() {
|
||||
return &image.NRGBA{}
|
||||
}
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, r.Dx(), r.Dy()))
|
||||
rowSize := r.Dx() * 4
|
||||
parallel(r.Min.Y, r.Max.Y, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
i := (y - r.Min.Y) * dst.Stride
|
||||
src.scan(r.Min.X, y, r.Max.X, y+1, dst.Pix[i:i+rowSize])
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// CropAnchor cuts out a rectangular region with the specified size
|
||||
// from the image using the specified anchor point and returns the cropped image.
|
||||
func CropAnchor(img image.Image, width, height int, anchor Anchor) *image.NRGBA {
|
||||
srcBounds := img.Bounds()
|
||||
pt := anchorPt(srcBounds, width, height, anchor)
|
||||
r := image.Rect(0, 0, width, height).Add(pt)
|
||||
b := srcBounds.Intersect(r)
|
||||
return Crop(img, b)
|
||||
}
|
||||
|
||||
// CropCenter cuts out a rectangular region with the specified size
|
||||
// from the center of the image and returns the cropped image.
|
||||
func CropCenter(img image.Image, width, height int) *image.NRGBA {
|
||||
return CropAnchor(img, width, height, Center)
|
||||
}
|
||||
|
||||
// Paste pastes the img image to the background image at the specified position and returns the combined image.
|
||||
func Paste(background, img image.Image, pos image.Point) *image.NRGBA {
|
||||
dst := Clone(background)
|
||||
pos = pos.Sub(background.Bounds().Min)
|
||||
pasteRect := image.Rectangle{Min: pos, Max: pos.Add(img.Bounds().Size())}
|
||||
interRect := pasteRect.Intersect(dst.Bounds())
|
||||
if interRect.Empty() {
|
||||
return dst
|
||||
}
|
||||
src := newScanner(img)
|
||||
parallel(interRect.Min.Y, interRect.Max.Y, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
x1 := interRect.Min.X - pasteRect.Min.X
|
||||
x2 := interRect.Max.X - pasteRect.Min.X
|
||||
y1 := y - pasteRect.Min.Y
|
||||
y2 := y1 + 1
|
||||
i1 := y*dst.Stride + interRect.Min.X*4
|
||||
i2 := i1 + interRect.Dx()*4
|
||||
src.scan(x1, y1, x2, y2, dst.Pix[i1:i2])
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// PasteCenter pastes the img image to the center of the background image and returns the combined image.
|
||||
func PasteCenter(background, img image.Image) *image.NRGBA {
|
||||
bgBounds := background.Bounds()
|
||||
bgW := bgBounds.Dx()
|
||||
bgH := bgBounds.Dy()
|
||||
bgMinX := bgBounds.Min.X
|
||||
bgMinY := bgBounds.Min.Y
|
||||
|
||||
centerX := bgMinX + bgW/2
|
||||
centerY := bgMinY + bgH/2
|
||||
|
||||
x0 := centerX - img.Bounds().Dx()/2
|
||||
y0 := centerY - img.Bounds().Dy()/2
|
||||
|
||||
return Paste(background, img, image.Pt(x0, y0))
|
||||
}
|
||||
|
||||
// Overlay draws the img image over the background image at given position
|
||||
// and returns the combined image. Opacity parameter is the opacity of the img
|
||||
// image layer, used to compose the images, it must be from 0.0 to 1.0.
|
||||
//
|
||||
// Examples:
|
||||
//
|
||||
// // Draw spriteImage over backgroundImage at the given position (x=50, y=50).
|
||||
// dstImage := imaging.Overlay(backgroundImage, spriteImage, image.Pt(50, 50), 1.0)
|
||||
//
|
||||
// // Blend two opaque images of the same size.
|
||||
// dstImage := imaging.Overlay(imageOne, imageTwo, image.Pt(0, 0), 0.5)
|
||||
//
|
||||
func Overlay(background, img image.Image, pos image.Point, opacity float64) *image.NRGBA {
|
||||
opacity = math.Min(math.Max(opacity, 0.0), 1.0) // Ensure 0.0 <= opacity <= 1.0.
|
||||
dst := Clone(background)
|
||||
pos = pos.Sub(background.Bounds().Min)
|
||||
pasteRect := image.Rectangle{Min: pos, Max: pos.Add(img.Bounds().Size())}
|
||||
interRect := pasteRect.Intersect(dst.Bounds())
|
||||
if interRect.Empty() {
|
||||
return dst
|
||||
}
|
||||
src := newScanner(img)
|
||||
parallel(interRect.Min.Y, interRect.Max.Y, func(ys <-chan int) {
|
||||
scanLine := make([]uint8, interRect.Dx()*4)
|
||||
for y := range ys {
|
||||
x1 := interRect.Min.X - pasteRect.Min.X
|
||||
x2 := interRect.Max.X - pasteRect.Min.X
|
||||
y1 := y - pasteRect.Min.Y
|
||||
y2 := y1 + 1
|
||||
src.scan(x1, y1, x2, y2, scanLine)
|
||||
i := y*dst.Stride + interRect.Min.X*4
|
||||
j := 0
|
||||
for x := interRect.Min.X; x < interRect.Max.X; x++ {
|
||||
d := dst.Pix[i : i+4 : i+4]
|
||||
r1 := float64(d[0])
|
||||
g1 := float64(d[1])
|
||||
b1 := float64(d[2])
|
||||
a1 := float64(d[3])
|
||||
|
||||
s := scanLine[j : j+4 : j+4]
|
||||
r2 := float64(s[0])
|
||||
g2 := float64(s[1])
|
||||
b2 := float64(s[2])
|
||||
a2 := float64(s[3])
|
||||
|
||||
coef2 := opacity * a2 / 255
|
||||
coef1 := (1 - coef2) * a1 / 255
|
||||
coefSum := coef1 + coef2
|
||||
coef1 /= coefSum
|
||||
coef2 /= coefSum
|
||||
|
||||
d[0] = uint8(r1*coef1 + r2*coef2)
|
||||
d[1] = uint8(g1*coef1 + g2*coef2)
|
||||
d[2] = uint8(b1*coef1 + b2*coef2)
|
||||
d[3] = uint8(math.Min(a1+a2*opacity*(255-a1)/255, 255))
|
||||
|
||||
i += 4
|
||||
j += 4
|
||||
}
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// OverlayCenter overlays the img image to the center of the background image and
|
||||
// returns the combined image. Opacity parameter is the opacity of the img
|
||||
// image layer, used to compose the images, it must be from 0.0 to 1.0.
|
||||
func OverlayCenter(background, img image.Image, opacity float64) *image.NRGBA {
|
||||
bgBounds := background.Bounds()
|
||||
bgW := bgBounds.Dx()
|
||||
bgH := bgBounds.Dy()
|
||||
bgMinX := bgBounds.Min.X
|
||||
bgMinY := bgBounds.Min.Y
|
||||
|
||||
centerX := bgMinX + bgW/2
|
||||
centerY := bgMinY + bgH/2
|
||||
|
||||
x0 := centerX - img.Bounds().Dx()/2
|
||||
y0 := centerY - img.Bounds().Dy()/2
|
||||
|
||||
return Overlay(background, img, image.Point{x0, y0}, opacity)
|
||||
}
|
||||
268
vendor/github.com/disintegration/imaging/transform.go
generated
vendored
Normal file
268
vendor/github.com/disintegration/imaging/transform.go
generated
vendored
Normal file
@@ -0,0 +1,268 @@
|
||||
package imaging
|
||||
|
||||
import (
|
||||
"image"
|
||||
"image/color"
|
||||
"math"
|
||||
)
|
||||
|
||||
// FlipH flips the image horizontally (from left to right) and returns the transformed image.
|
||||
func FlipH(img image.Image) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dstW := src.w
|
||||
dstH := src.h
|
||||
rowSize := dstW * 4
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
|
||||
parallel(0, dstH, func(ys <-chan int) {
|
||||
for dstY := range ys {
|
||||
i := dstY * dst.Stride
|
||||
srcY := dstY
|
||||
src.scan(0, srcY, src.w, srcY+1, dst.Pix[i:i+rowSize])
|
||||
reverse(dst.Pix[i : i+rowSize])
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// FlipV flips the image vertically (from top to bottom) and returns the transformed image.
|
||||
func FlipV(img image.Image) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dstW := src.w
|
||||
dstH := src.h
|
||||
rowSize := dstW * 4
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
|
||||
parallel(0, dstH, func(ys <-chan int) {
|
||||
for dstY := range ys {
|
||||
i := dstY * dst.Stride
|
||||
srcY := dstH - dstY - 1
|
||||
src.scan(0, srcY, src.w, srcY+1, dst.Pix[i:i+rowSize])
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// Transpose flips the image horizontally and rotates 90 degrees counter-clockwise.
|
||||
func Transpose(img image.Image) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dstW := src.h
|
||||
dstH := src.w
|
||||
rowSize := dstW * 4
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
|
||||
parallel(0, dstH, func(ys <-chan int) {
|
||||
for dstY := range ys {
|
||||
i := dstY * dst.Stride
|
||||
srcX := dstY
|
||||
src.scan(srcX, 0, srcX+1, src.h, dst.Pix[i:i+rowSize])
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// Transverse flips the image vertically and rotates 90 degrees counter-clockwise.
|
||||
func Transverse(img image.Image) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dstW := src.h
|
||||
dstH := src.w
|
||||
rowSize := dstW * 4
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
|
||||
parallel(0, dstH, func(ys <-chan int) {
|
||||
for dstY := range ys {
|
||||
i := dstY * dst.Stride
|
||||
srcX := dstH - dstY - 1
|
||||
src.scan(srcX, 0, srcX+1, src.h, dst.Pix[i:i+rowSize])
|
||||
reverse(dst.Pix[i : i+rowSize])
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// Rotate90 rotates the image 90 degrees counter-clockwise and returns the transformed image.
|
||||
func Rotate90(img image.Image) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dstW := src.h
|
||||
dstH := src.w
|
||||
rowSize := dstW * 4
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
|
||||
parallel(0, dstH, func(ys <-chan int) {
|
||||
for dstY := range ys {
|
||||
i := dstY * dst.Stride
|
||||
srcX := dstH - dstY - 1
|
||||
src.scan(srcX, 0, srcX+1, src.h, dst.Pix[i:i+rowSize])
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// Rotate180 rotates the image 180 degrees counter-clockwise and returns the transformed image.
|
||||
func Rotate180(img image.Image) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dstW := src.w
|
||||
dstH := src.h
|
||||
rowSize := dstW * 4
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
|
||||
parallel(0, dstH, func(ys <-chan int) {
|
||||
for dstY := range ys {
|
||||
i := dstY * dst.Stride
|
||||
srcY := dstH - dstY - 1
|
||||
src.scan(0, srcY, src.w, srcY+1, dst.Pix[i:i+rowSize])
|
||||
reverse(dst.Pix[i : i+rowSize])
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// Rotate270 rotates the image 270 degrees counter-clockwise and returns the transformed image.
|
||||
func Rotate270(img image.Image) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dstW := src.h
|
||||
dstH := src.w
|
||||
rowSize := dstW * 4
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
|
||||
parallel(0, dstH, func(ys <-chan int) {
|
||||
for dstY := range ys {
|
||||
i := dstY * dst.Stride
|
||||
srcX := dstY
|
||||
src.scan(srcX, 0, srcX+1, src.h, dst.Pix[i:i+rowSize])
|
||||
reverse(dst.Pix[i : i+rowSize])
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// Rotate rotates an image by the given angle counter-clockwise .
|
||||
// The angle parameter is the rotation angle in degrees.
|
||||
// The bgColor parameter specifies the color of the uncovered zone after the rotation.
|
||||
func Rotate(img image.Image, angle float64, bgColor color.Color) *image.NRGBA {
|
||||
angle = angle - math.Floor(angle/360)*360
|
||||
|
||||
switch angle {
|
||||
case 0:
|
||||
return Clone(img)
|
||||
case 90:
|
||||
return Rotate90(img)
|
||||
case 180:
|
||||
return Rotate180(img)
|
||||
case 270:
|
||||
return Rotate270(img)
|
||||
}
|
||||
|
||||
src := toNRGBA(img)
|
||||
srcW := src.Bounds().Max.X
|
||||
srcH := src.Bounds().Max.Y
|
||||
dstW, dstH := rotatedSize(srcW, srcH, angle)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
|
||||
|
||||
if dstW <= 0 || dstH <= 0 {
|
||||
return dst
|
||||
}
|
||||
|
||||
srcXOff := float64(srcW)/2 - 0.5
|
||||
srcYOff := float64(srcH)/2 - 0.5
|
||||
dstXOff := float64(dstW)/2 - 0.5
|
||||
dstYOff := float64(dstH)/2 - 0.5
|
||||
|
||||
bgColorNRGBA := color.NRGBAModel.Convert(bgColor).(color.NRGBA)
|
||||
sin, cos := math.Sincos(math.Pi * angle / 180)
|
||||
|
||||
parallel(0, dstH, func(ys <-chan int) {
|
||||
for dstY := range ys {
|
||||
for dstX := 0; dstX < dstW; dstX++ {
|
||||
xf, yf := rotatePoint(float64(dstX)-dstXOff, float64(dstY)-dstYOff, sin, cos)
|
||||
xf, yf = xf+srcXOff, yf+srcYOff
|
||||
interpolatePoint(dst, dstX, dstY, src, xf, yf, bgColorNRGBA)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return dst
|
||||
}
|
||||
|
||||
func rotatePoint(x, y, sin, cos float64) (float64, float64) {
|
||||
return x*cos - y*sin, x*sin + y*cos
|
||||
}
|
||||
|
||||
func rotatedSize(w, h int, angle float64) (int, int) {
|
||||
if w <= 0 || h <= 0 {
|
||||
return 0, 0
|
||||
}
|
||||
|
||||
sin, cos := math.Sincos(math.Pi * angle / 180)
|
||||
x1, y1 := rotatePoint(float64(w-1), 0, sin, cos)
|
||||
x2, y2 := rotatePoint(float64(w-1), float64(h-1), sin, cos)
|
||||
x3, y3 := rotatePoint(0, float64(h-1), sin, cos)
|
||||
|
||||
minx := math.Min(x1, math.Min(x2, math.Min(x3, 0)))
|
||||
maxx := math.Max(x1, math.Max(x2, math.Max(x3, 0)))
|
||||
miny := math.Min(y1, math.Min(y2, math.Min(y3, 0)))
|
||||
maxy := math.Max(y1, math.Max(y2, math.Max(y3, 0)))
|
||||
|
||||
neww := maxx - minx + 1
|
||||
if neww-math.Floor(neww) > 0.1 {
|
||||
neww++
|
||||
}
|
||||
newh := maxy - miny + 1
|
||||
if newh-math.Floor(newh) > 0.1 {
|
||||
newh++
|
||||
}
|
||||
|
||||
return int(neww), int(newh)
|
||||
}
|
||||
|
||||
func interpolatePoint(dst *image.NRGBA, dstX, dstY int, src *image.NRGBA, xf, yf float64, bgColor color.NRGBA) {
|
||||
j := dstY*dst.Stride + dstX*4
|
||||
d := dst.Pix[j : j+4 : j+4]
|
||||
|
||||
x0 := int(math.Floor(xf))
|
||||
y0 := int(math.Floor(yf))
|
||||
bounds := src.Bounds()
|
||||
if !image.Pt(x0, y0).In(image.Rect(bounds.Min.X-1, bounds.Min.Y-1, bounds.Max.X, bounds.Max.Y)) {
|
||||
d[0] = bgColor.R
|
||||
d[1] = bgColor.G
|
||||
d[2] = bgColor.B
|
||||
d[3] = bgColor.A
|
||||
return
|
||||
}
|
||||
|
||||
xq := xf - float64(x0)
|
||||
yq := yf - float64(y0)
|
||||
points := [4]image.Point{
|
||||
{x0, y0},
|
||||
{x0 + 1, y0},
|
||||
{x0, y0 + 1},
|
||||
{x0 + 1, y0 + 1},
|
||||
}
|
||||
weights := [4]float64{
|
||||
(1 - xq) * (1 - yq),
|
||||
xq * (1 - yq),
|
||||
(1 - xq) * yq,
|
||||
xq * yq,
|
||||
}
|
||||
|
||||
var r, g, b, a float64
|
||||
for i := 0; i < 4; i++ {
|
||||
p := points[i]
|
||||
w := weights[i]
|
||||
if p.In(bounds) {
|
||||
i := p.Y*src.Stride + p.X*4
|
||||
s := src.Pix[i : i+4 : i+4]
|
||||
wa := float64(s[3]) * w
|
||||
r += float64(s[0]) * wa
|
||||
g += float64(s[1]) * wa
|
||||
b += float64(s[2]) * wa
|
||||
a += wa
|
||||
} else {
|
||||
wa := float64(bgColor.A) * w
|
||||
r += float64(bgColor.R) * wa
|
||||
g += float64(bgColor.G) * wa
|
||||
b += float64(bgColor.B) * wa
|
||||
a += wa
|
||||
}
|
||||
}
|
||||
if a != 0 {
|
||||
aInv := 1 / a
|
||||
d[0] = clamp(r * aInv)
|
||||
d[1] = clamp(g * aInv)
|
||||
d[2] = clamp(b * aInv)
|
||||
d[3] = clamp(a)
|
||||
}
|
||||
}
|
||||
167
vendor/github.com/disintegration/imaging/utils.go
generated
vendored
Normal file
167
vendor/github.com/disintegration/imaging/utils.go
generated
vendored
Normal file
@@ -0,0 +1,167 @@
|
||||
package imaging
|
||||
|
||||
import (
|
||||
"image"
|
||||
"math"
|
||||
"runtime"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// parallel processes the data in separate goroutines.
|
||||
func parallel(start, stop int, fn func(<-chan int)) {
|
||||
count := stop - start
|
||||
if count < 1 {
|
||||
return
|
||||
}
|
||||
|
||||
procs := runtime.GOMAXPROCS(0)
|
||||
if procs > count {
|
||||
procs = count
|
||||
}
|
||||
|
||||
c := make(chan int, count)
|
||||
for i := start; i < stop; i++ {
|
||||
c <- i
|
||||
}
|
||||
close(c)
|
||||
|
||||
var wg sync.WaitGroup
|
||||
for i := 0; i < procs; i++ {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
fn(c)
|
||||
}()
|
||||
}
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
// absint returns the absolute value of i.
|
||||
func absint(i int) int {
|
||||
if i < 0 {
|
||||
return -i
|
||||
}
|
||||
return i
|
||||
}
|
||||
|
||||
// clamp rounds and clamps float64 value to fit into uint8.
|
||||
func clamp(x float64) uint8 {
|
||||
v := int64(x + 0.5)
|
||||
if v > 255 {
|
||||
return 255
|
||||
}
|
||||
if v > 0 {
|
||||
return uint8(v)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func reverse(pix []uint8) {
|
||||
if len(pix) <= 4 {
|
||||
return
|
||||
}
|
||||
i := 0
|
||||
j := len(pix) - 4
|
||||
for i < j {
|
||||
pi := pix[i : i+4 : i+4]
|
||||
pj := pix[j : j+4 : j+4]
|
||||
pi[0], pj[0] = pj[0], pi[0]
|
||||
pi[1], pj[1] = pj[1], pi[1]
|
||||
pi[2], pj[2] = pj[2], pi[2]
|
||||
pi[3], pj[3] = pj[3], pi[3]
|
||||
i += 4
|
||||
j -= 4
|
||||
}
|
||||
}
|
||||
|
||||
func toNRGBA(img image.Image) *image.NRGBA {
|
||||
if img, ok := img.(*image.NRGBA); ok {
|
||||
return &image.NRGBA{
|
||||
Pix: img.Pix,
|
||||
Stride: img.Stride,
|
||||
Rect: img.Rect.Sub(img.Rect.Min),
|
||||
}
|
||||
}
|
||||
return Clone(img)
|
||||
}
|
||||
|
||||
// rgbToHSL converts a color from RGB to HSL.
|
||||
func rgbToHSL(r, g, b uint8) (float64, float64, float64) {
|
||||
rr := float64(r) / 255
|
||||
gg := float64(g) / 255
|
||||
bb := float64(b) / 255
|
||||
|
||||
max := math.Max(rr, math.Max(gg, bb))
|
||||
min := math.Min(rr, math.Min(gg, bb))
|
||||
|
||||
l := (max + min) / 2
|
||||
|
||||
if max == min {
|
||||
return 0, 0, l
|
||||
}
|
||||
|
||||
var h, s float64
|
||||
d := max - min
|
||||
if l > 0.5 {
|
||||
s = d / (2 - max - min)
|
||||
} else {
|
||||
s = d / (max + min)
|
||||
}
|
||||
|
||||
switch max {
|
||||
case rr:
|
||||
h = (gg - bb) / d
|
||||
if g < b {
|
||||
h += 6
|
||||
}
|
||||
case gg:
|
||||
h = (bb-rr)/d + 2
|
||||
case bb:
|
||||
h = (rr-gg)/d + 4
|
||||
}
|
||||
h /= 6
|
||||
|
||||
return h, s, l
|
||||
}
|
||||
|
||||
// hslToRGB converts a color from HSL to RGB.
|
||||
func hslToRGB(h, s, l float64) (uint8, uint8, uint8) {
|
||||
var r, g, b float64
|
||||
if s == 0 {
|
||||
v := clamp(l * 255)
|
||||
return v, v, v
|
||||
}
|
||||
|
||||
var q float64
|
||||
if l < 0.5 {
|
||||
q = l * (1 + s)
|
||||
} else {
|
||||
q = l + s - l*s
|
||||
}
|
||||
p := 2*l - q
|
||||
|
||||
r = hueToRGB(p, q, h+1/3.0)
|
||||
g = hueToRGB(p, q, h)
|
||||
b = hueToRGB(p, q, h-1/3.0)
|
||||
|
||||
return clamp(r * 255), clamp(g * 255), clamp(b * 255)
|
||||
}
|
||||
|
||||
func hueToRGB(p, q, t float64) float64 {
|
||||
if t < 0 {
|
||||
t++
|
||||
}
|
||||
if t > 1 {
|
||||
t--
|
||||
}
|
||||
if t < 1/6.0 {
|
||||
return p + (q-p)*6*t
|
||||
}
|
||||
if t < 1/2.0 {
|
||||
return q
|
||||
}
|
||||
if t < 2/3.0 {
|
||||
return p + (q-p)*(2/3.0-t)*6
|
||||
}
|
||||
return p
|
||||
}
|
||||
3
vendor/github.com/go-chi/chi/.gitignore
generated
vendored
Normal file
3
vendor/github.com/go-chi/chi/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
.idea
|
||||
*.sw?
|
||||
.vscode
|
||||
17
vendor/github.com/go-chi/chi/.travis.yml
generated
vendored
Normal file
17
vendor/github.com/go-chi/chi/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
language: go
|
||||
|
||||
go:
|
||||
- 1.10.x
|
||||
- 1.11.x
|
||||
|
||||
script:
|
||||
- go get -d -t ./...
|
||||
- go vet ./...
|
||||
- go test ./...
|
||||
- >
|
||||
go_version=$(go version);
|
||||
if [ ${go_version:13:4} = "1.11" ]; then
|
||||
go get -u golang.org/x/tools/cmd/goimports;
|
||||
goimports -d -e ./ | grep '.*' && { echo; echo "Aborting due to non-empty goimports output."; exit 1; } || :;
|
||||
fi
|
||||
|
||||
139
vendor/github.com/go-chi/chi/CHANGELOG.md
generated
vendored
Normal file
139
vendor/github.com/go-chi/chi/CHANGELOG.md
generated
vendored
Normal file
@@ -0,0 +1,139 @@
|
||||
# Changelog
|
||||
|
||||
## v4.0.0 (2019-01-10)
|
||||
|
||||
- chi v4 requires Go 1.10.3+ (or Go 1.9.7+) - we have deprecated support for Go 1.7 and 1.8
|
||||
- router: respond with 404 on router with no routes (#362)
|
||||
- router: additional check to ensure wildcard is at the end of a url pattern (#333)
|
||||
- middleware: deprecate use of http.CloseNotifier (#347)
|
||||
- middleware: fix RedirectSlashes to include query params on redirect (#334)
|
||||
- History of changes: see https://github.com/go-chi/chi/compare/v3.3.4...v4.0.0
|
||||
|
||||
|
||||
## v3.3.4 (2019-01-07)
|
||||
|
||||
- Minor middleware improvements. No changes to core library/router. Moving v3 into its
|
||||
- own branch as a version of chi for Go 1.7, 1.8, 1.9, 1.10, 1.11
|
||||
- History of changes: see https://github.com/go-chi/chi/compare/v3.3.3...v3.3.4
|
||||
|
||||
|
||||
## v3.3.3 (2018-08-27)
|
||||
|
||||
- Minor release
|
||||
- See https://github.com/go-chi/chi/compare/v3.3.2...v3.3.3
|
||||
|
||||
|
||||
## v3.3.2 (2017-12-22)
|
||||
|
||||
- Support to route trailing slashes on mounted sub-routers (#281)
|
||||
- middleware: new `ContentCharset` to check matching charsets. Thank you
|
||||
@csucu for your community contribution!
|
||||
|
||||
|
||||
## v3.3.1 (2017-11-20)
|
||||
|
||||
- middleware: new `AllowContentType` handler for explicit whitelist of accepted request Content-Types
|
||||
- middleware: new `SetHeader` handler for short-hand middleware to set a response header key/value
|
||||
- Minor bug fixes
|
||||
|
||||
|
||||
## v3.3.0 (2017-10-10)
|
||||
|
||||
- New chi.RegisterMethod(method) to add support for custom HTTP methods, see _examples/custom-method for usage
|
||||
- Deprecated LINK and UNLINK methods from the default list, please use `chi.RegisterMethod("LINK")` and `chi.RegisterMethod("UNLINK")` in an `init()` function
|
||||
|
||||
|
||||
## v3.2.1 (2017-08-31)
|
||||
|
||||
- Add new `Match(rctx *Context, method, path string) bool` method to `Routes` interface
|
||||
and `Mux`. Match searches the mux's routing tree for a handler that matches the method/path
|
||||
- Add new `RouteMethod` to `*Context`
|
||||
- Add new `Routes` pointer to `*Context`
|
||||
- Add new `middleware.GetHead` to route missing HEAD requests to GET handler
|
||||
- Updated benchmarks (see README)
|
||||
|
||||
|
||||
## v3.1.5 (2017-08-02)
|
||||
|
||||
- Setup golint and go vet for the project
|
||||
- As per golint, we've redefined `func ServerBaseContext(h http.Handler, baseCtx context.Context) http.Handler`
|
||||
to `func ServerBaseContext(baseCtx context.Context, h http.Handler) http.Handler`
|
||||
|
||||
|
||||
## v3.1.0 (2017-07-10)
|
||||
|
||||
- Fix a few minor issues after v3 release
|
||||
- Move `docgen` sub-pkg to https://github.com/go-chi/docgen
|
||||
- Move `render` sub-pkg to https://github.com/go-chi/render
|
||||
- Add new `URLFormat` handler to chi/middleware sub-pkg to make working with url mime
|
||||
suffixes easier, ie. parsing `/articles/1.json` and `/articles/1.xml`. See comments in
|
||||
https://github.com/go-chi/chi/blob/master/middleware/url_format.go for example usage.
|
||||
|
||||
|
||||
## v3.0.0 (2017-06-21)
|
||||
|
||||
- Major update to chi library with many exciting updates, but also some *breaking changes*
|
||||
- URL parameter syntax changed from `/:id` to `/{id}` for even more flexible routing, such as
|
||||
`/articles/{month}-{day}-{year}-{slug}`, `/articles/{id}`, and `/articles/{id}.{ext}` on the
|
||||
same router
|
||||
- Support for regexp for routing patterns, in the form of `/{paramKey:regExp}` for example:
|
||||
`r.Get("/articles/{name:[a-z]+}", h)` and `chi.URLParam(r, "name")`
|
||||
- Add `Method` and `MethodFunc` to `chi.Router` to allow routing definitions such as
|
||||
`r.Method("GET", "/", h)` which provides a cleaner interface for custom handlers like
|
||||
in `_examples/custom-handler`
|
||||
- Deprecating `mux#FileServer` helper function. Instead, we encourage users to create their
|
||||
own using file handler with the stdlib, see `_examples/fileserver` for an example
|
||||
- Add support for LINK/UNLINK http methods via `r.Method()` and `r.MethodFunc()`
|
||||
- Moved the chi project to its own organization, to allow chi-related community packages to
|
||||
be easily discovered and supported, at: https://github.com/go-chi
|
||||
- *NOTE:* please update your import paths to `"github.com/go-chi/chi"`
|
||||
- *NOTE:* chi v2 is still available at https://github.com/go-chi/chi/tree/v2
|
||||
|
||||
|
||||
## v2.1.0 (2017-03-30)
|
||||
|
||||
- Minor improvements and update to the chi core library
|
||||
- Introduced a brand new `chi/render` sub-package to complete the story of building
|
||||
APIs to offer a pattern for managing well-defined request / response payloads. Please
|
||||
check out the updated `_examples/rest` example for how it works.
|
||||
- Added `MethodNotAllowed(h http.HandlerFunc)` to chi.Router interface
|
||||
|
||||
|
||||
## v2.0.0 (2017-01-06)
|
||||
|
||||
- After many months of v2 being in an RC state with many companies and users running it in
|
||||
production, the inclusion of some improvements to the middlewares, we are very pleased to
|
||||
announce v2.0.0 of chi.
|
||||
|
||||
|
||||
## v2.0.0-rc1 (2016-07-26)
|
||||
|
||||
- Huge update! chi v2 is a large refactor targetting Go 1.7+. As of Go 1.7, the popular
|
||||
community `"net/context"` package has been included in the standard library as `"context"` and
|
||||
utilized by `"net/http"` and `http.Request` to managing deadlines, cancelation signals and other
|
||||
request-scoped values. We're very excited about the new context addition and are proud to
|
||||
introduce chi v2, a minimal and powerful routing package for building large HTTP services,
|
||||
with zero external dependencies. Chi focuses on idiomatic design and encourages the use of
|
||||
stdlib HTTP handlers and middlwares.
|
||||
- chi v2 deprecates its `chi.Handler` interface and requires `http.Handler` or `http.HandlerFunc`
|
||||
- chi v2 stores URL routing parameters and patterns in the standard request context: `r.Context()`
|
||||
- chi v2 lower-level routing context is accessible by `chi.RouteContext(r.Context()) *chi.Context`,
|
||||
which provides direct access to URL routing parameters, the routing path and the matching
|
||||
routing patterns.
|
||||
- Users upgrading from chi v1 to v2, need to:
|
||||
1. Update the old chi.Handler signature, `func(ctx context.Context, w http.ResponseWriter, r *http.Request)` to
|
||||
the standard http.Handler: `func(w http.ResponseWriter, r *http.Request)`
|
||||
2. Use `chi.URLParam(r *http.Request, paramKey string) string`
|
||||
or `URLParamFromCtx(ctx context.Context, paramKey string) string` to access a url parameter value
|
||||
|
||||
|
||||
## v1.0.0 (2016-07-01)
|
||||
|
||||
- Released chi v1 stable https://github.com/go-chi/chi/tree/v1.0.0 for Go 1.6 and older.
|
||||
|
||||
|
||||
## v0.9.0 (2016-03-31)
|
||||
|
||||
- Reuse context objects via sync.Pool for zero-allocation routing [#33](https://github.com/go-chi/chi/pull/33)
|
||||
- BREAKING NOTE: due to subtle API changes, previously `chi.URLParams(ctx)["id"]` used to access url parameters
|
||||
has changed to: `chi.URLParam(ctx, "id")`
|
||||
31
vendor/github.com/go-chi/chi/CONTRIBUTING.md
generated
vendored
Normal file
31
vendor/github.com/go-chi/chi/CONTRIBUTING.md
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
# Contributing
|
||||
|
||||
## Prerequisites
|
||||
|
||||
1. [Install Go][go-install].
|
||||
2. Download the sources and switch the working directory:
|
||||
|
||||
```bash
|
||||
go get -u -d github.com/go-chi/chi
|
||||
cd $GOPATH/src/github.com/go-chi/chi
|
||||
```
|
||||
|
||||
## Submitting a Pull Request
|
||||
|
||||
A typical workflow is:
|
||||
|
||||
1. [Fork the repository.][fork] [This tip maybe also helpful.][go-fork-tip]
|
||||
2. [Create a topic branch.][branch]
|
||||
3. Add tests for your change.
|
||||
4. Run `go test`. If your tests pass, return to the step 3.
|
||||
5. Implement the change and ensure the steps from the previous step pass.
|
||||
6. Run `goimports -w .`, to ensure the new code conforms to Go formatting guideline.
|
||||
7. [Add, commit and push your changes.][git-help]
|
||||
8. [Submit a pull request.][pull-req]
|
||||
|
||||
[go-install]: https://golang.org/doc/install
|
||||
[go-fork-tip]: http://blog.campoy.cat/2014/03/github-and-go-forking-pull-requests-and.html
|
||||
[fork]: https://help.github.com/articles/fork-a-repo
|
||||
[branch]: http://learn.github.com/p/branching.html
|
||||
[git-help]: https://guides.github.com
|
||||
[pull-req]: https://help.github.com/articles/using-pull-requests
|
||||
20
vendor/github.com/go-chi/chi/LICENSE
generated
vendored
Normal file
20
vendor/github.com/go-chi/chi/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
Copyright (c) 2015-present Peter Kieltyka (https://github.com/pkieltyka), Google Inc.
|
||||
|
||||
MIT License
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
438
vendor/github.com/go-chi/chi/README.md
generated
vendored
Normal file
438
vendor/github.com/go-chi/chi/README.md
generated
vendored
Normal file
@@ -0,0 +1,438 @@
|
||||
# <img alt="chi" src="https://cdn.rawgit.com/go-chi/chi/master/_examples/chi.svg" width="220" />
|
||||
|
||||
|
||||
[![GoDoc Widget]][GoDoc] [![Travis Widget]][Travis]
|
||||
|
||||
`chi` is a lightweight, idiomatic and composable router for building Go HTTP services. It's
|
||||
especially good at helping you write large REST API services that are kept maintainable as your
|
||||
project grows and changes. `chi` is built on the new `context` package introduced in Go 1.7 to
|
||||
handle signaling, cancelation and request-scoped values across a handler chain.
|
||||
|
||||
The focus of the project has been to seek out an elegant and comfortable design for writing
|
||||
REST API servers, written during the development of the Pressly API service that powers our
|
||||
public API service, which in turn powers all of our client-side applications.
|
||||
|
||||
The key considerations of chi's design are: project structure, maintainability, standard http
|
||||
handlers (stdlib-only), developer productivity, and deconstructing a large system into many small
|
||||
parts. The core router `github.com/go-chi/chi` is quite small (less than 1000 LOC), but we've also
|
||||
included some useful/optional subpackages: [middleware](/middleware), [render](https://github.com/go-chi/render) and [docgen](https://github.com/go-chi/docgen). We hope you enjoy it too!
|
||||
|
||||
## Install
|
||||
|
||||
`go get -u github.com/go-chi/chi`
|
||||
|
||||
|
||||
## Features
|
||||
|
||||
* **Lightweight** - cloc'd in ~1000 LOC for the chi router
|
||||
* **Fast** - yes, see [benchmarks](#benchmarks)
|
||||
* **100% compatible with net/http** - use any http or middleware pkg in the ecosystem that is also compatible with `net/http`
|
||||
* **Designed for modular/composable APIs** - middlewares, inline middlewares, route groups and subrouter mounting
|
||||
* **Context control** - built on new `context` package, providing value chaining, cancelations and timeouts
|
||||
* **Robust** - in production at Pressly, CloudFlare, Heroku, 99Designs, and many others (see [discussion](https://github.com/go-chi/chi/issues/91))
|
||||
* **Doc generation** - `docgen` auto-generates routing documentation from your source to JSON or Markdown
|
||||
* **No external dependencies** - plain ol' Go stdlib + net/http
|
||||
|
||||
|
||||
## Examples
|
||||
|
||||
See [_examples/](https://github.com/go-chi/chi/blob/master/_examples/) for a variety of examples.
|
||||
|
||||
|
||||
**As easy as:**
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"github.com/go-chi/chi"
|
||||
)
|
||||
|
||||
func main() {
|
||||
r := chi.NewRouter()
|
||||
r.Get("/", func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Write([]byte("welcome"))
|
||||
})
|
||||
http.ListenAndServe(":3000", r)
|
||||
}
|
||||
```
|
||||
|
||||
**REST Preview:**
|
||||
|
||||
Here is a little preview of how routing looks like with chi. Also take a look at the generated routing docs
|
||||
in JSON ([routes.json](https://github.com/go-chi/chi/blob/master/_examples/rest/routes.json)) and in
|
||||
Markdown ([routes.md](https://github.com/go-chi/chi/blob/master/_examples/rest/routes.md)).
|
||||
|
||||
I highly recommend reading the source of the [examples](https://github.com/go-chi/chi/blob/master/_examples/) listed
|
||||
above, they will show you all the features of chi and serve as a good form of documentation.
|
||||
|
||||
```go
|
||||
import (
|
||||
//...
|
||||
"context"
|
||||
"github.com/go-chi/chi"
|
||||
"github.com/go-chi/chi/middleware"
|
||||
)
|
||||
|
||||
func main() {
|
||||
r := chi.NewRouter()
|
||||
|
||||
// A good base middleware stack
|
||||
r.Use(middleware.RequestID)
|
||||
r.Use(middleware.RealIP)
|
||||
r.Use(middleware.Logger)
|
||||
r.Use(middleware.Recoverer)
|
||||
|
||||
// Set a timeout value on the request context (ctx), that will signal
|
||||
// through ctx.Done() that the request has timed out and further
|
||||
// processing should be stopped.
|
||||
r.Use(middleware.Timeout(60 * time.Second))
|
||||
|
||||
r.Get("/", func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Write([]byte("hi"))
|
||||
})
|
||||
|
||||
// RESTy routes for "articles" resource
|
||||
r.Route("/articles", func(r chi.Router) {
|
||||
r.With(paginate).Get("/", listArticles) // GET /articles
|
||||
r.With(paginate).Get("/{month}-{day}-{year}", listArticlesByDate) // GET /articles/01-16-2017
|
||||
|
||||
r.Post("/", createArticle) // POST /articles
|
||||
r.Get("/search", searchArticles) // GET /articles/search
|
||||
|
||||
// Regexp url parameters:
|
||||
r.Get("/{articleSlug:[a-z-]+}", getArticleBySlug) // GET /articles/home-is-toronto
|
||||
|
||||
// Subrouters:
|
||||
r.Route("/{articleID}", func(r chi.Router) {
|
||||
r.Use(ArticleCtx)
|
||||
r.Get("/", getArticle) // GET /articles/123
|
||||
r.Put("/", updateArticle) // PUT /articles/123
|
||||
r.Delete("/", deleteArticle) // DELETE /articles/123
|
||||
})
|
||||
})
|
||||
|
||||
// Mount the admin sub-router
|
||||
r.Mount("/admin", adminRouter())
|
||||
|
||||
http.ListenAndServe(":3333", r)
|
||||
}
|
||||
|
||||
func ArticleCtx(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
articleID := chi.URLParam(r, "articleID")
|
||||
article, err := dbGetArticle(articleID)
|
||||
if err != nil {
|
||||
http.Error(w, http.StatusText(404), 404)
|
||||
return
|
||||
}
|
||||
ctx := context.WithValue(r.Context(), "article", article)
|
||||
next.ServeHTTP(w, r.WithContext(ctx))
|
||||
})
|
||||
}
|
||||
|
||||
func getArticle(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
article, ok := ctx.Value("article").(*Article)
|
||||
if !ok {
|
||||
http.Error(w, http.StatusText(422), 422)
|
||||
return
|
||||
}
|
||||
w.Write([]byte(fmt.Sprintf("title:%s", article.Title)))
|
||||
}
|
||||
|
||||
// A completely separate router for administrator routes
|
||||
func adminRouter() http.Handler {
|
||||
r := chi.NewRouter()
|
||||
r.Use(AdminOnly)
|
||||
r.Get("/", adminIndex)
|
||||
r.Get("/accounts", adminListAccounts)
|
||||
return r
|
||||
}
|
||||
|
||||
func AdminOnly(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
perm, ok := ctx.Value("acl.permission").(YourPermissionType)
|
||||
if !ok || !perm.IsAdmin() {
|
||||
http.Error(w, http.StatusText(403), 403)
|
||||
return
|
||||
}
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Router design
|
||||
|
||||
chi's router is based on a kind of [Patricia Radix trie](https://en.wikipedia.org/wiki/Radix_tree).
|
||||
The router is fully compatible with `net/http`.
|
||||
|
||||
Built on top of the tree is the `Router` interface:
|
||||
|
||||
```go
|
||||
// Router consisting of the core routing methods used by chi's Mux,
|
||||
// using only the standard net/http.
|
||||
type Router interface {
|
||||
http.Handler
|
||||
Routes
|
||||
|
||||
// Use appends one of more middlewares onto the Router stack.
|
||||
Use(middlewares ...func(http.Handler) http.Handler)
|
||||
|
||||
// With adds inline middlewares for an endpoint handler.
|
||||
With(middlewares ...func(http.Handler) http.Handler) Router
|
||||
|
||||
// Group adds a new inline-Router along the current routing
|
||||
// path, with a fresh middleware stack for the inline-Router.
|
||||
Group(fn func(r Router)) Router
|
||||
|
||||
// Route mounts a sub-Router along a `pattern`` string.
|
||||
Route(pattern string, fn func(r Router)) Router
|
||||
|
||||
// Mount attaches another http.Handler along ./pattern/*
|
||||
Mount(pattern string, h http.Handler)
|
||||
|
||||
// Handle and HandleFunc adds routes for `pattern` that matches
|
||||
// all HTTP methods.
|
||||
Handle(pattern string, h http.Handler)
|
||||
HandleFunc(pattern string, h http.HandlerFunc)
|
||||
|
||||
// Method and MethodFunc adds routes for `pattern` that matches
|
||||
// the `method` HTTP method.
|
||||
Method(method, pattern string, h http.Handler)
|
||||
MethodFunc(method, pattern string, h http.HandlerFunc)
|
||||
|
||||
// HTTP-method routing along `pattern`
|
||||
Connect(pattern string, h http.HandlerFunc)
|
||||
Delete(pattern string, h http.HandlerFunc)
|
||||
Get(pattern string, h http.HandlerFunc)
|
||||
Head(pattern string, h http.HandlerFunc)
|
||||
Options(pattern string, h http.HandlerFunc)
|
||||
Patch(pattern string, h http.HandlerFunc)
|
||||
Post(pattern string, h http.HandlerFunc)
|
||||
Put(pattern string, h http.HandlerFunc)
|
||||
Trace(pattern string, h http.HandlerFunc)
|
||||
|
||||
// NotFound defines a handler to respond whenever a route could
|
||||
// not be found.
|
||||
NotFound(h http.HandlerFunc)
|
||||
|
||||
// MethodNotAllowed defines a handler to respond whenever a method is
|
||||
// not allowed.
|
||||
MethodNotAllowed(h http.HandlerFunc)
|
||||
}
|
||||
|
||||
// Routes interface adds two methods for router traversal, which is also
|
||||
// used by the github.com/go-chi/docgen package to generate documentation for Routers.
|
||||
type Routes interface {
|
||||
// Routes returns the routing tree in an easily traversable structure.
|
||||
Routes() []Route
|
||||
|
||||
// Middlewares returns the list of middlewares in use by the router.
|
||||
Middlewares() Middlewares
|
||||
|
||||
// Match searches the routing tree for a handler that matches
|
||||
// the method/path - similar to routing a http request, but without
|
||||
// executing the handler thereafter.
|
||||
Match(rctx *Context, method, path string) bool
|
||||
}
|
||||
```
|
||||
|
||||
Each routing method accepts a URL `pattern` and chain of `handlers`. The URL pattern
|
||||
supports named params (ie. `/users/{userID}`) and wildcards (ie. `/admin/*`). URL parameters
|
||||
can be fetched at runtime by calling `chi.URLParam(r, "userID")` for named parameters
|
||||
and `chi.URLParam(r, "*")` for a wildcard parameter.
|
||||
|
||||
|
||||
### Middleware handlers
|
||||
|
||||
chi's middlewares are just stdlib net/http middleware handlers. There is nothing special
|
||||
about them, which means the router and all the tooling is designed to be compatible and
|
||||
friendly with any middleware in the community. This offers much better extensibility and reuse
|
||||
of packages and is at the heart of chi's purpose.
|
||||
|
||||
Here is an example of a standard net/http middleware handler using the new request context
|
||||
available in Go. This middleware sets a hypothetical user identifier on the request
|
||||
context and calls the next handler in the chain.
|
||||
|
||||
```go
|
||||
// HTTP middleware setting a value on the request context
|
||||
func MyMiddleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.WithValue(r.Context(), "user", "123")
|
||||
next.ServeHTTP(w, r.WithContext(ctx))
|
||||
})
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### Request handlers
|
||||
|
||||
chi uses standard net/http request handlers. This little snippet is an example of a http.Handler
|
||||
func that reads a user identifier from the request context - hypothetically, identifying
|
||||
the user sending an authenticated request, validated+set by a previous middleware handler.
|
||||
|
||||
```go
|
||||
// HTTP handler accessing data from the request context.
|
||||
func MyRequestHandler(w http.ResponseWriter, r *http.Request) {
|
||||
user := r.Context().Value("user").(string)
|
||||
w.Write([]byte(fmt.Sprintf("hi %s", user)))
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### URL parameters
|
||||
|
||||
chi's router parses and stores URL parameters right onto the request context. Here is
|
||||
an example of how to access URL params in your net/http handlers. And of course, middlewares
|
||||
are able to access the same information.
|
||||
|
||||
```go
|
||||
// HTTP handler accessing the url routing parameters.
|
||||
func MyRequestHandler(w http.ResponseWriter, r *http.Request) {
|
||||
userID := chi.URLParam(r, "userID") // from a route like /users/{userID}
|
||||
|
||||
ctx := r.Context()
|
||||
key := ctx.Value("key").(string)
|
||||
|
||||
w.Write([]byte(fmt.Sprintf("hi %v, %v", userID, key)))
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Middlewares
|
||||
|
||||
chi comes equipped with an optional `middleware` package, providing a suite of standard
|
||||
`net/http` middlewares. Please note, any middleware in the ecosystem that is also compatible
|
||||
with `net/http` can be used with chi's mux.
|
||||
|
||||
### Core middlewares
|
||||
|
||||
-----------------------------------------------------------------------------------------------------------
|
||||
| chi/middleware Handler | description |
|
||||
|:----------------------|:---------------------------------------------------------------------------------
|
||||
| AllowContentType | Explicit whitelist of accepted request Content-Types |
|
||||
| Compress | Gzip compression for clients that accept compressed responses |
|
||||
| GetHead | Automatically route undefined HEAD requests to GET handlers |
|
||||
| Heartbeat | Monitoring endpoint to check the servers pulse |
|
||||
| Logger | Logs the start and end of each request with the elapsed processing time |
|
||||
| NoCache | Sets response headers to prevent clients from caching |
|
||||
| Profiler | Easily attach net/http/pprof to your routers |
|
||||
| RealIP | Sets a http.Request's RemoteAddr to either X-Forwarded-For or X-Real-IP |
|
||||
| Recoverer | Gracefully absorb panics and prints the stack trace |
|
||||
| RequestID | Injects a request ID into the context of each request |
|
||||
| RedirectSlashes | Redirect slashes on routing paths |
|
||||
| SetHeader | Short-hand middleware to set a response header key/value |
|
||||
| StripSlashes | Strip slashes on routing paths |
|
||||
| Throttle | Puts a ceiling on the number of concurrent requests |
|
||||
| Timeout | Signals to the request context when the timeout deadline is reached |
|
||||
| URLFormat | Parse extension from url and put it on request context |
|
||||
| WithValue | Short-hand middleware to set a key/value on the request context |
|
||||
-----------------------------------------------------------------------------------------------------------
|
||||
|
||||
### Auxiliary middlewares & packages
|
||||
|
||||
Please see https://github.com/go-chi for additional packages.
|
||||
|
||||
--------------------------------------------------------------------------------------------------------------------
|
||||
| package | description |
|
||||
|:---------------------------------------------------|:-------------------------------------------------------------
|
||||
| [cors](https://github.com/go-chi/cors) | Cross-origin resource sharing (CORS) |
|
||||
| [docgen](https://github.com/go-chi/docgen) | Print chi.Router routes at runtime |
|
||||
| [jwtauth](https://github.com/go-chi/jwtauth) | JWT authentication |
|
||||
| [hostrouter](https://github.com/go-chi/hostrouter) | Domain/host based request routing |
|
||||
| [httpcoala](https://github.com/go-chi/httpcoala) | HTTP request coalescer |
|
||||
| [chi-authz](https://github.com/casbin/chi-authz) | Request ACL via https://github.com/hsluoyz/casbin |
|
||||
| [phi](https://github.com/fate-lovely/phi) | Port chi to [fasthttp](https://github.com/valyala/fasthttp) |
|
||||
--------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
please [submit a PR](./CONTRIBUTING.md) if you'd like to include a link to a chi-compatible middleware
|
||||
|
||||
|
||||
## context?
|
||||
|
||||
`context` is a tiny pkg that provides simple interface to signal context across call stacks
|
||||
and goroutines. It was originally written by [Sameer Ajmani](https://github.com/Sajmani)
|
||||
and is available in stdlib since go1.7.
|
||||
|
||||
Learn more at https://blog.golang.org/context
|
||||
|
||||
and..
|
||||
* Docs: https://golang.org/pkg/context
|
||||
* Source: https://github.com/golang/go/tree/master/src/context
|
||||
|
||||
|
||||
## Benchmarks
|
||||
|
||||
The benchmark suite: https://github.com/pkieltyka/go-http-routing-benchmark
|
||||
|
||||
Results as of Jan 9, 2019 with Go 1.11.4 on Linux X1 Carbon laptop
|
||||
|
||||
```shell
|
||||
BenchmarkChi_Param 3000000 475 ns/op 432 B/op 3 allocs/op
|
||||
BenchmarkChi_Param5 2000000 696 ns/op 432 B/op 3 allocs/op
|
||||
BenchmarkChi_Param20 1000000 1275 ns/op 432 B/op 3 allocs/op
|
||||
BenchmarkChi_ParamWrite 3000000 505 ns/op 432 B/op 3 allocs/op
|
||||
BenchmarkChi_GithubStatic 3000000 508 ns/op 432 B/op 3 allocs/op
|
||||
BenchmarkChi_GithubParam 2000000 669 ns/op 432 B/op 3 allocs/op
|
||||
BenchmarkChi_GithubAll 10000 134627 ns/op 87699 B/op 609 allocs/op
|
||||
BenchmarkChi_GPlusStatic 3000000 402 ns/op 432 B/op 3 allocs/op
|
||||
BenchmarkChi_GPlusParam 3000000 500 ns/op 432 B/op 3 allocs/op
|
||||
BenchmarkChi_GPlus2Params 3000000 586 ns/op 432 B/op 3 allocs/op
|
||||
BenchmarkChi_GPlusAll 200000 7237 ns/op 5616 B/op 39 allocs/op
|
||||
BenchmarkChi_ParseStatic 3000000 408 ns/op 432 B/op 3 allocs/op
|
||||
BenchmarkChi_ParseParam 3000000 488 ns/op 432 B/op 3 allocs/op
|
||||
BenchmarkChi_Parse2Params 3000000 551 ns/op 432 B/op 3 allocs/op
|
||||
BenchmarkChi_ParseAll 100000 13508 ns/op 11232 B/op 78 allocs/op
|
||||
BenchmarkChi_StaticAll 20000 81933 ns/op 67826 B/op 471 allocs/op
|
||||
```
|
||||
|
||||
Comparison with other routers: https://gist.github.com/pkieltyka/123032f12052520aaccab752bd3e78cc
|
||||
|
||||
NOTE: the allocs in the benchmark above are from the calls to http.Request's
|
||||
`WithContext(context.Context)` method that clones the http.Request, sets the `Context()`
|
||||
on the duplicated (alloc'd) request and returns it the new request object. This is just
|
||||
how setting context on a request in Go works.
|
||||
|
||||
|
||||
## Credits
|
||||
|
||||
* Carl Jackson for https://github.com/zenazn/goji
|
||||
* Parts of chi's thinking comes from goji, and chi's middleware package
|
||||
sources from goji.
|
||||
* Armon Dadgar for https://github.com/armon/go-radix
|
||||
* Contributions: [@VojtechVitek](https://github.com/VojtechVitek)
|
||||
|
||||
We'll be more than happy to see [your contributions](./CONTRIBUTING.md)!
|
||||
|
||||
|
||||
## Beyond REST
|
||||
|
||||
chi is just a http router that lets you decompose request handling into many smaller layers.
|
||||
Many companies including Pressly.com (of course) use chi to write REST services for their public
|
||||
APIs. But, REST is just a convention for managing state via HTTP, and there's a lot of other pieces
|
||||
required to write a complete client-server system or network of microservices.
|
||||
|
||||
Looking ahead beyond REST, I also recommend some newer works in the field coming from
|
||||
[gRPC](https://github.com/grpc/grpc-go), [NATS](https://nats.io), [go-kit](https://github.com/go-kit/kit)
|
||||
and even [graphql](https://github.com/graphql-go/graphql). They're all pretty cool with their
|
||||
own unique approaches and benefits. Specifically, I'd look at gRPC since it makes client-server
|
||||
communication feel like a single program on a single computer, no need to hand-write a client library
|
||||
and the request/response payloads are typed contracts. NATS is pretty amazing too as a super
|
||||
fast and lightweight pub-sub transport that can speak protobufs, with nice service discovery -
|
||||
an excellent combination with gRPC.
|
||||
|
||||
|
||||
## License
|
||||
|
||||
Copyright (c) 2015-present [Peter Kieltyka](https://github.com/pkieltyka)
|
||||
|
||||
Licensed under [MIT License](./LICENSE)
|
||||
|
||||
[GoDoc]: https://godoc.org/github.com/go-chi/chi
|
||||
[GoDoc Widget]: https://godoc.org/github.com/go-chi/chi?status.svg
|
||||
[Travis]: https://travis-ci.org/go-chi/chi
|
||||
[Travis Widget]: https://travis-ci.org/go-chi/chi.svg?branch=master
|
||||
49
vendor/github.com/go-chi/chi/chain.go
generated
vendored
Normal file
49
vendor/github.com/go-chi/chi/chain.go
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
package chi
|
||||
|
||||
import "net/http"
|
||||
|
||||
// Chain returns a Middlewares type from a slice of middleware handlers.
|
||||
func Chain(middlewares ...func(http.Handler) http.Handler) Middlewares {
|
||||
return Middlewares(middlewares)
|
||||
}
|
||||
|
||||
// Handler builds and returns a http.Handler from the chain of middlewares,
|
||||
// with `h http.Handler` as the final handler.
|
||||
func (mws Middlewares) Handler(h http.Handler) http.Handler {
|
||||
return &ChainHandler{mws, h, chain(mws, h)}
|
||||
}
|
||||
|
||||
// HandlerFunc builds and returns a http.Handler from the chain of middlewares,
|
||||
// with `h http.Handler` as the final handler.
|
||||
func (mws Middlewares) HandlerFunc(h http.HandlerFunc) http.Handler {
|
||||
return &ChainHandler{mws, h, chain(mws, h)}
|
||||
}
|
||||
|
||||
// ChainHandler is a http.Handler with support for handler composition and
|
||||
// execution.
|
||||
type ChainHandler struct {
|
||||
Middlewares Middlewares
|
||||
Endpoint http.Handler
|
||||
chain http.Handler
|
||||
}
|
||||
|
||||
func (c *ChainHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
c.chain.ServeHTTP(w, r)
|
||||
}
|
||||
|
||||
// chain builds a http.Handler composed of an inline middleware stack and endpoint
|
||||
// handler in the order they are passed.
|
||||
func chain(middlewares []func(http.Handler) http.Handler, endpoint http.Handler) http.Handler {
|
||||
// Return ahead of time if there aren't any middlewares for the chain
|
||||
if len(middlewares) == 0 {
|
||||
return endpoint
|
||||
}
|
||||
|
||||
// Wrap the end handler with the middleware chain
|
||||
h := middlewares[len(middlewares)-1](endpoint)
|
||||
for i := len(middlewares) - 2; i >= 0; i-- {
|
||||
h = middlewares[i](h)
|
||||
}
|
||||
|
||||
return h
|
||||
}
|
||||
134
vendor/github.com/go-chi/chi/chi.go
generated
vendored
Normal file
134
vendor/github.com/go-chi/chi/chi.go
generated
vendored
Normal file
@@ -0,0 +1,134 @@
|
||||
//
|
||||
// Package chi is a small, idiomatic and composable router for building HTTP services.
|
||||
//
|
||||
// chi requires Go 1.7 or newer.
|
||||
//
|
||||
// Example:
|
||||
// package main
|
||||
//
|
||||
// import (
|
||||
// "net/http"
|
||||
//
|
||||
// "github.com/go-chi/chi"
|
||||
// "github.com/go-chi/chi/middleware"
|
||||
// )
|
||||
//
|
||||
// func main() {
|
||||
// r := chi.NewRouter()
|
||||
// r.Use(middleware.Logger)
|
||||
// r.Use(middleware.Recoverer)
|
||||
//
|
||||
// r.Get("/", func(w http.ResponseWriter, r *http.Request) {
|
||||
// w.Write([]byte("root."))
|
||||
// })
|
||||
//
|
||||
// http.ListenAndServe(":3333", r)
|
||||
// }
|
||||
//
|
||||
// See github.com/go-chi/chi/_examples/ for more in-depth examples.
|
||||
//
|
||||
// URL patterns allow for easy matching of path components in HTTP
|
||||
// requests. The matching components can then be accessed using
|
||||
// chi.URLParam(). All patterns must begin with a slash.
|
||||
//
|
||||
// A simple named placeholder {name} matches any sequence of characters
|
||||
// up to the next / or the end of the URL. Trailing slashes on paths must
|
||||
// be handled explicitly.
|
||||
//
|
||||
// A placeholder with a name followed by a colon allows a regular
|
||||
// expression match, for example {number:\\d+}. The regular expression
|
||||
// syntax is Go's normal regexp RE2 syntax, except that regular expressions
|
||||
// including { or } are not supported, and / will never be
|
||||
// matched. An anonymous regexp pattern is allowed, using an empty string
|
||||
// before the colon in the placeholder, such as {:\\d+}
|
||||
//
|
||||
// The special placeholder of asterisk matches the rest of the requested
|
||||
// URL. Any trailing characters in the pattern are ignored. This is the only
|
||||
// placeholder which will match / characters.
|
||||
//
|
||||
// Examples:
|
||||
// "/user/{name}" matches "/user/jsmith" but not "/user/jsmith/info" or "/user/jsmith/"
|
||||
// "/user/{name}/info" matches "/user/jsmith/info"
|
||||
// "/page/*" matches "/page/intro/latest"
|
||||
// "/page/*/index" also matches "/page/intro/latest"
|
||||
// "/date/{yyyy:\\d\\d\\d\\d}/{mm:\\d\\d}/{dd:\\d\\d}" matches "/date/2017/04/01"
|
||||
//
|
||||
package chi
|
||||
|
||||
import "net/http"
|
||||
|
||||
// NewRouter returns a new Mux object that implements the Router interface.
|
||||
func NewRouter() *Mux {
|
||||
return NewMux()
|
||||
}
|
||||
|
||||
// Router consisting of the core routing methods used by chi's Mux,
|
||||
// using only the standard net/http.
|
||||
type Router interface {
|
||||
http.Handler
|
||||
Routes
|
||||
|
||||
// Use appends one of more middlewares onto the Router stack.
|
||||
Use(middlewares ...func(http.Handler) http.Handler)
|
||||
|
||||
// With adds inline middlewares for an endpoint handler.
|
||||
With(middlewares ...func(http.Handler) http.Handler) Router
|
||||
|
||||
// Group adds a new inline-Router along the current routing
|
||||
// path, with a fresh middleware stack for the inline-Router.
|
||||
Group(fn func(r Router)) Router
|
||||
|
||||
// Route mounts a sub-Router along a `pattern`` string.
|
||||
Route(pattern string, fn func(r Router)) Router
|
||||
|
||||
// Mount attaches another http.Handler along ./pattern/*
|
||||
Mount(pattern string, h http.Handler)
|
||||
|
||||
// Handle and HandleFunc adds routes for `pattern` that matches
|
||||
// all HTTP methods.
|
||||
Handle(pattern string, h http.Handler)
|
||||
HandleFunc(pattern string, h http.HandlerFunc)
|
||||
|
||||
// Method and MethodFunc adds routes for `pattern` that matches
|
||||
// the `method` HTTP method.
|
||||
Method(method, pattern string, h http.Handler)
|
||||
MethodFunc(method, pattern string, h http.HandlerFunc)
|
||||
|
||||
// HTTP-method routing along `pattern`
|
||||
Connect(pattern string, h http.HandlerFunc)
|
||||
Delete(pattern string, h http.HandlerFunc)
|
||||
Get(pattern string, h http.HandlerFunc)
|
||||
Head(pattern string, h http.HandlerFunc)
|
||||
Options(pattern string, h http.HandlerFunc)
|
||||
Patch(pattern string, h http.HandlerFunc)
|
||||
Post(pattern string, h http.HandlerFunc)
|
||||
Put(pattern string, h http.HandlerFunc)
|
||||
Trace(pattern string, h http.HandlerFunc)
|
||||
|
||||
// NotFound defines a handler to respond whenever a route could
|
||||
// not be found.
|
||||
NotFound(h http.HandlerFunc)
|
||||
|
||||
// MethodNotAllowed defines a handler to respond whenever a method is
|
||||
// not allowed.
|
||||
MethodNotAllowed(h http.HandlerFunc)
|
||||
}
|
||||
|
||||
// Routes interface adds two methods for router traversal, which is also
|
||||
// used by the `docgen` subpackage to generation documentation for Routers.
|
||||
type Routes interface {
|
||||
// Routes returns the routing tree in an easily traversable structure.
|
||||
Routes() []Route
|
||||
|
||||
// Middlewares returns the list of middlewares in use by the router.
|
||||
Middlewares() Middlewares
|
||||
|
||||
// Match searches the routing tree for a handler that matches
|
||||
// the method/path - similar to routing a http request, but without
|
||||
// executing the handler thereafter.
|
||||
Match(rctx *Context, method, path string) bool
|
||||
}
|
||||
|
||||
// Middlewares type is a slice of standard middleware handlers with methods
|
||||
// to compose middleware chains and http.Handler's.
|
||||
type Middlewares []func(http.Handler) http.Handler
|
||||
161
vendor/github.com/go-chi/chi/context.go
generated
vendored
Normal file
161
vendor/github.com/go-chi/chi/context.go
generated
vendored
Normal file
@@ -0,0 +1,161 @@
|
||||
package chi
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net"
|
||||
"net/http"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var (
|
||||
// RouteCtxKey is the context.Context key to store the request context.
|
||||
RouteCtxKey = &contextKey{"RouteContext"}
|
||||
)
|
||||
|
||||
// Context is the default routing context set on the root node of a
|
||||
// request context to track route patterns, URL parameters and
|
||||
// an optional routing path.
|
||||
type Context struct {
|
||||
Routes Routes
|
||||
|
||||
// Routing path/method override used during the route search.
|
||||
// See Mux#routeHTTP method.
|
||||
RoutePath string
|
||||
RouteMethod string
|
||||
|
||||
// Routing pattern stack throughout the lifecycle of the request,
|
||||
// across all connected routers. It is a record of all matching
|
||||
// patterns across a stack of sub-routers.
|
||||
RoutePatterns []string
|
||||
|
||||
// URLParams are the stack of routeParams captured during the
|
||||
// routing lifecycle across a stack of sub-routers.
|
||||
URLParams RouteParams
|
||||
|
||||
// The endpoint routing pattern that matched the request URI path
|
||||
// or `RoutePath` of the current sub-router. This value will update
|
||||
// during the lifecycle of a request passing through a stack of
|
||||
// sub-routers.
|
||||
routePattern string
|
||||
|
||||
// Route parameters matched for the current sub-router. It is
|
||||
// intentionally unexported so it cant be tampered.
|
||||
routeParams RouteParams
|
||||
|
||||
// methodNotAllowed hint
|
||||
methodNotAllowed bool
|
||||
}
|
||||
|
||||
// NewRouteContext returns a new routing Context object.
|
||||
func NewRouteContext() *Context {
|
||||
return &Context{}
|
||||
}
|
||||
|
||||
// Reset a routing context to its initial state.
|
||||
func (x *Context) Reset() {
|
||||
x.Routes = nil
|
||||
x.RoutePath = ""
|
||||
x.RouteMethod = ""
|
||||
x.RoutePatterns = x.RoutePatterns[:0]
|
||||
x.URLParams.Keys = x.URLParams.Keys[:0]
|
||||
x.URLParams.Values = x.URLParams.Values[:0]
|
||||
|
||||
x.routePattern = ""
|
||||
x.routeParams.Keys = x.routeParams.Keys[:0]
|
||||
x.routeParams.Values = x.routeParams.Values[:0]
|
||||
x.methodNotAllowed = false
|
||||
}
|
||||
|
||||
// URLParam returns the corresponding URL parameter value from the request
|
||||
// routing context.
|
||||
func (x *Context) URLParam(key string) string {
|
||||
for k := len(x.URLParams.Keys) - 1; k >= 0; k-- {
|
||||
if x.URLParams.Keys[k] == key {
|
||||
return x.URLParams.Values[k]
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// RoutePattern builds the routing pattern string for the particular
|
||||
// request, at the particular point during routing. This means, the value
|
||||
// will change throughout the execution of a request in a router. That is
|
||||
// why its advised to only use this value after calling the next handler.
|
||||
//
|
||||
// For example,
|
||||
//
|
||||
// func Instrument(next http.Handler) http.Handler {
|
||||
// return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
// next.ServeHTTP(w, r)
|
||||
// routePattern := chi.RouteContext(r.Context()).RoutePattern()
|
||||
// measure(w, r, routePattern)
|
||||
// })
|
||||
// }
|
||||
func (x *Context) RoutePattern() string {
|
||||
routePattern := strings.Join(x.RoutePatterns, "")
|
||||
return strings.Replace(routePattern, "/*/", "/", -1)
|
||||
}
|
||||
|
||||
// RouteContext returns chi's routing Context object from a
|
||||
// http.Request Context.
|
||||
func RouteContext(ctx context.Context) *Context {
|
||||
return ctx.Value(RouteCtxKey).(*Context)
|
||||
}
|
||||
|
||||
// URLParam returns the url parameter from a http.Request object.
|
||||
func URLParam(r *http.Request, key string) string {
|
||||
if rctx := RouteContext(r.Context()); rctx != nil {
|
||||
return rctx.URLParam(key)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// URLParamFromCtx returns the url parameter from a http.Request Context.
|
||||
func URLParamFromCtx(ctx context.Context, key string) string {
|
||||
if rctx := RouteContext(ctx); rctx != nil {
|
||||
return rctx.URLParam(key)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// RouteParams is a structure to track URL routing parameters efficiently.
|
||||
type RouteParams struct {
|
||||
Keys, Values []string
|
||||
}
|
||||
|
||||
// Add will append a URL parameter to the end of the route param
|
||||
func (s *RouteParams) Add(key, value string) {
|
||||
(*s).Keys = append((*s).Keys, key)
|
||||
(*s).Values = append((*s).Values, value)
|
||||
}
|
||||
|
||||
// ServerBaseContext wraps an http.Handler to set the request context to the
|
||||
// `baseCtx`.
|
||||
func ServerBaseContext(baseCtx context.Context, h http.Handler) http.Handler {
|
||||
fn := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
baseCtx := baseCtx
|
||||
|
||||
// Copy over default net/http server context keys
|
||||
if v, ok := ctx.Value(http.ServerContextKey).(*http.Server); ok {
|
||||
baseCtx = context.WithValue(baseCtx, http.ServerContextKey, v)
|
||||
}
|
||||
if v, ok := ctx.Value(http.LocalAddrContextKey).(net.Addr); ok {
|
||||
baseCtx = context.WithValue(baseCtx, http.LocalAddrContextKey, v)
|
||||
}
|
||||
|
||||
h.ServeHTTP(w, r.WithContext(baseCtx))
|
||||
})
|
||||
return fn
|
||||
}
|
||||
|
||||
// contextKey is a value for use with context.WithValue. It's used as
|
||||
// a pointer so it fits in an interface{} without allocation. This technique
|
||||
// for defining context keys was copied from Go 1.7's new use of context in net/http.
|
||||
type contextKey struct {
|
||||
name string
|
||||
}
|
||||
|
||||
func (k *contextKey) String() string {
|
||||
return "chi context value " + k.name
|
||||
}
|
||||
275
vendor/github.com/go-chi/chi/middleware/compress.go
generated
vendored
Normal file
275
vendor/github.com/go-chi/chi/middleware/compress.go
generated
vendored
Normal file
@@ -0,0 +1,275 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"compress/flate"
|
||||
"compress/gzip"
|
||||
"errors"
|
||||
"io"
|
||||
"net"
|
||||
"net/http"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var encoders = map[string]EncoderFunc{}
|
||||
|
||||
var encodingPrecedence = []string{"br", "gzip", "deflate"}
|
||||
|
||||
func init() {
|
||||
// TODO:
|
||||
// lzma: Opera.
|
||||
// sdch: Chrome, Android. Gzip output + dictionary header.
|
||||
// br: Brotli, see https://github.com/go-chi/chi/pull/326
|
||||
|
||||
// TODO: Exception for old MSIE browsers that can't handle non-HTML?
|
||||
// https://zoompf.com/blog/2012/02/lose-the-wait-http-compression
|
||||
SetEncoder("gzip", encoderGzip)
|
||||
|
||||
// HTTP 1.1 "deflate" (RFC 2616) stands for DEFLATE data (RFC 1951)
|
||||
// wrapped with zlib (RFC 1950). The zlib wrapper uses Adler-32
|
||||
// checksum compared to CRC-32 used in "gzip" and thus is faster.
|
||||
//
|
||||
// But.. some old browsers (MSIE, Safari 5.1) incorrectly expect
|
||||
// raw DEFLATE data only, without the mentioned zlib wrapper.
|
||||
// Because of this major confusion, most modern browsers try it
|
||||
// both ways, first looking for zlib headers.
|
||||
// Quote by Mark Adler: http://stackoverflow.com/a/9186091/385548
|
||||
//
|
||||
// The list of browsers having problems is quite big, see:
|
||||
// http://zoompf.com/blog/2012/02/lose-the-wait-http-compression
|
||||
// https://web.archive.org/web/20120321182910/http://www.vervestudios.co/projects/compression-tests/results
|
||||
//
|
||||
// That's why we prefer gzip over deflate. It's just more reliable
|
||||
// and not significantly slower than gzip.
|
||||
SetEncoder("deflate", encoderDeflate)
|
||||
|
||||
// NOTE: Not implemented, intentionally:
|
||||
// case "compress": // LZW. Deprecated.
|
||||
// case "bzip2": // Too slow on-the-fly.
|
||||
// case "zopfli": // Too slow on-the-fly.
|
||||
// case "xz": // Too slow on-the-fly.
|
||||
}
|
||||
|
||||
// An EncoderFunc is a function that wraps the provided ResponseWriter with a
|
||||
// streaming compression algorithm and returns it.
|
||||
//
|
||||
// In case of failure, the function should return nil.
|
||||
type EncoderFunc func(w http.ResponseWriter, level int) io.Writer
|
||||
|
||||
// SetEncoder can be used to set the implementation of a compression algorithm.
|
||||
//
|
||||
// The encoding should be a standardised identifier. See:
|
||||
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding
|
||||
//
|
||||
// For example, add the Brotli algortithm:
|
||||
//
|
||||
// import brotli_enc "gopkg.in/kothar/brotli-go.v0/enc"
|
||||
//
|
||||
// middleware.SetEncoder("br", func(w http.ResponseWriter, level int) io.Writer {
|
||||
// params := brotli_enc.NewBrotliParams()
|
||||
// params.SetQuality(level)
|
||||
// return brotli_enc.NewBrotliWriter(params, w)
|
||||
// })
|
||||
func SetEncoder(encoding string, fn EncoderFunc) {
|
||||
encoding = strings.ToLower(encoding)
|
||||
if encoding == "" {
|
||||
panic("the encoding can not be empty")
|
||||
}
|
||||
if fn == nil {
|
||||
panic("attempted to set a nil encoder function")
|
||||
}
|
||||
encoders[encoding] = fn
|
||||
|
||||
var e string
|
||||
for _, v := range encodingPrecedence {
|
||||
if v == encoding {
|
||||
e = v
|
||||
}
|
||||
}
|
||||
|
||||
if e == "" {
|
||||
encodingPrecedence = append([]string{e}, encodingPrecedence...)
|
||||
}
|
||||
}
|
||||
|
||||
var defaultContentTypes = map[string]struct{}{
|
||||
"text/html": {},
|
||||
"text/css": {},
|
||||
"text/plain": {},
|
||||
"text/javascript": {},
|
||||
"application/javascript": {},
|
||||
"application/x-javascript": {},
|
||||
"application/json": {},
|
||||
"application/atom+xml": {},
|
||||
"application/rss+xml": {},
|
||||
"image/svg+xml": {},
|
||||
}
|
||||
|
||||
// DefaultCompress is a middleware that compresses response
|
||||
// body of predefined content types to a data format based
|
||||
// on Accept-Encoding request header. It uses a default
|
||||
// compression level.
|
||||
func DefaultCompress(next http.Handler) http.Handler {
|
||||
return Compress(flate.DefaultCompression)(next)
|
||||
}
|
||||
|
||||
// Compress is a middleware that compresses response
|
||||
// body of a given content types to a data format based
|
||||
// on Accept-Encoding request header. It uses a given
|
||||
// compression level.
|
||||
//
|
||||
// NOTE: make sure to set the Content-Type header on your response
|
||||
// otherwise this middleware will not compress the response body. For ex, in
|
||||
// your handler you should set w.Header().Set("Content-Type", http.DetectContentType(yourBody))
|
||||
// or set it manually.
|
||||
func Compress(level int, types ...string) func(next http.Handler) http.Handler {
|
||||
contentTypes := defaultContentTypes
|
||||
if len(types) > 0 {
|
||||
contentTypes = make(map[string]struct{}, len(types))
|
||||
for _, t := range types {
|
||||
contentTypes[t] = struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
return func(next http.Handler) http.Handler {
|
||||
fn := func(w http.ResponseWriter, r *http.Request) {
|
||||
encoder, encoding := selectEncoder(r.Header)
|
||||
|
||||
cw := &compressResponseWriter{
|
||||
ResponseWriter: w,
|
||||
w: w,
|
||||
contentTypes: contentTypes,
|
||||
encoder: encoder,
|
||||
encoding: encoding,
|
||||
level: level,
|
||||
}
|
||||
defer cw.Close()
|
||||
|
||||
next.ServeHTTP(cw, r)
|
||||
}
|
||||
|
||||
return http.HandlerFunc(fn)
|
||||
}
|
||||
}
|
||||
|
||||
func selectEncoder(h http.Header) (EncoderFunc, string) {
|
||||
header := h.Get("Accept-Encoding")
|
||||
|
||||
// Parse the names of all accepted algorithms from the header.
|
||||
accepted := strings.Split(strings.ToLower(header), ",")
|
||||
|
||||
// Find supported encoder by accepted list by precedence
|
||||
for _, name := range encodingPrecedence {
|
||||
if fn, ok := encoders[name]; ok && matchAcceptEncoding(accepted, name) {
|
||||
return fn, name
|
||||
}
|
||||
}
|
||||
|
||||
// No encoder found to match the accepted encoding
|
||||
return nil, ""
|
||||
}
|
||||
|
||||
func matchAcceptEncoding(accepted []string, encoding string) bool {
|
||||
for _, v := range accepted {
|
||||
if strings.Index(v, encoding) >= 0 {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type compressResponseWriter struct {
|
||||
http.ResponseWriter
|
||||
w io.Writer
|
||||
encoder EncoderFunc
|
||||
encoding string
|
||||
contentTypes map[string]struct{}
|
||||
level int
|
||||
wroteHeader bool
|
||||
}
|
||||
|
||||
func (w *compressResponseWriter) WriteHeader(code int) {
|
||||
if w.wroteHeader {
|
||||
return
|
||||
}
|
||||
w.wroteHeader = true
|
||||
defer w.ResponseWriter.WriteHeader(code)
|
||||
|
||||
// Already compressed data?
|
||||
if w.Header().Get("Content-Encoding") != "" {
|
||||
return
|
||||
}
|
||||
|
||||
// Parse the first part of the Content-Type response header.
|
||||
contentType := ""
|
||||
parts := strings.Split(w.Header().Get("Content-Type"), ";")
|
||||
if len(parts) > 0 {
|
||||
contentType = parts[0]
|
||||
}
|
||||
|
||||
// Is the content type compressable?
|
||||
if _, ok := w.contentTypes[contentType]; !ok {
|
||||
return
|
||||
}
|
||||
|
||||
if w.encoder != nil && w.encoding != "" {
|
||||
if wr := w.encoder(w.ResponseWriter, w.level); wr != nil {
|
||||
w.w = wr
|
||||
w.Header().Set("Content-Encoding", w.encoding)
|
||||
|
||||
// The content-length after compression is unknown
|
||||
w.Header().Del("Content-Length")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (w *compressResponseWriter) Write(p []byte) (int, error) {
|
||||
if !w.wroteHeader {
|
||||
w.ResponseWriter.WriteHeader(http.StatusOK)
|
||||
}
|
||||
|
||||
return w.w.Write(p)
|
||||
}
|
||||
|
||||
func (w *compressResponseWriter) Flush() {
|
||||
if f, ok := w.w.(http.Flusher); ok {
|
||||
f.Flush()
|
||||
}
|
||||
}
|
||||
|
||||
func (w *compressResponseWriter) Hijack() (net.Conn, *bufio.ReadWriter, error) {
|
||||
if hj, ok := w.w.(http.Hijacker); ok {
|
||||
return hj.Hijack()
|
||||
}
|
||||
return nil, nil, errors.New("chi/middleware: http.Hijacker is unavailable on the writer")
|
||||
}
|
||||
|
||||
func (w *compressResponseWriter) Push(target string, opts *http.PushOptions) error {
|
||||
if ps, ok := w.w.(http.Pusher); ok {
|
||||
return ps.Push(target, opts)
|
||||
}
|
||||
return errors.New("chi/middleware: http.Pusher is unavailable on the writer")
|
||||
}
|
||||
|
||||
func (w *compressResponseWriter) Close() error {
|
||||
if c, ok := w.w.(io.WriteCloser); ok {
|
||||
return c.Close()
|
||||
}
|
||||
return errors.New("chi/middleware: io.WriteCloser is unavailable on the writer")
|
||||
}
|
||||
|
||||
func encoderGzip(w http.ResponseWriter, level int) io.Writer {
|
||||
gw, err := gzip.NewWriterLevel(w, level)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
return gw
|
||||
}
|
||||
|
||||
func encoderDeflate(w http.ResponseWriter, level int) io.Writer {
|
||||
dw, err := flate.NewWriter(w, level)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
return dw
|
||||
}
|
||||
51
vendor/github.com/go-chi/chi/middleware/content_charset.go
generated
vendored
Normal file
51
vendor/github.com/go-chi/chi/middleware/content_charset.go
generated
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// ContentCharset generates a handler that writes a 415 Unsupported Media Type response if none of the charsets match.
|
||||
// An empty charset will allow requests with no Content-Type header or no specified charset.
|
||||
func ContentCharset(charsets ...string) func(next http.Handler) http.Handler {
|
||||
for i, c := range charsets {
|
||||
charsets[i] = strings.ToLower(c)
|
||||
}
|
||||
|
||||
return func(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
if !contentEncoding(r.Header.Get("Content-Type"), charsets...) {
|
||||
w.WriteHeader(http.StatusUnsupportedMediaType)
|
||||
return
|
||||
}
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Check the content encoding against a list of acceptable values.
|
||||
func contentEncoding(ce string, charsets ...string) bool {
|
||||
_, ce = split(strings.ToLower(ce), ";")
|
||||
_, ce = split(ce, "charset=")
|
||||
ce, _ = split(ce, ";")
|
||||
for _, c := range charsets {
|
||||
if ce == c {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// Split a string in two parts, cleaning any whitespace.
|
||||
func split(str, sep string) (string, string) {
|
||||
var a, b string
|
||||
var parts = strings.SplitN(str, sep, 2)
|
||||
a = strings.TrimSpace(parts[0])
|
||||
if len(parts) == 2 {
|
||||
b = strings.TrimSpace(parts[1])
|
||||
}
|
||||
|
||||
return a, b
|
||||
}
|
||||
45
vendor/github.com/go-chi/chi/middleware/content_type.go
generated
vendored
Normal file
45
vendor/github.com/go-chi/chi/middleware/content_type.go
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// SetHeader is a convenience handler to set a response header key/value
|
||||
func SetHeader(key, value string) func(next http.Handler) http.Handler {
|
||||
return func(next http.Handler) http.Handler {
|
||||
fn := func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set(key, value)
|
||||
next.ServeHTTP(w, r)
|
||||
}
|
||||
return http.HandlerFunc(fn)
|
||||
}
|
||||
}
|
||||
|
||||
// AllowContentType enforces a whitelist of request Content-Types otherwise responds
|
||||
// with a 415 Unsupported Media Type status.
|
||||
func AllowContentType(contentTypes ...string) func(next http.Handler) http.Handler {
|
||||
cT := []string{}
|
||||
for _, t := range contentTypes {
|
||||
cT = append(cT, strings.ToLower(t))
|
||||
}
|
||||
|
||||
return func(next http.Handler) http.Handler {
|
||||
fn := func(w http.ResponseWriter, r *http.Request) {
|
||||
s := strings.ToLower(strings.TrimSpace(r.Header.Get("Content-Type")))
|
||||
if i := strings.Index(s, ";"); i > -1 {
|
||||
s = s[0:i]
|
||||
}
|
||||
|
||||
for _, t := range cT {
|
||||
if t == s {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusUnsupportedMediaType)
|
||||
}
|
||||
return http.HandlerFunc(fn)
|
||||
}
|
||||
}
|
||||
39
vendor/github.com/go-chi/chi/middleware/get_head.go
generated
vendored
Normal file
39
vendor/github.com/go-chi/chi/middleware/get_head.go
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/go-chi/chi"
|
||||
)
|
||||
|
||||
// GetHead automatically route undefined HEAD requests to GET handlers.
|
||||
func GetHead(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method == "HEAD" {
|
||||
rctx := chi.RouteContext(r.Context())
|
||||
routePath := rctx.RoutePath
|
||||
if routePath == "" {
|
||||
if r.URL.RawPath != "" {
|
||||
routePath = r.URL.RawPath
|
||||
} else {
|
||||
routePath = r.URL.Path
|
||||
}
|
||||
}
|
||||
|
||||
// Temporary routing context to look-ahead before routing the request
|
||||
tctx := chi.NewRouteContext()
|
||||
|
||||
// Attempt to find a HEAD handler for the routing path, if not found, traverse
|
||||
// the router as through its a GET route, but proceed with the request
|
||||
// with the HEAD method.
|
||||
if !rctx.Routes.Match(tctx, "HEAD", routePath) {
|
||||
rctx.RouteMethod = "GET"
|
||||
rctx.RoutePath = routePath
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
26
vendor/github.com/go-chi/chi/middleware/heartbeat.go
generated
vendored
Normal file
26
vendor/github.com/go-chi/chi/middleware/heartbeat.go
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Heartbeat endpoint middleware useful to setting up a path like
|
||||
// `/ping` that load balancers or uptime testing external services
|
||||
// can make a request before hitting any routes. It's also convenient
|
||||
// to place this above ACL middlewares as well.
|
||||
func Heartbeat(endpoint string) func(http.Handler) http.Handler {
|
||||
f := func(h http.Handler) http.Handler {
|
||||
fn := func(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method == "GET" && strings.EqualFold(r.URL.Path, endpoint) {
|
||||
w.Header().Set("Content-Type", "text/plain")
|
||||
w.WriteHeader(http.StatusOK)
|
||||
w.Write([]byte("."))
|
||||
return
|
||||
}
|
||||
h.ServeHTTP(w, r)
|
||||
}
|
||||
return http.HandlerFunc(fn)
|
||||
}
|
||||
return f
|
||||
}
|
||||
158
vendor/github.com/go-chi/chi/middleware/logger.go
generated
vendored
Normal file
158
vendor/github.com/go-chi/chi/middleware/logger.go
generated
vendored
Normal file
@@ -0,0 +1,158 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"time"
|
||||
)
|
||||
|
||||
var (
|
||||
// LogEntryCtxKey is the context.Context key to store the request log entry.
|
||||
LogEntryCtxKey = &contextKey{"LogEntry"}
|
||||
|
||||
// DefaultLogger is called by the Logger middleware handler to log each request.
|
||||
// Its made a package-level variable so that it can be reconfigured for custom
|
||||
// logging configurations.
|
||||
DefaultLogger = RequestLogger(&DefaultLogFormatter{Logger: log.New(os.Stdout, "", log.LstdFlags), NoColor: false})
|
||||
)
|
||||
|
||||
// Logger is a middleware that logs the start and end of each request, along
|
||||
// with some useful data about what was requested, what the response status was,
|
||||
// and how long it took to return. When standard output is a TTY, Logger will
|
||||
// print in color, otherwise it will print in black and white. Logger prints a
|
||||
// request ID if one is provided.
|
||||
//
|
||||
// Alternatively, look at https://github.com/pressly/lg and the `lg.RequestLogger`
|
||||
// middleware pkg.
|
||||
func Logger(next http.Handler) http.Handler {
|
||||
return DefaultLogger(next)
|
||||
}
|
||||
|
||||
// RequestLogger returns a logger handler using a custom LogFormatter.
|
||||
func RequestLogger(f LogFormatter) func(next http.Handler) http.Handler {
|
||||
return func(next http.Handler) http.Handler {
|
||||
fn := func(w http.ResponseWriter, r *http.Request) {
|
||||
entry := f.NewLogEntry(r)
|
||||
ww := NewWrapResponseWriter(w, r.ProtoMajor)
|
||||
|
||||
t1 := time.Now()
|
||||
defer func() {
|
||||
entry.Write(ww.Status(), ww.BytesWritten(), time.Since(t1))
|
||||
}()
|
||||
|
||||
next.ServeHTTP(ww, WithLogEntry(r, entry))
|
||||
}
|
||||
return http.HandlerFunc(fn)
|
||||
}
|
||||
}
|
||||
|
||||
// LogFormatter initiates the beginning of a new LogEntry per request.
|
||||
// See DefaultLogFormatter for an example implementation.
|
||||
type LogFormatter interface {
|
||||
NewLogEntry(r *http.Request) LogEntry
|
||||
}
|
||||
|
||||
// LogEntry records the final log when a request completes.
|
||||
// See defaultLogEntry for an example implementation.
|
||||
type LogEntry interface {
|
||||
Write(status, bytes int, elapsed time.Duration)
|
||||
Panic(v interface{}, stack []byte)
|
||||
}
|
||||
|
||||
// GetLogEntry returns the in-context LogEntry for a request.
|
||||
func GetLogEntry(r *http.Request) LogEntry {
|
||||
entry, _ := r.Context().Value(LogEntryCtxKey).(LogEntry)
|
||||
return entry
|
||||
}
|
||||
|
||||
// WithLogEntry sets the in-context LogEntry for a request.
|
||||
func WithLogEntry(r *http.Request, entry LogEntry) *http.Request {
|
||||
r = r.WithContext(context.WithValue(r.Context(), LogEntryCtxKey, entry))
|
||||
return r
|
||||
}
|
||||
|
||||
// LoggerInterface accepts printing to stdlib logger or compatible logger.
|
||||
type LoggerInterface interface {
|
||||
Print(v ...interface{})
|
||||
}
|
||||
|
||||
// DefaultLogFormatter is a simple logger that implements a LogFormatter.
|
||||
type DefaultLogFormatter struct {
|
||||
Logger LoggerInterface
|
||||
NoColor bool
|
||||
}
|
||||
|
||||
// NewLogEntry creates a new LogEntry for the request.
|
||||
func (l *DefaultLogFormatter) NewLogEntry(r *http.Request) LogEntry {
|
||||
useColor := !l.NoColor
|
||||
entry := &defaultLogEntry{
|
||||
DefaultLogFormatter: l,
|
||||
request: r,
|
||||
buf: &bytes.Buffer{},
|
||||
useColor: useColor,
|
||||
}
|
||||
|
||||
reqID := GetReqID(r.Context())
|
||||
if reqID != "" {
|
||||
cW(entry.buf, useColor, nYellow, "[%s] ", reqID)
|
||||
}
|
||||
cW(entry.buf, useColor, nCyan, "\"")
|
||||
cW(entry.buf, useColor, bMagenta, "%s ", r.Method)
|
||||
|
||||
scheme := "http"
|
||||
if r.TLS != nil {
|
||||
scheme = "https"
|
||||
}
|
||||
cW(entry.buf, useColor, nCyan, "%s://%s%s %s\" ", scheme, r.Host, r.RequestURI, r.Proto)
|
||||
|
||||
entry.buf.WriteString("from ")
|
||||
entry.buf.WriteString(r.RemoteAddr)
|
||||
entry.buf.WriteString(" - ")
|
||||
|
||||
return entry
|
||||
}
|
||||
|
||||
type defaultLogEntry struct {
|
||||
*DefaultLogFormatter
|
||||
request *http.Request
|
||||
buf *bytes.Buffer
|
||||
useColor bool
|
||||
}
|
||||
|
||||
func (l *defaultLogEntry) Write(status, bytes int, elapsed time.Duration) {
|
||||
switch {
|
||||
case status < 200:
|
||||
cW(l.buf, l.useColor, bBlue, "%03d", status)
|
||||
case status < 300:
|
||||
cW(l.buf, l.useColor, bGreen, "%03d", status)
|
||||
case status < 400:
|
||||
cW(l.buf, l.useColor, bCyan, "%03d", status)
|
||||
case status < 500:
|
||||
cW(l.buf, l.useColor, bYellow, "%03d", status)
|
||||
default:
|
||||
cW(l.buf, l.useColor, bRed, "%03d", status)
|
||||
}
|
||||
|
||||
cW(l.buf, l.useColor, bBlue, " %dB", bytes)
|
||||
|
||||
l.buf.WriteString(" in ")
|
||||
if elapsed < 500*time.Millisecond {
|
||||
cW(l.buf, l.useColor, nGreen, "%s", elapsed)
|
||||
} else if elapsed < 5*time.Second {
|
||||
cW(l.buf, l.useColor, nYellow, "%s", elapsed)
|
||||
} else {
|
||||
cW(l.buf, l.useColor, nRed, "%s", elapsed)
|
||||
}
|
||||
|
||||
l.Logger.Print(l.buf.String())
|
||||
}
|
||||
|
||||
func (l *defaultLogEntry) Panic(v interface{}, stack []byte) {
|
||||
panicEntry := l.NewLogEntry(l.request).(*defaultLogEntry)
|
||||
cW(panicEntry.buf, l.useColor, bRed, "panic: %+v", v)
|
||||
l.Logger.Print(panicEntry.buf.String())
|
||||
l.Logger.Print(string(stack))
|
||||
}
|
||||
12
vendor/github.com/go-chi/chi/middleware/middleware.go
generated
vendored
Normal file
12
vendor/github.com/go-chi/chi/middleware/middleware.go
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
package middleware
|
||||
|
||||
// contextKey is a value for use with context.WithValue. It's used as
|
||||
// a pointer so it fits in an interface{} without allocation. This technique
|
||||
// for defining context keys was copied from Go 1.7's new use of context in net/http.
|
||||
type contextKey struct {
|
||||
name string
|
||||
}
|
||||
|
||||
func (k *contextKey) String() string {
|
||||
return "chi/middleware context value " + k.name
|
||||
}
|
||||
58
vendor/github.com/go-chi/chi/middleware/nocache.go
generated
vendored
Normal file
58
vendor/github.com/go-chi/chi/middleware/nocache.go
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
package middleware
|
||||
|
||||
// Ported from Goji's middleware, source:
|
||||
// https://github.com/zenazn/goji/tree/master/web/middleware
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Unix epoch time
|
||||
var epoch = time.Unix(0, 0).Format(time.RFC1123)
|
||||
|
||||
// Taken from https://github.com/mytrile/nocache
|
||||
var noCacheHeaders = map[string]string{
|
||||
"Expires": epoch,
|
||||
"Cache-Control": "no-cache, no-store, no-transform, must-revalidate, private, max-age=0",
|
||||
"Pragma": "no-cache",
|
||||
"X-Accel-Expires": "0",
|
||||
}
|
||||
|
||||
var etagHeaders = []string{
|
||||
"ETag",
|
||||
"If-Modified-Since",
|
||||
"If-Match",
|
||||
"If-None-Match",
|
||||
"If-Range",
|
||||
"If-Unmodified-Since",
|
||||
}
|
||||
|
||||
// NoCache is a simple piece of middleware that sets a number of HTTP headers to prevent
|
||||
// a router (or subrouter) from being cached by an upstream proxy and/or client.
|
||||
//
|
||||
// As per http://wiki.nginx.org/HttpProxyModule - NoCache sets:
|
||||
// Expires: Thu, 01 Jan 1970 00:00:00 UTC
|
||||
// Cache-Control: no-cache, private, max-age=0
|
||||
// X-Accel-Expires: 0
|
||||
// Pragma: no-cache (for HTTP/1.0 proxies/clients)
|
||||
func NoCache(h http.Handler) http.Handler {
|
||||
fn := func(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
// Delete any ETag headers that may have been set
|
||||
for _, v := range etagHeaders {
|
||||
if r.Header.Get(v) != "" {
|
||||
r.Header.Del(v)
|
||||
}
|
||||
}
|
||||
|
||||
// Set our NoCache headers
|
||||
for k, v := range noCacheHeaders {
|
||||
w.Header().Set(k, v)
|
||||
}
|
||||
|
||||
h.ServeHTTP(w, r)
|
||||
}
|
||||
|
||||
return http.HandlerFunc(fn)
|
||||
}
|
||||
55
vendor/github.com/go-chi/chi/middleware/profiler.go
generated
vendored
Normal file
55
vendor/github.com/go-chi/chi/middleware/profiler.go
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"expvar"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/http/pprof"
|
||||
|
||||
"github.com/go-chi/chi"
|
||||
)
|
||||
|
||||
// Profiler is a convenient subrouter used for mounting net/http/pprof. ie.
|
||||
//
|
||||
// func MyService() http.Handler {
|
||||
// r := chi.NewRouter()
|
||||
// // ..middlewares
|
||||
// r.Mount("/debug", middleware.Profiler())
|
||||
// // ..routes
|
||||
// return r
|
||||
// }
|
||||
func Profiler() http.Handler {
|
||||
r := chi.NewRouter()
|
||||
r.Use(NoCache)
|
||||
|
||||
r.Get("/", func(w http.ResponseWriter, r *http.Request) {
|
||||
http.Redirect(w, r, r.RequestURI+"/pprof/", 301)
|
||||
})
|
||||
r.HandleFunc("/pprof", func(w http.ResponseWriter, r *http.Request) {
|
||||
http.Redirect(w, r, r.RequestURI+"/", 301)
|
||||
})
|
||||
|
||||
r.HandleFunc("/pprof/*", pprof.Index)
|
||||
r.HandleFunc("/pprof/cmdline", pprof.Cmdline)
|
||||
r.HandleFunc("/pprof/profile", pprof.Profile)
|
||||
r.HandleFunc("/pprof/symbol", pprof.Symbol)
|
||||
r.HandleFunc("/pprof/trace", pprof.Trace)
|
||||
r.HandleFunc("/vars", expVars)
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
// Replicated from expvar.go as not public.
|
||||
func expVars(w http.ResponseWriter, r *http.Request) {
|
||||
first := true
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
fmt.Fprintf(w, "{\n")
|
||||
expvar.Do(func(kv expvar.KeyValue) {
|
||||
if !first {
|
||||
fmt.Fprintf(w, ",\n")
|
||||
}
|
||||
first = false
|
||||
fmt.Fprintf(w, "%q: %s", kv.Key, kv.Value)
|
||||
})
|
||||
fmt.Fprintf(w, "\n}\n")
|
||||
}
|
||||
54
vendor/github.com/go-chi/chi/middleware/realip.go
generated
vendored
Normal file
54
vendor/github.com/go-chi/chi/middleware/realip.go
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
package middleware
|
||||
|
||||
// Ported from Goji's middleware, source:
|
||||
// https://github.com/zenazn/goji/tree/master/web/middleware
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var xForwardedFor = http.CanonicalHeaderKey("X-Forwarded-For")
|
||||
var xRealIP = http.CanonicalHeaderKey("X-Real-IP")
|
||||
|
||||
// RealIP is a middleware that sets a http.Request's RemoteAddr to the results
|
||||
// of parsing either the X-Forwarded-For header or the X-Real-IP header (in that
|
||||
// order).
|
||||
//
|
||||
// This middleware should be inserted fairly early in the middleware stack to
|
||||
// ensure that subsequent layers (e.g., request loggers) which examine the
|
||||
// RemoteAddr will see the intended value.
|
||||
//
|
||||
// You should only use this middleware if you can trust the headers passed to
|
||||
// you (in particular, the two headers this middleware uses), for example
|
||||
// because you have placed a reverse proxy like HAProxy or nginx in front of
|
||||
// chi. If your reverse proxies are configured to pass along arbitrary header
|
||||
// values from the client, or if you use this middleware without a reverse
|
||||
// proxy, malicious clients will be able to make you very sad (or, depending on
|
||||
// how you're using RemoteAddr, vulnerable to an attack of some sort).
|
||||
func RealIP(h http.Handler) http.Handler {
|
||||
fn := func(w http.ResponseWriter, r *http.Request) {
|
||||
if rip := realIP(r); rip != "" {
|
||||
r.RemoteAddr = rip
|
||||
}
|
||||
h.ServeHTTP(w, r)
|
||||
}
|
||||
|
||||
return http.HandlerFunc(fn)
|
||||
}
|
||||
|
||||
func realIP(r *http.Request) string {
|
||||
var ip string
|
||||
|
||||
if xff := r.Header.Get(xForwardedFor); xff != "" {
|
||||
i := strings.Index(xff, ", ")
|
||||
if i == -1 {
|
||||
i = len(xff)
|
||||
}
|
||||
ip = xff[:i]
|
||||
} else if xrip := r.Header.Get(xRealIP); xrip != "" {
|
||||
ip = xrip
|
||||
}
|
||||
|
||||
return ip
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user