mirror of
https://github.com/stashapp/stash.git
synced 2025-12-17 04:14:39 +03:00
Replace basic auth with cookie authentication (#440)
* Add logout functionality and button * Make session age configurable
This commit is contained in:
1
go.mod
1
go.mod
@@ -11,6 +11,7 @@ require (
|
|||||||
github.com/go-chi/chi v4.0.2+incompatible
|
github.com/go-chi/chi v4.0.2+incompatible
|
||||||
github.com/gobuffalo/packr/v2 v2.0.2
|
github.com/gobuffalo/packr/v2 v2.0.2
|
||||||
github.com/golang-migrate/migrate/v4 v4.3.1
|
github.com/golang-migrate/migrate/v4 v4.3.1
|
||||||
|
github.com/gorilla/sessions v1.2.0
|
||||||
github.com/gorilla/websocket v1.4.0
|
github.com/gorilla/websocket v1.4.0
|
||||||
github.com/h2non/filetype v1.0.8
|
github.com/h2non/filetype v1.0.8
|
||||||
// this is required for generate
|
// this is required for generate
|
||||||
|
|||||||
3
go.sum
3
go.sum
@@ -353,9 +353,12 @@ github.com/gorilla/mux v1.6.1/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2z
|
|||||||
github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
|
github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
|
||||||
github.com/gorilla/mux v1.7.1/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
|
github.com/gorilla/mux v1.7.1/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
|
||||||
github.com/gorilla/pat v0.0.0-20180118222023-199c85a7f6d1/go.mod h1:YeAe0gNeiNT5hoiZRI4yiOky6jVdNvfO2N6Kav/HmxY=
|
github.com/gorilla/pat v0.0.0-20180118222023-199c85a7f6d1/go.mod h1:YeAe0gNeiNT5hoiZRI4yiOky6jVdNvfO2N6Kav/HmxY=
|
||||||
|
github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ=
|
||||||
github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=
|
github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=
|
||||||
github.com/gorilla/sessions v1.1.2/go.mod h1:8KCfur6+4Mqcc6S0FEfKuN15Vl5MgXW92AE8ovaJD0w=
|
github.com/gorilla/sessions v1.1.2/go.mod h1:8KCfur6+4Mqcc6S0FEfKuN15Vl5MgXW92AE8ovaJD0w=
|
||||||
github.com/gorilla/sessions v1.1.3/go.mod h1:8KCfur6+4Mqcc6S0FEfKuN15Vl5MgXW92AE8ovaJD0w=
|
github.com/gorilla/sessions v1.1.3/go.mod h1:8KCfur6+4Mqcc6S0FEfKuN15Vl5MgXW92AE8ovaJD0w=
|
||||||
|
github.com/gorilla/sessions v1.2.0 h1:S7P+1Hm5V/AT9cjEcUD5uDaQSX0OE577aCXgoaKpYbQ=
|
||||||
|
github.com/gorilla/sessions v1.2.0/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
|
||||||
github.com/gorilla/websocket v1.2.0 h1:VJtLvh6VQym50czpZzx07z/kw9EgAxI3x1ZB8taTMQQ=
|
github.com/gorilla/websocket v1.2.0 h1:VJtLvh6VQym50czpZzx07z/kw9EgAxI3x1ZB8taTMQQ=
|
||||||
github.com/gorilla/websocket v1.2.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
|
github.com/gorilla/websocket v1.2.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
|
||||||
github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q=
|
github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q=
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ fragment ConfigGeneralData on ConfigGeneralResult {
|
|||||||
maxStreamingTranscodeSize
|
maxStreamingTranscodeSize
|
||||||
username
|
username
|
||||||
password
|
password
|
||||||
|
maxSessionAge
|
||||||
logFile
|
logFile
|
||||||
logOut
|
logOut
|
||||||
logLevel
|
logLevel
|
||||||
|
|||||||
@@ -22,6 +22,8 @@ input ConfigGeneralInput {
|
|||||||
username: String
|
username: String
|
||||||
"""Password"""
|
"""Password"""
|
||||||
password: String
|
password: String
|
||||||
|
"""Maximum session cookie age"""
|
||||||
|
maxSessionAge: Int
|
||||||
"""Name of the log file"""
|
"""Name of the log file"""
|
||||||
logFile: String
|
logFile: String
|
||||||
"""Whether to also output to stderr"""
|
"""Whether to also output to stderr"""
|
||||||
@@ -51,6 +53,8 @@ type ConfigGeneralResult {
|
|||||||
username: String!
|
username: String!
|
||||||
"""Password"""
|
"""Password"""
|
||||||
password: String!
|
password: String!
|
||||||
|
"""Maximum session cookie age"""
|
||||||
|
maxSessionAge: Int!
|
||||||
"""Name of the log file"""
|
"""Name of the log file"""
|
||||||
logFile: String
|
logFile: String
|
||||||
"""Whether to also output to stderr"""
|
"""Whether to also output to stderr"""
|
||||||
|
|||||||
@@ -10,4 +10,5 @@ const (
|
|||||||
sceneKey key = 2
|
sceneKey key = 2
|
||||||
studioKey key = 3
|
studioKey key = 3
|
||||||
movieKey key = 4
|
movieKey key = 4
|
||||||
|
ContextUser key = 5
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -60,6 +60,10 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if input.MaxSessionAge != nil {
|
||||||
|
config.Set(config.MaxSessionAge, *input.MaxSessionAge)
|
||||||
|
}
|
||||||
|
|
||||||
if input.LogFile != nil {
|
if input.LogFile != nil {
|
||||||
config.Set(config.LogFile, input.LogFile)
|
config.Set(config.LogFile, input.LogFile)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -43,6 +43,7 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult {
|
|||||||
MaxStreamingTranscodeSize: &maxStreamingTranscodeSize,
|
MaxStreamingTranscodeSize: &maxStreamingTranscodeSize,
|
||||||
Username: config.GetUsername(),
|
Username: config.GetUsername(),
|
||||||
Password: config.GetPasswordHash(),
|
Password: config.GetPasswordHash(),
|
||||||
|
MaxSessionAge: config.GetMaxSessionAge(),
|
||||||
LogFile: &logFile,
|
LogFile: &logFile,
|
||||||
LogOut: config.GetLogOut(),
|
LogOut: config.GetLogOut(),
|
||||||
LogLevel: config.GetLogLevel(),
|
LogLevel: config.GetLogLevel(),
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
@@ -37,38 +38,69 @@ var uiBox *packr.Box
|
|||||||
|
|
||||||
//var legacyUiBox *packr.Box
|
//var legacyUiBox *packr.Box
|
||||||
var setupUIBox *packr.Box
|
var setupUIBox *packr.Box
|
||||||
|
var loginUIBox *packr.Box
|
||||||
|
|
||||||
|
func allowUnauthenticated(r *http.Request) bool {
|
||||||
|
return strings.HasPrefix(r.URL.Path, "/login") || r.URL.Path == "/css"
|
||||||
|
}
|
||||||
|
|
||||||
func authenticateHandler() func(http.Handler) http.Handler {
|
func authenticateHandler() func(http.Handler) http.Handler {
|
||||||
return func(next http.Handler) http.Handler {
|
return func(next http.Handler) http.Handler {
|
||||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
// only do this if credentials have been configured
|
ctx := r.Context()
|
||||||
if !config.HasCredentials() {
|
|
||||||
next.ServeHTTP(w, r)
|
// translate api key into current user, if present
|
||||||
|
userID := ""
|
||||||
|
var err error
|
||||||
|
|
||||||
|
// handle session
|
||||||
|
userID, err = getSessionUserID(w, r)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
w.WriteHeader(http.StatusInternalServerError)
|
||||||
|
w.Write([]byte(err.Error()))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
authUser, authPW, ok := r.BasicAuth()
|
// handle redirect if no user and user is required
|
||||||
|
if userID == "" && config.HasCredentials() && !allowUnauthenticated(r) {
|
||||||
|
// always allow
|
||||||
|
|
||||||
if !ok || !config.ValidateCredentials(authUser, authPW) {
|
// if we don't have a userID, then redirect
|
||||||
unauthorized(w)
|
// if graphql was requested, we just return a forbidden error
|
||||||
|
if r.URL.Path == "/graphql" {
|
||||||
|
w.Header().Add("WWW-Authenticate", `FormBased`)
|
||||||
|
w.WriteHeader(http.StatusUnauthorized)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// otherwise redirect to the login page
|
||||||
|
u := url.URL{
|
||||||
|
Path: "/login",
|
||||||
|
}
|
||||||
|
q := u.Query()
|
||||||
|
q.Set(returnURLParam, r.URL.Path)
|
||||||
|
u.RawQuery = q.Encode()
|
||||||
|
http.Redirect(w, r, u.String(), http.StatusFound)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ctx = context.WithValue(ctx, ContextUser, userID)
|
||||||
|
|
||||||
|
r = r.WithContext(ctx)
|
||||||
|
|
||||||
next.ServeHTTP(w, r)
|
next.ServeHTTP(w, r)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func unauthorized(w http.ResponseWriter) {
|
|
||||||
w.Header().Add("WWW-Authenticate", `Basic realm=\"Stash\"`)
|
|
||||||
w.WriteHeader(http.StatusUnauthorized)
|
|
||||||
}
|
|
||||||
|
|
||||||
func Start() {
|
func Start() {
|
||||||
uiBox = packr.New("UI Box", "../../ui/v2.5/build")
|
uiBox = packr.New("UI Box", "../../ui/v2.5/build")
|
||||||
//legacyUiBox = packr.New("UI Box", "../../ui/v1/dist/stash-frontend")
|
//legacyUiBox = packr.New("UI Box", "../../ui/v1/dist/stash-frontend")
|
||||||
setupUIBox = packr.New("Setup UI Box", "../../ui/setup")
|
setupUIBox = packr.New("Setup UI Box", "../../ui/setup")
|
||||||
|
loginUIBox = packr.New("Login UI Box", "../../ui/login")
|
||||||
|
|
||||||
|
initSessionStore()
|
||||||
initialiseImages()
|
initialiseImages()
|
||||||
|
|
||||||
r := chi.NewRouter()
|
r := chi.NewRouter()
|
||||||
@@ -107,6 +139,12 @@ func Start() {
|
|||||||
r.Handle("/graphql", gqlHandler)
|
r.Handle("/graphql", gqlHandler)
|
||||||
r.Handle("/playground", handler.Playground("GraphQL playground", "/graphql"))
|
r.Handle("/playground", handler.Playground("GraphQL playground", "/graphql"))
|
||||||
|
|
||||||
|
// session handlers
|
||||||
|
r.Post("/login", handleLogin)
|
||||||
|
r.Get("/logout", handleLogout)
|
||||||
|
|
||||||
|
r.Get("/login", getLoginHandler)
|
||||||
|
|
||||||
r.Mount("/gallery", galleryRoutes{}.Routes())
|
r.Mount("/gallery", galleryRoutes{}.Routes())
|
||||||
r.Mount("/performer", performerRoutes{}.Routes())
|
r.Mount("/performer", performerRoutes{}.Routes())
|
||||||
r.Mount("/scene", sceneRoutes{}.Routes())
|
r.Mount("/scene", sceneRoutes{}.Routes())
|
||||||
@@ -144,6 +182,16 @@ func Start() {
|
|||||||
http.FileServer(setupUIBox).ServeHTTP(w, r)
|
http.FileServer(setupUIBox).ServeHTTP(w, r)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
r.HandleFunc("/login*", func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ext := path.Ext(r.URL.Path)
|
||||||
|
if ext == ".html" || ext == "" {
|
||||||
|
data, _ := loginUIBox.Find("login.html")
|
||||||
|
_, _ = w.Write(data)
|
||||||
|
} else {
|
||||||
|
r.URL.Path = strings.Replace(r.URL.Path, "/login", "", 1)
|
||||||
|
http.FileServer(loginUIBox).ServeHTTP(w, r)
|
||||||
|
}
|
||||||
|
})
|
||||||
r.Post("/init", func(w http.ResponseWriter, r *http.Request) {
|
r.Post("/init", func(w http.ResponseWriter, r *http.Request) {
|
||||||
err := r.ParseForm()
|
err := r.ParseForm()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
127
pkg/api/session.go
Normal file
127
pkg/api/session.go
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"html/template"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/manager/config"
|
||||||
|
|
||||||
|
"github.com/gorilla/sessions"
|
||||||
|
)
|
||||||
|
|
||||||
|
const cookieName = "session"
|
||||||
|
const usernameFormKey = "username"
|
||||||
|
const passwordFormKey = "password"
|
||||||
|
const userIDKey = "userID"
|
||||||
|
|
||||||
|
const returnURLParam = "returnURL"
|
||||||
|
|
||||||
|
var sessionStore = sessions.NewCookieStore(config.GetSessionStoreKey())
|
||||||
|
|
||||||
|
type loginTemplateData struct {
|
||||||
|
URL string
|
||||||
|
Error string
|
||||||
|
}
|
||||||
|
|
||||||
|
func initSessionStore() {
|
||||||
|
sessionStore.MaxAge(config.GetMaxSessionAge())
|
||||||
|
}
|
||||||
|
|
||||||
|
func redirectToLogin(w http.ResponseWriter, returnURL string, loginError string) {
|
||||||
|
data, _ := loginUIBox.Find("login.html")
|
||||||
|
templ, err := template.New("Login").Parse(string(data))
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, fmt.Sprintf("error: %s", err), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err = templ.Execute(w, loginTemplateData{URL: returnURL, Error: loginError})
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, fmt.Sprintf("error: %s", err), http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func getLoginHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if !config.HasCredentials() {
|
||||||
|
http.Redirect(w, r, "/", http.StatusFound)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
redirectToLogin(w, r.URL.Query().Get(returnURLParam), "")
|
||||||
|
}
|
||||||
|
|
||||||
|
func handleLogin(w http.ResponseWriter, r *http.Request) {
|
||||||
|
url := r.FormValue(returnURLParam)
|
||||||
|
if url == "" {
|
||||||
|
url = "/"
|
||||||
|
}
|
||||||
|
|
||||||
|
// ignore error - we want a new session regardless
|
||||||
|
newSession, _ := sessionStore.Get(r, cookieName)
|
||||||
|
|
||||||
|
username := r.FormValue("username")
|
||||||
|
password := r.FormValue("password")
|
||||||
|
|
||||||
|
// authenticate the user
|
||||||
|
if !config.ValidateCredentials(username, password) {
|
||||||
|
// redirect back to the login page with an error
|
||||||
|
redirectToLogin(w, url, "Username or password is invalid")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
newSession.Values[userIDKey] = username
|
||||||
|
|
||||||
|
err := newSession.Save(r, w)
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
http.Redirect(w, r, url, http.StatusFound)
|
||||||
|
}
|
||||||
|
|
||||||
|
func handleLogout(w http.ResponseWriter, r *http.Request) {
|
||||||
|
session, err := sessionStore.Get(r, cookieName)
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
delete(session.Values, userIDKey)
|
||||||
|
session.Options.MaxAge = -1
|
||||||
|
|
||||||
|
err = session.Save(r, w)
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// redirect to the login page if credentials are required
|
||||||
|
getLoginHandler(w, r)
|
||||||
|
}
|
||||||
|
|
||||||
|
func getSessionUserID(w http.ResponseWriter, r *http.Request) (string, error) {
|
||||||
|
session, err := sessionStore.Get(r, cookieName)
|
||||||
|
// ignore errors and treat as an empty user id, so that we handle expired
|
||||||
|
// cookie
|
||||||
|
if err != nil {
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if !session.IsNew {
|
||||||
|
val := session.Values[userIDKey]
|
||||||
|
|
||||||
|
// refresh the cookie
|
||||||
|
err = session.Save(r, w)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
ret, _ := val.(string)
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
@@ -19,6 +19,9 @@ const Metadata = "metadata"
|
|||||||
const Downloads = "downloads"
|
const Downloads = "downloads"
|
||||||
const Username = "username"
|
const Username = "username"
|
||||||
const Password = "password"
|
const Password = "password"
|
||||||
|
const MaxSessionAge = "max_session_age"
|
||||||
|
|
||||||
|
const DefaultMaxSessionAge = 60 * 60 * 1 // 1 hours
|
||||||
|
|
||||||
const Database = "database"
|
const Database = "database"
|
||||||
|
|
||||||
@@ -31,6 +34,12 @@ const Host = "host"
|
|||||||
const Port = "port"
|
const Port = "port"
|
||||||
const ExternalHost = "external_host"
|
const ExternalHost = "external_host"
|
||||||
|
|
||||||
|
// key used to sign JWT tokens
|
||||||
|
const JWTSignKey = "jwt_secret_key"
|
||||||
|
|
||||||
|
// key used for session store
|
||||||
|
const SessionStoreKey = "session_store_key"
|
||||||
|
|
||||||
// scraping options
|
// scraping options
|
||||||
const ScrapersPath = "scrapers_path"
|
const ScrapersPath = "scrapers_path"
|
||||||
const ScraperUserAgent = "scraper_user_agent"
|
const ScraperUserAgent = "scraper_user_agent"
|
||||||
@@ -89,6 +98,14 @@ func GetDatabasePath() string {
|
|||||||
return viper.GetString(Database)
|
return viper.GetString(Database)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func GetJWTSignKey() []byte {
|
||||||
|
return []byte(viper.GetString(JWTSignKey))
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetSessionStoreKey() []byte {
|
||||||
|
return []byte(viper.GetString(SessionStoreKey))
|
||||||
|
}
|
||||||
|
|
||||||
func GetDefaultScrapersPath() string {
|
func GetDefaultScrapersPath() string {
|
||||||
// default to the same directory as the config file
|
// default to the same directory as the config file
|
||||||
configFileUsed := viper.ConfigFileUsed()
|
configFileUsed := viper.ConfigFileUsed()
|
||||||
@@ -202,6 +219,13 @@ func ValidateCredentials(username string, password string) bool {
|
|||||||
return username == authUser && err == nil
|
return username == authUser && err == nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetMaxSessionAge gets the maximum age for session cookies, in seconds.
|
||||||
|
// Session cookie expiry times are refreshed every request.
|
||||||
|
func GetMaxSessionAge() int {
|
||||||
|
viper.SetDefault(MaxSessionAge, DefaultMaxSessionAge)
|
||||||
|
return viper.GetInt(MaxSessionAge)
|
||||||
|
}
|
||||||
|
|
||||||
// Interface options
|
// Interface options
|
||||||
func GetSoundOnPreview() bool {
|
func GetSoundOnPreview() bool {
|
||||||
viper.SetDefault(SoundOnPreview, true)
|
viper.SetDefault(SoundOnPreview, true)
|
||||||
@@ -315,3 +339,21 @@ func IsValid() bool {
|
|||||||
// TODO: check valid paths
|
// TODO: check valid paths
|
||||||
return setPaths
|
return setPaths
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetInitialConfig fills in missing required config fields
|
||||||
|
func SetInitialConfig() error {
|
||||||
|
// generate some api keys
|
||||||
|
const apiKeyLength = 32
|
||||||
|
|
||||||
|
if string(GetJWTSignKey()) == "" {
|
||||||
|
signKey := utils.GenerateRandomKey(apiKeyLength)
|
||||||
|
Set(JWTSignKey, signKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
if string(GetSessionStoreKey()) == "" {
|
||||||
|
sessionStoreKey := utils.GenerateRandomKey(apiKeyLength)
|
||||||
|
Set(SessionStoreKey, sessionStoreKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
return Write()
|
||||||
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package utils
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"crypto/md5"
|
"crypto/md5"
|
||||||
|
"crypto/rand"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
@@ -31,3 +32,9 @@ func MD5FromFilePath(filePath string) (string, error) {
|
|||||||
checksum := h.Sum(nil)
|
checksum := h.Sum(nil)
|
||||||
return fmt.Sprintf("%x", checksum), nil
|
return fmt.Sprintf("%x", checksum), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func GenerateRandomKey(l int) string {
|
||||||
|
b := make([]byte, l)
|
||||||
|
rand.Read(b)
|
||||||
|
return fmt.Sprintf("%x", b)
|
||||||
|
}
|
||||||
|
|||||||
117
ui/login/login.css
Normal file
117
ui/login/login.css
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
/* try to reflect the default css as much as possible */
|
||||||
|
* {
|
||||||
|
box-sizing: border-box;
|
||||||
|
}
|
||||||
|
html {
|
||||||
|
font-size: 14px;
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
background-color: #202b33;
|
||||||
|
color: #f5f8fa;
|
||||||
|
font-family: -apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica Neue",Arial,"Noto Sans",sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol","Noto Color Emoji";
|
||||||
|
-webkit-font-smoothing: antialiased;
|
||||||
|
-moz-osx-font-smoothing: grayscale;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
overflow-y: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
h6 {
|
||||||
|
font-size: 1rem;
|
||||||
|
margin-top: 0;
|
||||||
|
margin-bottom: .5rem;
|
||||||
|
font-weight: 500;
|
||||||
|
line-height: 1.2;
|
||||||
|
}
|
||||||
|
|
||||||
|
button, input {
|
||||||
|
margin: 0;
|
||||||
|
font-family: inherit;
|
||||||
|
font-size: inherit;
|
||||||
|
line-height: inherit;
|
||||||
|
}
|
||||||
|
|
||||||
|
.card {
|
||||||
|
background-color: #30404d;
|
||||||
|
border-radius: 3px;
|
||||||
|
box-shadow: 0 0 0 1px rgba(16,22,26,.4), 0 0 0 rgba(16,22,26,0), 0 0 0 rgba(16,22,26,0);
|
||||||
|
padding: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dialog {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
|
||||||
|
width: 100%;
|
||||||
|
height: 100vh;
|
||||||
|
padding-right: 15px;
|
||||||
|
padding-left: 15px;
|
||||||
|
margin-right: auto;
|
||||||
|
margin-left: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.form-group {
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.form-control {
|
||||||
|
display: block;
|
||||||
|
width: 100%;
|
||||||
|
height: calc(1.5em + .75rem + 2px);
|
||||||
|
padding: .375rem .75rem;
|
||||||
|
font-size: 1rem;
|
||||||
|
font-weight: 400;
|
||||||
|
line-height: 1.5;
|
||||||
|
color: #495057;
|
||||||
|
background-clip: padding-box;
|
||||||
|
border: 1px solid #ced4da;
|
||||||
|
border-radius: .25rem;
|
||||||
|
-webkit-transition: border-color .15s ease-in-out,box-shadow .15s ease-in-out;
|
||||||
|
transition: border-color .15s ease-in-out,box-shadow .15s ease-in-out;
|
||||||
|
}
|
||||||
|
|
||||||
|
.text-input {
|
||||||
|
border: 0;
|
||||||
|
box-shadow: 0 0 0 0 rgba(19,124,189,0), 0 0 0 0 rgba(19,124,189,0), 0 0 0 0 rgba(19,124,189,0), inset 0 0 0 1px rgba(16,22,26,.3), inset 0 1px 1px rgba(16,22,26,.4);
|
||||||
|
color: #f5f8fa;
|
||||||
|
}
|
||||||
|
|
||||||
|
.text-input, .text-input:focus, .text-input[readonly] {
|
||||||
|
background-color: rgba(16,22,26,.3);
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn {
|
||||||
|
display: inline-block;
|
||||||
|
font-weight: 400;
|
||||||
|
color: #212529;
|
||||||
|
text-align: center;
|
||||||
|
vertical-align: middle;
|
||||||
|
cursor: pointer;
|
||||||
|
-webkit-user-select: none;
|
||||||
|
-moz-user-select: none;
|
||||||
|
-ms-user-select: none;
|
||||||
|
user-select: none;
|
||||||
|
background-color: initial;
|
||||||
|
border: 1px solid transparent;
|
||||||
|
padding: .375rem .75rem;
|
||||||
|
font-size: 1rem;
|
||||||
|
line-height: 1.5;
|
||||||
|
border-radius: .25rem;
|
||||||
|
-webkit-transition: color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;
|
||||||
|
transition: color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-primary {
|
||||||
|
color: #fff;
|
||||||
|
background-color: #137cbd;
|
||||||
|
border-color: #137cbd;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-error {
|
||||||
|
color: #db3737;
|
||||||
|
font-size: 80%;
|
||||||
|
font-weight: 500;
|
||||||
|
padding-bottom: 1rem;
|
||||||
|
}
|
||||||
40
ui/login/login.html
Normal file
40
ui/login/login.html
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
||||||
|
<title>Login</title>
|
||||||
|
|
||||||
|
<link rel="stylesheet" href="//fonts.googleapis.com/css?family=Roboto:300,300italic,700,700italic">
|
||||||
|
<link rel="stylesheet" href="/login/login.css">
|
||||||
|
<link rel="stylesheet" href="/css">
|
||||||
|
</head>
|
||||||
|
<body class="login">
|
||||||
|
|
||||||
|
<div class="dialog">
|
||||||
|
<div class="card">
|
||||||
|
<form action="/login" method="POST">
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="username"><h6>Username</h6></label>
|
||||||
|
<input class="text-input form-control" name="username" type="text" placeholder="Username" />
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="password"><h6>Password</h6></label>
|
||||||
|
<input class="text-input form-control" name="password" type="password" placeholder="Password" />
|
||||||
|
</div>
|
||||||
|
<div class="login-error">
|
||||||
|
{{.Error}}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<input type="hidden" name="returnURL" value="{{.URL}}" />
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<input class="btn btn-primary" type="submit" value="Login">
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
@@ -55,6 +55,7 @@
|
|||||||
"react-router-dom": "^5.1.2",
|
"react-router-dom": "^5.1.2",
|
||||||
"react-select": "^3.0.8",
|
"react-select": "^3.0.8",
|
||||||
"subscriptions-transport-ws": "^0.9.16",
|
"subscriptions-transport-ws": "^0.9.16",
|
||||||
|
"universal-cookie": "^4.0.3",
|
||||||
"video.js": "^7.6.0"
|
"video.js": "^7.6.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { Nav, Navbar, Button } from "react-bootstrap";
|
|||||||
import { IconName } from "@fortawesome/fontawesome-svg-core";
|
import { IconName } from "@fortawesome/fontawesome-svg-core";
|
||||||
import { LinkContainer } from "react-router-bootstrap";
|
import { LinkContainer } from "react-router-bootstrap";
|
||||||
import { Link, useLocation } from "react-router-dom";
|
import { Link, useLocation } from "react-router-dom";
|
||||||
|
import { SessionUtils } from "src/utils";
|
||||||
|
|
||||||
import { Icon } from "src/components/Shared";
|
import { Icon } from "src/components/Shared";
|
||||||
|
|
||||||
@@ -98,6 +99,16 @@ export const MainNavbar: React.FC = () => {
|
|||||||
</LinkContainer>
|
</LinkContainer>
|
||||||
);
|
);
|
||||||
|
|
||||||
|
function maybeRenderLogout() {
|
||||||
|
if (SessionUtils.isLoggedIn()) {
|
||||||
|
return (
|
||||||
|
<Button className="minimal logout-button" href="/logout">
|
||||||
|
<Icon icon="sign-out-alt" />
|
||||||
|
</Button>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Navbar
|
<Navbar
|
||||||
collapseOnSelect
|
collapseOnSelect
|
||||||
@@ -153,6 +164,7 @@ export const MainNavbar: React.FC = () => {
|
|||||||
<Icon icon="cog" />
|
<Icon icon="cog" />
|
||||||
</Button>
|
</Button>
|
||||||
</LinkContainer>
|
</LinkContainer>
|
||||||
|
{maybeRenderLogout()}
|
||||||
</Nav>
|
</Nav>
|
||||||
</Navbar>
|
</Navbar>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ export const SettingsConfigurationPanel: React.FC = () => {
|
|||||||
>(undefined);
|
>(undefined);
|
||||||
const [username, setUsername] = useState<string | undefined>(undefined);
|
const [username, setUsername] = useState<string | undefined>(undefined);
|
||||||
const [password, setPassword] = useState<string | undefined>(undefined);
|
const [password, setPassword] = useState<string | undefined>(undefined);
|
||||||
|
const [maxSessionAge, setMaxSessionAge] = useState<number>(0);
|
||||||
const [logFile, setLogFile] = useState<string | undefined>();
|
const [logFile, setLogFile] = useState<string | undefined>();
|
||||||
const [logOut, setLogOut] = useState<boolean>(true);
|
const [logOut, setLogOut] = useState<boolean>(true);
|
||||||
const [logLevel, setLogLevel] = useState<string>("Info");
|
const [logLevel, setLogLevel] = useState<string>("Info");
|
||||||
@@ -43,6 +44,7 @@ export const SettingsConfigurationPanel: React.FC = () => {
|
|||||||
maxStreamingTranscodeSize,
|
maxStreamingTranscodeSize,
|
||||||
username,
|
username,
|
||||||
password,
|
password,
|
||||||
|
maxSessionAge,
|
||||||
logFile,
|
logFile,
|
||||||
logOut,
|
logOut,
|
||||||
logLevel,
|
logLevel,
|
||||||
@@ -65,6 +67,7 @@ export const SettingsConfigurationPanel: React.FC = () => {
|
|||||||
);
|
);
|
||||||
setUsername(conf.general.username);
|
setUsername(conf.general.username);
|
||||||
setPassword(conf.general.password);
|
setPassword(conf.general.password);
|
||||||
|
setMaxSessionAge(conf.general.maxSessionAge);
|
||||||
setLogFile(conf.general.logFile ?? undefined);
|
setLogFile(conf.general.logFile ?? undefined);
|
||||||
setLogOut(conf.general.logOut);
|
setLogOut(conf.general.logOut);
|
||||||
setLogLevel(conf.general.logLevel);
|
setLogLevel(conf.general.logLevel);
|
||||||
@@ -339,6 +342,21 @@ export const SettingsConfigurationPanel: React.FC = () => {
|
|||||||
Password to access Stash. Leave blank to disable user authentication
|
Password to access Stash. Leave blank to disable user authentication
|
||||||
</Form.Text>
|
</Form.Text>
|
||||||
</Form.Group>
|
</Form.Group>
|
||||||
|
|
||||||
|
<Form.Group id="maxSessionAge">
|
||||||
|
<h6>Maximum Session Age</h6>
|
||||||
|
<Form.Control
|
||||||
|
className="col col-sm-6 text-input"
|
||||||
|
type="number"
|
||||||
|
value={maxSessionAge.toString()}
|
||||||
|
onInput={(e: React.FormEvent<HTMLInputElement>) =>
|
||||||
|
setMaxSessionAge(Number.parseInt(e.currentTarget.value, 10))
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Form.Text className="text-muted">
|
||||||
|
Maximum idle time before a login session is expired, in seconds.
|
||||||
|
</Form.Text>
|
||||||
|
</Form.Group>
|
||||||
</Form.Group>
|
</Form.Group>
|
||||||
|
|
||||||
<hr />
|
<hr />
|
||||||
|
|||||||
@@ -2,7 +2,9 @@ import ApolloClient from "apollo-client";
|
|||||||
import { WebSocketLink } from "apollo-link-ws";
|
import { WebSocketLink } from "apollo-link-ws";
|
||||||
import { InMemoryCache, NormalizedCacheObject } from "apollo-cache-inmemory";
|
import { InMemoryCache, NormalizedCacheObject } from "apollo-cache-inmemory";
|
||||||
import { HttpLink } from "apollo-link-http";
|
import { HttpLink } from "apollo-link-http";
|
||||||
import { split } from "apollo-link";
|
import { onError } from "apollo-link-error";
|
||||||
|
import { ServerError } from "apollo-link-http-common";
|
||||||
|
import { split, from } from "apollo-link";
|
||||||
import { getMainDefinition } from "apollo-utilities";
|
import { getMainDefinition } from "apollo-utilities";
|
||||||
import { ListFilterModel } from "../models/list-filter/filter";
|
import { ListFilterModel } from "../models/list-filter/filter";
|
||||||
import * as GQL from "./generated-graphql";
|
import * as GQL from "./generated-graphql";
|
||||||
@@ -51,7 +53,15 @@ export class StashService {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const link = split(
|
const errorLink = onError(({ networkError }) => {
|
||||||
|
// handle unauthorized error by redirecting to the login page
|
||||||
|
if (networkError && (networkError as ServerError).statusCode === 401) {
|
||||||
|
// redirect to login page
|
||||||
|
window.location.href = "/login";
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const splitLink = split(
|
||||||
({ query }) => {
|
({ query }) => {
|
||||||
const definition = getMainDefinition(query);
|
const definition = getMainDefinition(query);
|
||||||
return (
|
return (
|
||||||
@@ -63,6 +73,8 @@ export class StashService {
|
|||||||
httpLink
|
httpLink
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const link = from([errorLink, splitLink]);
|
||||||
|
|
||||||
StashService.cache = new InMemoryCache();
|
StashService.cache = new InMemoryCache();
|
||||||
StashService.client = new ApolloClient({
|
StashService.client = new ApolloClient({
|
||||||
link,
|
link,
|
||||||
|
|||||||
@@ -4,4 +4,5 @@ export { default as TableUtils } from "./table";
|
|||||||
export { default as TextUtils } from "./text";
|
export { default as TextUtils } from "./text";
|
||||||
export { default as DurationUtils } from "./duration";
|
export { default as DurationUtils } from "./duration";
|
||||||
export { default as JWUtils } from "./jwplayer";
|
export { default as JWUtils } from "./jwplayer";
|
||||||
|
export { default as SessionUtils } from "./session";
|
||||||
export { default as flattenMessages } from "./flattenMessages";
|
export { default as flattenMessages } from "./flattenMessages";
|
||||||
|
|||||||
9
ui/v2.5/src/utils/session.ts
Normal file
9
ui/v2.5/src/utils/session.ts
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
import Cookies from "universal-cookie";
|
||||||
|
|
||||||
|
const isLoggedIn = () => {
|
||||||
|
return new Cookies().get("session") !== undefined;
|
||||||
|
};
|
||||||
|
|
||||||
|
export default {
|
||||||
|
isLoggedIn,
|
||||||
|
};
|
||||||
@@ -1996,6 +1996,11 @@
|
|||||||
resolved "https://registry.yarnpkg.com/@types/color-name/-/color-name-1.1.1.tgz#1c1261bbeaa10a8055bbc5d8ab84b7b2afc846a0"
|
resolved "https://registry.yarnpkg.com/@types/color-name/-/color-name-1.1.1.tgz#1c1261bbeaa10a8055bbc5d8ab84b7b2afc846a0"
|
||||||
integrity sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==
|
integrity sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==
|
||||||
|
|
||||||
|
"@types/cookie@^0.3.3":
|
||||||
|
version "0.3.3"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/cookie/-/cookie-0.3.3.tgz#85bc74ba782fb7aa3a514d11767832b0e3bc6803"
|
||||||
|
integrity sha512-LKVP3cgXBT9RYj+t+9FDKwS5tdI+rPBXaNSkma7hvqy35lc7mAokC2zsqWJH0LaqIt3B962nuYI77hsJoT1gow==
|
||||||
|
|
||||||
"@types/debounce@1.2.0":
|
"@types/debounce@1.2.0":
|
||||||
version "1.2.0"
|
version "1.2.0"
|
||||||
resolved "https://registry.yarnpkg.com/@types/debounce/-/debounce-1.2.0.tgz#9ee99259f41018c640b3929e1bb32c3dcecdb192"
|
resolved "https://registry.yarnpkg.com/@types/debounce/-/debounce-1.2.0.tgz#9ee99259f41018c640b3929e1bb32c3dcecdb192"
|
||||||
@@ -2126,6 +2131,11 @@
|
|||||||
resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz#e486d0d97396d79beedd0a6e33f4534ff6b4973e"
|
resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz#e486d0d97396d79beedd0a6e33f4534ff6b4973e"
|
||||||
integrity sha512-f5j5b/Gf71L+dbqxIpQ4Z2WlmI/mPJ0fOkGGmFgtb6sAu97EPczzbS3/tJKxmcYDj55OX6ssqwDAWOHIYDRDGA==
|
integrity sha512-f5j5b/Gf71L+dbqxIpQ4Z2WlmI/mPJ0fOkGGmFgtb6sAu97EPczzbS3/tJKxmcYDj55OX6ssqwDAWOHIYDRDGA==
|
||||||
|
|
||||||
|
"@types/object-assign@^4.0.30":
|
||||||
|
version "4.0.30"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/object-assign/-/object-assign-4.0.30.tgz#8949371d5a99f4381ee0f1df0a9b7a187e07e652"
|
||||||
|
integrity sha1-iUk3HVqZ9Dge4PHfCpt6GH4H5lI=
|
||||||
|
|
||||||
"@types/parse-json@^4.0.0":
|
"@types/parse-json@^4.0.0":
|
||||||
version "4.0.0"
|
version "4.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0"
|
resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0"
|
||||||
@@ -4371,7 +4381,7 @@ cookie-signature@1.0.6:
|
|||||||
resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c"
|
resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c"
|
||||||
integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw=
|
integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw=
|
||||||
|
|
||||||
cookie@0.4.0:
|
cookie@0.4.0, cookie@^0.4.0:
|
||||||
version "0.4.0"
|
version "0.4.0"
|
||||||
resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.0.tgz#beb437e7022b3b6d49019d088665303ebe9c14ba"
|
resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.0.tgz#beb437e7022b3b6d49019d088665303ebe9c14ba"
|
||||||
integrity sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg==
|
integrity sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg==
|
||||||
@@ -13791,6 +13801,16 @@ unist-util-visit@^1.1.0:
|
|||||||
dependencies:
|
dependencies:
|
||||||
unist-util-visit-parents "^2.0.0"
|
unist-util-visit-parents "^2.0.0"
|
||||||
|
|
||||||
|
universal-cookie@^4.0.3:
|
||||||
|
version "4.0.3"
|
||||||
|
resolved "https://registry.yarnpkg.com/universal-cookie/-/universal-cookie-4.0.3.tgz#c2fa59127260e6ad21ef3e0cdd66ad453cbc41f6"
|
||||||
|
integrity sha512-YbEHRs7bYOBTIWedTR9koVEe2mXrq+xdjTJZcoKJK/pQaE6ni28ak2AKXFpevb+X6w3iU5SXzWDiJkmpDRb9qw==
|
||||||
|
dependencies:
|
||||||
|
"@types/cookie" "^0.3.3"
|
||||||
|
"@types/object-assign" "^4.0.30"
|
||||||
|
cookie "^0.4.0"
|
||||||
|
object-assign "^4.1.1"
|
||||||
|
|
||||||
universalify@^0.1.0:
|
universalify@^0.1.0:
|
||||||
version "0.1.2"
|
version "0.1.2"
|
||||||
resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66"
|
resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66"
|
||||||
|
|||||||
19
vendor/github.com/gorilla/securecookie/.travis.yml
generated
vendored
Normal file
19
vendor/github.com/gorilla/securecookie/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
language: go
|
||||||
|
sudo: false
|
||||||
|
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- go: 1.3
|
||||||
|
- go: 1.4
|
||||||
|
- go: 1.5
|
||||||
|
- go: 1.6
|
||||||
|
- go: 1.7
|
||||||
|
- go: tip
|
||||||
|
allow_failures:
|
||||||
|
- go: tip
|
||||||
|
|
||||||
|
script:
|
||||||
|
- go get -t -v ./...
|
||||||
|
- diff -u <(echo -n) <(gofmt -d .)
|
||||||
|
- go vet $(go list ./... | grep -v /vendor/)
|
||||||
|
- go test -v -race ./...
|
||||||
27
vendor/github.com/gorilla/securecookie/LICENSE
generated
vendored
Normal file
27
vendor/github.com/gorilla/securecookie/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
Copyright (c) 2012 Rodrigo Moraes. All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are
|
||||||
|
met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
* Redistributions in binary form must reproduce the above
|
||||||
|
copyright notice, this list of conditions and the following disclaimer
|
||||||
|
in the documentation and/or other materials provided with the
|
||||||
|
distribution.
|
||||||
|
* Neither the name of Google Inc. nor the names of its
|
||||||
|
contributors may be used to endorse or promote products derived from
|
||||||
|
this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
80
vendor/github.com/gorilla/securecookie/README.md
generated
vendored
Normal file
80
vendor/github.com/gorilla/securecookie/README.md
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
securecookie
|
||||||
|
============
|
||||||
|
[](https://godoc.org/github.com/gorilla/securecookie) [](https://travis-ci.org/gorilla/securecookie)
|
||||||
|
[](https://sourcegraph.com/github.com/gorilla/securecookie?badge)
|
||||||
|
|
||||||
|
|
||||||
|
securecookie encodes and decodes authenticated and optionally encrypted
|
||||||
|
cookie values.
|
||||||
|
|
||||||
|
Secure cookies can't be forged, because their values are validated using HMAC.
|
||||||
|
When encrypted, the content is also inaccessible to malicious eyes. It is still
|
||||||
|
recommended that sensitive data not be stored in cookies, and that HTTPS be used
|
||||||
|
to prevent cookie [replay attacks](https://en.wikipedia.org/wiki/Replay_attack).
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
To use it, first create a new SecureCookie instance:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Hash keys should be at least 32 bytes long
|
||||||
|
var hashKey = []byte("very-secret")
|
||||||
|
// Block keys should be 16 bytes (AES-128) or 32 bytes (AES-256) long.
|
||||||
|
// Shorter keys may weaken the encryption used.
|
||||||
|
var blockKey = []byte("a-lot-secret")
|
||||||
|
var s = securecookie.New(hashKey, blockKey)
|
||||||
|
```
|
||||||
|
|
||||||
|
The hashKey is required, used to authenticate the cookie value using HMAC.
|
||||||
|
It is recommended to use a key with 32 or 64 bytes.
|
||||||
|
|
||||||
|
The blockKey is optional, used to encrypt the cookie value -- set it to nil
|
||||||
|
to not use encryption. If set, the length must correspond to the block size
|
||||||
|
of the encryption algorithm. For AES, used by default, valid lengths are
|
||||||
|
16, 24, or 32 bytes to select AES-128, AES-192, or AES-256.
|
||||||
|
|
||||||
|
Strong keys can be created using the convenience function GenerateRandomKey().
|
||||||
|
|
||||||
|
Once a SecureCookie instance is set, use it to encode a cookie value:
|
||||||
|
|
||||||
|
```go
|
||||||
|
func SetCookieHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
value := map[string]string{
|
||||||
|
"foo": "bar",
|
||||||
|
}
|
||||||
|
if encoded, err := s.Encode("cookie-name", value); err == nil {
|
||||||
|
cookie := &http.Cookie{
|
||||||
|
Name: "cookie-name",
|
||||||
|
Value: encoded,
|
||||||
|
Path: "/",
|
||||||
|
Secure: true,
|
||||||
|
HttpOnly: true,
|
||||||
|
}
|
||||||
|
http.SetCookie(w, cookie)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Later, use the same SecureCookie instance to decode and validate a cookie
|
||||||
|
value:
|
||||||
|
|
||||||
|
```go
|
||||||
|
func ReadCookieHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if cookie, err := r.Cookie("cookie-name"); err == nil {
|
||||||
|
value := make(map[string]string)
|
||||||
|
if err = s2.Decode("cookie-name", cookie.Value, &value); err == nil {
|
||||||
|
fmt.Fprintf(w, "The value of foo is %q", value["foo"])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
We stored a map[string]string, but secure cookies can hold any value that
|
||||||
|
can be encoded using `encoding/gob`. To store custom types, they must be
|
||||||
|
registered first using gob.Register(). For basic types this is not needed;
|
||||||
|
it works out of the box. An optional JSON encoder that uses `encoding/json` is
|
||||||
|
available for types compatible with JSON.
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
BSD licensed. See the LICENSE file for details.
|
||||||
61
vendor/github.com/gorilla/securecookie/doc.go
generated
vendored
Normal file
61
vendor/github.com/gorilla/securecookie/doc.go
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
// Copyright 2012 The Gorilla Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
/*
|
||||||
|
Package securecookie encodes and decodes authenticated and optionally
|
||||||
|
encrypted cookie values.
|
||||||
|
|
||||||
|
Secure cookies can't be forged, because their values are validated using HMAC.
|
||||||
|
When encrypted, the content is also inaccessible to malicious eyes.
|
||||||
|
|
||||||
|
To use it, first create a new SecureCookie instance:
|
||||||
|
|
||||||
|
var hashKey = []byte("very-secret")
|
||||||
|
var blockKey = []byte("a-lot-secret")
|
||||||
|
var s = securecookie.New(hashKey, blockKey)
|
||||||
|
|
||||||
|
The hashKey is required, used to authenticate the cookie value using HMAC.
|
||||||
|
It is recommended to use a key with 32 or 64 bytes.
|
||||||
|
|
||||||
|
The blockKey is optional, used to encrypt the cookie value -- set it to nil
|
||||||
|
to not use encryption. If set, the length must correspond to the block size
|
||||||
|
of the encryption algorithm. For AES, used by default, valid lengths are
|
||||||
|
16, 24, or 32 bytes to select AES-128, AES-192, or AES-256.
|
||||||
|
|
||||||
|
Strong keys can be created using the convenience function GenerateRandomKey().
|
||||||
|
|
||||||
|
Once a SecureCookie instance is set, use it to encode a cookie value:
|
||||||
|
|
||||||
|
func SetCookieHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
value := map[string]string{
|
||||||
|
"foo": "bar",
|
||||||
|
}
|
||||||
|
if encoded, err := s.Encode("cookie-name", value); err == nil {
|
||||||
|
cookie := &http.Cookie{
|
||||||
|
Name: "cookie-name",
|
||||||
|
Value: encoded,
|
||||||
|
Path: "/",
|
||||||
|
}
|
||||||
|
http.SetCookie(w, cookie)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Later, use the same SecureCookie instance to decode and validate a cookie
|
||||||
|
value:
|
||||||
|
|
||||||
|
func ReadCookieHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if cookie, err := r.Cookie("cookie-name"); err == nil {
|
||||||
|
value := make(map[string]string)
|
||||||
|
if err = s2.Decode("cookie-name", cookie.Value, &value); err == nil {
|
||||||
|
fmt.Fprintf(w, "The value of foo is %q", value["foo"])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
We stored a map[string]string, but secure cookies can hold any value that
|
||||||
|
can be encoded using encoding/gob. To store custom types, they must be
|
||||||
|
registered first using gob.Register(). For basic types this is not needed;
|
||||||
|
it works out of the box.
|
||||||
|
*/
|
||||||
|
package securecookie
|
||||||
25
vendor/github.com/gorilla/securecookie/fuzz.go
generated
vendored
Normal file
25
vendor/github.com/gorilla/securecookie/fuzz.go
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
// +build gofuzz
|
||||||
|
|
||||||
|
package securecookie
|
||||||
|
|
||||||
|
var hashKey = []byte("very-secret12345")
|
||||||
|
var blockKey = []byte("a-lot-secret1234")
|
||||||
|
var s = New(hashKey, blockKey)
|
||||||
|
|
||||||
|
type Cookie struct {
|
||||||
|
B bool
|
||||||
|
I int
|
||||||
|
S string
|
||||||
|
}
|
||||||
|
|
||||||
|
func Fuzz(data []byte) int {
|
||||||
|
datas := string(data)
|
||||||
|
var c Cookie
|
||||||
|
if err := s.Decode("fuzz", datas, &c); err != nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
if _, err := s.Encode("fuzz", c); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return 1
|
||||||
|
}
|
||||||
646
vendor/github.com/gorilla/securecookie/securecookie.go
generated
vendored
Normal file
646
vendor/github.com/gorilla/securecookie/securecookie.go
generated
vendored
Normal file
@@ -0,0 +1,646 @@
|
|||||||
|
// Copyright 2012 The Gorilla Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package securecookie
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"crypto/aes"
|
||||||
|
"crypto/cipher"
|
||||||
|
"crypto/hmac"
|
||||||
|
"crypto/rand"
|
||||||
|
"crypto/sha256"
|
||||||
|
"crypto/subtle"
|
||||||
|
"encoding/base64"
|
||||||
|
"encoding/gob"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"hash"
|
||||||
|
"io"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Error is the interface of all errors returned by functions in this library.
|
||||||
|
type Error interface {
|
||||||
|
error
|
||||||
|
|
||||||
|
// IsUsage returns true for errors indicating the client code probably
|
||||||
|
// uses this library incorrectly. For example, the client may have
|
||||||
|
// failed to provide a valid hash key, or may have failed to configure
|
||||||
|
// the Serializer adequately for encoding value.
|
||||||
|
IsUsage() bool
|
||||||
|
|
||||||
|
// IsDecode returns true for errors indicating that a cookie could not
|
||||||
|
// be decoded and validated. Since cookies are usually untrusted
|
||||||
|
// user-provided input, errors of this type should be expected.
|
||||||
|
// Usually, the proper action is simply to reject the request.
|
||||||
|
IsDecode() bool
|
||||||
|
|
||||||
|
// IsInternal returns true for unexpected errors occurring in the
|
||||||
|
// securecookie implementation.
|
||||||
|
IsInternal() bool
|
||||||
|
|
||||||
|
// Cause, if it returns a non-nil value, indicates that this error was
|
||||||
|
// propagated from some underlying library. If this method returns nil,
|
||||||
|
// this error was raised directly by this library.
|
||||||
|
//
|
||||||
|
// Cause is provided principally for debugging/logging purposes; it is
|
||||||
|
// rare that application logic should perform meaningfully different
|
||||||
|
// logic based on Cause. See, for example, the caveats described on
|
||||||
|
// (MultiError).Cause().
|
||||||
|
Cause() error
|
||||||
|
}
|
||||||
|
|
||||||
|
// errorType is a bitmask giving the error type(s) of an cookieError value.
|
||||||
|
type errorType int
|
||||||
|
|
||||||
|
const (
|
||||||
|
usageError = errorType(1 << iota)
|
||||||
|
decodeError
|
||||||
|
internalError
|
||||||
|
)
|
||||||
|
|
||||||
|
type cookieError struct {
|
||||||
|
typ errorType
|
||||||
|
msg string
|
||||||
|
cause error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e cookieError) IsUsage() bool { return (e.typ & usageError) != 0 }
|
||||||
|
func (e cookieError) IsDecode() bool { return (e.typ & decodeError) != 0 }
|
||||||
|
func (e cookieError) IsInternal() bool { return (e.typ & internalError) != 0 }
|
||||||
|
|
||||||
|
func (e cookieError) Cause() error { return e.cause }
|
||||||
|
|
||||||
|
func (e cookieError) Error() string {
|
||||||
|
parts := []string{"securecookie: "}
|
||||||
|
if e.msg == "" {
|
||||||
|
parts = append(parts, "error")
|
||||||
|
} else {
|
||||||
|
parts = append(parts, e.msg)
|
||||||
|
}
|
||||||
|
if c := e.Cause(); c != nil {
|
||||||
|
parts = append(parts, " - caused by: ", c.Error())
|
||||||
|
}
|
||||||
|
return strings.Join(parts, "")
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
errGeneratingIV = cookieError{typ: internalError, msg: "failed to generate random iv"}
|
||||||
|
|
||||||
|
errNoCodecs = cookieError{typ: usageError, msg: "no codecs provided"}
|
||||||
|
errHashKeyNotSet = cookieError{typ: usageError, msg: "hash key is not set"}
|
||||||
|
errBlockKeyNotSet = cookieError{typ: usageError, msg: "block key is not set"}
|
||||||
|
errEncodedValueTooLong = cookieError{typ: usageError, msg: "the value is too long"}
|
||||||
|
|
||||||
|
errValueToDecodeTooLong = cookieError{typ: decodeError, msg: "the value is too long"}
|
||||||
|
errTimestampInvalid = cookieError{typ: decodeError, msg: "invalid timestamp"}
|
||||||
|
errTimestampTooNew = cookieError{typ: decodeError, msg: "timestamp is too new"}
|
||||||
|
errTimestampExpired = cookieError{typ: decodeError, msg: "expired timestamp"}
|
||||||
|
errDecryptionFailed = cookieError{typ: decodeError, msg: "the value could not be decrypted"}
|
||||||
|
errValueNotByte = cookieError{typ: decodeError, msg: "value not a []byte."}
|
||||||
|
errValueNotBytePtr = cookieError{typ: decodeError, msg: "value not a pointer to []byte."}
|
||||||
|
|
||||||
|
// ErrMacInvalid indicates that cookie decoding failed because the HMAC
|
||||||
|
// could not be extracted and verified. Direct use of this error
|
||||||
|
// variable is deprecated; it is public only for legacy compatibility,
|
||||||
|
// and may be privatized in the future, as it is rarely useful to
|
||||||
|
// distinguish between this error and other Error implementations.
|
||||||
|
ErrMacInvalid = cookieError{typ: decodeError, msg: "the value is not valid"}
|
||||||
|
)
|
||||||
|
|
||||||
|
// Codec defines an interface to encode and decode cookie values.
|
||||||
|
type Codec interface {
|
||||||
|
Encode(name string, value interface{}) (string, error)
|
||||||
|
Decode(name, value string, dst interface{}) error
|
||||||
|
}
|
||||||
|
|
||||||
|
// New returns a new SecureCookie.
|
||||||
|
//
|
||||||
|
// hashKey is required, used to authenticate values using HMAC. Create it using
|
||||||
|
// GenerateRandomKey(). It is recommended to use a key with 32 or 64 bytes.
|
||||||
|
//
|
||||||
|
// blockKey is optional, used to encrypt values. Create it using
|
||||||
|
// GenerateRandomKey(). The key length must correspond to the block size
|
||||||
|
// of the encryption algorithm. For AES, used by default, valid lengths are
|
||||||
|
// 16, 24, or 32 bytes to select AES-128, AES-192, or AES-256.
|
||||||
|
// The default encoder used for cookie serialization is encoding/gob.
|
||||||
|
//
|
||||||
|
// Note that keys created using GenerateRandomKey() are not automatically
|
||||||
|
// persisted. New keys will be created when the application is restarted, and
|
||||||
|
// previously issued cookies will not be able to be decoded.
|
||||||
|
func New(hashKey, blockKey []byte) *SecureCookie {
|
||||||
|
s := &SecureCookie{
|
||||||
|
hashKey: hashKey,
|
||||||
|
blockKey: blockKey,
|
||||||
|
hashFunc: sha256.New,
|
||||||
|
maxAge: 86400 * 30,
|
||||||
|
maxLength: 4096,
|
||||||
|
sz: GobEncoder{},
|
||||||
|
}
|
||||||
|
if hashKey == nil {
|
||||||
|
s.err = errHashKeyNotSet
|
||||||
|
}
|
||||||
|
if blockKey != nil {
|
||||||
|
s.BlockFunc(aes.NewCipher)
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// SecureCookie encodes and decodes authenticated and optionally encrypted
|
||||||
|
// cookie values.
|
||||||
|
type SecureCookie struct {
|
||||||
|
hashKey []byte
|
||||||
|
hashFunc func() hash.Hash
|
||||||
|
blockKey []byte
|
||||||
|
block cipher.Block
|
||||||
|
maxLength int
|
||||||
|
maxAge int64
|
||||||
|
minAge int64
|
||||||
|
err error
|
||||||
|
sz Serializer
|
||||||
|
// For testing purposes, the function that returns the current timestamp.
|
||||||
|
// If not set, it will use time.Now().UTC().Unix().
|
||||||
|
timeFunc func() int64
|
||||||
|
}
|
||||||
|
|
||||||
|
// Serializer provides an interface for providing custom serializers for cookie
|
||||||
|
// values.
|
||||||
|
type Serializer interface {
|
||||||
|
Serialize(src interface{}) ([]byte, error)
|
||||||
|
Deserialize(src []byte, dst interface{}) error
|
||||||
|
}
|
||||||
|
|
||||||
|
// GobEncoder encodes cookie values using encoding/gob. This is the simplest
|
||||||
|
// encoder and can handle complex types via gob.Register.
|
||||||
|
type GobEncoder struct{}
|
||||||
|
|
||||||
|
// JSONEncoder encodes cookie values using encoding/json. Users who wish to
|
||||||
|
// encode complex types need to satisfy the json.Marshaller and
|
||||||
|
// json.Unmarshaller interfaces.
|
||||||
|
type JSONEncoder struct{}
|
||||||
|
|
||||||
|
// NopEncoder does not encode cookie values, and instead simply accepts a []byte
|
||||||
|
// (as an interface{}) and returns a []byte. This is particularly useful when
|
||||||
|
// you encoding an object upstream and do not wish to re-encode it.
|
||||||
|
type NopEncoder struct{}
|
||||||
|
|
||||||
|
// MaxLength restricts the maximum length, in bytes, for the cookie value.
|
||||||
|
//
|
||||||
|
// Default is 4096, which is the maximum value accepted by Internet Explorer.
|
||||||
|
func (s *SecureCookie) MaxLength(value int) *SecureCookie {
|
||||||
|
s.maxLength = value
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// MaxAge restricts the maximum age, in seconds, for the cookie value.
|
||||||
|
//
|
||||||
|
// Default is 86400 * 30. Set it to 0 for no restriction.
|
||||||
|
func (s *SecureCookie) MaxAge(value int) *SecureCookie {
|
||||||
|
s.maxAge = int64(value)
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// MinAge restricts the minimum age, in seconds, for the cookie value.
|
||||||
|
//
|
||||||
|
// Default is 0 (no restriction).
|
||||||
|
func (s *SecureCookie) MinAge(value int) *SecureCookie {
|
||||||
|
s.minAge = int64(value)
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// HashFunc sets the hash function used to create HMAC.
|
||||||
|
//
|
||||||
|
// Default is crypto/sha256.New.
|
||||||
|
func (s *SecureCookie) HashFunc(f func() hash.Hash) *SecureCookie {
|
||||||
|
s.hashFunc = f
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// BlockFunc sets the encryption function used to create a cipher.Block.
|
||||||
|
//
|
||||||
|
// Default is crypto/aes.New.
|
||||||
|
func (s *SecureCookie) BlockFunc(f func([]byte) (cipher.Block, error)) *SecureCookie {
|
||||||
|
if s.blockKey == nil {
|
||||||
|
s.err = errBlockKeyNotSet
|
||||||
|
} else if block, err := f(s.blockKey); err == nil {
|
||||||
|
s.block = block
|
||||||
|
} else {
|
||||||
|
s.err = cookieError{cause: err, typ: usageError}
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Encoding sets the encoding/serialization method for cookies.
|
||||||
|
//
|
||||||
|
// Default is encoding/gob. To encode special structures using encoding/gob,
|
||||||
|
// they must be registered first using gob.Register().
|
||||||
|
func (s *SecureCookie) SetSerializer(sz Serializer) *SecureCookie {
|
||||||
|
s.sz = sz
|
||||||
|
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Encode encodes a cookie value.
|
||||||
|
//
|
||||||
|
// It serializes, optionally encrypts, signs with a message authentication code,
|
||||||
|
// and finally encodes the value.
|
||||||
|
//
|
||||||
|
// The name argument is the cookie name. It is stored with the encoded value.
|
||||||
|
// The value argument is the value to be encoded. It can be any value that can
|
||||||
|
// be encoded using the currently selected serializer; see SetSerializer().
|
||||||
|
//
|
||||||
|
// It is the client's responsibility to ensure that value, when encoded using
|
||||||
|
// the current serialization/encryption settings on s and then base64-encoded,
|
||||||
|
// is shorter than the maximum permissible length.
|
||||||
|
func (s *SecureCookie) Encode(name string, value interface{}) (string, error) {
|
||||||
|
if s.err != nil {
|
||||||
|
return "", s.err
|
||||||
|
}
|
||||||
|
if s.hashKey == nil {
|
||||||
|
s.err = errHashKeyNotSet
|
||||||
|
return "", s.err
|
||||||
|
}
|
||||||
|
var err error
|
||||||
|
var b []byte
|
||||||
|
// 1. Serialize.
|
||||||
|
if b, err = s.sz.Serialize(value); err != nil {
|
||||||
|
return "", cookieError{cause: err, typ: usageError}
|
||||||
|
}
|
||||||
|
// 2. Encrypt (optional).
|
||||||
|
if s.block != nil {
|
||||||
|
if b, err = encrypt(s.block, b); err != nil {
|
||||||
|
return "", cookieError{cause: err, typ: usageError}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
b = encode(b)
|
||||||
|
// 3. Create MAC for "name|date|value". Extra pipe to be used later.
|
||||||
|
b = []byte(fmt.Sprintf("%s|%d|%s|", name, s.timestamp(), b))
|
||||||
|
mac := createMac(hmac.New(s.hashFunc, s.hashKey), b[:len(b)-1])
|
||||||
|
// Append mac, remove name.
|
||||||
|
b = append(b, mac...)[len(name)+1:]
|
||||||
|
// 4. Encode to base64.
|
||||||
|
b = encode(b)
|
||||||
|
// 5. Check length.
|
||||||
|
if s.maxLength != 0 && len(b) > s.maxLength {
|
||||||
|
return "", errEncodedValueTooLong
|
||||||
|
}
|
||||||
|
// Done.
|
||||||
|
return string(b), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decode decodes a cookie value.
|
||||||
|
//
|
||||||
|
// It decodes, verifies a message authentication code, optionally decrypts and
|
||||||
|
// finally deserializes the value.
|
||||||
|
//
|
||||||
|
// The name argument is the cookie name. It must be the same name used when
|
||||||
|
// it was stored. The value argument is the encoded cookie value. The dst
|
||||||
|
// argument is where the cookie will be decoded. It must be a pointer.
|
||||||
|
func (s *SecureCookie) Decode(name, value string, dst interface{}) error {
|
||||||
|
if s.err != nil {
|
||||||
|
return s.err
|
||||||
|
}
|
||||||
|
if s.hashKey == nil {
|
||||||
|
s.err = errHashKeyNotSet
|
||||||
|
return s.err
|
||||||
|
}
|
||||||
|
// 1. Check length.
|
||||||
|
if s.maxLength != 0 && len(value) > s.maxLength {
|
||||||
|
return errValueToDecodeTooLong
|
||||||
|
}
|
||||||
|
// 2. Decode from base64.
|
||||||
|
b, err := decode([]byte(value))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
// 3. Verify MAC. Value is "date|value|mac".
|
||||||
|
parts := bytes.SplitN(b, []byte("|"), 3)
|
||||||
|
if len(parts) != 3 {
|
||||||
|
return ErrMacInvalid
|
||||||
|
}
|
||||||
|
h := hmac.New(s.hashFunc, s.hashKey)
|
||||||
|
b = append([]byte(name+"|"), b[:len(b)-len(parts[2])-1]...)
|
||||||
|
if err = verifyMac(h, b, parts[2]); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
// 4. Verify date ranges.
|
||||||
|
var t1 int64
|
||||||
|
if t1, err = strconv.ParseInt(string(parts[0]), 10, 64); err != nil {
|
||||||
|
return errTimestampInvalid
|
||||||
|
}
|
||||||
|
t2 := s.timestamp()
|
||||||
|
if s.minAge != 0 && t1 > t2-s.minAge {
|
||||||
|
return errTimestampTooNew
|
||||||
|
}
|
||||||
|
if s.maxAge != 0 && t1 < t2-s.maxAge {
|
||||||
|
return errTimestampExpired
|
||||||
|
}
|
||||||
|
// 5. Decrypt (optional).
|
||||||
|
b, err = decode(parts[1])
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if s.block != nil {
|
||||||
|
if b, err = decrypt(s.block, b); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// 6. Deserialize.
|
||||||
|
if err = s.sz.Deserialize(b, dst); err != nil {
|
||||||
|
return cookieError{cause: err, typ: decodeError}
|
||||||
|
}
|
||||||
|
// Done.
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// timestamp returns the current timestamp, in seconds.
|
||||||
|
//
|
||||||
|
// For testing purposes, the function that generates the timestamp can be
|
||||||
|
// overridden. If not set, it will return time.Now().UTC().Unix().
|
||||||
|
func (s *SecureCookie) timestamp() int64 {
|
||||||
|
if s.timeFunc == nil {
|
||||||
|
return time.Now().UTC().Unix()
|
||||||
|
}
|
||||||
|
return s.timeFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Authentication -------------------------------------------------------------
|
||||||
|
|
||||||
|
// createMac creates a message authentication code (MAC).
|
||||||
|
func createMac(h hash.Hash, value []byte) []byte {
|
||||||
|
h.Write(value)
|
||||||
|
return h.Sum(nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// verifyMac verifies that a message authentication code (MAC) is valid.
|
||||||
|
func verifyMac(h hash.Hash, value []byte, mac []byte) error {
|
||||||
|
mac2 := createMac(h, value)
|
||||||
|
// Check that both MACs are of equal length, as subtle.ConstantTimeCompare
|
||||||
|
// does not do this prior to Go 1.4.
|
||||||
|
if len(mac) == len(mac2) && subtle.ConstantTimeCompare(mac, mac2) == 1 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return ErrMacInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
// Encryption -----------------------------------------------------------------
|
||||||
|
|
||||||
|
// encrypt encrypts a value using the given block in counter mode.
|
||||||
|
//
|
||||||
|
// A random initialization vector (http://goo.gl/zF67k) with the length of the
|
||||||
|
// block size is prepended to the resulting ciphertext.
|
||||||
|
func encrypt(block cipher.Block, value []byte) ([]byte, error) {
|
||||||
|
iv := GenerateRandomKey(block.BlockSize())
|
||||||
|
if iv == nil {
|
||||||
|
return nil, errGeneratingIV
|
||||||
|
}
|
||||||
|
// Encrypt it.
|
||||||
|
stream := cipher.NewCTR(block, iv)
|
||||||
|
stream.XORKeyStream(value, value)
|
||||||
|
// Return iv + ciphertext.
|
||||||
|
return append(iv, value...), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// decrypt decrypts a value using the given block in counter mode.
|
||||||
|
//
|
||||||
|
// The value to be decrypted must be prepended by a initialization vector
|
||||||
|
// (http://goo.gl/zF67k) with the length of the block size.
|
||||||
|
func decrypt(block cipher.Block, value []byte) ([]byte, error) {
|
||||||
|
size := block.BlockSize()
|
||||||
|
if len(value) > size {
|
||||||
|
// Extract iv.
|
||||||
|
iv := value[:size]
|
||||||
|
// Extract ciphertext.
|
||||||
|
value = value[size:]
|
||||||
|
// Decrypt it.
|
||||||
|
stream := cipher.NewCTR(block, iv)
|
||||||
|
stream.XORKeyStream(value, value)
|
||||||
|
return value, nil
|
||||||
|
}
|
||||||
|
return nil, errDecryptionFailed
|
||||||
|
}
|
||||||
|
|
||||||
|
// Serialization --------------------------------------------------------------
|
||||||
|
|
||||||
|
// Serialize encodes a value using gob.
|
||||||
|
func (e GobEncoder) Serialize(src interface{}) ([]byte, error) {
|
||||||
|
buf := new(bytes.Buffer)
|
||||||
|
enc := gob.NewEncoder(buf)
|
||||||
|
if err := enc.Encode(src); err != nil {
|
||||||
|
return nil, cookieError{cause: err, typ: usageError}
|
||||||
|
}
|
||||||
|
return buf.Bytes(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deserialize decodes a value using gob.
|
||||||
|
func (e GobEncoder) Deserialize(src []byte, dst interface{}) error {
|
||||||
|
dec := gob.NewDecoder(bytes.NewBuffer(src))
|
||||||
|
if err := dec.Decode(dst); err != nil {
|
||||||
|
return cookieError{cause: err, typ: decodeError}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Serialize encodes a value using encoding/json.
|
||||||
|
func (e JSONEncoder) Serialize(src interface{}) ([]byte, error) {
|
||||||
|
buf := new(bytes.Buffer)
|
||||||
|
enc := json.NewEncoder(buf)
|
||||||
|
if err := enc.Encode(src); err != nil {
|
||||||
|
return nil, cookieError{cause: err, typ: usageError}
|
||||||
|
}
|
||||||
|
return buf.Bytes(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deserialize decodes a value using encoding/json.
|
||||||
|
func (e JSONEncoder) Deserialize(src []byte, dst interface{}) error {
|
||||||
|
dec := json.NewDecoder(bytes.NewReader(src))
|
||||||
|
if err := dec.Decode(dst); err != nil {
|
||||||
|
return cookieError{cause: err, typ: decodeError}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Serialize passes a []byte through as-is.
|
||||||
|
func (e NopEncoder) Serialize(src interface{}) ([]byte, error) {
|
||||||
|
if b, ok := src.([]byte); ok {
|
||||||
|
return b, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, errValueNotByte
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deserialize passes a []byte through as-is.
|
||||||
|
func (e NopEncoder) Deserialize(src []byte, dst interface{}) error {
|
||||||
|
if dat, ok := dst.(*[]byte); ok {
|
||||||
|
*dat = src
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return errValueNotBytePtr
|
||||||
|
}
|
||||||
|
|
||||||
|
// Encoding -------------------------------------------------------------------
|
||||||
|
|
||||||
|
// encode encodes a value using base64.
|
||||||
|
func encode(value []byte) []byte {
|
||||||
|
encoded := make([]byte, base64.URLEncoding.EncodedLen(len(value)))
|
||||||
|
base64.URLEncoding.Encode(encoded, value)
|
||||||
|
return encoded
|
||||||
|
}
|
||||||
|
|
||||||
|
// decode decodes a cookie using base64.
|
||||||
|
func decode(value []byte) ([]byte, error) {
|
||||||
|
decoded := make([]byte, base64.URLEncoding.DecodedLen(len(value)))
|
||||||
|
b, err := base64.URLEncoding.Decode(decoded, value)
|
||||||
|
if err != nil {
|
||||||
|
return nil, cookieError{cause: err, typ: decodeError, msg: "base64 decode failed"}
|
||||||
|
}
|
||||||
|
return decoded[:b], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helpers --------------------------------------------------------------------
|
||||||
|
|
||||||
|
// GenerateRandomKey creates a random key with the given length in bytes.
|
||||||
|
// On failure, returns nil.
|
||||||
|
//
|
||||||
|
// Callers should explicitly check for the possibility of a nil return, treat
|
||||||
|
// it as a failure of the system random number generator, and not continue.
|
||||||
|
func GenerateRandomKey(length int) []byte {
|
||||||
|
k := make([]byte, length)
|
||||||
|
if _, err := io.ReadFull(rand.Reader, k); err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return k
|
||||||
|
}
|
||||||
|
|
||||||
|
// CodecsFromPairs returns a slice of SecureCookie instances.
|
||||||
|
//
|
||||||
|
// It is a convenience function to create a list of codecs for key rotation. Note
|
||||||
|
// that the generated Codecs will have the default options applied: callers
|
||||||
|
// should iterate over each Codec and type-assert the underlying *SecureCookie to
|
||||||
|
// change these.
|
||||||
|
//
|
||||||
|
// Example:
|
||||||
|
//
|
||||||
|
// codecs := securecookie.CodecsFromPairs(
|
||||||
|
// []byte("new-hash-key"),
|
||||||
|
// []byte("new-block-key"),
|
||||||
|
// []byte("old-hash-key"),
|
||||||
|
// []byte("old-block-key"),
|
||||||
|
// )
|
||||||
|
//
|
||||||
|
// // Modify each instance.
|
||||||
|
// for _, s := range codecs {
|
||||||
|
// if cookie, ok := s.(*securecookie.SecureCookie); ok {
|
||||||
|
// cookie.MaxAge(86400 * 7)
|
||||||
|
// cookie.SetSerializer(securecookie.JSONEncoder{})
|
||||||
|
// cookie.HashFunc(sha512.New512_256)
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
func CodecsFromPairs(keyPairs ...[]byte) []Codec {
|
||||||
|
codecs := make([]Codec, len(keyPairs)/2+len(keyPairs)%2)
|
||||||
|
for i := 0; i < len(keyPairs); i += 2 {
|
||||||
|
var blockKey []byte
|
||||||
|
if i+1 < len(keyPairs) {
|
||||||
|
blockKey = keyPairs[i+1]
|
||||||
|
}
|
||||||
|
codecs[i/2] = New(keyPairs[i], blockKey)
|
||||||
|
}
|
||||||
|
return codecs
|
||||||
|
}
|
||||||
|
|
||||||
|
// EncodeMulti encodes a cookie value using a group of codecs.
|
||||||
|
//
|
||||||
|
// The codecs are tried in order. Multiple codecs are accepted to allow
|
||||||
|
// key rotation.
|
||||||
|
//
|
||||||
|
// On error, may return a MultiError.
|
||||||
|
func EncodeMulti(name string, value interface{}, codecs ...Codec) (string, error) {
|
||||||
|
if len(codecs) == 0 {
|
||||||
|
return "", errNoCodecs
|
||||||
|
}
|
||||||
|
|
||||||
|
var errors MultiError
|
||||||
|
for _, codec := range codecs {
|
||||||
|
encoded, err := codec.Encode(name, value)
|
||||||
|
if err == nil {
|
||||||
|
return encoded, nil
|
||||||
|
}
|
||||||
|
errors = append(errors, err)
|
||||||
|
}
|
||||||
|
return "", errors
|
||||||
|
}
|
||||||
|
|
||||||
|
// DecodeMulti decodes a cookie value using a group of codecs.
|
||||||
|
//
|
||||||
|
// The codecs are tried in order. Multiple codecs are accepted to allow
|
||||||
|
// key rotation.
|
||||||
|
//
|
||||||
|
// On error, may return a MultiError.
|
||||||
|
func DecodeMulti(name string, value string, dst interface{}, codecs ...Codec) error {
|
||||||
|
if len(codecs) == 0 {
|
||||||
|
return errNoCodecs
|
||||||
|
}
|
||||||
|
|
||||||
|
var errors MultiError
|
||||||
|
for _, codec := range codecs {
|
||||||
|
err := codec.Decode(name, value, dst)
|
||||||
|
if err == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
errors = append(errors, err)
|
||||||
|
}
|
||||||
|
return errors
|
||||||
|
}
|
||||||
|
|
||||||
|
// MultiError groups multiple errors.
|
||||||
|
type MultiError []error
|
||||||
|
|
||||||
|
func (m MultiError) IsUsage() bool { return m.any(func(e Error) bool { return e.IsUsage() }) }
|
||||||
|
func (m MultiError) IsDecode() bool { return m.any(func(e Error) bool { return e.IsDecode() }) }
|
||||||
|
func (m MultiError) IsInternal() bool { return m.any(func(e Error) bool { return e.IsInternal() }) }
|
||||||
|
|
||||||
|
// Cause returns nil for MultiError; there is no unique underlying cause in the
|
||||||
|
// general case.
|
||||||
|
//
|
||||||
|
// Note: we could conceivably return a non-nil Cause only when there is exactly
|
||||||
|
// one child error with a Cause. However, it would be brittle for client code
|
||||||
|
// to rely on the arity of causes inside a MultiError, so we have opted not to
|
||||||
|
// provide this functionality. Clients which really wish to access the Causes
|
||||||
|
// of the underlying errors are free to iterate through the errors themselves.
|
||||||
|
func (m MultiError) Cause() error { return nil }
|
||||||
|
|
||||||
|
func (m MultiError) Error() string {
|
||||||
|
s, n := "", 0
|
||||||
|
for _, e := range m {
|
||||||
|
if e != nil {
|
||||||
|
if n == 0 {
|
||||||
|
s = e.Error()
|
||||||
|
}
|
||||||
|
n++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
switch n {
|
||||||
|
case 0:
|
||||||
|
return "(0 errors)"
|
||||||
|
case 1:
|
||||||
|
return s
|
||||||
|
case 2:
|
||||||
|
return s + " (and 1 other error)"
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%s (and %d other errors)", s, n-1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// any returns true if any element of m is an Error for which pred returns true.
|
||||||
|
func (m MultiError) any(pred func(Error) bool) bool {
|
||||||
|
for _, e := range m {
|
||||||
|
if ourErr, ok := e.(Error); ok && pred(ourErr) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
43
vendor/github.com/gorilla/sessions/AUTHORS
generated
vendored
Normal file
43
vendor/github.com/gorilla/sessions/AUTHORS
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
# This is the official list of gorilla/sessions authors for copyright purposes.
|
||||||
|
#
|
||||||
|
# Please keep the list sorted.
|
||||||
|
|
||||||
|
Ahmadreza Zibaei <ahmadrezazibaei@hotmail.com>
|
||||||
|
Anton Lindström <lindztr@gmail.com>
|
||||||
|
Brian Jones <mojobojo@gmail.com>
|
||||||
|
Collin Stedman <kronion@users.noreply.github.com>
|
||||||
|
Deniz Eren <dee.116@gmail.com>
|
||||||
|
Dmitry Chestnykh <dmitry@codingrobots.com>
|
||||||
|
Dustin Oprea <myselfasunder@gmail.com>
|
||||||
|
Egon Elbre <egonelbre@gmail.com>
|
||||||
|
enumappstore <appstore@enumapps.com>
|
||||||
|
Geofrey Ernest <geofreyernest@live.com>
|
||||||
|
Google LLC (https://opensource.google.com/)
|
||||||
|
Jerry Saravia <SaraviaJ@gmail.com>
|
||||||
|
Jonathan Gillham <jonathan.gillham@gamil.com>
|
||||||
|
Justin Clift <justin@postgresql.org>
|
||||||
|
Justin Hellings <justin.hellings@gmail.com>
|
||||||
|
Kamil Kisiel <kamil@kamilkisiel.net>
|
||||||
|
Keiji Yoshida <yoshida.keiji.84@gmail.com>
|
||||||
|
kliron <kliron@gmail.com>
|
||||||
|
Kshitij Saraogi <KshitijSaraogi@gmail.com>
|
||||||
|
Lauris BH <lauris@nix.lv>
|
||||||
|
Lukas Rist <glaslos@gmail.com>
|
||||||
|
Mark Dain <ancarda@users.noreply.github.com>
|
||||||
|
Matt Ho <matt.ho@gmail.com>
|
||||||
|
Matt Silverlock <matt@eatsleeprepeat.net>
|
||||||
|
Mattias Wadman <mattias.wadman@gmail.com>
|
||||||
|
Michael Schuett <michaeljs1990@gmail.com>
|
||||||
|
Michael Stapelberg <stapelberg@users.noreply.github.com>
|
||||||
|
Mirco Zeiss <mirco.zeiss@gmail.com>
|
||||||
|
moraes <rodrigo.moraes@gmail.com>
|
||||||
|
nvcnvn <nguyen@open-vn.org>
|
||||||
|
pappz <zoltan.pmail@gmail.com>
|
||||||
|
Pontus Leitzler <leitzler@users.noreply.github.com>
|
||||||
|
QuaSoft <info@quasoft.net>
|
||||||
|
rcadena <robert.cadena@gmail.com>
|
||||||
|
rodrigo moraes <rodrigo.moraes@gmail.com>
|
||||||
|
Shawn Smith <shawnpsmith@gmail.com>
|
||||||
|
Taylor Hurt <taylor.a.hurt@gmail.com>
|
||||||
|
Tortuoise <sanyasinp@gmail.com>
|
||||||
|
Vitor De Mario <vitordemario@gmail.com>
|
||||||
27
vendor/github.com/gorilla/sessions/LICENSE
generated
vendored
Normal file
27
vendor/github.com/gorilla/sessions/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
Copyright (c) 2012-2018 The Gorilla Authors. All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are
|
||||||
|
met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
* Redistributions in binary form must reproduce the above
|
||||||
|
copyright notice, this list of conditions and the following disclaimer
|
||||||
|
in the documentation and/or other materials provided with the
|
||||||
|
distribution.
|
||||||
|
* Neither the name of Google Inc. nor the names of its
|
||||||
|
contributors may be used to endorse or promote products derived from
|
||||||
|
this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
83
vendor/github.com/gorilla/sessions/README.md
generated
vendored
Normal file
83
vendor/github.com/gorilla/sessions/README.md
generated
vendored
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
# sessions
|
||||||
|
|
||||||
|
[](https://godoc.org/github.com/gorilla/sessions) [](https://travis-ci.org/gorilla/sessions)
|
||||||
|
[](https://sourcegraph.com/github.com/gorilla/sessions?badge)
|
||||||
|
|
||||||
|
gorilla/sessions provides cookie and filesystem sessions and infrastructure for
|
||||||
|
custom session backends.
|
||||||
|
|
||||||
|
The key features are:
|
||||||
|
|
||||||
|
- Simple API: use it as an easy way to set signed (and optionally
|
||||||
|
encrypted) cookies.
|
||||||
|
- Built-in backends to store sessions in cookies or the filesystem.
|
||||||
|
- Flash messages: session values that last until read.
|
||||||
|
- Convenient way to switch session persistency (aka "remember me") and set
|
||||||
|
other attributes.
|
||||||
|
- Mechanism to rotate authentication and encryption keys.
|
||||||
|
- Multiple sessions per request, even using different backends.
|
||||||
|
- Interfaces and infrastructure for custom session backends: sessions from
|
||||||
|
different stores can be retrieved and batch-saved using a common API.
|
||||||
|
|
||||||
|
Let's start with an example that shows the sessions API in a nutshell:
|
||||||
|
|
||||||
|
```go
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"github.com/gorilla/sessions"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Note: Don't store your key in your source code. Pass it via an
|
||||||
|
// environmental variable, or flag (or both), and don't accidentally commit it
|
||||||
|
// alongside your code. Ensure your key is sufficiently random - i.e. use Go's
|
||||||
|
// crypto/rand or securecookie.GenerateRandomKey(32) and persist the result.
|
||||||
|
var store = sessions.NewCookieStore([]byte(os.Getenv("SESSION_KEY")))
|
||||||
|
|
||||||
|
func MyHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
// Get a session. We're ignoring the error resulted from decoding an
|
||||||
|
// existing session: Get() always returns a session, even if empty.
|
||||||
|
session, _ := store.Get(r, "session-name")
|
||||||
|
// Set some session values.
|
||||||
|
session.Values["foo"] = "bar"
|
||||||
|
session.Values[42] = 43
|
||||||
|
// Save it before we write to the response/return from the handler.
|
||||||
|
session.Save(r, w)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
First we initialize a session store calling `NewCookieStore()` and passing a
|
||||||
|
secret key used to authenticate the session. Inside the handler, we call
|
||||||
|
`store.Get()` to retrieve an existing session or create a new one. Then we set
|
||||||
|
some session values in session.Values, which is a `map[interface{}]interface{}`.
|
||||||
|
And finally we call `session.Save()` to save the session in the response.
|
||||||
|
|
||||||
|
More examples are available [on the Gorilla
|
||||||
|
website](https://www.gorillatoolkit.org/pkg/sessions).
|
||||||
|
|
||||||
|
## Store Implementations
|
||||||
|
|
||||||
|
Other implementations of the `sessions.Store` interface:
|
||||||
|
|
||||||
|
- [github.com/starJammer/gorilla-sessions-arangodb](https://github.com/starJammer/gorilla-sessions-arangodb) - ArangoDB
|
||||||
|
- [github.com/yosssi/boltstore](https://github.com/yosssi/boltstore) - Bolt
|
||||||
|
- [github.com/srinathgs/couchbasestore](https://github.com/srinathgs/couchbasestore) - Couchbase
|
||||||
|
- [github.com/denizeren/dynamostore](https://github.com/denizeren/dynamostore) - Dynamodb on AWS
|
||||||
|
- [github.com/savaki/dynastore](https://github.com/savaki/dynastore) - DynamoDB on AWS (Official AWS library)
|
||||||
|
- [github.com/bradleypeabody/gorilla-sessions-memcache](https://github.com/bradleypeabody/gorilla-sessions-memcache) - Memcache
|
||||||
|
- [github.com/dsoprea/go-appengine-sessioncascade](https://github.com/dsoprea/go-appengine-sessioncascade) - Memcache/Datastore/Context in AppEngine
|
||||||
|
- [github.com/kidstuff/mongostore](https://github.com/kidstuff/mongostore) - MongoDB
|
||||||
|
- [github.com/srinathgs/mysqlstore](https://github.com/srinathgs/mysqlstore) - MySQL
|
||||||
|
- [github.com/EnumApps/clustersqlstore](https://github.com/EnumApps/clustersqlstore) - MySQL Cluster
|
||||||
|
- [github.com/antonlindstrom/pgstore](https://github.com/antonlindstrom/pgstore) - PostgreSQL
|
||||||
|
- [github.com/boj/redistore](https://github.com/boj/redistore) - Redis
|
||||||
|
- [github.com/boj/rethinkstore](https://github.com/boj/rethinkstore) - RethinkDB
|
||||||
|
- [github.com/boj/riakstore](https://github.com/boj/riakstore) - Riak
|
||||||
|
- [github.com/michaeljs1990/sqlitestore](https://github.com/michaeljs1990/sqlitestore) - SQLite
|
||||||
|
- [github.com/wader/gormstore](https://github.com/wader/gormstore) - GORM (MySQL, PostgreSQL, SQLite)
|
||||||
|
- [github.com/gernest/qlstore](https://github.com/gernest/qlstore) - ql
|
||||||
|
- [github.com/quasoft/memstore](https://github.com/quasoft/memstore) - In-memory implementation for use in unit tests
|
||||||
|
- [github.com/lafriks/xormstore](https://github.com/lafriks/xormstore) - XORM (MySQL, PostgreSQL, SQLite, Microsoft SQL Server, TiDB)
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
BSD licensed. See the LICENSE file for details.
|
||||||
19
vendor/github.com/gorilla/sessions/cookie.go
generated
vendored
Normal file
19
vendor/github.com/gorilla/sessions/cookie.go
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
// +build !go1.11
|
||||||
|
|
||||||
|
package sessions
|
||||||
|
|
||||||
|
import "net/http"
|
||||||
|
|
||||||
|
// newCookieFromOptions returns an http.Cookie with the options set.
|
||||||
|
func newCookieFromOptions(name, value string, options *Options) *http.Cookie {
|
||||||
|
return &http.Cookie{
|
||||||
|
Name: name,
|
||||||
|
Value: value,
|
||||||
|
Path: options.Path,
|
||||||
|
Domain: options.Domain,
|
||||||
|
MaxAge: options.MaxAge,
|
||||||
|
Secure: options.Secure,
|
||||||
|
HttpOnly: options.HttpOnly,
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
20
vendor/github.com/gorilla/sessions/cookie_go111.go
generated
vendored
Normal file
20
vendor/github.com/gorilla/sessions/cookie_go111.go
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
// +build go1.11
|
||||||
|
|
||||||
|
package sessions
|
||||||
|
|
||||||
|
import "net/http"
|
||||||
|
|
||||||
|
// newCookieFromOptions returns an http.Cookie with the options set.
|
||||||
|
func newCookieFromOptions(name, value string, options *Options) *http.Cookie {
|
||||||
|
return &http.Cookie{
|
||||||
|
Name: name,
|
||||||
|
Value: value,
|
||||||
|
Path: options.Path,
|
||||||
|
Domain: options.Domain,
|
||||||
|
MaxAge: options.MaxAge,
|
||||||
|
Secure: options.Secure,
|
||||||
|
HttpOnly: options.HttpOnly,
|
||||||
|
SameSite: options.SameSite,
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
194
vendor/github.com/gorilla/sessions/doc.go
generated
vendored
Normal file
194
vendor/github.com/gorilla/sessions/doc.go
generated
vendored
Normal file
@@ -0,0 +1,194 @@
|
|||||||
|
// Copyright 2012 The Gorilla Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
/*
|
||||||
|
Package sessions provides cookie and filesystem sessions and
|
||||||
|
infrastructure for custom session backends.
|
||||||
|
|
||||||
|
The key features are:
|
||||||
|
|
||||||
|
* Simple API: use it as an easy way to set signed (and optionally
|
||||||
|
encrypted) cookies.
|
||||||
|
* Built-in backends to store sessions in cookies or the filesystem.
|
||||||
|
* Flash messages: session values that last until read.
|
||||||
|
* Convenient way to switch session persistency (aka "remember me") and set
|
||||||
|
other attributes.
|
||||||
|
* Mechanism to rotate authentication and encryption keys.
|
||||||
|
* Multiple sessions per request, even using different backends.
|
||||||
|
* Interfaces and infrastructure for custom session backends: sessions from
|
||||||
|
different stores can be retrieved and batch-saved using a common API.
|
||||||
|
|
||||||
|
Let's start with an example that shows the sessions API in a nutshell:
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"github.com/gorilla/sessions"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Note: Don't store your key in your source code. Pass it via an
|
||||||
|
// environmental variable, or flag (or both), and don't accidentally commit it
|
||||||
|
// alongside your code. Ensure your key is sufficiently random - i.e. use Go's
|
||||||
|
// crypto/rand or securecookie.GenerateRandomKey(32) and persist the result.
|
||||||
|
var store = sessions.NewCookieStore(os.Getenv("SESSION_KEY"))
|
||||||
|
|
||||||
|
func MyHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
// Get a session. Get() always returns a session, even if empty.
|
||||||
|
session, err := store.Get(r, "session-name")
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set some session values.
|
||||||
|
session.Values["foo"] = "bar"
|
||||||
|
session.Values[42] = 43
|
||||||
|
// Save it before we write to the response/return from the handler.
|
||||||
|
session.Save(r, w)
|
||||||
|
}
|
||||||
|
|
||||||
|
First we initialize a session store calling NewCookieStore() and passing a
|
||||||
|
secret key used to authenticate the session. Inside the handler, we call
|
||||||
|
store.Get() to retrieve an existing session or a new one. Then we set some
|
||||||
|
session values in session.Values, which is a map[interface{}]interface{}.
|
||||||
|
And finally we call session.Save() to save the session in the response.
|
||||||
|
|
||||||
|
Note that in production code, we should check for errors when calling
|
||||||
|
session.Save(r, w), and either display an error message or otherwise handle it.
|
||||||
|
|
||||||
|
Save must be called before writing to the response, otherwise the session
|
||||||
|
cookie will not be sent to the client.
|
||||||
|
|
||||||
|
That's all you need to know for the basic usage. Let's take a look at other
|
||||||
|
options, starting with flash messages.
|
||||||
|
|
||||||
|
Flash messages are session values that last until read. The term appeared with
|
||||||
|
Ruby On Rails a few years back. When we request a flash message, it is removed
|
||||||
|
from the session. To add a flash, call session.AddFlash(), and to get all
|
||||||
|
flashes, call session.Flashes(). Here is an example:
|
||||||
|
|
||||||
|
func MyHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
// Get a session.
|
||||||
|
session, err := store.Get(r, "session-name")
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the previous flashes, if any.
|
||||||
|
if flashes := session.Flashes(); len(flashes) > 0 {
|
||||||
|
// Use the flash values.
|
||||||
|
} else {
|
||||||
|
// Set a new flash.
|
||||||
|
session.AddFlash("Hello, flash messages world!")
|
||||||
|
}
|
||||||
|
session.Save(r, w)
|
||||||
|
}
|
||||||
|
|
||||||
|
Flash messages are useful to set information to be read after a redirection,
|
||||||
|
like after form submissions.
|
||||||
|
|
||||||
|
There may also be cases where you want to store a complex datatype within a
|
||||||
|
session, such as a struct. Sessions are serialised using the encoding/gob package,
|
||||||
|
so it is easy to register new datatypes for storage in sessions:
|
||||||
|
|
||||||
|
import(
|
||||||
|
"encoding/gob"
|
||||||
|
"github.com/gorilla/sessions"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Person struct {
|
||||||
|
FirstName string
|
||||||
|
LastName string
|
||||||
|
Email string
|
||||||
|
Age int
|
||||||
|
}
|
||||||
|
|
||||||
|
type M map[string]interface{}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
|
||||||
|
gob.Register(&Person{})
|
||||||
|
gob.Register(&M{})
|
||||||
|
}
|
||||||
|
|
||||||
|
As it's not possible to pass a raw type as a parameter to a function, gob.Register()
|
||||||
|
relies on us passing it a value of the desired type. In the example above we've passed
|
||||||
|
it a pointer to a struct and a pointer to a custom type representing a
|
||||||
|
map[string]interface. (We could have passed non-pointer values if we wished.) This will
|
||||||
|
then allow us to serialise/deserialise values of those types to and from our sessions.
|
||||||
|
|
||||||
|
Note that because session values are stored in a map[string]interface{}, there's
|
||||||
|
a need to type-assert data when retrieving it. We'll use the Person struct we registered above:
|
||||||
|
|
||||||
|
func MyHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
session, err := store.Get(r, "session-name")
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Retrieve our struct and type-assert it
|
||||||
|
val := session.Values["person"]
|
||||||
|
var person = &Person{}
|
||||||
|
if person, ok := val.(*Person); !ok {
|
||||||
|
// Handle the case that it's not an expected type
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now we can use our person object
|
||||||
|
}
|
||||||
|
|
||||||
|
By default, session cookies last for a month. This is probably too long for
|
||||||
|
some cases, but it is easy to change this and other attributes during
|
||||||
|
runtime. Sessions can be configured individually or the store can be
|
||||||
|
configured and then all sessions saved using it will use that configuration.
|
||||||
|
We access session.Options or store.Options to set a new configuration. The
|
||||||
|
fields are basically a subset of http.Cookie fields. Let's change the
|
||||||
|
maximum age of a session to one week:
|
||||||
|
|
||||||
|
session.Options = &sessions.Options{
|
||||||
|
Path: "/",
|
||||||
|
MaxAge: 86400 * 7,
|
||||||
|
HttpOnly: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
Sometimes we may want to change authentication and/or encryption keys without
|
||||||
|
breaking existing sessions. The CookieStore supports key rotation, and to use
|
||||||
|
it you just need to set multiple authentication and encryption keys, in pairs,
|
||||||
|
to be tested in order:
|
||||||
|
|
||||||
|
var store = sessions.NewCookieStore(
|
||||||
|
[]byte("new-authentication-key"),
|
||||||
|
[]byte("new-encryption-key"),
|
||||||
|
[]byte("old-authentication-key"),
|
||||||
|
[]byte("old-encryption-key"),
|
||||||
|
)
|
||||||
|
|
||||||
|
New sessions will be saved using the first pair. Old sessions can still be
|
||||||
|
read because the first pair will fail, and the second will be tested. This
|
||||||
|
makes it easy to "rotate" secret keys and still be able to validate existing
|
||||||
|
sessions. Note: for all pairs the encryption key is optional; set it to nil
|
||||||
|
or omit it and and encryption won't be used.
|
||||||
|
|
||||||
|
Multiple sessions can be used in the same request, even with different
|
||||||
|
session backends. When this happens, calling Save() on each session
|
||||||
|
individually would be cumbersome, so we have a way to save all sessions
|
||||||
|
at once: it's sessions.Save(). Here's an example:
|
||||||
|
|
||||||
|
var store = sessions.NewCookieStore([]byte("something-very-secret"))
|
||||||
|
|
||||||
|
func MyHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
// Get a session and set a value.
|
||||||
|
session1, _ := store.Get(r, "session-one")
|
||||||
|
session1.Values["foo"] = "bar"
|
||||||
|
// Get another session and set another value.
|
||||||
|
session2, _ := store.Get(r, "session-two")
|
||||||
|
session2.Values[42] = 43
|
||||||
|
// Save all sessions.
|
||||||
|
sessions.Save(r, w)
|
||||||
|
}
|
||||||
|
|
||||||
|
This is possible because when we call Get() from a session store, it adds the
|
||||||
|
session to a common registry. Save() uses it to save all registered sessions.
|
||||||
|
*/
|
||||||
|
package sessions
|
||||||
3
vendor/github.com/gorilla/sessions/go.mod
generated
vendored
Normal file
3
vendor/github.com/gorilla/sessions/go.mod
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
module github.com/gorilla/sessions
|
||||||
|
|
||||||
|
require github.com/gorilla/securecookie v1.1.1
|
||||||
2
vendor/github.com/gorilla/sessions/go.sum
generated
vendored
Normal file
2
vendor/github.com/gorilla/sessions/go.sum
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ=
|
||||||
|
github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=
|
||||||
102
vendor/github.com/gorilla/sessions/lex.go
generated
vendored
Normal file
102
vendor/github.com/gorilla/sessions/lex.go
generated
vendored
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
// This file contains code adapted from the Go standard library
|
||||||
|
// https://github.com/golang/go/blob/39ad0fd0789872f9469167be7fe9578625ff246e/src/net/http/lex.go
|
||||||
|
|
||||||
|
package sessions
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
var isTokenTable = [127]bool{
|
||||||
|
'!': true,
|
||||||
|
'#': true,
|
||||||
|
'$': true,
|
||||||
|
'%': true,
|
||||||
|
'&': true,
|
||||||
|
'\'': true,
|
||||||
|
'*': true,
|
||||||
|
'+': true,
|
||||||
|
'-': true,
|
||||||
|
'.': true,
|
||||||
|
'0': true,
|
||||||
|
'1': true,
|
||||||
|
'2': true,
|
||||||
|
'3': true,
|
||||||
|
'4': true,
|
||||||
|
'5': true,
|
||||||
|
'6': true,
|
||||||
|
'7': true,
|
||||||
|
'8': true,
|
||||||
|
'9': true,
|
||||||
|
'A': true,
|
||||||
|
'B': true,
|
||||||
|
'C': true,
|
||||||
|
'D': true,
|
||||||
|
'E': true,
|
||||||
|
'F': true,
|
||||||
|
'G': true,
|
||||||
|
'H': true,
|
||||||
|
'I': true,
|
||||||
|
'J': true,
|
||||||
|
'K': true,
|
||||||
|
'L': true,
|
||||||
|
'M': true,
|
||||||
|
'N': true,
|
||||||
|
'O': true,
|
||||||
|
'P': true,
|
||||||
|
'Q': true,
|
||||||
|
'R': true,
|
||||||
|
'S': true,
|
||||||
|
'T': true,
|
||||||
|
'U': true,
|
||||||
|
'W': true,
|
||||||
|
'V': true,
|
||||||
|
'X': true,
|
||||||
|
'Y': true,
|
||||||
|
'Z': true,
|
||||||
|
'^': true,
|
||||||
|
'_': true,
|
||||||
|
'`': true,
|
||||||
|
'a': true,
|
||||||
|
'b': true,
|
||||||
|
'c': true,
|
||||||
|
'd': true,
|
||||||
|
'e': true,
|
||||||
|
'f': true,
|
||||||
|
'g': true,
|
||||||
|
'h': true,
|
||||||
|
'i': true,
|
||||||
|
'j': true,
|
||||||
|
'k': true,
|
||||||
|
'l': true,
|
||||||
|
'm': true,
|
||||||
|
'n': true,
|
||||||
|
'o': true,
|
||||||
|
'p': true,
|
||||||
|
'q': true,
|
||||||
|
'r': true,
|
||||||
|
's': true,
|
||||||
|
't': true,
|
||||||
|
'u': true,
|
||||||
|
'v': true,
|
||||||
|
'w': true,
|
||||||
|
'x': true,
|
||||||
|
'y': true,
|
||||||
|
'z': true,
|
||||||
|
'|': true,
|
||||||
|
'~': true,
|
||||||
|
}
|
||||||
|
|
||||||
|
func isToken(r rune) bool {
|
||||||
|
i := int(r)
|
||||||
|
return i < len(isTokenTable) && isTokenTable[i]
|
||||||
|
}
|
||||||
|
|
||||||
|
func isNotToken(r rune) bool {
|
||||||
|
return !isToken(r)
|
||||||
|
}
|
||||||
|
|
||||||
|
func isCookieNameValid(raw string) bool {
|
||||||
|
if raw == "" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return strings.IndexFunc(raw, isNotToken) < 0
|
||||||
|
}
|
||||||
18
vendor/github.com/gorilla/sessions/options.go
generated
vendored
Normal file
18
vendor/github.com/gorilla/sessions/options.go
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
// +build !go1.11
|
||||||
|
|
||||||
|
package sessions
|
||||||
|
|
||||||
|
// Options stores configuration for a session or session store.
|
||||||
|
//
|
||||||
|
// Fields are a subset of http.Cookie fields.
|
||||||
|
type Options struct {
|
||||||
|
Path string
|
||||||
|
Domain string
|
||||||
|
// MaxAge=0 means no Max-Age attribute specified and the cookie will be
|
||||||
|
// deleted after the browser session ends.
|
||||||
|
// MaxAge<0 means delete cookie immediately.
|
||||||
|
// MaxAge>0 means Max-Age attribute present and given in seconds.
|
||||||
|
MaxAge int
|
||||||
|
Secure bool
|
||||||
|
HttpOnly bool
|
||||||
|
}
|
||||||
22
vendor/github.com/gorilla/sessions/options_go111.go
generated
vendored
Normal file
22
vendor/github.com/gorilla/sessions/options_go111.go
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
// +build go1.11
|
||||||
|
|
||||||
|
package sessions
|
||||||
|
|
||||||
|
import "net/http"
|
||||||
|
|
||||||
|
// Options stores configuration for a session or session store.
|
||||||
|
//
|
||||||
|
// Fields are a subset of http.Cookie fields.
|
||||||
|
type Options struct {
|
||||||
|
Path string
|
||||||
|
Domain string
|
||||||
|
// MaxAge=0 means no Max-Age attribute specified and the cookie will be
|
||||||
|
// deleted after the browser session ends.
|
||||||
|
// MaxAge<0 means delete cookie immediately.
|
||||||
|
// MaxAge>0 means Max-Age attribute present and given in seconds.
|
||||||
|
MaxAge int
|
||||||
|
Secure bool
|
||||||
|
HttpOnly bool
|
||||||
|
// Defaults to http.SameSiteDefaultMode
|
||||||
|
SameSite http.SameSite
|
||||||
|
}
|
||||||
218
vendor/github.com/gorilla/sessions/sessions.go
generated
vendored
Normal file
218
vendor/github.com/gorilla/sessions/sessions.go
generated
vendored
Normal file
@@ -0,0 +1,218 @@
|
|||||||
|
// Copyright 2012 The Gorilla Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package sessions
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/gob"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Default flashes key.
|
||||||
|
const flashesKey = "_flash"
|
||||||
|
|
||||||
|
// Session --------------------------------------------------------------------
|
||||||
|
|
||||||
|
// NewSession is called by session stores to create a new session instance.
|
||||||
|
func NewSession(store Store, name string) *Session {
|
||||||
|
return &Session{
|
||||||
|
Values: make(map[interface{}]interface{}),
|
||||||
|
store: store,
|
||||||
|
name: name,
|
||||||
|
Options: new(Options),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Session stores the values and optional configuration for a session.
|
||||||
|
type Session struct {
|
||||||
|
// The ID of the session, generated by stores. It should not be used for
|
||||||
|
// user data.
|
||||||
|
ID string
|
||||||
|
// Values contains the user-data for the session.
|
||||||
|
Values map[interface{}]interface{}
|
||||||
|
Options *Options
|
||||||
|
IsNew bool
|
||||||
|
store Store
|
||||||
|
name string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Flashes returns a slice of flash messages from the session.
|
||||||
|
//
|
||||||
|
// A single variadic argument is accepted, and it is optional: it defines
|
||||||
|
// the flash key. If not defined "_flash" is used by default.
|
||||||
|
func (s *Session) Flashes(vars ...string) []interface{} {
|
||||||
|
var flashes []interface{}
|
||||||
|
key := flashesKey
|
||||||
|
if len(vars) > 0 {
|
||||||
|
key = vars[0]
|
||||||
|
}
|
||||||
|
if v, ok := s.Values[key]; ok {
|
||||||
|
// Drop the flashes and return it.
|
||||||
|
delete(s.Values, key)
|
||||||
|
flashes = v.([]interface{})
|
||||||
|
}
|
||||||
|
return flashes
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddFlash adds a flash message to the session.
|
||||||
|
//
|
||||||
|
// A single variadic argument is accepted, and it is optional: it defines
|
||||||
|
// the flash key. If not defined "_flash" is used by default.
|
||||||
|
func (s *Session) AddFlash(value interface{}, vars ...string) {
|
||||||
|
key := flashesKey
|
||||||
|
if len(vars) > 0 {
|
||||||
|
key = vars[0]
|
||||||
|
}
|
||||||
|
var flashes []interface{}
|
||||||
|
if v, ok := s.Values[key]; ok {
|
||||||
|
flashes = v.([]interface{})
|
||||||
|
}
|
||||||
|
s.Values[key] = append(flashes, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save is a convenience method to save this session. It is the same as calling
|
||||||
|
// store.Save(request, response, session). You should call Save before writing to
|
||||||
|
// the response or returning from the handler.
|
||||||
|
func (s *Session) Save(r *http.Request, w http.ResponseWriter) error {
|
||||||
|
return s.store.Save(r, w, s)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Name returns the name used to register the session.
|
||||||
|
func (s *Session) Name() string {
|
||||||
|
return s.name
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store returns the session store used to register the session.
|
||||||
|
func (s *Session) Store() Store {
|
||||||
|
return s.store
|
||||||
|
}
|
||||||
|
|
||||||
|
// Registry -------------------------------------------------------------------
|
||||||
|
|
||||||
|
// sessionInfo stores a session tracked by the registry.
|
||||||
|
type sessionInfo struct {
|
||||||
|
s *Session
|
||||||
|
e error
|
||||||
|
}
|
||||||
|
|
||||||
|
// contextKey is the type used to store the registry in the context.
|
||||||
|
type contextKey int
|
||||||
|
|
||||||
|
// registryKey is the key used to store the registry in the context.
|
||||||
|
const registryKey contextKey = 0
|
||||||
|
|
||||||
|
// GetRegistry returns a registry instance for the current request.
|
||||||
|
func GetRegistry(r *http.Request) *Registry {
|
||||||
|
var ctx = r.Context()
|
||||||
|
registry := ctx.Value(registryKey)
|
||||||
|
if registry != nil {
|
||||||
|
return registry.(*Registry)
|
||||||
|
}
|
||||||
|
newRegistry := &Registry{
|
||||||
|
request: r,
|
||||||
|
sessions: make(map[string]sessionInfo),
|
||||||
|
}
|
||||||
|
*r = *r.WithContext(context.WithValue(ctx, registryKey, newRegistry))
|
||||||
|
return newRegistry
|
||||||
|
}
|
||||||
|
|
||||||
|
// Registry stores sessions used during a request.
|
||||||
|
type Registry struct {
|
||||||
|
request *http.Request
|
||||||
|
sessions map[string]sessionInfo
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get registers and returns a session for the given name and session store.
|
||||||
|
//
|
||||||
|
// It returns a new session if there are no sessions registered for the name.
|
||||||
|
func (s *Registry) Get(store Store, name string) (session *Session, err error) {
|
||||||
|
if !isCookieNameValid(name) {
|
||||||
|
return nil, fmt.Errorf("sessions: invalid character in cookie name: %s", name)
|
||||||
|
}
|
||||||
|
if info, ok := s.sessions[name]; ok {
|
||||||
|
session, err = info.s, info.e
|
||||||
|
} else {
|
||||||
|
session, err = store.New(s.request, name)
|
||||||
|
session.name = name
|
||||||
|
s.sessions[name] = sessionInfo{s: session, e: err}
|
||||||
|
}
|
||||||
|
session.store = store
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save saves all sessions registered for the current request.
|
||||||
|
func (s *Registry) Save(w http.ResponseWriter) error {
|
||||||
|
var errMulti MultiError
|
||||||
|
for name, info := range s.sessions {
|
||||||
|
session := info.s
|
||||||
|
if session.store == nil {
|
||||||
|
errMulti = append(errMulti, fmt.Errorf(
|
||||||
|
"sessions: missing store for session %q", name))
|
||||||
|
} else if err := session.store.Save(s.request, w, session); err != nil {
|
||||||
|
errMulti = append(errMulti, fmt.Errorf(
|
||||||
|
"sessions: error saving session %q -- %v", name, err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if errMulti != nil {
|
||||||
|
return errMulti
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helpers --------------------------------------------------------------------
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
gob.Register([]interface{}{})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save saves all sessions used during the current request.
|
||||||
|
func Save(r *http.Request, w http.ResponseWriter) error {
|
||||||
|
return GetRegistry(r).Save(w)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewCookie returns an http.Cookie with the options set. It also sets
|
||||||
|
// the Expires field calculated based on the MaxAge value, for Internet
|
||||||
|
// Explorer compatibility.
|
||||||
|
func NewCookie(name, value string, options *Options) *http.Cookie {
|
||||||
|
cookie := newCookieFromOptions(name, value, options)
|
||||||
|
if options.MaxAge > 0 {
|
||||||
|
d := time.Duration(options.MaxAge) * time.Second
|
||||||
|
cookie.Expires = time.Now().Add(d)
|
||||||
|
} else if options.MaxAge < 0 {
|
||||||
|
// Set it to the past to expire now.
|
||||||
|
cookie.Expires = time.Unix(1, 0)
|
||||||
|
}
|
||||||
|
return cookie
|
||||||
|
}
|
||||||
|
|
||||||
|
// Error ----------------------------------------------------------------------
|
||||||
|
|
||||||
|
// MultiError stores multiple errors.
|
||||||
|
//
|
||||||
|
// Borrowed from the App Engine SDK.
|
||||||
|
type MultiError []error
|
||||||
|
|
||||||
|
func (m MultiError) Error() string {
|
||||||
|
s, n := "", 0
|
||||||
|
for _, e := range m {
|
||||||
|
if e != nil {
|
||||||
|
if n == 0 {
|
||||||
|
s = e.Error()
|
||||||
|
}
|
||||||
|
n++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
switch n {
|
||||||
|
case 0:
|
||||||
|
return "(0 errors)"
|
||||||
|
case 1:
|
||||||
|
return s
|
||||||
|
case 2:
|
||||||
|
return s + " (and 1 other error)"
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%s (and %d other errors)", s, n-1)
|
||||||
|
}
|
||||||
292
vendor/github.com/gorilla/sessions/store.go
generated
vendored
Normal file
292
vendor/github.com/gorilla/sessions/store.go
generated
vendored
Normal file
@@ -0,0 +1,292 @@
|
|||||||
|
// Copyright 2012 The Gorilla Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package sessions
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/base32"
|
||||||
|
"io/ioutil"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/gorilla/securecookie"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Store is an interface for custom session stores.
|
||||||
|
//
|
||||||
|
// See CookieStore and FilesystemStore for examples.
|
||||||
|
type Store interface {
|
||||||
|
// Get should return a cached session.
|
||||||
|
Get(r *http.Request, name string) (*Session, error)
|
||||||
|
|
||||||
|
// New should create and return a new session.
|
||||||
|
//
|
||||||
|
// Note that New should never return a nil session, even in the case of
|
||||||
|
// an error if using the Registry infrastructure to cache the session.
|
||||||
|
New(r *http.Request, name string) (*Session, error)
|
||||||
|
|
||||||
|
// Save should persist session to the underlying store implementation.
|
||||||
|
Save(r *http.Request, w http.ResponseWriter, s *Session) error
|
||||||
|
}
|
||||||
|
|
||||||
|
// CookieStore ----------------------------------------------------------------
|
||||||
|
|
||||||
|
// NewCookieStore returns a new CookieStore.
|
||||||
|
//
|
||||||
|
// Keys are defined in pairs to allow key rotation, but the common case is
|
||||||
|
// to set a single authentication key and optionally an encryption key.
|
||||||
|
//
|
||||||
|
// The first key in a pair is used for authentication and the second for
|
||||||
|
// encryption. The encryption key can be set to nil or omitted in the last
|
||||||
|
// pair, but the authentication key is required in all pairs.
|
||||||
|
//
|
||||||
|
// It is recommended to use an authentication key with 32 or 64 bytes.
|
||||||
|
// The encryption key, if set, must be either 16, 24, or 32 bytes to select
|
||||||
|
// AES-128, AES-192, or AES-256 modes.
|
||||||
|
func NewCookieStore(keyPairs ...[]byte) *CookieStore {
|
||||||
|
cs := &CookieStore{
|
||||||
|
Codecs: securecookie.CodecsFromPairs(keyPairs...),
|
||||||
|
Options: &Options{
|
||||||
|
Path: "/",
|
||||||
|
MaxAge: 86400 * 30,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
cs.MaxAge(cs.Options.MaxAge)
|
||||||
|
return cs
|
||||||
|
}
|
||||||
|
|
||||||
|
// CookieStore stores sessions using secure cookies.
|
||||||
|
type CookieStore struct {
|
||||||
|
Codecs []securecookie.Codec
|
||||||
|
Options *Options // default configuration
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get returns a session for the given name after adding it to the registry.
|
||||||
|
//
|
||||||
|
// It returns a new session if the sessions doesn't exist. Access IsNew on
|
||||||
|
// the session to check if it is an existing session or a new one.
|
||||||
|
//
|
||||||
|
// It returns a new session and an error if the session exists but could
|
||||||
|
// not be decoded.
|
||||||
|
func (s *CookieStore) Get(r *http.Request, name string) (*Session, error) {
|
||||||
|
return GetRegistry(r).Get(s, name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// New returns a session for the given name without adding it to the registry.
|
||||||
|
//
|
||||||
|
// The difference between New() and Get() is that calling New() twice will
|
||||||
|
// decode the session data twice, while Get() registers and reuses the same
|
||||||
|
// decoded session after the first call.
|
||||||
|
func (s *CookieStore) New(r *http.Request, name string) (*Session, error) {
|
||||||
|
session := NewSession(s, name)
|
||||||
|
opts := *s.Options
|
||||||
|
session.Options = &opts
|
||||||
|
session.IsNew = true
|
||||||
|
var err error
|
||||||
|
if c, errCookie := r.Cookie(name); errCookie == nil {
|
||||||
|
err = securecookie.DecodeMulti(name, c.Value, &session.Values,
|
||||||
|
s.Codecs...)
|
||||||
|
if err == nil {
|
||||||
|
session.IsNew = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return session, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save adds a single session to the response.
|
||||||
|
func (s *CookieStore) Save(r *http.Request, w http.ResponseWriter,
|
||||||
|
session *Session) error {
|
||||||
|
encoded, err := securecookie.EncodeMulti(session.Name(), session.Values,
|
||||||
|
s.Codecs...)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
http.SetCookie(w, NewCookie(session.Name(), encoded, session.Options))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// MaxAge sets the maximum age for the store and the underlying cookie
|
||||||
|
// implementation. Individual sessions can be deleted by setting Options.MaxAge
|
||||||
|
// = -1 for that session.
|
||||||
|
func (s *CookieStore) MaxAge(age int) {
|
||||||
|
s.Options.MaxAge = age
|
||||||
|
|
||||||
|
// Set the maxAge for each securecookie instance.
|
||||||
|
for _, codec := range s.Codecs {
|
||||||
|
if sc, ok := codec.(*securecookie.SecureCookie); ok {
|
||||||
|
sc.MaxAge(age)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// FilesystemStore ------------------------------------------------------------
|
||||||
|
|
||||||
|
var fileMutex sync.RWMutex
|
||||||
|
|
||||||
|
// NewFilesystemStore returns a new FilesystemStore.
|
||||||
|
//
|
||||||
|
// The path argument is the directory where sessions will be saved. If empty
|
||||||
|
// it will use os.TempDir().
|
||||||
|
//
|
||||||
|
// See NewCookieStore() for a description of the other parameters.
|
||||||
|
func NewFilesystemStore(path string, keyPairs ...[]byte) *FilesystemStore {
|
||||||
|
if path == "" {
|
||||||
|
path = os.TempDir()
|
||||||
|
}
|
||||||
|
fs := &FilesystemStore{
|
||||||
|
Codecs: securecookie.CodecsFromPairs(keyPairs...),
|
||||||
|
Options: &Options{
|
||||||
|
Path: "/",
|
||||||
|
MaxAge: 86400 * 30,
|
||||||
|
},
|
||||||
|
path: path,
|
||||||
|
}
|
||||||
|
|
||||||
|
fs.MaxAge(fs.Options.MaxAge)
|
||||||
|
return fs
|
||||||
|
}
|
||||||
|
|
||||||
|
// FilesystemStore stores sessions in the filesystem.
|
||||||
|
//
|
||||||
|
// It also serves as a reference for custom stores.
|
||||||
|
//
|
||||||
|
// This store is still experimental and not well tested. Feedback is welcome.
|
||||||
|
type FilesystemStore struct {
|
||||||
|
Codecs []securecookie.Codec
|
||||||
|
Options *Options // default configuration
|
||||||
|
path string
|
||||||
|
}
|
||||||
|
|
||||||
|
// MaxLength restricts the maximum length of new sessions to l.
|
||||||
|
// If l is 0 there is no limit to the size of a session, use with caution.
|
||||||
|
// The default for a new FilesystemStore is 4096.
|
||||||
|
func (s *FilesystemStore) MaxLength(l int) {
|
||||||
|
for _, c := range s.Codecs {
|
||||||
|
if codec, ok := c.(*securecookie.SecureCookie); ok {
|
||||||
|
codec.MaxLength(l)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get returns a session for the given name after adding it to the registry.
|
||||||
|
//
|
||||||
|
// See CookieStore.Get().
|
||||||
|
func (s *FilesystemStore) Get(r *http.Request, name string) (*Session, error) {
|
||||||
|
return GetRegistry(r).Get(s, name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// New returns a session for the given name without adding it to the registry.
|
||||||
|
//
|
||||||
|
// See CookieStore.New().
|
||||||
|
func (s *FilesystemStore) New(r *http.Request, name string) (*Session, error) {
|
||||||
|
session := NewSession(s, name)
|
||||||
|
opts := *s.Options
|
||||||
|
session.Options = &opts
|
||||||
|
session.IsNew = true
|
||||||
|
var err error
|
||||||
|
if c, errCookie := r.Cookie(name); errCookie == nil {
|
||||||
|
err = securecookie.DecodeMulti(name, c.Value, &session.ID, s.Codecs...)
|
||||||
|
if err == nil {
|
||||||
|
err = s.load(session)
|
||||||
|
if err == nil {
|
||||||
|
session.IsNew = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return session, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save adds a single session to the response.
|
||||||
|
//
|
||||||
|
// If the Options.MaxAge of the session is <= 0 then the session file will be
|
||||||
|
// deleted from the store path. With this process it enforces the properly
|
||||||
|
// session cookie handling so no need to trust in the cookie management in the
|
||||||
|
// web browser.
|
||||||
|
func (s *FilesystemStore) Save(r *http.Request, w http.ResponseWriter,
|
||||||
|
session *Session) error {
|
||||||
|
// Delete if max-age is <= 0
|
||||||
|
if session.Options.MaxAge <= 0 {
|
||||||
|
if err := s.erase(session); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
http.SetCookie(w, NewCookie(session.Name(), "", session.Options))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if session.ID == "" {
|
||||||
|
// Because the ID is used in the filename, encode it to
|
||||||
|
// use alphanumeric characters only.
|
||||||
|
session.ID = strings.TrimRight(
|
||||||
|
base32.StdEncoding.EncodeToString(
|
||||||
|
securecookie.GenerateRandomKey(32)), "=")
|
||||||
|
}
|
||||||
|
if err := s.save(session); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
encoded, err := securecookie.EncodeMulti(session.Name(), session.ID,
|
||||||
|
s.Codecs...)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
http.SetCookie(w, NewCookie(session.Name(), encoded, session.Options))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// MaxAge sets the maximum age for the store and the underlying cookie
|
||||||
|
// implementation. Individual sessions can be deleted by setting Options.MaxAge
|
||||||
|
// = -1 for that session.
|
||||||
|
func (s *FilesystemStore) MaxAge(age int) {
|
||||||
|
s.Options.MaxAge = age
|
||||||
|
|
||||||
|
// Set the maxAge for each securecookie instance.
|
||||||
|
for _, codec := range s.Codecs {
|
||||||
|
if sc, ok := codec.(*securecookie.SecureCookie); ok {
|
||||||
|
sc.MaxAge(age)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// save writes encoded session.Values to a file.
|
||||||
|
func (s *FilesystemStore) save(session *Session) error {
|
||||||
|
encoded, err := securecookie.EncodeMulti(session.Name(), session.Values,
|
||||||
|
s.Codecs...)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
filename := filepath.Join(s.path, "session_"+session.ID)
|
||||||
|
fileMutex.Lock()
|
||||||
|
defer fileMutex.Unlock()
|
||||||
|
return ioutil.WriteFile(filename, []byte(encoded), 0600)
|
||||||
|
}
|
||||||
|
|
||||||
|
// load reads a file and decodes its content into session.Values.
|
||||||
|
func (s *FilesystemStore) load(session *Session) error {
|
||||||
|
filename := filepath.Join(s.path, "session_"+session.ID)
|
||||||
|
fileMutex.RLock()
|
||||||
|
defer fileMutex.RUnlock()
|
||||||
|
fdata, err := ioutil.ReadFile(filename)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err = securecookie.DecodeMulti(session.Name(), string(fdata),
|
||||||
|
&session.Values, s.Codecs...); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// delete session file
|
||||||
|
func (s *FilesystemStore) erase(session *Session) error {
|
||||||
|
filename := filepath.Join(s.path, "session_"+session.ID)
|
||||||
|
|
||||||
|
fileMutex.RLock()
|
||||||
|
defer fileMutex.RUnlock()
|
||||||
|
|
||||||
|
err := os.Remove(filename)
|
||||||
|
return err
|
||||||
|
}
|
||||||
712
vendor/golang.org/x/net/html/atom/gen.go
generated
vendored
Normal file
712
vendor/golang.org/x/net/html/atom/gen.go
generated
vendored
Normal file
@@ -0,0 +1,712 @@
|
|||||||
|
// Copyright 2012 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
//go:generate go run gen.go
|
||||||
|
//go:generate go run gen.go -test
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"go/format"
|
||||||
|
"io/ioutil"
|
||||||
|
"math/rand"
|
||||||
|
"os"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// identifier converts s to a Go exported identifier.
|
||||||
|
// It converts "div" to "Div" and "accept-charset" to "AcceptCharset".
|
||||||
|
func identifier(s string) string {
|
||||||
|
b := make([]byte, 0, len(s))
|
||||||
|
cap := true
|
||||||
|
for _, c := range s {
|
||||||
|
if c == '-' {
|
||||||
|
cap = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if cap && 'a' <= c && c <= 'z' {
|
||||||
|
c -= 'a' - 'A'
|
||||||
|
}
|
||||||
|
cap = false
|
||||||
|
b = append(b, byte(c))
|
||||||
|
}
|
||||||
|
return string(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
var test = flag.Bool("test", false, "generate table_test.go")
|
||||||
|
|
||||||
|
func genFile(name string, buf *bytes.Buffer) {
|
||||||
|
b, err := format.Source(buf.Bytes())
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintln(os.Stderr, err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
if err := ioutil.WriteFile(name, b, 0644); err != nil {
|
||||||
|
fmt.Fprintln(os.Stderr, err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
flag.Parse()
|
||||||
|
|
||||||
|
var all []string
|
||||||
|
all = append(all, elements...)
|
||||||
|
all = append(all, attributes...)
|
||||||
|
all = append(all, eventHandlers...)
|
||||||
|
all = append(all, extra...)
|
||||||
|
sort.Strings(all)
|
||||||
|
|
||||||
|
// uniq - lists have dups
|
||||||
|
w := 0
|
||||||
|
for _, s := range all {
|
||||||
|
if w == 0 || all[w-1] != s {
|
||||||
|
all[w] = s
|
||||||
|
w++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
all = all[:w]
|
||||||
|
|
||||||
|
if *test {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
fmt.Fprintln(&buf, "// Code generated by go generate gen.go; DO NOT EDIT.\n")
|
||||||
|
fmt.Fprintln(&buf, "//go:generate go run gen.go -test\n")
|
||||||
|
fmt.Fprintln(&buf, "package atom\n")
|
||||||
|
fmt.Fprintln(&buf, "var testAtomList = []string{")
|
||||||
|
for _, s := range all {
|
||||||
|
fmt.Fprintf(&buf, "\t%q,\n", s)
|
||||||
|
}
|
||||||
|
fmt.Fprintln(&buf, "}")
|
||||||
|
|
||||||
|
genFile("table_test.go", &buf)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find hash that minimizes table size.
|
||||||
|
var best *table
|
||||||
|
for i := 0; i < 1000000; i++ {
|
||||||
|
if best != nil && 1<<(best.k-1) < len(all) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
h := rand.Uint32()
|
||||||
|
for k := uint(0); k <= 16; k++ {
|
||||||
|
if best != nil && k >= best.k {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
var t table
|
||||||
|
if t.init(h, k, all) {
|
||||||
|
best = &t
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if best == nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "failed to construct string table\n")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Lay out strings, using overlaps when possible.
|
||||||
|
layout := append([]string{}, all...)
|
||||||
|
|
||||||
|
// Remove strings that are substrings of other strings
|
||||||
|
for changed := true; changed; {
|
||||||
|
changed = false
|
||||||
|
for i, s := range layout {
|
||||||
|
if s == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for j, t := range layout {
|
||||||
|
if i != j && t != "" && strings.Contains(s, t) {
|
||||||
|
changed = true
|
||||||
|
layout[j] = ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Join strings where one suffix matches another prefix.
|
||||||
|
for {
|
||||||
|
// Find best i, j, k such that layout[i][len-k:] == layout[j][:k],
|
||||||
|
// maximizing overlap length k.
|
||||||
|
besti := -1
|
||||||
|
bestj := -1
|
||||||
|
bestk := 0
|
||||||
|
for i, s := range layout {
|
||||||
|
if s == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for j, t := range layout {
|
||||||
|
if i == j {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for k := bestk + 1; k <= len(s) && k <= len(t); k++ {
|
||||||
|
if s[len(s)-k:] == t[:k] {
|
||||||
|
besti = i
|
||||||
|
bestj = j
|
||||||
|
bestk = k
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if bestk > 0 {
|
||||||
|
layout[besti] += layout[bestj][bestk:]
|
||||||
|
layout[bestj] = ""
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
text := strings.Join(layout, "")
|
||||||
|
|
||||||
|
atom := map[string]uint32{}
|
||||||
|
for _, s := range all {
|
||||||
|
off := strings.Index(text, s)
|
||||||
|
if off < 0 {
|
||||||
|
panic("lost string " + s)
|
||||||
|
}
|
||||||
|
atom[s] = uint32(off<<8 | len(s))
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
// Generate the Go code.
|
||||||
|
fmt.Fprintln(&buf, "// Code generated by go generate gen.go; DO NOT EDIT.\n")
|
||||||
|
fmt.Fprintln(&buf, "//go:generate go run gen.go\n")
|
||||||
|
fmt.Fprintln(&buf, "package atom\n\nconst (")
|
||||||
|
|
||||||
|
// compute max len
|
||||||
|
maxLen := 0
|
||||||
|
for _, s := range all {
|
||||||
|
if maxLen < len(s) {
|
||||||
|
maxLen = len(s)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(&buf, "\t%s Atom = %#x\n", identifier(s), atom[s])
|
||||||
|
}
|
||||||
|
fmt.Fprintln(&buf, ")\n")
|
||||||
|
|
||||||
|
fmt.Fprintf(&buf, "const hash0 = %#x\n\n", best.h0)
|
||||||
|
fmt.Fprintf(&buf, "const maxAtomLen = %d\n\n", maxLen)
|
||||||
|
|
||||||
|
fmt.Fprintf(&buf, "var table = [1<<%d]Atom{\n", best.k)
|
||||||
|
for i, s := range best.tab {
|
||||||
|
if s == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fmt.Fprintf(&buf, "\t%#x: %#x, // %s\n", i, atom[s], s)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(&buf, "}\n")
|
||||||
|
datasize := (1 << best.k) * 4
|
||||||
|
|
||||||
|
fmt.Fprintln(&buf, "const atomText =")
|
||||||
|
textsize := len(text)
|
||||||
|
for len(text) > 60 {
|
||||||
|
fmt.Fprintf(&buf, "\t%q +\n", text[:60])
|
||||||
|
text = text[60:]
|
||||||
|
}
|
||||||
|
fmt.Fprintf(&buf, "\t%q\n\n", text)
|
||||||
|
|
||||||
|
genFile("table.go", &buf)
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stdout, "%d atoms; %d string bytes + %d tables = %d total data\n", len(all), textsize, datasize, textsize+datasize)
|
||||||
|
}
|
||||||
|
|
||||||
|
type byLen []string
|
||||||
|
|
||||||
|
func (x byLen) Less(i, j int) bool { return len(x[i]) > len(x[j]) }
|
||||||
|
func (x byLen) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
|
||||||
|
func (x byLen) Len() int { return len(x) }
|
||||||
|
|
||||||
|
// fnv computes the FNV hash with an arbitrary starting value h.
|
||||||
|
func fnv(h uint32, s string) uint32 {
|
||||||
|
for i := 0; i < len(s); i++ {
|
||||||
|
h ^= uint32(s[i])
|
||||||
|
h *= 16777619
|
||||||
|
}
|
||||||
|
return h
|
||||||
|
}
|
||||||
|
|
||||||
|
// A table represents an attempt at constructing the lookup table.
|
||||||
|
// The lookup table uses cuckoo hashing, meaning that each string
|
||||||
|
// can be found in one of two positions.
|
||||||
|
type table struct {
|
||||||
|
h0 uint32
|
||||||
|
k uint
|
||||||
|
mask uint32
|
||||||
|
tab []string
|
||||||
|
}
|
||||||
|
|
||||||
|
// hash returns the two hashes for s.
|
||||||
|
func (t *table) hash(s string) (h1, h2 uint32) {
|
||||||
|
h := fnv(t.h0, s)
|
||||||
|
h1 = h & t.mask
|
||||||
|
h2 = (h >> 16) & t.mask
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// init initializes the table with the given parameters.
|
||||||
|
// h0 is the initial hash value,
|
||||||
|
// k is the number of bits of hash value to use, and
|
||||||
|
// x is the list of strings to store in the table.
|
||||||
|
// init returns false if the table cannot be constructed.
|
||||||
|
func (t *table) init(h0 uint32, k uint, x []string) bool {
|
||||||
|
t.h0 = h0
|
||||||
|
t.k = k
|
||||||
|
t.tab = make([]string, 1<<k)
|
||||||
|
t.mask = 1<<k - 1
|
||||||
|
for _, s := range x {
|
||||||
|
if !t.insert(s) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// insert inserts s in the table.
|
||||||
|
func (t *table) insert(s string) bool {
|
||||||
|
h1, h2 := t.hash(s)
|
||||||
|
if t.tab[h1] == "" {
|
||||||
|
t.tab[h1] = s
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if t.tab[h2] == "" {
|
||||||
|
t.tab[h2] = s
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if t.push(h1, 0) {
|
||||||
|
t.tab[h1] = s
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if t.push(h2, 0) {
|
||||||
|
t.tab[h2] = s
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// push attempts to push aside the entry in slot i.
|
||||||
|
func (t *table) push(i uint32, depth int) bool {
|
||||||
|
if depth > len(t.tab) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
s := t.tab[i]
|
||||||
|
h1, h2 := t.hash(s)
|
||||||
|
j := h1 + h2 - i
|
||||||
|
if t.tab[j] != "" && !t.push(j, depth+1) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
t.tab[j] = s
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// The lists of element names and attribute keys were taken from
|
||||||
|
// https://html.spec.whatwg.org/multipage/indices.html#index
|
||||||
|
// as of the "HTML Living Standard - Last Updated 16 April 2018" version.
|
||||||
|
|
||||||
|
// "command", "keygen" and "menuitem" have been removed from the spec,
|
||||||
|
// but are kept here for backwards compatibility.
|
||||||
|
var elements = []string{
|
||||||
|
"a",
|
||||||
|
"abbr",
|
||||||
|
"address",
|
||||||
|
"area",
|
||||||
|
"article",
|
||||||
|
"aside",
|
||||||
|
"audio",
|
||||||
|
"b",
|
||||||
|
"base",
|
||||||
|
"bdi",
|
||||||
|
"bdo",
|
||||||
|
"blockquote",
|
||||||
|
"body",
|
||||||
|
"br",
|
||||||
|
"button",
|
||||||
|
"canvas",
|
||||||
|
"caption",
|
||||||
|
"cite",
|
||||||
|
"code",
|
||||||
|
"col",
|
||||||
|
"colgroup",
|
||||||
|
"command",
|
||||||
|
"data",
|
||||||
|
"datalist",
|
||||||
|
"dd",
|
||||||
|
"del",
|
||||||
|
"details",
|
||||||
|
"dfn",
|
||||||
|
"dialog",
|
||||||
|
"div",
|
||||||
|
"dl",
|
||||||
|
"dt",
|
||||||
|
"em",
|
||||||
|
"embed",
|
||||||
|
"fieldset",
|
||||||
|
"figcaption",
|
||||||
|
"figure",
|
||||||
|
"footer",
|
||||||
|
"form",
|
||||||
|
"h1",
|
||||||
|
"h2",
|
||||||
|
"h3",
|
||||||
|
"h4",
|
||||||
|
"h5",
|
||||||
|
"h6",
|
||||||
|
"head",
|
||||||
|
"header",
|
||||||
|
"hgroup",
|
||||||
|
"hr",
|
||||||
|
"html",
|
||||||
|
"i",
|
||||||
|
"iframe",
|
||||||
|
"img",
|
||||||
|
"input",
|
||||||
|
"ins",
|
||||||
|
"kbd",
|
||||||
|
"keygen",
|
||||||
|
"label",
|
||||||
|
"legend",
|
||||||
|
"li",
|
||||||
|
"link",
|
||||||
|
"main",
|
||||||
|
"map",
|
||||||
|
"mark",
|
||||||
|
"menu",
|
||||||
|
"menuitem",
|
||||||
|
"meta",
|
||||||
|
"meter",
|
||||||
|
"nav",
|
||||||
|
"noscript",
|
||||||
|
"object",
|
||||||
|
"ol",
|
||||||
|
"optgroup",
|
||||||
|
"option",
|
||||||
|
"output",
|
||||||
|
"p",
|
||||||
|
"param",
|
||||||
|
"picture",
|
||||||
|
"pre",
|
||||||
|
"progress",
|
||||||
|
"q",
|
||||||
|
"rp",
|
||||||
|
"rt",
|
||||||
|
"ruby",
|
||||||
|
"s",
|
||||||
|
"samp",
|
||||||
|
"script",
|
||||||
|
"section",
|
||||||
|
"select",
|
||||||
|
"slot",
|
||||||
|
"small",
|
||||||
|
"source",
|
||||||
|
"span",
|
||||||
|
"strong",
|
||||||
|
"style",
|
||||||
|
"sub",
|
||||||
|
"summary",
|
||||||
|
"sup",
|
||||||
|
"table",
|
||||||
|
"tbody",
|
||||||
|
"td",
|
||||||
|
"template",
|
||||||
|
"textarea",
|
||||||
|
"tfoot",
|
||||||
|
"th",
|
||||||
|
"thead",
|
||||||
|
"time",
|
||||||
|
"title",
|
||||||
|
"tr",
|
||||||
|
"track",
|
||||||
|
"u",
|
||||||
|
"ul",
|
||||||
|
"var",
|
||||||
|
"video",
|
||||||
|
"wbr",
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://html.spec.whatwg.org/multipage/indices.html#attributes-3
|
||||||
|
//
|
||||||
|
// "challenge", "command", "contextmenu", "dropzone", "icon", "keytype", "mediagroup",
|
||||||
|
// "radiogroup", "spellcheck", "scoped", "seamless", "sortable" and "sorted" have been removed from the spec,
|
||||||
|
// but are kept here for backwards compatibility.
|
||||||
|
var attributes = []string{
|
||||||
|
"abbr",
|
||||||
|
"accept",
|
||||||
|
"accept-charset",
|
||||||
|
"accesskey",
|
||||||
|
"action",
|
||||||
|
"allowfullscreen",
|
||||||
|
"allowpaymentrequest",
|
||||||
|
"allowusermedia",
|
||||||
|
"alt",
|
||||||
|
"as",
|
||||||
|
"async",
|
||||||
|
"autocomplete",
|
||||||
|
"autofocus",
|
||||||
|
"autoplay",
|
||||||
|
"challenge",
|
||||||
|
"charset",
|
||||||
|
"checked",
|
||||||
|
"cite",
|
||||||
|
"class",
|
||||||
|
"color",
|
||||||
|
"cols",
|
||||||
|
"colspan",
|
||||||
|
"command",
|
||||||
|
"content",
|
||||||
|
"contenteditable",
|
||||||
|
"contextmenu",
|
||||||
|
"controls",
|
||||||
|
"coords",
|
||||||
|
"crossorigin",
|
||||||
|
"data",
|
||||||
|
"datetime",
|
||||||
|
"default",
|
||||||
|
"defer",
|
||||||
|
"dir",
|
||||||
|
"dirname",
|
||||||
|
"disabled",
|
||||||
|
"download",
|
||||||
|
"draggable",
|
||||||
|
"dropzone",
|
||||||
|
"enctype",
|
||||||
|
"for",
|
||||||
|
"form",
|
||||||
|
"formaction",
|
||||||
|
"formenctype",
|
||||||
|
"formmethod",
|
||||||
|
"formnovalidate",
|
||||||
|
"formtarget",
|
||||||
|
"headers",
|
||||||
|
"height",
|
||||||
|
"hidden",
|
||||||
|
"high",
|
||||||
|
"href",
|
||||||
|
"hreflang",
|
||||||
|
"http-equiv",
|
||||||
|
"icon",
|
||||||
|
"id",
|
||||||
|
"inputmode",
|
||||||
|
"integrity",
|
||||||
|
"is",
|
||||||
|
"ismap",
|
||||||
|
"itemid",
|
||||||
|
"itemprop",
|
||||||
|
"itemref",
|
||||||
|
"itemscope",
|
||||||
|
"itemtype",
|
||||||
|
"keytype",
|
||||||
|
"kind",
|
||||||
|
"label",
|
||||||
|
"lang",
|
||||||
|
"list",
|
||||||
|
"loop",
|
||||||
|
"low",
|
||||||
|
"manifest",
|
||||||
|
"max",
|
||||||
|
"maxlength",
|
||||||
|
"media",
|
||||||
|
"mediagroup",
|
||||||
|
"method",
|
||||||
|
"min",
|
||||||
|
"minlength",
|
||||||
|
"multiple",
|
||||||
|
"muted",
|
||||||
|
"name",
|
||||||
|
"nomodule",
|
||||||
|
"nonce",
|
||||||
|
"novalidate",
|
||||||
|
"open",
|
||||||
|
"optimum",
|
||||||
|
"pattern",
|
||||||
|
"ping",
|
||||||
|
"placeholder",
|
||||||
|
"playsinline",
|
||||||
|
"poster",
|
||||||
|
"preload",
|
||||||
|
"radiogroup",
|
||||||
|
"readonly",
|
||||||
|
"referrerpolicy",
|
||||||
|
"rel",
|
||||||
|
"required",
|
||||||
|
"reversed",
|
||||||
|
"rows",
|
||||||
|
"rowspan",
|
||||||
|
"sandbox",
|
||||||
|
"spellcheck",
|
||||||
|
"scope",
|
||||||
|
"scoped",
|
||||||
|
"seamless",
|
||||||
|
"selected",
|
||||||
|
"shape",
|
||||||
|
"size",
|
||||||
|
"sizes",
|
||||||
|
"sortable",
|
||||||
|
"sorted",
|
||||||
|
"slot",
|
||||||
|
"span",
|
||||||
|
"spellcheck",
|
||||||
|
"src",
|
||||||
|
"srcdoc",
|
||||||
|
"srclang",
|
||||||
|
"srcset",
|
||||||
|
"start",
|
||||||
|
"step",
|
||||||
|
"style",
|
||||||
|
"tabindex",
|
||||||
|
"target",
|
||||||
|
"title",
|
||||||
|
"translate",
|
||||||
|
"type",
|
||||||
|
"typemustmatch",
|
||||||
|
"updateviacache",
|
||||||
|
"usemap",
|
||||||
|
"value",
|
||||||
|
"width",
|
||||||
|
"workertype",
|
||||||
|
"wrap",
|
||||||
|
}
|
||||||
|
|
||||||
|
// "onautocomplete", "onautocompleteerror", "onmousewheel",
|
||||||
|
// "onshow" and "onsort" have been removed from the spec,
|
||||||
|
// but are kept here for backwards compatibility.
|
||||||
|
var eventHandlers = []string{
|
||||||
|
"onabort",
|
||||||
|
"onautocomplete",
|
||||||
|
"onautocompleteerror",
|
||||||
|
"onauxclick",
|
||||||
|
"onafterprint",
|
||||||
|
"onbeforeprint",
|
||||||
|
"onbeforeunload",
|
||||||
|
"onblur",
|
||||||
|
"oncancel",
|
||||||
|
"oncanplay",
|
||||||
|
"oncanplaythrough",
|
||||||
|
"onchange",
|
||||||
|
"onclick",
|
||||||
|
"onclose",
|
||||||
|
"oncontextmenu",
|
||||||
|
"oncopy",
|
||||||
|
"oncuechange",
|
||||||
|
"oncut",
|
||||||
|
"ondblclick",
|
||||||
|
"ondrag",
|
||||||
|
"ondragend",
|
||||||
|
"ondragenter",
|
||||||
|
"ondragexit",
|
||||||
|
"ondragleave",
|
||||||
|
"ondragover",
|
||||||
|
"ondragstart",
|
||||||
|
"ondrop",
|
||||||
|
"ondurationchange",
|
||||||
|
"onemptied",
|
||||||
|
"onended",
|
||||||
|
"onerror",
|
||||||
|
"onfocus",
|
||||||
|
"onhashchange",
|
||||||
|
"oninput",
|
||||||
|
"oninvalid",
|
||||||
|
"onkeydown",
|
||||||
|
"onkeypress",
|
||||||
|
"onkeyup",
|
||||||
|
"onlanguagechange",
|
||||||
|
"onload",
|
||||||
|
"onloadeddata",
|
||||||
|
"onloadedmetadata",
|
||||||
|
"onloadend",
|
||||||
|
"onloadstart",
|
||||||
|
"onmessage",
|
||||||
|
"onmessageerror",
|
||||||
|
"onmousedown",
|
||||||
|
"onmouseenter",
|
||||||
|
"onmouseleave",
|
||||||
|
"onmousemove",
|
||||||
|
"onmouseout",
|
||||||
|
"onmouseover",
|
||||||
|
"onmouseup",
|
||||||
|
"onmousewheel",
|
||||||
|
"onwheel",
|
||||||
|
"onoffline",
|
||||||
|
"ononline",
|
||||||
|
"onpagehide",
|
||||||
|
"onpageshow",
|
||||||
|
"onpaste",
|
||||||
|
"onpause",
|
||||||
|
"onplay",
|
||||||
|
"onplaying",
|
||||||
|
"onpopstate",
|
||||||
|
"onprogress",
|
||||||
|
"onratechange",
|
||||||
|
"onreset",
|
||||||
|
"onresize",
|
||||||
|
"onrejectionhandled",
|
||||||
|
"onscroll",
|
||||||
|
"onsecuritypolicyviolation",
|
||||||
|
"onseeked",
|
||||||
|
"onseeking",
|
||||||
|
"onselect",
|
||||||
|
"onshow",
|
||||||
|
"onsort",
|
||||||
|
"onstalled",
|
||||||
|
"onstorage",
|
||||||
|
"onsubmit",
|
||||||
|
"onsuspend",
|
||||||
|
"ontimeupdate",
|
||||||
|
"ontoggle",
|
||||||
|
"onunhandledrejection",
|
||||||
|
"onunload",
|
||||||
|
"onvolumechange",
|
||||||
|
"onwaiting",
|
||||||
|
}
|
||||||
|
|
||||||
|
// extra are ad-hoc values not covered by any of the lists above.
|
||||||
|
var extra = []string{
|
||||||
|
"acronym",
|
||||||
|
"align",
|
||||||
|
"annotation",
|
||||||
|
"annotation-xml",
|
||||||
|
"applet",
|
||||||
|
"basefont",
|
||||||
|
"bgsound",
|
||||||
|
"big",
|
||||||
|
"blink",
|
||||||
|
"center",
|
||||||
|
"color",
|
||||||
|
"desc",
|
||||||
|
"face",
|
||||||
|
"font",
|
||||||
|
"foreignObject", // HTML is case-insensitive, but SVG-embedded-in-HTML is case-sensitive.
|
||||||
|
"foreignobject",
|
||||||
|
"frame",
|
||||||
|
"frameset",
|
||||||
|
"image",
|
||||||
|
"isindex",
|
||||||
|
"listing",
|
||||||
|
"malignmark",
|
||||||
|
"marquee",
|
||||||
|
"math",
|
||||||
|
"mglyph",
|
||||||
|
"mi",
|
||||||
|
"mn",
|
||||||
|
"mo",
|
||||||
|
"ms",
|
||||||
|
"mtext",
|
||||||
|
"nobr",
|
||||||
|
"noembed",
|
||||||
|
"noframes",
|
||||||
|
"plaintext",
|
||||||
|
"prompt",
|
||||||
|
"public",
|
||||||
|
"rb",
|
||||||
|
"rtc",
|
||||||
|
"spacer",
|
||||||
|
"strike",
|
||||||
|
"svg",
|
||||||
|
"system",
|
||||||
|
"tt",
|
||||||
|
"xmp",
|
||||||
|
}
|
||||||
61
vendor/golang.org/x/sys/unix/mkasm_darwin.go
generated
vendored
Normal file
61
vendor/golang.org/x/sys/unix/mkasm_darwin.go
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
// Copyright 2018 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
// mkasm_darwin.go generates assembly trampolines to call libSystem routines from Go.
|
||||||
|
//This program must be run after mksyscall.go.
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
in1, err := ioutil.ReadFile("syscall_darwin.go")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("can't open syscall_darwin.go: %s", err)
|
||||||
|
}
|
||||||
|
arch := os.Args[1]
|
||||||
|
in2, err := ioutil.ReadFile(fmt.Sprintf("syscall_darwin_%s.go", arch))
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("can't open syscall_darwin_%s.go: %s", arch, err)
|
||||||
|
}
|
||||||
|
in3, err := ioutil.ReadFile(fmt.Sprintf("zsyscall_darwin_%s.go", arch))
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("can't open zsyscall_darwin_%s.go: %s", arch, err)
|
||||||
|
}
|
||||||
|
in := string(in1) + string(in2) + string(in3)
|
||||||
|
|
||||||
|
trampolines := map[string]bool{}
|
||||||
|
|
||||||
|
var out bytes.Buffer
|
||||||
|
|
||||||
|
fmt.Fprintf(&out, "// go run mkasm_darwin.go %s\n", strings.Join(os.Args[1:], " "))
|
||||||
|
fmt.Fprintf(&out, "// Code generated by the command above; DO NOT EDIT.\n")
|
||||||
|
fmt.Fprintf(&out, "\n")
|
||||||
|
fmt.Fprintf(&out, "// +build go1.12\n")
|
||||||
|
fmt.Fprintf(&out, "\n")
|
||||||
|
fmt.Fprintf(&out, "#include \"textflag.h\"\n")
|
||||||
|
for _, line := range strings.Split(in, "\n") {
|
||||||
|
if !strings.HasPrefix(line, "func ") || !strings.HasSuffix(line, "_trampoline()") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fn := line[5 : len(line)-13]
|
||||||
|
if !trampolines[fn] {
|
||||||
|
trampolines[fn] = true
|
||||||
|
fmt.Fprintf(&out, "TEXT ·%s_trampoline(SB),NOSPLIT,$0-0\n", fn)
|
||||||
|
fmt.Fprintf(&out, "\tJMP\t%s(SB)\n", fn)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
err = ioutil.WriteFile(fmt.Sprintf("zsyscall_darwin_%s.s", arch), out.Bytes(), 0644)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("can't write zsyscall_darwin_%s.s: %s", arch, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
106
vendor/golang.org/x/sys/unix/mkpost.go
generated
vendored
Normal file
106
vendor/golang.org/x/sys/unix/mkpost.go
generated
vendored
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
// Copyright 2016 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
// mkpost processes the output of cgo -godefs to
|
||||||
|
// modify the generated types. It is used to clean up
|
||||||
|
// the sys API in an architecture specific manner.
|
||||||
|
//
|
||||||
|
// mkpost is run after cgo -godefs; see README.md.
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"go/format"
|
||||||
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Get the OS and architecture (using GOARCH_TARGET if it exists)
|
||||||
|
goos := os.Getenv("GOOS")
|
||||||
|
goarch := os.Getenv("GOARCH_TARGET")
|
||||||
|
if goarch == "" {
|
||||||
|
goarch = os.Getenv("GOARCH")
|
||||||
|
}
|
||||||
|
// Check that we are using the Docker-based build system if we should be.
|
||||||
|
if goos == "linux" {
|
||||||
|
if os.Getenv("GOLANG_SYS_BUILD") != "docker" {
|
||||||
|
os.Stderr.WriteString("In the Docker-based build system, mkpost should not be called directly.\n")
|
||||||
|
os.Stderr.WriteString("See README.md\n")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
b, err := ioutil.ReadAll(os.Stdin)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Intentionally export __val fields in Fsid and Sigset_t
|
||||||
|
valRegex := regexp.MustCompile(`type (Fsid|Sigset_t) struct {(\s+)X__val(\s+\S+\s+)}`)
|
||||||
|
b = valRegex.ReplaceAll(b, []byte("type $1 struct {${2}Val$3}"))
|
||||||
|
|
||||||
|
// Intentionally export __fds_bits field in FdSet
|
||||||
|
fdSetRegex := regexp.MustCompile(`type (FdSet) struct {(\s+)X__fds_bits(\s+\S+\s+)}`)
|
||||||
|
b = fdSetRegex.ReplaceAll(b, []byte("type $1 struct {${2}Bits$3}"))
|
||||||
|
|
||||||
|
// If we have empty Ptrace structs, we should delete them. Only s390x emits
|
||||||
|
// nonempty Ptrace structs.
|
||||||
|
ptraceRexexp := regexp.MustCompile(`type Ptrace((Psw|Fpregs|Per) struct {\s*})`)
|
||||||
|
b = ptraceRexexp.ReplaceAll(b, nil)
|
||||||
|
|
||||||
|
// Replace the control_regs union with a blank identifier for now.
|
||||||
|
controlRegsRegex := regexp.MustCompile(`(Control_regs)\s+\[0\]uint64`)
|
||||||
|
b = controlRegsRegex.ReplaceAll(b, []byte("_ [0]uint64"))
|
||||||
|
|
||||||
|
// Remove fields that are added by glibc
|
||||||
|
// Note that this is unstable as the identifers are private.
|
||||||
|
removeFieldsRegex := regexp.MustCompile(`X__glibc\S*`)
|
||||||
|
b = removeFieldsRegex.ReplaceAll(b, []byte("_"))
|
||||||
|
|
||||||
|
// Convert [65]int8 to [65]byte in Utsname members to simplify
|
||||||
|
// conversion to string; see golang.org/issue/20753
|
||||||
|
convertUtsnameRegex := regexp.MustCompile(`((Sys|Node|Domain)name|Release|Version|Machine)(\s+)\[(\d+)\]u?int8`)
|
||||||
|
b = convertUtsnameRegex.ReplaceAll(b, []byte("$1$3[$4]byte"))
|
||||||
|
|
||||||
|
// Convert [1024]int8 to [1024]byte in Ptmget members
|
||||||
|
convertPtmget := regexp.MustCompile(`([SC]n)(\s+)\[(\d+)\]u?int8`)
|
||||||
|
b = convertPtmget.ReplaceAll(b, []byte("$1[$3]byte"))
|
||||||
|
|
||||||
|
// Remove spare fields (e.g. in Statx_t)
|
||||||
|
spareFieldsRegex := regexp.MustCompile(`X__spare\S*`)
|
||||||
|
b = spareFieldsRegex.ReplaceAll(b, []byte("_"))
|
||||||
|
|
||||||
|
// Remove cgo padding fields
|
||||||
|
removePaddingFieldsRegex := regexp.MustCompile(`Pad_cgo_\d+`)
|
||||||
|
b = removePaddingFieldsRegex.ReplaceAll(b, []byte("_"))
|
||||||
|
|
||||||
|
// Remove padding, hidden, or unused fields
|
||||||
|
removeFieldsRegex = regexp.MustCompile(`\b(X_\S+|Padding)`)
|
||||||
|
b = removeFieldsRegex.ReplaceAll(b, []byte("_"))
|
||||||
|
|
||||||
|
// Remove the first line of warning from cgo
|
||||||
|
b = b[bytes.IndexByte(b, '\n')+1:]
|
||||||
|
// Modify the command in the header to include:
|
||||||
|
// mkpost, our own warning, and a build tag.
|
||||||
|
replacement := fmt.Sprintf(`$1 | go run mkpost.go
|
||||||
|
// Code generated by the command above; see README.md. DO NOT EDIT.
|
||||||
|
|
||||||
|
// +build %s,%s`, goarch, goos)
|
||||||
|
cgoCommandRegex := regexp.MustCompile(`(cgo -godefs .*)`)
|
||||||
|
b = cgoCommandRegex.ReplaceAll(b, []byte(replacement))
|
||||||
|
|
||||||
|
// gofmt
|
||||||
|
b, err = format.Source(b)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
os.Stdout.Write(b)
|
||||||
|
}
|
||||||
407
vendor/golang.org/x/sys/unix/mksyscall.go
generated
vendored
Normal file
407
vendor/golang.org/x/sys/unix/mksyscall.go
generated
vendored
Normal file
@@ -0,0 +1,407 @@
|
|||||||
|
// Copyright 2018 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
/*
|
||||||
|
This program reads a file containing function prototypes
|
||||||
|
(like syscall_darwin.go) and generates system call bodies.
|
||||||
|
The prototypes are marked by lines beginning with "//sys"
|
||||||
|
and read like func declarations if //sys is replaced by func, but:
|
||||||
|
* The parameter lists must give a name for each argument.
|
||||||
|
This includes return parameters.
|
||||||
|
* The parameter lists must give a type for each argument:
|
||||||
|
the (x, y, z int) shorthand is not allowed.
|
||||||
|
* If the return parameter is an error number, it must be named errno.
|
||||||
|
|
||||||
|
A line beginning with //sysnb is like //sys, except that the
|
||||||
|
goroutine will not be suspended during the execution of the system
|
||||||
|
call. This must only be used for system calls which can never
|
||||||
|
block, as otherwise the system call could cause all goroutines to
|
||||||
|
hang.
|
||||||
|
*/
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
b32 = flag.Bool("b32", false, "32bit big-endian")
|
||||||
|
l32 = flag.Bool("l32", false, "32bit little-endian")
|
||||||
|
plan9 = flag.Bool("plan9", false, "plan9")
|
||||||
|
openbsd = flag.Bool("openbsd", false, "openbsd")
|
||||||
|
netbsd = flag.Bool("netbsd", false, "netbsd")
|
||||||
|
dragonfly = flag.Bool("dragonfly", false, "dragonfly")
|
||||||
|
arm = flag.Bool("arm", false, "arm") // 64-bit value should use (even, odd)-pair
|
||||||
|
tags = flag.String("tags", "", "build tags")
|
||||||
|
filename = flag.String("output", "", "output file name (standard output if omitted)")
|
||||||
|
)
|
||||||
|
|
||||||
|
// cmdLine returns this programs's commandline arguments
|
||||||
|
func cmdLine() string {
|
||||||
|
return "go run mksyscall.go " + strings.Join(os.Args[1:], " ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildTags returns build tags
|
||||||
|
func buildTags() string {
|
||||||
|
return *tags
|
||||||
|
}
|
||||||
|
|
||||||
|
// Param is function parameter
|
||||||
|
type Param struct {
|
||||||
|
Name string
|
||||||
|
Type string
|
||||||
|
}
|
||||||
|
|
||||||
|
// usage prints the program usage
|
||||||
|
func usage() {
|
||||||
|
fmt.Fprintf(os.Stderr, "usage: go run mksyscall.go [-b32 | -l32] [-tags x,y] [file ...]\n")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseParamList parses parameter list and returns a slice of parameters
|
||||||
|
func parseParamList(list string) []string {
|
||||||
|
list = strings.TrimSpace(list)
|
||||||
|
if list == "" {
|
||||||
|
return []string{}
|
||||||
|
}
|
||||||
|
return regexp.MustCompile(`\s*,\s*`).Split(list, -1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseParam splits a parameter into name and type
|
||||||
|
func parseParam(p string) Param {
|
||||||
|
ps := regexp.MustCompile(`^(\S*) (\S*)$`).FindStringSubmatch(p)
|
||||||
|
if ps == nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "malformed parameter: %s\n", p)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
return Param{ps[1], ps[2]}
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Get the OS and architecture (using GOARCH_TARGET if it exists)
|
||||||
|
goos := os.Getenv("GOOS")
|
||||||
|
if goos == "" {
|
||||||
|
fmt.Fprintln(os.Stderr, "GOOS not defined in environment")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
goarch := os.Getenv("GOARCH_TARGET")
|
||||||
|
if goarch == "" {
|
||||||
|
goarch = os.Getenv("GOARCH")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that we are using the Docker-based build system if we should
|
||||||
|
if goos == "linux" {
|
||||||
|
if os.Getenv("GOLANG_SYS_BUILD") != "docker" {
|
||||||
|
fmt.Fprintf(os.Stderr, "In the Docker-based build system, mksyscall should not be called directly.\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "See README.md\n")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
flag.Usage = usage
|
||||||
|
flag.Parse()
|
||||||
|
if len(flag.Args()) <= 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, "no files to parse provided\n")
|
||||||
|
usage()
|
||||||
|
}
|
||||||
|
|
||||||
|
endianness := ""
|
||||||
|
if *b32 {
|
||||||
|
endianness = "big-endian"
|
||||||
|
} else if *l32 {
|
||||||
|
endianness = "little-endian"
|
||||||
|
}
|
||||||
|
|
||||||
|
libc := false
|
||||||
|
if goos == "darwin" && strings.Contains(buildTags(), ",go1.12") {
|
||||||
|
libc = true
|
||||||
|
}
|
||||||
|
trampolines := map[string]bool{}
|
||||||
|
|
||||||
|
text := ""
|
||||||
|
for _, path := range flag.Args() {
|
||||||
|
file, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
s := bufio.NewScanner(file)
|
||||||
|
for s.Scan() {
|
||||||
|
t := s.Text()
|
||||||
|
t = strings.TrimSpace(t)
|
||||||
|
t = regexp.MustCompile(`\s+`).ReplaceAllString(t, ` `)
|
||||||
|
nonblock := regexp.MustCompile(`^\/\/sysnb `).FindStringSubmatch(t)
|
||||||
|
if regexp.MustCompile(`^\/\/sys `).FindStringSubmatch(t) == nil && nonblock == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Line must be of the form
|
||||||
|
// func Open(path string, mode int, perm int) (fd int, errno error)
|
||||||
|
// Split into name, in params, out params.
|
||||||
|
f := regexp.MustCompile(`^\/\/sys(nb)? (\w+)\(([^()]*)\)\s*(?:\(([^()]+)\))?\s*(?:=\s*((?i)SYS_[A-Z0-9_]+))?$`).FindStringSubmatch(t)
|
||||||
|
if f == nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s:%s\nmalformed //sys declaration\n", path, t)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
funct, inps, outps, sysname := f[2], f[3], f[4], f[5]
|
||||||
|
|
||||||
|
// ClockGettime doesn't have a syscall number on Darwin, only generate libc wrappers.
|
||||||
|
if goos == "darwin" && !libc && funct == "ClockGettime" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Split argument lists on comma.
|
||||||
|
in := parseParamList(inps)
|
||||||
|
out := parseParamList(outps)
|
||||||
|
|
||||||
|
// Try in vain to keep people from editing this file.
|
||||||
|
// The theory is that they jump into the middle of the file
|
||||||
|
// without reading the header.
|
||||||
|
text += "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT\n\n"
|
||||||
|
|
||||||
|
// Go function header.
|
||||||
|
outDecl := ""
|
||||||
|
if len(out) > 0 {
|
||||||
|
outDecl = fmt.Sprintf(" (%s)", strings.Join(out, ", "))
|
||||||
|
}
|
||||||
|
text += fmt.Sprintf("func %s(%s)%s {\n", funct, strings.Join(in, ", "), outDecl)
|
||||||
|
|
||||||
|
// Check if err return available
|
||||||
|
errvar := ""
|
||||||
|
for _, param := range out {
|
||||||
|
p := parseParam(param)
|
||||||
|
if p.Type == "error" {
|
||||||
|
errvar = p.Name
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepare arguments to Syscall.
|
||||||
|
var args []string
|
||||||
|
n := 0
|
||||||
|
for _, param := range in {
|
||||||
|
p := parseParam(param)
|
||||||
|
if regexp.MustCompile(`^\*`).FindStringSubmatch(p.Type) != nil {
|
||||||
|
args = append(args, "uintptr(unsafe.Pointer("+p.Name+"))")
|
||||||
|
} else if p.Type == "string" && errvar != "" {
|
||||||
|
text += fmt.Sprintf("\tvar _p%d *byte\n", n)
|
||||||
|
text += fmt.Sprintf("\t_p%d, %s = BytePtrFromString(%s)\n", n, errvar, p.Name)
|
||||||
|
text += fmt.Sprintf("\tif %s != nil {\n\t\treturn\n\t}\n", errvar)
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n))
|
||||||
|
n++
|
||||||
|
} else if p.Type == "string" {
|
||||||
|
fmt.Fprintf(os.Stderr, path+":"+funct+" uses string arguments, but has no error return\n")
|
||||||
|
text += fmt.Sprintf("\tvar _p%d *byte\n", n)
|
||||||
|
text += fmt.Sprintf("\t_p%d, _ = BytePtrFromString(%s)\n", n, p.Name)
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n))
|
||||||
|
n++
|
||||||
|
} else if regexp.MustCompile(`^\[\](.*)`).FindStringSubmatch(p.Type) != nil {
|
||||||
|
// Convert slice into pointer, length.
|
||||||
|
// Have to be careful not to take address of &a[0] if len == 0:
|
||||||
|
// pass dummy pointer in that case.
|
||||||
|
// Used to pass nil, but some OSes or simulators reject write(fd, nil, 0).
|
||||||
|
text += fmt.Sprintf("\tvar _p%d unsafe.Pointer\n", n)
|
||||||
|
text += fmt.Sprintf("\tif len(%s) > 0 {\n\t\t_p%d = unsafe.Pointer(&%s[0])\n\t}", p.Name, n, p.Name)
|
||||||
|
text += fmt.Sprintf(" else {\n\t\t_p%d = unsafe.Pointer(&_zero)\n\t}\n", n)
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(_p%d)", n), fmt.Sprintf("uintptr(len(%s))", p.Name))
|
||||||
|
n++
|
||||||
|
} else if p.Type == "int64" && (*openbsd || *netbsd) {
|
||||||
|
args = append(args, "0")
|
||||||
|
if endianness == "big-endian" {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s>>32)", p.Name), fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
} else if endianness == "little-endian" {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s)", p.Name), fmt.Sprintf("uintptr(%s>>32)", p.Name))
|
||||||
|
} else {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
}
|
||||||
|
} else if p.Type == "int64" && *dragonfly {
|
||||||
|
if regexp.MustCompile(`^(?i)extp(read|write)`).FindStringSubmatch(funct) == nil {
|
||||||
|
args = append(args, "0")
|
||||||
|
}
|
||||||
|
if endianness == "big-endian" {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s>>32)", p.Name), fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
} else if endianness == "little-endian" {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s)", p.Name), fmt.Sprintf("uintptr(%s>>32)", p.Name))
|
||||||
|
} else {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
}
|
||||||
|
} else if (p.Type == "int64" || p.Type == "uint64") && endianness != "" {
|
||||||
|
if len(args)%2 == 1 && *arm {
|
||||||
|
// arm abi specifies 64-bit argument uses
|
||||||
|
// (even, odd) pair
|
||||||
|
args = append(args, "0")
|
||||||
|
}
|
||||||
|
if endianness == "big-endian" {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s>>32)", p.Name), fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
} else {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s)", p.Name), fmt.Sprintf("uintptr(%s>>32)", p.Name))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine which form to use; pad args with zeros.
|
||||||
|
asm := "Syscall"
|
||||||
|
if nonblock != nil {
|
||||||
|
if errvar == "" && goos == "linux" {
|
||||||
|
asm = "RawSyscallNoError"
|
||||||
|
} else {
|
||||||
|
asm = "RawSyscall"
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if errvar == "" && goos == "linux" {
|
||||||
|
asm = "SyscallNoError"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(args) <= 3 {
|
||||||
|
for len(args) < 3 {
|
||||||
|
args = append(args, "0")
|
||||||
|
}
|
||||||
|
} else if len(args) <= 6 {
|
||||||
|
asm += "6"
|
||||||
|
for len(args) < 6 {
|
||||||
|
args = append(args, "0")
|
||||||
|
}
|
||||||
|
} else if len(args) <= 9 {
|
||||||
|
asm += "9"
|
||||||
|
for len(args) < 9 {
|
||||||
|
args = append(args, "0")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s:%s too many arguments to system call\n", path, funct)
|
||||||
|
}
|
||||||
|
|
||||||
|
// System call number.
|
||||||
|
if sysname == "" {
|
||||||
|
sysname = "SYS_" + funct
|
||||||
|
sysname = regexp.MustCompile(`([a-z])([A-Z])`).ReplaceAllString(sysname, `${1}_$2`)
|
||||||
|
sysname = strings.ToUpper(sysname)
|
||||||
|
}
|
||||||
|
|
||||||
|
var libcFn string
|
||||||
|
if libc {
|
||||||
|
asm = "syscall_" + strings.ToLower(asm[:1]) + asm[1:] // internal syscall call
|
||||||
|
sysname = strings.TrimPrefix(sysname, "SYS_") // remove SYS_
|
||||||
|
sysname = strings.ToLower(sysname) // lowercase
|
||||||
|
if sysname == "getdirentries64" {
|
||||||
|
// Special case - libSystem name and
|
||||||
|
// raw syscall name don't match.
|
||||||
|
sysname = "__getdirentries64"
|
||||||
|
}
|
||||||
|
libcFn = sysname
|
||||||
|
sysname = "funcPC(libc_" + sysname + "_trampoline)"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Actual call.
|
||||||
|
arglist := strings.Join(args, ", ")
|
||||||
|
call := fmt.Sprintf("%s(%s, %s)", asm, sysname, arglist)
|
||||||
|
|
||||||
|
// Assign return values.
|
||||||
|
body := ""
|
||||||
|
ret := []string{"_", "_", "_"}
|
||||||
|
doErrno := false
|
||||||
|
for i := 0; i < len(out); i++ {
|
||||||
|
p := parseParam(out[i])
|
||||||
|
reg := ""
|
||||||
|
if p.Name == "err" && !*plan9 {
|
||||||
|
reg = "e1"
|
||||||
|
ret[2] = reg
|
||||||
|
doErrno = true
|
||||||
|
} else if p.Name == "err" && *plan9 {
|
||||||
|
ret[0] = "r0"
|
||||||
|
ret[2] = "e1"
|
||||||
|
break
|
||||||
|
} else {
|
||||||
|
reg = fmt.Sprintf("r%d", i)
|
||||||
|
ret[i] = reg
|
||||||
|
}
|
||||||
|
if p.Type == "bool" {
|
||||||
|
reg = fmt.Sprintf("%s != 0", reg)
|
||||||
|
}
|
||||||
|
if p.Type == "int64" && endianness != "" {
|
||||||
|
// 64-bit number in r1:r0 or r0:r1.
|
||||||
|
if i+2 > len(out) {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s:%s not enough registers for int64 return\n", path, funct)
|
||||||
|
}
|
||||||
|
if endianness == "big-endian" {
|
||||||
|
reg = fmt.Sprintf("int64(r%d)<<32 | int64(r%d)", i, i+1)
|
||||||
|
} else {
|
||||||
|
reg = fmt.Sprintf("int64(r%d)<<32 | int64(r%d)", i+1, i)
|
||||||
|
}
|
||||||
|
ret[i] = fmt.Sprintf("r%d", i)
|
||||||
|
ret[i+1] = fmt.Sprintf("r%d", i+1)
|
||||||
|
}
|
||||||
|
if reg != "e1" || *plan9 {
|
||||||
|
body += fmt.Sprintf("\t%s = %s(%s)\n", p.Name, p.Type, reg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ret[0] == "_" && ret[1] == "_" && ret[2] == "_" {
|
||||||
|
text += fmt.Sprintf("\t%s\n", call)
|
||||||
|
} else {
|
||||||
|
if errvar == "" && goos == "linux" {
|
||||||
|
// raw syscall without error on Linux, see golang.org/issue/22924
|
||||||
|
text += fmt.Sprintf("\t%s, %s := %s\n", ret[0], ret[1], call)
|
||||||
|
} else {
|
||||||
|
text += fmt.Sprintf("\t%s, %s, %s := %s\n", ret[0], ret[1], ret[2], call)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
text += body
|
||||||
|
|
||||||
|
if *plan9 && ret[2] == "e1" {
|
||||||
|
text += "\tif int32(r0) == -1 {\n"
|
||||||
|
text += "\t\terr = e1\n"
|
||||||
|
text += "\t}\n"
|
||||||
|
} else if doErrno {
|
||||||
|
text += "\tif e1 != 0 {\n"
|
||||||
|
text += "\t\terr = errnoErr(e1)\n"
|
||||||
|
text += "\t}\n"
|
||||||
|
}
|
||||||
|
text += "\treturn\n"
|
||||||
|
text += "}\n\n"
|
||||||
|
|
||||||
|
if libc && !trampolines[libcFn] {
|
||||||
|
// some system calls share a trampoline, like read and readlen.
|
||||||
|
trampolines[libcFn] = true
|
||||||
|
// Declare assembly trampoline.
|
||||||
|
text += fmt.Sprintf("func libc_%s_trampoline()\n", libcFn)
|
||||||
|
// Assembly trampoline calls the libc_* function, which this magic
|
||||||
|
// redirects to use the function from libSystem.
|
||||||
|
text += fmt.Sprintf("//go:linkname libc_%s libc_%s\n", libcFn, libcFn)
|
||||||
|
text += fmt.Sprintf("//go:cgo_import_dynamic libc_%s %s \"/usr/lib/libSystem.B.dylib\"\n", libcFn, libcFn)
|
||||||
|
text += "\n"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := s.Err(); err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
file.Close()
|
||||||
|
}
|
||||||
|
fmt.Printf(srcTemplate, cmdLine(), buildTags(), text)
|
||||||
|
}
|
||||||
|
|
||||||
|
const srcTemplate = `// %s
|
||||||
|
// Code generated by the command above; see README.md. DO NOT EDIT.
|
||||||
|
|
||||||
|
// +build %s
|
||||||
|
|
||||||
|
package unix
|
||||||
|
|
||||||
|
import (
|
||||||
|
"syscall"
|
||||||
|
"unsafe"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ syscall.Errno
|
||||||
|
|
||||||
|
%s
|
||||||
|
`
|
||||||
415
vendor/golang.org/x/sys/unix/mksyscall_aix_ppc.go
generated
vendored
Normal file
415
vendor/golang.org/x/sys/unix/mksyscall_aix_ppc.go
generated
vendored
Normal file
@@ -0,0 +1,415 @@
|
|||||||
|
// Copyright 2019 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
/*
|
||||||
|
This program reads a file containing function prototypes
|
||||||
|
(like syscall_aix.go) and generates system call bodies.
|
||||||
|
The prototypes are marked by lines beginning with "//sys"
|
||||||
|
and read like func declarations if //sys is replaced by func, but:
|
||||||
|
* The parameter lists must give a name for each argument.
|
||||||
|
This includes return parameters.
|
||||||
|
* The parameter lists must give a type for each argument:
|
||||||
|
the (x, y, z int) shorthand is not allowed.
|
||||||
|
* If the return parameter is an error number, it must be named err.
|
||||||
|
* If go func name needs to be different than its libc name,
|
||||||
|
* or the function is not in libc, name could be specified
|
||||||
|
* at the end, after "=" sign, like
|
||||||
|
//sys getsockopt(s int, level int, name int, val uintptr, vallen *_Socklen) (err error) = libsocket.getsockopt
|
||||||
|
*/
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
b32 = flag.Bool("b32", false, "32bit big-endian")
|
||||||
|
l32 = flag.Bool("l32", false, "32bit little-endian")
|
||||||
|
aix = flag.Bool("aix", false, "aix")
|
||||||
|
tags = flag.String("tags", "", "build tags")
|
||||||
|
)
|
||||||
|
|
||||||
|
// cmdLine returns this programs's commandline arguments
|
||||||
|
func cmdLine() string {
|
||||||
|
return "go run mksyscall_aix_ppc.go " + strings.Join(os.Args[1:], " ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildTags returns build tags
|
||||||
|
func buildTags() string {
|
||||||
|
return *tags
|
||||||
|
}
|
||||||
|
|
||||||
|
// Param is function parameter
|
||||||
|
type Param struct {
|
||||||
|
Name string
|
||||||
|
Type string
|
||||||
|
}
|
||||||
|
|
||||||
|
// usage prints the program usage
|
||||||
|
func usage() {
|
||||||
|
fmt.Fprintf(os.Stderr, "usage: go run mksyscall_aix_ppc.go [-b32 | -l32] [-tags x,y] [file ...]\n")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseParamList parses parameter list and returns a slice of parameters
|
||||||
|
func parseParamList(list string) []string {
|
||||||
|
list = strings.TrimSpace(list)
|
||||||
|
if list == "" {
|
||||||
|
return []string{}
|
||||||
|
}
|
||||||
|
return regexp.MustCompile(`\s*,\s*`).Split(list, -1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseParam splits a parameter into name and type
|
||||||
|
func parseParam(p string) Param {
|
||||||
|
ps := regexp.MustCompile(`^(\S*) (\S*)$`).FindStringSubmatch(p)
|
||||||
|
if ps == nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "malformed parameter: %s\n", p)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
return Param{ps[1], ps[2]}
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
flag.Usage = usage
|
||||||
|
flag.Parse()
|
||||||
|
if len(flag.Args()) <= 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, "no files to parse provided\n")
|
||||||
|
usage()
|
||||||
|
}
|
||||||
|
|
||||||
|
endianness := ""
|
||||||
|
if *b32 {
|
||||||
|
endianness = "big-endian"
|
||||||
|
} else if *l32 {
|
||||||
|
endianness = "little-endian"
|
||||||
|
}
|
||||||
|
|
||||||
|
pack := ""
|
||||||
|
text := ""
|
||||||
|
cExtern := "/*\n#include <stdint.h>\n#include <stddef.h>\n"
|
||||||
|
for _, path := range flag.Args() {
|
||||||
|
file, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
s := bufio.NewScanner(file)
|
||||||
|
for s.Scan() {
|
||||||
|
t := s.Text()
|
||||||
|
t = strings.TrimSpace(t)
|
||||||
|
t = regexp.MustCompile(`\s+`).ReplaceAllString(t, ` `)
|
||||||
|
if p := regexp.MustCompile(`^package (\S+)$`).FindStringSubmatch(t); p != nil && pack == "" {
|
||||||
|
pack = p[1]
|
||||||
|
}
|
||||||
|
nonblock := regexp.MustCompile(`^\/\/sysnb `).FindStringSubmatch(t)
|
||||||
|
if regexp.MustCompile(`^\/\/sys `).FindStringSubmatch(t) == nil && nonblock == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Line must be of the form
|
||||||
|
// func Open(path string, mode int, perm int) (fd int, err error)
|
||||||
|
// Split into name, in params, out params.
|
||||||
|
f := regexp.MustCompile(`^\/\/sys(nb)? (\w+)\(([^()]*)\)\s*(?:\(([^()]+)\))?\s*(?:=\s*(?:(\w*)\.)?(\w*))?$`).FindStringSubmatch(t)
|
||||||
|
if f == nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s:%s\nmalformed //sys declaration\n", path, t)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
funct, inps, outps, modname, sysname := f[2], f[3], f[4], f[5], f[6]
|
||||||
|
|
||||||
|
// Split argument lists on comma.
|
||||||
|
in := parseParamList(inps)
|
||||||
|
out := parseParamList(outps)
|
||||||
|
|
||||||
|
inps = strings.Join(in, ", ")
|
||||||
|
outps = strings.Join(out, ", ")
|
||||||
|
|
||||||
|
// Try in vain to keep people from editing this file.
|
||||||
|
// The theory is that they jump into the middle of the file
|
||||||
|
// without reading the header.
|
||||||
|
text += "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT\n\n"
|
||||||
|
|
||||||
|
// Check if value return, err return available
|
||||||
|
errvar := ""
|
||||||
|
retvar := ""
|
||||||
|
rettype := ""
|
||||||
|
for _, param := range out {
|
||||||
|
p := parseParam(param)
|
||||||
|
if p.Type == "error" {
|
||||||
|
errvar = p.Name
|
||||||
|
} else {
|
||||||
|
retvar = p.Name
|
||||||
|
rettype = p.Type
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// System call name.
|
||||||
|
if sysname == "" {
|
||||||
|
sysname = funct
|
||||||
|
}
|
||||||
|
sysname = regexp.MustCompile(`([a-z])([A-Z])`).ReplaceAllString(sysname, `${1}_$2`)
|
||||||
|
sysname = strings.ToLower(sysname) // All libc functions are lowercase.
|
||||||
|
|
||||||
|
cRettype := ""
|
||||||
|
if rettype == "unsafe.Pointer" {
|
||||||
|
cRettype = "uintptr_t"
|
||||||
|
} else if rettype == "uintptr" {
|
||||||
|
cRettype = "uintptr_t"
|
||||||
|
} else if regexp.MustCompile(`^_`).FindStringSubmatch(rettype) != nil {
|
||||||
|
cRettype = "uintptr_t"
|
||||||
|
} else if rettype == "int" {
|
||||||
|
cRettype = "int"
|
||||||
|
} else if rettype == "int32" {
|
||||||
|
cRettype = "int"
|
||||||
|
} else if rettype == "int64" {
|
||||||
|
cRettype = "long long"
|
||||||
|
} else if rettype == "uint32" {
|
||||||
|
cRettype = "unsigned int"
|
||||||
|
} else if rettype == "uint64" {
|
||||||
|
cRettype = "unsigned long long"
|
||||||
|
} else {
|
||||||
|
cRettype = "int"
|
||||||
|
}
|
||||||
|
if sysname == "exit" {
|
||||||
|
cRettype = "void"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Change p.Types to c
|
||||||
|
var cIn []string
|
||||||
|
for _, param := range in {
|
||||||
|
p := parseParam(param)
|
||||||
|
if regexp.MustCompile(`^\*`).FindStringSubmatch(p.Type) != nil {
|
||||||
|
cIn = append(cIn, "uintptr_t")
|
||||||
|
} else if p.Type == "string" {
|
||||||
|
cIn = append(cIn, "uintptr_t")
|
||||||
|
} else if regexp.MustCompile(`^\[\](.*)`).FindStringSubmatch(p.Type) != nil {
|
||||||
|
cIn = append(cIn, "uintptr_t", "size_t")
|
||||||
|
} else if p.Type == "unsafe.Pointer" {
|
||||||
|
cIn = append(cIn, "uintptr_t")
|
||||||
|
} else if p.Type == "uintptr" {
|
||||||
|
cIn = append(cIn, "uintptr_t")
|
||||||
|
} else if regexp.MustCompile(`^_`).FindStringSubmatch(p.Type) != nil {
|
||||||
|
cIn = append(cIn, "uintptr_t")
|
||||||
|
} else if p.Type == "int" {
|
||||||
|
cIn = append(cIn, "int")
|
||||||
|
} else if p.Type == "int32" {
|
||||||
|
cIn = append(cIn, "int")
|
||||||
|
} else if p.Type == "int64" {
|
||||||
|
cIn = append(cIn, "long long")
|
||||||
|
} else if p.Type == "uint32" {
|
||||||
|
cIn = append(cIn, "unsigned int")
|
||||||
|
} else if p.Type == "uint64" {
|
||||||
|
cIn = append(cIn, "unsigned long long")
|
||||||
|
} else {
|
||||||
|
cIn = append(cIn, "int")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if funct != "fcntl" && funct != "FcntlInt" && funct != "readlen" && funct != "writelen" {
|
||||||
|
if sysname == "select" {
|
||||||
|
// select is a keyword of Go. Its name is
|
||||||
|
// changed to c_select.
|
||||||
|
cExtern += "#define c_select select\n"
|
||||||
|
}
|
||||||
|
// Imports of system calls from libc
|
||||||
|
cExtern += fmt.Sprintf("%s %s", cRettype, sysname)
|
||||||
|
cIn := strings.Join(cIn, ", ")
|
||||||
|
cExtern += fmt.Sprintf("(%s);\n", cIn)
|
||||||
|
}
|
||||||
|
|
||||||
|
// So file name.
|
||||||
|
if *aix {
|
||||||
|
if modname == "" {
|
||||||
|
modname = "libc.a/shr_64.o"
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s: only syscall using libc are available\n", funct)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
strconvfunc := "C.CString"
|
||||||
|
|
||||||
|
// Go function header.
|
||||||
|
if outps != "" {
|
||||||
|
outps = fmt.Sprintf(" (%s)", outps)
|
||||||
|
}
|
||||||
|
if text != "" {
|
||||||
|
text += "\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
text += fmt.Sprintf("func %s(%s)%s {\n", funct, strings.Join(in, ", "), outps)
|
||||||
|
|
||||||
|
// Prepare arguments to Syscall.
|
||||||
|
var args []string
|
||||||
|
n := 0
|
||||||
|
argN := 0
|
||||||
|
for _, param := range in {
|
||||||
|
p := parseParam(param)
|
||||||
|
if regexp.MustCompile(`^\*`).FindStringSubmatch(p.Type) != nil {
|
||||||
|
args = append(args, "C.uintptr_t(uintptr(unsafe.Pointer("+p.Name+")))")
|
||||||
|
} else if p.Type == "string" && errvar != "" {
|
||||||
|
text += fmt.Sprintf("\t_p%d := uintptr(unsafe.Pointer(%s(%s)))\n", n, strconvfunc, p.Name)
|
||||||
|
args = append(args, fmt.Sprintf("C.uintptr_t(_p%d)", n))
|
||||||
|
n++
|
||||||
|
} else if p.Type == "string" {
|
||||||
|
fmt.Fprintf(os.Stderr, path+":"+funct+" uses string arguments, but has no error return\n")
|
||||||
|
text += fmt.Sprintf("\t_p%d := uintptr(unsafe.Pointer(%s(%s)))\n", n, strconvfunc, p.Name)
|
||||||
|
args = append(args, fmt.Sprintf("C.uintptr_t(_p%d)", n))
|
||||||
|
n++
|
||||||
|
} else if m := regexp.MustCompile(`^\[\](.*)`).FindStringSubmatch(p.Type); m != nil {
|
||||||
|
// Convert slice into pointer, length.
|
||||||
|
// Have to be careful not to take address of &a[0] if len == 0:
|
||||||
|
// pass nil in that case.
|
||||||
|
text += fmt.Sprintf("\tvar _p%d *%s\n", n, m[1])
|
||||||
|
text += fmt.Sprintf("\tif len(%s) > 0 {\n\t\t_p%d = &%s[0]\n\t}\n", p.Name, n, p.Name)
|
||||||
|
args = append(args, fmt.Sprintf("C.uintptr_t(uintptr(unsafe.Pointer(_p%d)))", n))
|
||||||
|
n++
|
||||||
|
text += fmt.Sprintf("\tvar _p%d int\n", n)
|
||||||
|
text += fmt.Sprintf("\t_p%d = len(%s)\n", n, p.Name)
|
||||||
|
args = append(args, fmt.Sprintf("C.size_t(_p%d)", n))
|
||||||
|
n++
|
||||||
|
} else if p.Type == "int64" && endianness != "" {
|
||||||
|
if endianness == "big-endian" {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s>>32)", p.Name), fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
} else {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s)", p.Name), fmt.Sprintf("uintptr(%s>>32)", p.Name))
|
||||||
|
}
|
||||||
|
n++
|
||||||
|
} else if p.Type == "bool" {
|
||||||
|
text += fmt.Sprintf("\tvar _p%d uint32\n", n)
|
||||||
|
text += fmt.Sprintf("\tif %s {\n\t\t_p%d = 1\n\t} else {\n\t\t_p%d = 0\n\t}\n", p.Name, n, n)
|
||||||
|
args = append(args, fmt.Sprintf("_p%d", n))
|
||||||
|
} else if regexp.MustCompile(`^_`).FindStringSubmatch(p.Type) != nil {
|
||||||
|
args = append(args, fmt.Sprintf("C.uintptr_t(uintptr(%s))", p.Name))
|
||||||
|
} else if p.Type == "unsafe.Pointer" {
|
||||||
|
args = append(args, fmt.Sprintf("C.uintptr_t(uintptr(%s))", p.Name))
|
||||||
|
} else if p.Type == "int" {
|
||||||
|
if (argN == 2) && ((funct == "readlen") || (funct == "writelen")) {
|
||||||
|
args = append(args, fmt.Sprintf("C.size_t(%s)", p.Name))
|
||||||
|
} else if argN == 0 && funct == "fcntl" {
|
||||||
|
args = append(args, fmt.Sprintf("C.uintptr_t(%s)", p.Name))
|
||||||
|
} else if (argN == 2) && ((funct == "fcntl") || (funct == "FcntlInt")) {
|
||||||
|
args = append(args, fmt.Sprintf("C.uintptr_t(%s)", p.Name))
|
||||||
|
} else {
|
||||||
|
args = append(args, fmt.Sprintf("C.int(%s)", p.Name))
|
||||||
|
}
|
||||||
|
} else if p.Type == "int32" {
|
||||||
|
args = append(args, fmt.Sprintf("C.int(%s)", p.Name))
|
||||||
|
} else if p.Type == "int64" {
|
||||||
|
args = append(args, fmt.Sprintf("C.longlong(%s)", p.Name))
|
||||||
|
} else if p.Type == "uint32" {
|
||||||
|
args = append(args, fmt.Sprintf("C.uint(%s)", p.Name))
|
||||||
|
} else if p.Type == "uint64" {
|
||||||
|
args = append(args, fmt.Sprintf("C.ulonglong(%s)", p.Name))
|
||||||
|
} else if p.Type == "uintptr" {
|
||||||
|
args = append(args, fmt.Sprintf("C.uintptr_t(%s)", p.Name))
|
||||||
|
} else {
|
||||||
|
args = append(args, fmt.Sprintf("C.int(%s)", p.Name))
|
||||||
|
}
|
||||||
|
argN++
|
||||||
|
}
|
||||||
|
|
||||||
|
// Actual call.
|
||||||
|
arglist := strings.Join(args, ", ")
|
||||||
|
call := ""
|
||||||
|
if sysname == "exit" {
|
||||||
|
if errvar != "" {
|
||||||
|
call += "er :="
|
||||||
|
} else {
|
||||||
|
call += ""
|
||||||
|
}
|
||||||
|
} else if errvar != "" {
|
||||||
|
call += "r0,er :="
|
||||||
|
} else if retvar != "" {
|
||||||
|
call += "r0,_ :="
|
||||||
|
} else {
|
||||||
|
call += ""
|
||||||
|
}
|
||||||
|
if sysname == "select" {
|
||||||
|
// select is a keyword of Go. Its name is
|
||||||
|
// changed to c_select.
|
||||||
|
call += fmt.Sprintf("C.c_%s(%s)", sysname, arglist)
|
||||||
|
} else {
|
||||||
|
call += fmt.Sprintf("C.%s(%s)", sysname, arglist)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Assign return values.
|
||||||
|
body := ""
|
||||||
|
for i := 0; i < len(out); i++ {
|
||||||
|
p := parseParam(out[i])
|
||||||
|
reg := ""
|
||||||
|
if p.Name == "err" {
|
||||||
|
reg = "e1"
|
||||||
|
} else {
|
||||||
|
reg = "r0"
|
||||||
|
}
|
||||||
|
if reg != "e1" {
|
||||||
|
body += fmt.Sprintf("\t%s = %s(%s)\n", p.Name, p.Type, reg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// verify return
|
||||||
|
if sysname != "exit" && errvar != "" {
|
||||||
|
if regexp.MustCompile(`^uintptr`).FindStringSubmatch(cRettype) != nil {
|
||||||
|
body += "\tif (uintptr(r0) ==^uintptr(0) && er != nil) {\n"
|
||||||
|
body += fmt.Sprintf("\t\t%s = er\n", errvar)
|
||||||
|
body += "\t}\n"
|
||||||
|
} else {
|
||||||
|
body += "\tif (r0 ==-1 && er != nil) {\n"
|
||||||
|
body += fmt.Sprintf("\t\t%s = er\n", errvar)
|
||||||
|
body += "\t}\n"
|
||||||
|
}
|
||||||
|
} else if errvar != "" {
|
||||||
|
body += "\tif (er != nil) {\n"
|
||||||
|
body += fmt.Sprintf("\t\t%s = er\n", errvar)
|
||||||
|
body += "\t}\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
text += fmt.Sprintf("\t%s\n", call)
|
||||||
|
text += body
|
||||||
|
|
||||||
|
text += "\treturn\n"
|
||||||
|
text += "}\n"
|
||||||
|
}
|
||||||
|
if err := s.Err(); err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
file.Close()
|
||||||
|
}
|
||||||
|
imp := ""
|
||||||
|
if pack != "unix" {
|
||||||
|
imp = "import \"golang.org/x/sys/unix\"\n"
|
||||||
|
|
||||||
|
}
|
||||||
|
fmt.Printf(srcTemplate, cmdLine(), buildTags(), pack, cExtern, imp, text)
|
||||||
|
}
|
||||||
|
|
||||||
|
const srcTemplate = `// %s
|
||||||
|
// Code generated by the command above; see README.md. DO NOT EDIT.
|
||||||
|
|
||||||
|
// +build %s
|
||||||
|
|
||||||
|
package %s
|
||||||
|
|
||||||
|
|
||||||
|
%s
|
||||||
|
*/
|
||||||
|
import "C"
|
||||||
|
import (
|
||||||
|
"unsafe"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
%s
|
||||||
|
|
||||||
|
%s
|
||||||
|
`
|
||||||
614
vendor/golang.org/x/sys/unix/mksyscall_aix_ppc64.go
generated
vendored
Normal file
614
vendor/golang.org/x/sys/unix/mksyscall_aix_ppc64.go
generated
vendored
Normal file
@@ -0,0 +1,614 @@
|
|||||||
|
// Copyright 2019 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
/*
|
||||||
|
This program reads a file containing function prototypes
|
||||||
|
(like syscall_aix.go) and generates system call bodies.
|
||||||
|
The prototypes are marked by lines beginning with "//sys"
|
||||||
|
and read like func declarations if //sys is replaced by func, but:
|
||||||
|
* The parameter lists must give a name for each argument.
|
||||||
|
This includes return parameters.
|
||||||
|
* The parameter lists must give a type for each argument:
|
||||||
|
the (x, y, z int) shorthand is not allowed.
|
||||||
|
* If the return parameter is an error number, it must be named err.
|
||||||
|
* If go func name needs to be different than its libc name,
|
||||||
|
* or the function is not in libc, name could be specified
|
||||||
|
* at the end, after "=" sign, like
|
||||||
|
//sys getsockopt(s int, level int, name int, val uintptr, vallen *_Socklen) (err error) = libsocket.getsockopt
|
||||||
|
|
||||||
|
|
||||||
|
This program will generate three files and handle both gc and gccgo implementation:
|
||||||
|
- zsyscall_aix_ppc64.go: the common part of each implementation (error handler, pointer creation)
|
||||||
|
- zsyscall_aix_ppc64_gc.go: gc part with //go_cgo_import_dynamic and a call to syscall6
|
||||||
|
- zsyscall_aix_ppc64_gccgo.go: gccgo part with C function and conversion to C type.
|
||||||
|
|
||||||
|
The generated code looks like this
|
||||||
|
|
||||||
|
zsyscall_aix_ppc64.go
|
||||||
|
func asyscall(...) (n int, err error) {
|
||||||
|
// Pointer Creation
|
||||||
|
r1, e1 := callasyscall(...)
|
||||||
|
// Type Conversion
|
||||||
|
// Error Handler
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
zsyscall_aix_ppc64_gc.go
|
||||||
|
//go:cgo_import_dynamic libc_asyscall asyscall "libc.a/shr_64.o"
|
||||||
|
//go:linkname libc_asyscall libc_asyscall
|
||||||
|
var asyscall syscallFunc
|
||||||
|
|
||||||
|
func callasyscall(...) (r1 uintptr, e1 Errno) {
|
||||||
|
r1, _, e1 = syscall6(uintptr(unsafe.Pointer(&libc_asyscall)), "nb_args", ... )
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
zsyscall_aix_ppc64_ggcgo.go
|
||||||
|
|
||||||
|
// int asyscall(...)
|
||||||
|
|
||||||
|
import "C"
|
||||||
|
|
||||||
|
func callasyscall(...) (r1 uintptr, e1 Errno) {
|
||||||
|
r1 = uintptr(C.asyscall(...))
|
||||||
|
e1 = syscall.GetErrno()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
b32 = flag.Bool("b32", false, "32bit big-endian")
|
||||||
|
l32 = flag.Bool("l32", false, "32bit little-endian")
|
||||||
|
aix = flag.Bool("aix", false, "aix")
|
||||||
|
tags = flag.String("tags", "", "build tags")
|
||||||
|
)
|
||||||
|
|
||||||
|
// cmdLine returns this programs's commandline arguments
|
||||||
|
func cmdLine() string {
|
||||||
|
return "go run mksyscall_aix_ppc64.go " + strings.Join(os.Args[1:], " ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildTags returns build tags
|
||||||
|
func buildTags() string {
|
||||||
|
return *tags
|
||||||
|
}
|
||||||
|
|
||||||
|
// Param is function parameter
|
||||||
|
type Param struct {
|
||||||
|
Name string
|
||||||
|
Type string
|
||||||
|
}
|
||||||
|
|
||||||
|
// usage prints the program usage
|
||||||
|
func usage() {
|
||||||
|
fmt.Fprintf(os.Stderr, "usage: go run mksyscall_aix_ppc64.go [-b32 | -l32] [-tags x,y] [file ...]\n")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseParamList parses parameter list and returns a slice of parameters
|
||||||
|
func parseParamList(list string) []string {
|
||||||
|
list = strings.TrimSpace(list)
|
||||||
|
if list == "" {
|
||||||
|
return []string{}
|
||||||
|
}
|
||||||
|
return regexp.MustCompile(`\s*,\s*`).Split(list, -1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseParam splits a parameter into name and type
|
||||||
|
func parseParam(p string) Param {
|
||||||
|
ps := regexp.MustCompile(`^(\S*) (\S*)$`).FindStringSubmatch(p)
|
||||||
|
if ps == nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "malformed parameter: %s\n", p)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
return Param{ps[1], ps[2]}
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
flag.Usage = usage
|
||||||
|
flag.Parse()
|
||||||
|
if len(flag.Args()) <= 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, "no files to parse provided\n")
|
||||||
|
usage()
|
||||||
|
}
|
||||||
|
|
||||||
|
endianness := ""
|
||||||
|
if *b32 {
|
||||||
|
endianness = "big-endian"
|
||||||
|
} else if *l32 {
|
||||||
|
endianness = "little-endian"
|
||||||
|
}
|
||||||
|
|
||||||
|
pack := ""
|
||||||
|
// GCCGO
|
||||||
|
textgccgo := ""
|
||||||
|
cExtern := "/*\n#include <stdint.h>\n"
|
||||||
|
// GC
|
||||||
|
textgc := ""
|
||||||
|
dynimports := ""
|
||||||
|
linknames := ""
|
||||||
|
var vars []string
|
||||||
|
// COMMON
|
||||||
|
textcommon := ""
|
||||||
|
for _, path := range flag.Args() {
|
||||||
|
file, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
s := bufio.NewScanner(file)
|
||||||
|
for s.Scan() {
|
||||||
|
t := s.Text()
|
||||||
|
t = strings.TrimSpace(t)
|
||||||
|
t = regexp.MustCompile(`\s+`).ReplaceAllString(t, ` `)
|
||||||
|
if p := regexp.MustCompile(`^package (\S+)$`).FindStringSubmatch(t); p != nil && pack == "" {
|
||||||
|
pack = p[1]
|
||||||
|
}
|
||||||
|
nonblock := regexp.MustCompile(`^\/\/sysnb `).FindStringSubmatch(t)
|
||||||
|
if regexp.MustCompile(`^\/\/sys `).FindStringSubmatch(t) == nil && nonblock == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Line must be of the form
|
||||||
|
// func Open(path string, mode int, perm int) (fd int, err error)
|
||||||
|
// Split into name, in params, out params.
|
||||||
|
f := regexp.MustCompile(`^\/\/sys(nb)? (\w+)\(([^()]*)\)\s*(?:\(([^()]+)\))?\s*(?:=\s*(?:(\w*)\.)?(\w*))?$`).FindStringSubmatch(t)
|
||||||
|
if f == nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s:%s\nmalformed //sys declaration\n", path, t)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
funct, inps, outps, modname, sysname := f[2], f[3], f[4], f[5], f[6]
|
||||||
|
|
||||||
|
// Split argument lists on comma.
|
||||||
|
in := parseParamList(inps)
|
||||||
|
out := parseParamList(outps)
|
||||||
|
|
||||||
|
inps = strings.Join(in, ", ")
|
||||||
|
outps = strings.Join(out, ", ")
|
||||||
|
|
||||||
|
if sysname == "" {
|
||||||
|
sysname = funct
|
||||||
|
}
|
||||||
|
|
||||||
|
onlyCommon := false
|
||||||
|
if funct == "readlen" || funct == "writelen" || funct == "FcntlInt" || funct == "FcntlFlock" {
|
||||||
|
// This function call another syscall which is already implemented.
|
||||||
|
// Therefore, the gc and gccgo part must not be generated.
|
||||||
|
onlyCommon = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try in vain to keep people from editing this file.
|
||||||
|
// The theory is that they jump into the middle of the file
|
||||||
|
// without reading the header.
|
||||||
|
|
||||||
|
textcommon += "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT\n\n"
|
||||||
|
if !onlyCommon {
|
||||||
|
textgccgo += "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT\n\n"
|
||||||
|
textgc += "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT\n\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if value return, err return available
|
||||||
|
errvar := ""
|
||||||
|
rettype := ""
|
||||||
|
for _, param := range out {
|
||||||
|
p := parseParam(param)
|
||||||
|
if p.Type == "error" {
|
||||||
|
errvar = p.Name
|
||||||
|
} else {
|
||||||
|
rettype = p.Type
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sysname = regexp.MustCompile(`([a-z])([A-Z])`).ReplaceAllString(sysname, `${1}_$2`)
|
||||||
|
sysname = strings.ToLower(sysname) // All libc functions are lowercase.
|
||||||
|
|
||||||
|
// GCCGO Prototype return type
|
||||||
|
cRettype := ""
|
||||||
|
if rettype == "unsafe.Pointer" {
|
||||||
|
cRettype = "uintptr_t"
|
||||||
|
} else if rettype == "uintptr" {
|
||||||
|
cRettype = "uintptr_t"
|
||||||
|
} else if regexp.MustCompile(`^_`).FindStringSubmatch(rettype) != nil {
|
||||||
|
cRettype = "uintptr_t"
|
||||||
|
} else if rettype == "int" {
|
||||||
|
cRettype = "int"
|
||||||
|
} else if rettype == "int32" {
|
||||||
|
cRettype = "int"
|
||||||
|
} else if rettype == "int64" {
|
||||||
|
cRettype = "long long"
|
||||||
|
} else if rettype == "uint32" {
|
||||||
|
cRettype = "unsigned int"
|
||||||
|
} else if rettype == "uint64" {
|
||||||
|
cRettype = "unsigned long long"
|
||||||
|
} else {
|
||||||
|
cRettype = "int"
|
||||||
|
}
|
||||||
|
if sysname == "exit" {
|
||||||
|
cRettype = "void"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GCCGO Prototype arguments type
|
||||||
|
var cIn []string
|
||||||
|
for i, param := range in {
|
||||||
|
p := parseParam(param)
|
||||||
|
if regexp.MustCompile(`^\*`).FindStringSubmatch(p.Type) != nil {
|
||||||
|
cIn = append(cIn, "uintptr_t")
|
||||||
|
} else if p.Type == "string" {
|
||||||
|
cIn = append(cIn, "uintptr_t")
|
||||||
|
} else if regexp.MustCompile(`^\[\](.*)`).FindStringSubmatch(p.Type) != nil {
|
||||||
|
cIn = append(cIn, "uintptr_t", "size_t")
|
||||||
|
} else if p.Type == "unsafe.Pointer" {
|
||||||
|
cIn = append(cIn, "uintptr_t")
|
||||||
|
} else if p.Type == "uintptr" {
|
||||||
|
cIn = append(cIn, "uintptr_t")
|
||||||
|
} else if regexp.MustCompile(`^_`).FindStringSubmatch(p.Type) != nil {
|
||||||
|
cIn = append(cIn, "uintptr_t")
|
||||||
|
} else if p.Type == "int" {
|
||||||
|
if (i == 0 || i == 2) && funct == "fcntl" {
|
||||||
|
// These fcntl arguments needs to be uintptr to be able to call FcntlInt and FcntlFlock
|
||||||
|
cIn = append(cIn, "uintptr_t")
|
||||||
|
} else {
|
||||||
|
cIn = append(cIn, "int")
|
||||||
|
}
|
||||||
|
|
||||||
|
} else if p.Type == "int32" {
|
||||||
|
cIn = append(cIn, "int")
|
||||||
|
} else if p.Type == "int64" {
|
||||||
|
cIn = append(cIn, "long long")
|
||||||
|
} else if p.Type == "uint32" {
|
||||||
|
cIn = append(cIn, "unsigned int")
|
||||||
|
} else if p.Type == "uint64" {
|
||||||
|
cIn = append(cIn, "unsigned long long")
|
||||||
|
} else {
|
||||||
|
cIn = append(cIn, "int")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !onlyCommon {
|
||||||
|
// GCCGO Prototype Generation
|
||||||
|
// Imports of system calls from libc
|
||||||
|
if sysname == "select" {
|
||||||
|
// select is a keyword of Go. Its name is
|
||||||
|
// changed to c_select.
|
||||||
|
cExtern += "#define c_select select\n"
|
||||||
|
}
|
||||||
|
cExtern += fmt.Sprintf("%s %s", cRettype, sysname)
|
||||||
|
cIn := strings.Join(cIn, ", ")
|
||||||
|
cExtern += fmt.Sprintf("(%s);\n", cIn)
|
||||||
|
}
|
||||||
|
// GC Library name
|
||||||
|
if modname == "" {
|
||||||
|
modname = "libc.a/shr_64.o"
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s: only syscall using libc are available\n", funct)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
sysvarname := fmt.Sprintf("libc_%s", sysname)
|
||||||
|
|
||||||
|
if !onlyCommon {
|
||||||
|
// GC Runtime import of function to allow cross-platform builds.
|
||||||
|
dynimports += fmt.Sprintf("//go:cgo_import_dynamic %s %s \"%s\"\n", sysvarname, sysname, modname)
|
||||||
|
// GC Link symbol to proc address variable.
|
||||||
|
linknames += fmt.Sprintf("//go:linkname %s %s\n", sysvarname, sysvarname)
|
||||||
|
// GC Library proc address variable.
|
||||||
|
vars = append(vars, sysvarname)
|
||||||
|
}
|
||||||
|
|
||||||
|
strconvfunc := "BytePtrFromString"
|
||||||
|
strconvtype := "*byte"
|
||||||
|
|
||||||
|
// Go function header.
|
||||||
|
if outps != "" {
|
||||||
|
outps = fmt.Sprintf(" (%s)", outps)
|
||||||
|
}
|
||||||
|
if textcommon != "" {
|
||||||
|
textcommon += "\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
textcommon += fmt.Sprintf("func %s(%s)%s {\n", funct, strings.Join(in, ", "), outps)
|
||||||
|
|
||||||
|
// Prepare arguments tocall.
|
||||||
|
var argscommon []string // Arguments in the common part
|
||||||
|
var argscall []string // Arguments for call prototype
|
||||||
|
var argsgc []string // Arguments for gc call (with syscall6)
|
||||||
|
var argsgccgo []string // Arguments for gccgo call (with C.name_of_syscall)
|
||||||
|
n := 0
|
||||||
|
argN := 0
|
||||||
|
for _, param := range in {
|
||||||
|
p := parseParam(param)
|
||||||
|
if regexp.MustCompile(`^\*`).FindStringSubmatch(p.Type) != nil {
|
||||||
|
argscommon = append(argscommon, fmt.Sprintf("uintptr(unsafe.Pointer(%s))", p.Name))
|
||||||
|
argscall = append(argscall, fmt.Sprintf("%s uintptr", p.Name))
|
||||||
|
argsgc = append(argsgc, p.Name)
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(%s)", p.Name))
|
||||||
|
} else if p.Type == "string" && errvar != "" {
|
||||||
|
textcommon += fmt.Sprintf("\tvar _p%d %s\n", n, strconvtype)
|
||||||
|
textcommon += fmt.Sprintf("\t_p%d, %s = %s(%s)\n", n, errvar, strconvfunc, p.Name)
|
||||||
|
textcommon += fmt.Sprintf("\tif %s != nil {\n\t\treturn\n\t}\n", errvar)
|
||||||
|
|
||||||
|
argscommon = append(argscommon, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n))
|
||||||
|
argscall = append(argscall, fmt.Sprintf("_p%d uintptr ", n))
|
||||||
|
argsgc = append(argsgc, fmt.Sprintf("_p%d", n))
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(_p%d)", n))
|
||||||
|
n++
|
||||||
|
} else if p.Type == "string" {
|
||||||
|
fmt.Fprintf(os.Stderr, path+":"+funct+" uses string arguments, but has no error return\n")
|
||||||
|
textcommon += fmt.Sprintf("\tvar _p%d %s\n", n, strconvtype)
|
||||||
|
textcommon += fmt.Sprintf("\t_p%d, %s = %s(%s)\n", n, errvar, strconvfunc, p.Name)
|
||||||
|
textcommon += fmt.Sprintf("\tif %s != nil {\n\t\treturn\n\t}\n", errvar)
|
||||||
|
|
||||||
|
argscommon = append(argscommon, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n))
|
||||||
|
argscall = append(argscall, fmt.Sprintf("_p%d uintptr", n))
|
||||||
|
argsgc = append(argsgc, fmt.Sprintf("_p%d", n))
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(_p%d)", n))
|
||||||
|
n++
|
||||||
|
} else if m := regexp.MustCompile(`^\[\](.*)`).FindStringSubmatch(p.Type); m != nil {
|
||||||
|
// Convert slice into pointer, length.
|
||||||
|
// Have to be careful not to take address of &a[0] if len == 0:
|
||||||
|
// pass nil in that case.
|
||||||
|
textcommon += fmt.Sprintf("\tvar _p%d *%s\n", n, m[1])
|
||||||
|
textcommon += fmt.Sprintf("\tif len(%s) > 0 {\n\t\t_p%d = &%s[0]\n\t}\n", p.Name, n, p.Name)
|
||||||
|
argscommon = append(argscommon, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n), fmt.Sprintf("len(%s)", p.Name))
|
||||||
|
argscall = append(argscall, fmt.Sprintf("_p%d uintptr", n), fmt.Sprintf("_lenp%d int", n))
|
||||||
|
argsgc = append(argsgc, fmt.Sprintf("_p%d", n), fmt.Sprintf("uintptr(_lenp%d)", n))
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(_p%d)", n), fmt.Sprintf("C.size_t(_lenp%d)", n))
|
||||||
|
n++
|
||||||
|
} else if p.Type == "int64" && endianness != "" {
|
||||||
|
fmt.Fprintf(os.Stderr, path+":"+funct+" uses int64 with 32 bits mode. Case not yet implemented\n")
|
||||||
|
} else if p.Type == "bool" {
|
||||||
|
fmt.Fprintf(os.Stderr, path+":"+funct+" uses bool. Case not yet implemented\n")
|
||||||
|
} else if regexp.MustCompile(`^_`).FindStringSubmatch(p.Type) != nil || p.Type == "unsafe.Pointer" {
|
||||||
|
argscommon = append(argscommon, fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
argscall = append(argscall, fmt.Sprintf("%s uintptr", p.Name))
|
||||||
|
argsgc = append(argsgc, p.Name)
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(%s)", p.Name))
|
||||||
|
} else if p.Type == "int" {
|
||||||
|
if (argN == 0 || argN == 2) && ((funct == "fcntl") || (funct == "FcntlInt") || (funct == "FcntlFlock")) {
|
||||||
|
// These fcntl arguments need to be uintptr to be able to call FcntlInt and FcntlFlock
|
||||||
|
argscommon = append(argscommon, fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
argscall = append(argscall, fmt.Sprintf("%s uintptr", p.Name))
|
||||||
|
argsgc = append(argsgc, p.Name)
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(%s)", p.Name))
|
||||||
|
|
||||||
|
} else {
|
||||||
|
argscommon = append(argscommon, p.Name)
|
||||||
|
argscall = append(argscall, fmt.Sprintf("%s int", p.Name))
|
||||||
|
argsgc = append(argsgc, fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.int(%s)", p.Name))
|
||||||
|
}
|
||||||
|
} else if p.Type == "int32" {
|
||||||
|
argscommon = append(argscommon, p.Name)
|
||||||
|
argscall = append(argscall, fmt.Sprintf("%s int32", p.Name))
|
||||||
|
argsgc = append(argsgc, fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.int(%s)", p.Name))
|
||||||
|
} else if p.Type == "int64" {
|
||||||
|
argscommon = append(argscommon, p.Name)
|
||||||
|
argscall = append(argscall, fmt.Sprintf("%s int64", p.Name))
|
||||||
|
argsgc = append(argsgc, fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.longlong(%s)", p.Name))
|
||||||
|
} else if p.Type == "uint32" {
|
||||||
|
argscommon = append(argscommon, p.Name)
|
||||||
|
argscall = append(argscall, fmt.Sprintf("%s uint32", p.Name))
|
||||||
|
argsgc = append(argsgc, fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.uint(%s)", p.Name))
|
||||||
|
} else if p.Type == "uint64" {
|
||||||
|
argscommon = append(argscommon, p.Name)
|
||||||
|
argscall = append(argscall, fmt.Sprintf("%s uint64", p.Name))
|
||||||
|
argsgc = append(argsgc, fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.ulonglong(%s)", p.Name))
|
||||||
|
} else if p.Type == "uintptr" {
|
||||||
|
argscommon = append(argscommon, p.Name)
|
||||||
|
argscall = append(argscall, fmt.Sprintf("%s uintptr", p.Name))
|
||||||
|
argsgc = append(argsgc, p.Name)
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(%s)", p.Name))
|
||||||
|
} else {
|
||||||
|
argscommon = append(argscommon, fmt.Sprintf("int(%s)", p.Name))
|
||||||
|
argscall = append(argscall, fmt.Sprintf("%s int", p.Name))
|
||||||
|
argsgc = append(argsgc, fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.int(%s)", p.Name))
|
||||||
|
}
|
||||||
|
argN++
|
||||||
|
}
|
||||||
|
nargs := len(argsgc)
|
||||||
|
|
||||||
|
// COMMON function generation
|
||||||
|
argscommonlist := strings.Join(argscommon, ", ")
|
||||||
|
callcommon := fmt.Sprintf("call%s(%s)", sysname, argscommonlist)
|
||||||
|
ret := []string{"_", "_"}
|
||||||
|
body := ""
|
||||||
|
doErrno := false
|
||||||
|
for i := 0; i < len(out); i++ {
|
||||||
|
p := parseParam(out[i])
|
||||||
|
reg := ""
|
||||||
|
if p.Name == "err" {
|
||||||
|
reg = "e1"
|
||||||
|
ret[1] = reg
|
||||||
|
doErrno = true
|
||||||
|
} else {
|
||||||
|
reg = "r0"
|
||||||
|
ret[0] = reg
|
||||||
|
}
|
||||||
|
if p.Type == "bool" {
|
||||||
|
reg = fmt.Sprintf("%s != 0", reg)
|
||||||
|
}
|
||||||
|
if reg != "e1" {
|
||||||
|
body += fmt.Sprintf("\t%s = %s(%s)\n", p.Name, p.Type, reg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ret[0] == "_" && ret[1] == "_" {
|
||||||
|
textcommon += fmt.Sprintf("\t%s\n", callcommon)
|
||||||
|
} else {
|
||||||
|
textcommon += fmt.Sprintf("\t%s, %s := %s\n", ret[0], ret[1], callcommon)
|
||||||
|
}
|
||||||
|
textcommon += body
|
||||||
|
|
||||||
|
if doErrno {
|
||||||
|
textcommon += "\tif e1 != 0 {\n"
|
||||||
|
textcommon += "\t\terr = errnoErr(e1)\n"
|
||||||
|
textcommon += "\t}\n"
|
||||||
|
}
|
||||||
|
textcommon += "\treturn\n"
|
||||||
|
textcommon += "}\n"
|
||||||
|
|
||||||
|
if onlyCommon {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// CALL Prototype
|
||||||
|
callProto := fmt.Sprintf("func call%s(%s) (r1 uintptr, e1 Errno) {\n", sysname, strings.Join(argscall, ", "))
|
||||||
|
|
||||||
|
// GC function generation
|
||||||
|
asm := "syscall6"
|
||||||
|
if nonblock != nil {
|
||||||
|
asm = "rawSyscall6"
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(argsgc) <= 6 {
|
||||||
|
for len(argsgc) < 6 {
|
||||||
|
argsgc = append(argsgc, "0")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s: too many arguments to system call", funct)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
argsgclist := strings.Join(argsgc, ", ")
|
||||||
|
callgc := fmt.Sprintf("%s(uintptr(unsafe.Pointer(&%s)), %d, %s)", asm, sysvarname, nargs, argsgclist)
|
||||||
|
|
||||||
|
textgc += callProto
|
||||||
|
textgc += fmt.Sprintf("\tr1, _, e1 = %s\n", callgc)
|
||||||
|
textgc += "\treturn\n}\n"
|
||||||
|
|
||||||
|
// GCCGO function generation
|
||||||
|
argsgccgolist := strings.Join(argsgccgo, ", ")
|
||||||
|
var callgccgo string
|
||||||
|
if sysname == "select" {
|
||||||
|
// select is a keyword of Go. Its name is
|
||||||
|
// changed to c_select.
|
||||||
|
callgccgo = fmt.Sprintf("C.c_%s(%s)", sysname, argsgccgolist)
|
||||||
|
} else {
|
||||||
|
callgccgo = fmt.Sprintf("C.%s(%s)", sysname, argsgccgolist)
|
||||||
|
}
|
||||||
|
textgccgo += callProto
|
||||||
|
textgccgo += fmt.Sprintf("\tr1 = uintptr(%s)\n", callgccgo)
|
||||||
|
textgccgo += "\te1 = syscall.GetErrno()\n"
|
||||||
|
textgccgo += "\treturn\n}\n"
|
||||||
|
}
|
||||||
|
if err := s.Err(); err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
file.Close()
|
||||||
|
}
|
||||||
|
imp := ""
|
||||||
|
if pack != "unix" {
|
||||||
|
imp = "import \"golang.org/x/sys/unix\"\n"
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print zsyscall_aix_ppc64.go
|
||||||
|
err := ioutil.WriteFile("zsyscall_aix_ppc64.go",
|
||||||
|
[]byte(fmt.Sprintf(srcTemplate1, cmdLine(), buildTags(), pack, imp, textcommon)),
|
||||||
|
0644)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print zsyscall_aix_ppc64_gc.go
|
||||||
|
vardecls := "\t" + strings.Join(vars, ",\n\t")
|
||||||
|
vardecls += " syscallFunc"
|
||||||
|
err = ioutil.WriteFile("zsyscall_aix_ppc64_gc.go",
|
||||||
|
[]byte(fmt.Sprintf(srcTemplate2, cmdLine(), buildTags(), pack, imp, dynimports, linknames, vardecls, textgc)),
|
||||||
|
0644)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print zsyscall_aix_ppc64_gccgo.go
|
||||||
|
err = ioutil.WriteFile("zsyscall_aix_ppc64_gccgo.go",
|
||||||
|
[]byte(fmt.Sprintf(srcTemplate3, cmdLine(), buildTags(), pack, cExtern, imp, textgccgo)),
|
||||||
|
0644)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const srcTemplate1 = `// %s
|
||||||
|
// Code generated by the command above; see README.md. DO NOT EDIT.
|
||||||
|
|
||||||
|
// +build %s
|
||||||
|
|
||||||
|
package %s
|
||||||
|
|
||||||
|
import (
|
||||||
|
"unsafe"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
%s
|
||||||
|
|
||||||
|
%s
|
||||||
|
`
|
||||||
|
const srcTemplate2 = `// %s
|
||||||
|
// Code generated by the command above; see README.md. DO NOT EDIT.
|
||||||
|
|
||||||
|
// +build %s
|
||||||
|
// +build !gccgo
|
||||||
|
|
||||||
|
package %s
|
||||||
|
|
||||||
|
import (
|
||||||
|
"unsafe"
|
||||||
|
)
|
||||||
|
%s
|
||||||
|
%s
|
||||||
|
%s
|
||||||
|
type syscallFunc uintptr
|
||||||
|
|
||||||
|
var (
|
||||||
|
%s
|
||||||
|
)
|
||||||
|
|
||||||
|
// Implemented in runtime/syscall_aix.go.
|
||||||
|
func rawSyscall6(trap, nargs, a1, a2, a3, a4, a5, a6 uintptr) (r1, r2 uintptr, err Errno)
|
||||||
|
func syscall6(trap, nargs, a1, a2, a3, a4, a5, a6 uintptr) (r1, r2 uintptr, err Errno)
|
||||||
|
|
||||||
|
%s
|
||||||
|
`
|
||||||
|
const srcTemplate3 = `// %s
|
||||||
|
// Code generated by the command above; see README.md. DO NOT EDIT.
|
||||||
|
|
||||||
|
// +build %s
|
||||||
|
// +build gccgo
|
||||||
|
|
||||||
|
package %s
|
||||||
|
|
||||||
|
%s
|
||||||
|
*/
|
||||||
|
import "C"
|
||||||
|
import (
|
||||||
|
"syscall"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
%s
|
||||||
|
|
||||||
|
%s
|
||||||
|
`
|
||||||
335
vendor/golang.org/x/sys/unix/mksyscall_solaris.go
generated
vendored
Normal file
335
vendor/golang.org/x/sys/unix/mksyscall_solaris.go
generated
vendored
Normal file
@@ -0,0 +1,335 @@
|
|||||||
|
// Copyright 2019 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
/*
|
||||||
|
This program reads a file containing function prototypes
|
||||||
|
(like syscall_solaris.go) and generates system call bodies.
|
||||||
|
The prototypes are marked by lines beginning with "//sys"
|
||||||
|
and read like func declarations if //sys is replaced by func, but:
|
||||||
|
* The parameter lists must give a name for each argument.
|
||||||
|
This includes return parameters.
|
||||||
|
* The parameter lists must give a type for each argument:
|
||||||
|
the (x, y, z int) shorthand is not allowed.
|
||||||
|
* If the return parameter is an error number, it must be named err.
|
||||||
|
* If go func name needs to be different than its libc name,
|
||||||
|
* or the function is not in libc, name could be specified
|
||||||
|
* at the end, after "=" sign, like
|
||||||
|
//sys getsockopt(s int, level int, name int, val uintptr, vallen *_Socklen) (err error) = libsocket.getsockopt
|
||||||
|
*/
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
b32 = flag.Bool("b32", false, "32bit big-endian")
|
||||||
|
l32 = flag.Bool("l32", false, "32bit little-endian")
|
||||||
|
tags = flag.String("tags", "", "build tags")
|
||||||
|
)
|
||||||
|
|
||||||
|
// cmdLine returns this programs's commandline arguments
|
||||||
|
func cmdLine() string {
|
||||||
|
return "go run mksyscall_solaris.go " + strings.Join(os.Args[1:], " ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildTags returns build tags
|
||||||
|
func buildTags() string {
|
||||||
|
return *tags
|
||||||
|
}
|
||||||
|
|
||||||
|
// Param is function parameter
|
||||||
|
type Param struct {
|
||||||
|
Name string
|
||||||
|
Type string
|
||||||
|
}
|
||||||
|
|
||||||
|
// usage prints the program usage
|
||||||
|
func usage() {
|
||||||
|
fmt.Fprintf(os.Stderr, "usage: go run mksyscall_solaris.go [-b32 | -l32] [-tags x,y] [file ...]\n")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseParamList parses parameter list and returns a slice of parameters
|
||||||
|
func parseParamList(list string) []string {
|
||||||
|
list = strings.TrimSpace(list)
|
||||||
|
if list == "" {
|
||||||
|
return []string{}
|
||||||
|
}
|
||||||
|
return regexp.MustCompile(`\s*,\s*`).Split(list, -1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseParam splits a parameter into name and type
|
||||||
|
func parseParam(p string) Param {
|
||||||
|
ps := regexp.MustCompile(`^(\S*) (\S*)$`).FindStringSubmatch(p)
|
||||||
|
if ps == nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "malformed parameter: %s\n", p)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
return Param{ps[1], ps[2]}
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
flag.Usage = usage
|
||||||
|
flag.Parse()
|
||||||
|
if len(flag.Args()) <= 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, "no files to parse provided\n")
|
||||||
|
usage()
|
||||||
|
}
|
||||||
|
|
||||||
|
endianness := ""
|
||||||
|
if *b32 {
|
||||||
|
endianness = "big-endian"
|
||||||
|
} else if *l32 {
|
||||||
|
endianness = "little-endian"
|
||||||
|
}
|
||||||
|
|
||||||
|
pack := ""
|
||||||
|
text := ""
|
||||||
|
dynimports := ""
|
||||||
|
linknames := ""
|
||||||
|
var vars []string
|
||||||
|
for _, path := range flag.Args() {
|
||||||
|
file, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
s := bufio.NewScanner(file)
|
||||||
|
for s.Scan() {
|
||||||
|
t := s.Text()
|
||||||
|
t = strings.TrimSpace(t)
|
||||||
|
t = regexp.MustCompile(`\s+`).ReplaceAllString(t, ` `)
|
||||||
|
if p := regexp.MustCompile(`^package (\S+)$`).FindStringSubmatch(t); p != nil && pack == "" {
|
||||||
|
pack = p[1]
|
||||||
|
}
|
||||||
|
nonblock := regexp.MustCompile(`^\/\/sysnb `).FindStringSubmatch(t)
|
||||||
|
if regexp.MustCompile(`^\/\/sys `).FindStringSubmatch(t) == nil && nonblock == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Line must be of the form
|
||||||
|
// func Open(path string, mode int, perm int) (fd int, err error)
|
||||||
|
// Split into name, in params, out params.
|
||||||
|
f := regexp.MustCompile(`^\/\/sys(nb)? (\w+)\(([^()]*)\)\s*(?:\(([^()]+)\))?\s*(?:=\s*(?:(\w*)\.)?(\w*))?$`).FindStringSubmatch(t)
|
||||||
|
if f == nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s:%s\nmalformed //sys declaration\n", path, t)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
funct, inps, outps, modname, sysname := f[2], f[3], f[4], f[5], f[6]
|
||||||
|
|
||||||
|
// Split argument lists on comma.
|
||||||
|
in := parseParamList(inps)
|
||||||
|
out := parseParamList(outps)
|
||||||
|
|
||||||
|
inps = strings.Join(in, ", ")
|
||||||
|
outps = strings.Join(out, ", ")
|
||||||
|
|
||||||
|
// Try in vain to keep people from editing this file.
|
||||||
|
// The theory is that they jump into the middle of the file
|
||||||
|
// without reading the header.
|
||||||
|
text += "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT\n\n"
|
||||||
|
|
||||||
|
// So file name.
|
||||||
|
if modname == "" {
|
||||||
|
modname = "libc"
|
||||||
|
}
|
||||||
|
|
||||||
|
// System call name.
|
||||||
|
if sysname == "" {
|
||||||
|
sysname = funct
|
||||||
|
}
|
||||||
|
|
||||||
|
// System call pointer variable name.
|
||||||
|
sysvarname := fmt.Sprintf("proc%s", sysname)
|
||||||
|
|
||||||
|
strconvfunc := "BytePtrFromString"
|
||||||
|
strconvtype := "*byte"
|
||||||
|
|
||||||
|
sysname = strings.ToLower(sysname) // All libc functions are lowercase.
|
||||||
|
|
||||||
|
// Runtime import of function to allow cross-platform builds.
|
||||||
|
dynimports += fmt.Sprintf("//go:cgo_import_dynamic libc_%s %s \"%s.so\"\n", sysname, sysname, modname)
|
||||||
|
// Link symbol to proc address variable.
|
||||||
|
linknames += fmt.Sprintf("//go:linkname %s libc_%s\n", sysvarname, sysname)
|
||||||
|
// Library proc address variable.
|
||||||
|
vars = append(vars, sysvarname)
|
||||||
|
|
||||||
|
// Go function header.
|
||||||
|
outlist := strings.Join(out, ", ")
|
||||||
|
if outlist != "" {
|
||||||
|
outlist = fmt.Sprintf(" (%s)", outlist)
|
||||||
|
}
|
||||||
|
if text != "" {
|
||||||
|
text += "\n"
|
||||||
|
}
|
||||||
|
text += fmt.Sprintf("func %s(%s)%s {\n", funct, strings.Join(in, ", "), outlist)
|
||||||
|
|
||||||
|
// Check if err return available
|
||||||
|
errvar := ""
|
||||||
|
for _, param := range out {
|
||||||
|
p := parseParam(param)
|
||||||
|
if p.Type == "error" {
|
||||||
|
errvar = p.Name
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepare arguments to Syscall.
|
||||||
|
var args []string
|
||||||
|
n := 0
|
||||||
|
for _, param := range in {
|
||||||
|
p := parseParam(param)
|
||||||
|
if regexp.MustCompile(`^\*`).FindStringSubmatch(p.Type) != nil {
|
||||||
|
args = append(args, "uintptr(unsafe.Pointer("+p.Name+"))")
|
||||||
|
} else if p.Type == "string" && errvar != "" {
|
||||||
|
text += fmt.Sprintf("\tvar _p%d %s\n", n, strconvtype)
|
||||||
|
text += fmt.Sprintf("\t_p%d, %s = %s(%s)\n", n, errvar, strconvfunc, p.Name)
|
||||||
|
text += fmt.Sprintf("\tif %s != nil {\n\t\treturn\n\t}\n", errvar)
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n))
|
||||||
|
n++
|
||||||
|
} else if p.Type == "string" {
|
||||||
|
fmt.Fprintf(os.Stderr, path+":"+funct+" uses string arguments, but has no error return\n")
|
||||||
|
text += fmt.Sprintf("\tvar _p%d %s\n", n, strconvtype)
|
||||||
|
text += fmt.Sprintf("\t_p%d, _ = %s(%s)\n", n, strconvfunc, p.Name)
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n))
|
||||||
|
n++
|
||||||
|
} else if s := regexp.MustCompile(`^\[\](.*)`).FindStringSubmatch(p.Type); s != nil {
|
||||||
|
// Convert slice into pointer, length.
|
||||||
|
// Have to be careful not to take address of &a[0] if len == 0:
|
||||||
|
// pass nil in that case.
|
||||||
|
text += fmt.Sprintf("\tvar _p%d *%s\n", n, s[1])
|
||||||
|
text += fmt.Sprintf("\tif len(%s) > 0 {\n\t\t_p%d = &%s[0]\n\t}\n", p.Name, n, p.Name)
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n), fmt.Sprintf("uintptr(len(%s))", p.Name))
|
||||||
|
n++
|
||||||
|
} else if p.Type == "int64" && endianness != "" {
|
||||||
|
if endianness == "big-endian" {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s>>32)", p.Name), fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
} else {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s)", p.Name), fmt.Sprintf("uintptr(%s>>32)", p.Name))
|
||||||
|
}
|
||||||
|
} else if p.Type == "bool" {
|
||||||
|
text += fmt.Sprintf("\tvar _p%d uint32\n", n)
|
||||||
|
text += fmt.Sprintf("\tif %s {\n\t\t_p%d = 1\n\t} else {\n\t\t_p%d = 0\n\t}\n", p.Name, n, n)
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(_p%d)", n))
|
||||||
|
n++
|
||||||
|
} else {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
nargs := len(args)
|
||||||
|
|
||||||
|
// Determine which form to use; pad args with zeros.
|
||||||
|
asm := "sysvicall6"
|
||||||
|
if nonblock != nil {
|
||||||
|
asm = "rawSysvicall6"
|
||||||
|
}
|
||||||
|
if len(args) <= 6 {
|
||||||
|
for len(args) < 6 {
|
||||||
|
args = append(args, "0")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s: too many arguments to system call\n", path)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Actual call.
|
||||||
|
arglist := strings.Join(args, ", ")
|
||||||
|
call := fmt.Sprintf("%s(uintptr(unsafe.Pointer(&%s)), %d, %s)", asm, sysvarname, nargs, arglist)
|
||||||
|
|
||||||
|
// Assign return values.
|
||||||
|
body := ""
|
||||||
|
ret := []string{"_", "_", "_"}
|
||||||
|
doErrno := false
|
||||||
|
for i := 0; i < len(out); i++ {
|
||||||
|
p := parseParam(out[i])
|
||||||
|
reg := ""
|
||||||
|
if p.Name == "err" {
|
||||||
|
reg = "e1"
|
||||||
|
ret[2] = reg
|
||||||
|
doErrno = true
|
||||||
|
} else {
|
||||||
|
reg = fmt.Sprintf("r%d", i)
|
||||||
|
ret[i] = reg
|
||||||
|
}
|
||||||
|
if p.Type == "bool" {
|
||||||
|
reg = fmt.Sprintf("%d != 0", reg)
|
||||||
|
}
|
||||||
|
if p.Type == "int64" && endianness != "" {
|
||||||
|
// 64-bit number in r1:r0 or r0:r1.
|
||||||
|
if i+2 > len(out) {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s: not enough registers for int64 return\n", path)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
if endianness == "big-endian" {
|
||||||
|
reg = fmt.Sprintf("int64(r%d)<<32 | int64(r%d)", i, i+1)
|
||||||
|
} else {
|
||||||
|
reg = fmt.Sprintf("int64(r%d)<<32 | int64(r%d)", i+1, i)
|
||||||
|
}
|
||||||
|
ret[i] = fmt.Sprintf("r%d", i)
|
||||||
|
ret[i+1] = fmt.Sprintf("r%d", i+1)
|
||||||
|
}
|
||||||
|
if reg != "e1" {
|
||||||
|
body += fmt.Sprintf("\t%s = %s(%s)\n", p.Name, p.Type, reg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ret[0] == "_" && ret[1] == "_" && ret[2] == "_" {
|
||||||
|
text += fmt.Sprintf("\t%s\n", call)
|
||||||
|
} else {
|
||||||
|
text += fmt.Sprintf("\t%s, %s, %s := %s\n", ret[0], ret[1], ret[2], call)
|
||||||
|
}
|
||||||
|
text += body
|
||||||
|
|
||||||
|
if doErrno {
|
||||||
|
text += "\tif e1 != 0 {\n"
|
||||||
|
text += "\t\terr = e1\n"
|
||||||
|
text += "\t}\n"
|
||||||
|
}
|
||||||
|
text += "\treturn\n"
|
||||||
|
text += "}\n"
|
||||||
|
}
|
||||||
|
if err := s.Err(); err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
file.Close()
|
||||||
|
}
|
||||||
|
imp := ""
|
||||||
|
if pack != "unix" {
|
||||||
|
imp = "import \"golang.org/x/sys/unix\"\n"
|
||||||
|
|
||||||
|
}
|
||||||
|
vardecls := "\t" + strings.Join(vars, ",\n\t")
|
||||||
|
vardecls += " syscallFunc"
|
||||||
|
fmt.Printf(srcTemplate, cmdLine(), buildTags(), pack, imp, dynimports, linknames, vardecls, text)
|
||||||
|
}
|
||||||
|
|
||||||
|
const srcTemplate = `// %s
|
||||||
|
// Code generated by the command above; see README.md. DO NOT EDIT.
|
||||||
|
|
||||||
|
// +build %s
|
||||||
|
|
||||||
|
package %s
|
||||||
|
|
||||||
|
import (
|
||||||
|
"syscall"
|
||||||
|
"unsafe"
|
||||||
|
)
|
||||||
|
%s
|
||||||
|
%s
|
||||||
|
%s
|
||||||
|
var (
|
||||||
|
%s
|
||||||
|
)
|
||||||
|
|
||||||
|
%s
|
||||||
|
`
|
||||||
190
vendor/golang.org/x/sys/unix/mksysnum.go
generated
vendored
Normal file
190
vendor/golang.org/x/sys/unix/mksysnum.go
generated
vendored
Normal file
@@ -0,0 +1,190 @@
|
|||||||
|
// Copyright 2018 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
// Generate system call table for DragonFly, NetBSD,
|
||||||
|
// FreeBSD, OpenBSD or Darwin from master list
|
||||||
|
// (for example, /usr/src/sys/kern/syscalls.master or
|
||||||
|
// sys/syscall.h).
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
goos, goarch string
|
||||||
|
)
|
||||||
|
|
||||||
|
// cmdLine returns this programs's commandline arguments
|
||||||
|
func cmdLine() string {
|
||||||
|
return "go run mksysnum.go " + strings.Join(os.Args[1:], " ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildTags returns build tags
|
||||||
|
func buildTags() string {
|
||||||
|
return fmt.Sprintf("%s,%s", goarch, goos)
|
||||||
|
}
|
||||||
|
|
||||||
|
func checkErr(err error) {
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "%v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// source string and substring slice for regexp
|
||||||
|
type re struct {
|
||||||
|
str string // source string
|
||||||
|
sub []string // matched sub-string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Match performs regular expression match
|
||||||
|
func (r *re) Match(exp string) bool {
|
||||||
|
r.sub = regexp.MustCompile(exp).FindStringSubmatch(r.str)
|
||||||
|
if r.sub != nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// fetchFile fetches a text file from URL
|
||||||
|
func fetchFile(URL string) io.Reader {
|
||||||
|
resp, err := http.Get(URL)
|
||||||
|
checkErr(err)
|
||||||
|
defer resp.Body.Close()
|
||||||
|
body, err := ioutil.ReadAll(resp.Body)
|
||||||
|
checkErr(err)
|
||||||
|
return strings.NewReader(string(body))
|
||||||
|
}
|
||||||
|
|
||||||
|
// readFile reads a text file from path
|
||||||
|
func readFile(path string) io.Reader {
|
||||||
|
file, err := os.Open(os.Args[1])
|
||||||
|
checkErr(err)
|
||||||
|
return file
|
||||||
|
}
|
||||||
|
|
||||||
|
func format(name, num, proto string) string {
|
||||||
|
name = strings.ToUpper(name)
|
||||||
|
// There are multiple entries for enosys and nosys, so comment them out.
|
||||||
|
nm := re{str: name}
|
||||||
|
if nm.Match(`^SYS_E?NOSYS$`) {
|
||||||
|
name = fmt.Sprintf("// %s", name)
|
||||||
|
}
|
||||||
|
if name == `SYS_SYS_EXIT` {
|
||||||
|
name = `SYS_EXIT`
|
||||||
|
}
|
||||||
|
return fmt.Sprintf(" %s = %s; // %s\n", name, num, proto)
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Get the OS (using GOOS_TARGET if it exist)
|
||||||
|
goos = os.Getenv("GOOS_TARGET")
|
||||||
|
if goos == "" {
|
||||||
|
goos = os.Getenv("GOOS")
|
||||||
|
}
|
||||||
|
// Get the architecture (using GOARCH_TARGET if it exists)
|
||||||
|
goarch = os.Getenv("GOARCH_TARGET")
|
||||||
|
if goarch == "" {
|
||||||
|
goarch = os.Getenv("GOARCH")
|
||||||
|
}
|
||||||
|
// Check if GOOS and GOARCH environment variables are defined
|
||||||
|
if goarch == "" || goos == "" {
|
||||||
|
fmt.Fprintf(os.Stderr, "GOARCH or GOOS not defined in environment\n")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
file := strings.TrimSpace(os.Args[1])
|
||||||
|
var syscalls io.Reader
|
||||||
|
if strings.HasPrefix(file, "https://") || strings.HasPrefix(file, "http://") {
|
||||||
|
// Download syscalls.master file
|
||||||
|
syscalls = fetchFile(file)
|
||||||
|
} else {
|
||||||
|
syscalls = readFile(file)
|
||||||
|
}
|
||||||
|
|
||||||
|
var text, line string
|
||||||
|
s := bufio.NewScanner(syscalls)
|
||||||
|
for s.Scan() {
|
||||||
|
t := re{str: line}
|
||||||
|
if t.Match(`^(.*)\\$`) {
|
||||||
|
// Handle continuation
|
||||||
|
line = t.sub[1]
|
||||||
|
line += strings.TrimLeft(s.Text(), " \t")
|
||||||
|
} else {
|
||||||
|
// New line
|
||||||
|
line = s.Text()
|
||||||
|
}
|
||||||
|
t = re{str: line}
|
||||||
|
if t.Match(`\\$`) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
t = re{str: line}
|
||||||
|
|
||||||
|
switch goos {
|
||||||
|
case "dragonfly":
|
||||||
|
if t.Match(`^([0-9]+)\s+STD\s+({ \S+\s+(\w+).*)$`) {
|
||||||
|
num, proto := t.sub[1], t.sub[2]
|
||||||
|
name := fmt.Sprintf("SYS_%s", t.sub[3])
|
||||||
|
text += format(name, num, proto)
|
||||||
|
}
|
||||||
|
case "freebsd":
|
||||||
|
if t.Match(`^([0-9]+)\s+\S+\s+(?:NO)?STD\s+({ \S+\s+(\w+).*)$`) {
|
||||||
|
num, proto := t.sub[1], t.sub[2]
|
||||||
|
name := fmt.Sprintf("SYS_%s", t.sub[3])
|
||||||
|
text += format(name, num, proto)
|
||||||
|
}
|
||||||
|
case "openbsd":
|
||||||
|
if t.Match(`^([0-9]+)\s+STD\s+(NOLOCK\s+)?({ \S+\s+\*?(\w+).*)$`) {
|
||||||
|
num, proto, name := t.sub[1], t.sub[3], t.sub[4]
|
||||||
|
text += format(name, num, proto)
|
||||||
|
}
|
||||||
|
case "netbsd":
|
||||||
|
if t.Match(`^([0-9]+)\s+((STD)|(NOERR))\s+(RUMP\s+)?({\s+\S+\s*\*?\s*\|(\S+)\|(\S*)\|(\w+).*\s+})(\s+(\S+))?$`) {
|
||||||
|
num, proto, compat := t.sub[1], t.sub[6], t.sub[8]
|
||||||
|
name := t.sub[7] + "_" + t.sub[9]
|
||||||
|
if t.sub[11] != "" {
|
||||||
|
name = t.sub[7] + "_" + t.sub[11]
|
||||||
|
}
|
||||||
|
name = strings.ToUpper(name)
|
||||||
|
if compat == "" || compat == "13" || compat == "30" || compat == "50" {
|
||||||
|
text += fmt.Sprintf(" %s = %s; // %s\n", name, num, proto)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case "darwin":
|
||||||
|
if t.Match(`^#define\s+SYS_(\w+)\s+([0-9]+)`) {
|
||||||
|
name, num := t.sub[1], t.sub[2]
|
||||||
|
name = strings.ToUpper(name)
|
||||||
|
text += fmt.Sprintf(" SYS_%s = %s;\n", name, num)
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
fmt.Fprintf(os.Stderr, "unrecognized GOOS=%s\n", goos)
|
||||||
|
os.Exit(1)
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
err := s.Err()
|
||||||
|
checkErr(err)
|
||||||
|
|
||||||
|
fmt.Printf(template, cmdLine(), buildTags(), text)
|
||||||
|
}
|
||||||
|
|
||||||
|
const template = `// %s
|
||||||
|
// Code generated by the command above; see README.md. DO NOT EDIT.
|
||||||
|
|
||||||
|
// +build %s
|
||||||
|
|
||||||
|
package unix
|
||||||
|
|
||||||
|
const(
|
||||||
|
%s)`
|
||||||
236
vendor/golang.org/x/sys/unix/types_aix.go
generated
vendored
Normal file
236
vendor/golang.org/x/sys/unix/types_aix.go
generated
vendored
Normal file
@@ -0,0 +1,236 @@
|
|||||||
|
// Copyright 2018 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
// +build aix
|
||||||
|
|
||||||
|
/*
|
||||||
|
Input to cgo -godefs. See also mkerrors.sh and mkall.sh
|
||||||
|
*/
|
||||||
|
|
||||||
|
// +godefs map struct_in_addr [4]byte /* in_addr */
|
||||||
|
// +godefs map struct_in6_addr [16]byte /* in6_addr */
|
||||||
|
|
||||||
|
package unix
|
||||||
|
|
||||||
|
/*
|
||||||
|
#include <sys/types.h>
|
||||||
|
#include <sys/time.h>
|
||||||
|
#include <sys/limits.h>
|
||||||
|
#include <sys/un.h>
|
||||||
|
#include <utime.h>
|
||||||
|
#include <sys/utsname.h>
|
||||||
|
#include <sys/poll.h>
|
||||||
|
#include <sys/resource.h>
|
||||||
|
#include <sys/stat.h>
|
||||||
|
#include <sys/statfs.h>
|
||||||
|
#include <sys/termio.h>
|
||||||
|
#include <sys/ioctl.h>
|
||||||
|
|
||||||
|
#include <termios.h>
|
||||||
|
|
||||||
|
#include <net/if.h>
|
||||||
|
#include <net/if_dl.h>
|
||||||
|
#include <netinet/in.h>
|
||||||
|
#include <netinet/icmp6.h>
|
||||||
|
|
||||||
|
|
||||||
|
#include <dirent.h>
|
||||||
|
#include <fcntl.h>
|
||||||
|
|
||||||
|
enum {
|
||||||
|
sizeofPtr = sizeof(void*),
|
||||||
|
};
|
||||||
|
|
||||||
|
union sockaddr_all {
|
||||||
|
struct sockaddr s1; // this one gets used for fields
|
||||||
|
struct sockaddr_in s2; // these pad it out
|
||||||
|
struct sockaddr_in6 s3;
|
||||||
|
struct sockaddr_un s4;
|
||||||
|
struct sockaddr_dl s5;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct sockaddr_any {
|
||||||
|
struct sockaddr addr;
|
||||||
|
char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)];
|
||||||
|
};
|
||||||
|
|
||||||
|
*/
|
||||||
|
import "C"
|
||||||
|
|
||||||
|
// Machine characteristics
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofPtr = C.sizeofPtr
|
||||||
|
SizeofShort = C.sizeof_short
|
||||||
|
SizeofInt = C.sizeof_int
|
||||||
|
SizeofLong = C.sizeof_long
|
||||||
|
SizeofLongLong = C.sizeof_longlong
|
||||||
|
PathMax = C.PATH_MAX
|
||||||
|
)
|
||||||
|
|
||||||
|
// Basic types
|
||||||
|
|
||||||
|
type (
|
||||||
|
_C_short C.short
|
||||||
|
_C_int C.int
|
||||||
|
_C_long C.long
|
||||||
|
_C_long_long C.longlong
|
||||||
|
)
|
||||||
|
|
||||||
|
type off64 C.off64_t
|
||||||
|
type off C.off_t
|
||||||
|
type Mode_t C.mode_t
|
||||||
|
|
||||||
|
// Time
|
||||||
|
|
||||||
|
type Timespec C.struct_timespec
|
||||||
|
|
||||||
|
type StTimespec C.struct_st_timespec
|
||||||
|
|
||||||
|
type Timeval C.struct_timeval
|
||||||
|
|
||||||
|
type Timeval32 C.struct_timeval32
|
||||||
|
|
||||||
|
type Timex C.struct_timex
|
||||||
|
|
||||||
|
type Time_t C.time_t
|
||||||
|
|
||||||
|
type Tms C.struct_tms
|
||||||
|
|
||||||
|
type Utimbuf C.struct_utimbuf
|
||||||
|
|
||||||
|
type Timezone C.struct_timezone
|
||||||
|
|
||||||
|
// Processes
|
||||||
|
|
||||||
|
type Rusage C.struct_rusage
|
||||||
|
|
||||||
|
type Rlimit C.struct_rlimit64
|
||||||
|
|
||||||
|
type Pid_t C.pid_t
|
||||||
|
|
||||||
|
type _Gid_t C.gid_t
|
||||||
|
|
||||||
|
type dev_t C.dev_t
|
||||||
|
|
||||||
|
// Files
|
||||||
|
|
||||||
|
type Stat_t C.struct_stat
|
||||||
|
|
||||||
|
type StatxTimestamp C.struct_statx_timestamp
|
||||||
|
|
||||||
|
type Statx_t C.struct_statx
|
||||||
|
|
||||||
|
type Dirent C.struct_dirent
|
||||||
|
|
||||||
|
// Sockets
|
||||||
|
|
||||||
|
type RawSockaddrInet4 C.struct_sockaddr_in
|
||||||
|
|
||||||
|
type RawSockaddrInet6 C.struct_sockaddr_in6
|
||||||
|
|
||||||
|
type RawSockaddrUnix C.struct_sockaddr_un
|
||||||
|
|
||||||
|
type RawSockaddr C.struct_sockaddr
|
||||||
|
|
||||||
|
type RawSockaddrAny C.struct_sockaddr_any
|
||||||
|
|
||||||
|
type _Socklen C.socklen_t
|
||||||
|
|
||||||
|
type Cmsghdr C.struct_cmsghdr
|
||||||
|
|
||||||
|
type ICMPv6Filter C.struct_icmp6_filter
|
||||||
|
|
||||||
|
type Iovec C.struct_iovec
|
||||||
|
|
||||||
|
type IPMreq C.struct_ip_mreq
|
||||||
|
|
||||||
|
type IPv6Mreq C.struct_ipv6_mreq
|
||||||
|
|
||||||
|
type IPv6MTUInfo C.struct_ip6_mtuinfo
|
||||||
|
|
||||||
|
type Linger C.struct_linger
|
||||||
|
|
||||||
|
type Msghdr C.struct_msghdr
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in
|
||||||
|
SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6
|
||||||
|
SizeofSockaddrAny = C.sizeof_struct_sockaddr_any
|
||||||
|
SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un
|
||||||
|
SizeofLinger = C.sizeof_struct_linger
|
||||||
|
SizeofIPMreq = C.sizeof_struct_ip_mreq
|
||||||
|
SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq
|
||||||
|
SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo
|
||||||
|
SizeofMsghdr = C.sizeof_struct_msghdr
|
||||||
|
SizeofCmsghdr = C.sizeof_struct_cmsghdr
|
||||||
|
SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter
|
||||||
|
)
|
||||||
|
|
||||||
|
// Routing and interface messages
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofIfMsghdr = C.sizeof_struct_if_msghdr
|
||||||
|
)
|
||||||
|
|
||||||
|
type IfMsgHdr C.struct_if_msghdr
|
||||||
|
|
||||||
|
// Misc
|
||||||
|
|
||||||
|
type FdSet C.fd_set
|
||||||
|
|
||||||
|
type Utsname C.struct_utsname
|
||||||
|
|
||||||
|
type Ustat_t C.struct_ustat
|
||||||
|
|
||||||
|
type Sigset_t C.sigset_t
|
||||||
|
|
||||||
|
const (
|
||||||
|
AT_FDCWD = C.AT_FDCWD
|
||||||
|
AT_REMOVEDIR = C.AT_REMOVEDIR
|
||||||
|
AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW
|
||||||
|
)
|
||||||
|
|
||||||
|
// Terminal handling
|
||||||
|
|
||||||
|
type Termios C.struct_termios
|
||||||
|
|
||||||
|
type Termio C.struct_termio
|
||||||
|
|
||||||
|
type Winsize C.struct_winsize
|
||||||
|
|
||||||
|
//poll
|
||||||
|
|
||||||
|
type PollFd struct {
|
||||||
|
Fd int32
|
||||||
|
Events uint16
|
||||||
|
Revents uint16
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
POLLERR = C.POLLERR
|
||||||
|
POLLHUP = C.POLLHUP
|
||||||
|
POLLIN = C.POLLIN
|
||||||
|
POLLNVAL = C.POLLNVAL
|
||||||
|
POLLOUT = C.POLLOUT
|
||||||
|
POLLPRI = C.POLLPRI
|
||||||
|
POLLRDBAND = C.POLLRDBAND
|
||||||
|
POLLRDNORM = C.POLLRDNORM
|
||||||
|
POLLWRBAND = C.POLLWRBAND
|
||||||
|
POLLWRNORM = C.POLLWRNORM
|
||||||
|
)
|
||||||
|
|
||||||
|
//flock_t
|
||||||
|
|
||||||
|
type Flock_t C.struct_flock64
|
||||||
|
|
||||||
|
// Statfs
|
||||||
|
|
||||||
|
type Fsid_t C.struct_fsid_t
|
||||||
|
type Fsid64_t C.struct_fsid64_t
|
||||||
|
|
||||||
|
type Statfs_t C.struct_statfs
|
||||||
|
|
||||||
|
const RNDGETENTCNT = 0x80045200
|
||||||
283
vendor/golang.org/x/sys/unix/types_darwin.go
generated
vendored
Normal file
283
vendor/golang.org/x/sys/unix/types_darwin.go
generated
vendored
Normal file
@@ -0,0 +1,283 @@
|
|||||||
|
// Copyright 2009 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
/*
|
||||||
|
Input to cgo -godefs. See README.md
|
||||||
|
*/
|
||||||
|
|
||||||
|
// +godefs map struct_in_addr [4]byte /* in_addr */
|
||||||
|
// +godefs map struct_in6_addr [16]byte /* in6_addr */
|
||||||
|
|
||||||
|
package unix
|
||||||
|
|
||||||
|
/*
|
||||||
|
#define __DARWIN_UNIX03 0
|
||||||
|
#define KERNEL
|
||||||
|
#define _DARWIN_USE_64_BIT_INODE
|
||||||
|
#include <dirent.h>
|
||||||
|
#include <fcntl.h>
|
||||||
|
#include <poll.h>
|
||||||
|
#include <signal.h>
|
||||||
|
#include <termios.h>
|
||||||
|
#include <unistd.h>
|
||||||
|
#include <mach/mach.h>
|
||||||
|
#include <mach/message.h>
|
||||||
|
#include <sys/event.h>
|
||||||
|
#include <sys/mman.h>
|
||||||
|
#include <sys/mount.h>
|
||||||
|
#include <sys/param.h>
|
||||||
|
#include <sys/ptrace.h>
|
||||||
|
#include <sys/resource.h>
|
||||||
|
#include <sys/select.h>
|
||||||
|
#include <sys/signal.h>
|
||||||
|
#include <sys/socket.h>
|
||||||
|
#include <sys/stat.h>
|
||||||
|
#include <sys/time.h>
|
||||||
|
#include <sys/types.h>
|
||||||
|
#include <sys/uio.h>
|
||||||
|
#include <sys/un.h>
|
||||||
|
#include <sys/utsname.h>
|
||||||
|
#include <sys/wait.h>
|
||||||
|
#include <net/bpf.h>
|
||||||
|
#include <net/if.h>
|
||||||
|
#include <net/if_dl.h>
|
||||||
|
#include <net/if_var.h>
|
||||||
|
#include <net/route.h>
|
||||||
|
#include <netinet/in.h>
|
||||||
|
#include <netinet/icmp6.h>
|
||||||
|
#include <netinet/tcp.h>
|
||||||
|
|
||||||
|
enum {
|
||||||
|
sizeofPtr = sizeof(void*),
|
||||||
|
};
|
||||||
|
|
||||||
|
union sockaddr_all {
|
||||||
|
struct sockaddr s1; // this one gets used for fields
|
||||||
|
struct sockaddr_in s2; // these pad it out
|
||||||
|
struct sockaddr_in6 s3;
|
||||||
|
struct sockaddr_un s4;
|
||||||
|
struct sockaddr_dl s5;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct sockaddr_any {
|
||||||
|
struct sockaddr addr;
|
||||||
|
char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)];
|
||||||
|
};
|
||||||
|
|
||||||
|
*/
|
||||||
|
import "C"
|
||||||
|
|
||||||
|
// Machine characteristics
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofPtr = C.sizeofPtr
|
||||||
|
SizeofShort = C.sizeof_short
|
||||||
|
SizeofInt = C.sizeof_int
|
||||||
|
SizeofLong = C.sizeof_long
|
||||||
|
SizeofLongLong = C.sizeof_longlong
|
||||||
|
)
|
||||||
|
|
||||||
|
// Basic types
|
||||||
|
|
||||||
|
type (
|
||||||
|
_C_short C.short
|
||||||
|
_C_int C.int
|
||||||
|
_C_long C.long
|
||||||
|
_C_long_long C.longlong
|
||||||
|
)
|
||||||
|
|
||||||
|
// Time
|
||||||
|
|
||||||
|
type Timespec C.struct_timespec
|
||||||
|
|
||||||
|
type Timeval C.struct_timeval
|
||||||
|
|
||||||
|
type Timeval32 C.struct_timeval32
|
||||||
|
|
||||||
|
// Processes
|
||||||
|
|
||||||
|
type Rusage C.struct_rusage
|
||||||
|
|
||||||
|
type Rlimit C.struct_rlimit
|
||||||
|
|
||||||
|
type _Gid_t C.gid_t
|
||||||
|
|
||||||
|
// Files
|
||||||
|
|
||||||
|
type Stat_t C.struct_stat64
|
||||||
|
|
||||||
|
type Statfs_t C.struct_statfs64
|
||||||
|
|
||||||
|
type Flock_t C.struct_flock
|
||||||
|
|
||||||
|
type Fstore_t C.struct_fstore
|
||||||
|
|
||||||
|
type Radvisory_t C.struct_radvisory
|
||||||
|
|
||||||
|
type Fbootstraptransfer_t C.struct_fbootstraptransfer
|
||||||
|
|
||||||
|
type Log2phys_t C.struct_log2phys
|
||||||
|
|
||||||
|
type Fsid C.struct_fsid
|
||||||
|
|
||||||
|
type Dirent C.struct_dirent
|
||||||
|
|
||||||
|
// Sockets
|
||||||
|
|
||||||
|
type RawSockaddrInet4 C.struct_sockaddr_in
|
||||||
|
|
||||||
|
type RawSockaddrInet6 C.struct_sockaddr_in6
|
||||||
|
|
||||||
|
type RawSockaddrUnix C.struct_sockaddr_un
|
||||||
|
|
||||||
|
type RawSockaddrDatalink C.struct_sockaddr_dl
|
||||||
|
|
||||||
|
type RawSockaddr C.struct_sockaddr
|
||||||
|
|
||||||
|
type RawSockaddrAny C.struct_sockaddr_any
|
||||||
|
|
||||||
|
type _Socklen C.socklen_t
|
||||||
|
|
||||||
|
type Linger C.struct_linger
|
||||||
|
|
||||||
|
type Iovec C.struct_iovec
|
||||||
|
|
||||||
|
type IPMreq C.struct_ip_mreq
|
||||||
|
|
||||||
|
type IPv6Mreq C.struct_ipv6_mreq
|
||||||
|
|
||||||
|
type Msghdr C.struct_msghdr
|
||||||
|
|
||||||
|
type Cmsghdr C.struct_cmsghdr
|
||||||
|
|
||||||
|
type Inet4Pktinfo C.struct_in_pktinfo
|
||||||
|
|
||||||
|
type Inet6Pktinfo C.struct_in6_pktinfo
|
||||||
|
|
||||||
|
type IPv6MTUInfo C.struct_ip6_mtuinfo
|
||||||
|
|
||||||
|
type ICMPv6Filter C.struct_icmp6_filter
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in
|
||||||
|
SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6
|
||||||
|
SizeofSockaddrAny = C.sizeof_struct_sockaddr_any
|
||||||
|
SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un
|
||||||
|
SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl
|
||||||
|
SizeofLinger = C.sizeof_struct_linger
|
||||||
|
SizeofIPMreq = C.sizeof_struct_ip_mreq
|
||||||
|
SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq
|
||||||
|
SizeofMsghdr = C.sizeof_struct_msghdr
|
||||||
|
SizeofCmsghdr = C.sizeof_struct_cmsghdr
|
||||||
|
SizeofInet4Pktinfo = C.sizeof_struct_in_pktinfo
|
||||||
|
SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo
|
||||||
|
SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo
|
||||||
|
SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter
|
||||||
|
)
|
||||||
|
|
||||||
|
// Ptrace requests
|
||||||
|
|
||||||
|
const (
|
||||||
|
PTRACE_TRACEME = C.PT_TRACE_ME
|
||||||
|
PTRACE_CONT = C.PT_CONTINUE
|
||||||
|
PTRACE_KILL = C.PT_KILL
|
||||||
|
)
|
||||||
|
|
||||||
|
// Events (kqueue, kevent)
|
||||||
|
|
||||||
|
type Kevent_t C.struct_kevent
|
||||||
|
|
||||||
|
// Select
|
||||||
|
|
||||||
|
type FdSet C.fd_set
|
||||||
|
|
||||||
|
// Routing and interface messages
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofIfMsghdr = C.sizeof_struct_if_msghdr
|
||||||
|
SizeofIfData = C.sizeof_struct_if_data
|
||||||
|
SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr
|
||||||
|
SizeofIfmaMsghdr = C.sizeof_struct_ifma_msghdr
|
||||||
|
SizeofIfmaMsghdr2 = C.sizeof_struct_ifma_msghdr2
|
||||||
|
SizeofRtMsghdr = C.sizeof_struct_rt_msghdr
|
||||||
|
SizeofRtMetrics = C.sizeof_struct_rt_metrics
|
||||||
|
)
|
||||||
|
|
||||||
|
type IfMsghdr C.struct_if_msghdr
|
||||||
|
|
||||||
|
type IfData C.struct_if_data
|
||||||
|
|
||||||
|
type IfaMsghdr C.struct_ifa_msghdr
|
||||||
|
|
||||||
|
type IfmaMsghdr C.struct_ifma_msghdr
|
||||||
|
|
||||||
|
type IfmaMsghdr2 C.struct_ifma_msghdr2
|
||||||
|
|
||||||
|
type RtMsghdr C.struct_rt_msghdr
|
||||||
|
|
||||||
|
type RtMetrics C.struct_rt_metrics
|
||||||
|
|
||||||
|
// Berkeley packet filter
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofBpfVersion = C.sizeof_struct_bpf_version
|
||||||
|
SizeofBpfStat = C.sizeof_struct_bpf_stat
|
||||||
|
SizeofBpfProgram = C.sizeof_struct_bpf_program
|
||||||
|
SizeofBpfInsn = C.sizeof_struct_bpf_insn
|
||||||
|
SizeofBpfHdr = C.sizeof_struct_bpf_hdr
|
||||||
|
)
|
||||||
|
|
||||||
|
type BpfVersion C.struct_bpf_version
|
||||||
|
|
||||||
|
type BpfStat C.struct_bpf_stat
|
||||||
|
|
||||||
|
type BpfProgram C.struct_bpf_program
|
||||||
|
|
||||||
|
type BpfInsn C.struct_bpf_insn
|
||||||
|
|
||||||
|
type BpfHdr C.struct_bpf_hdr
|
||||||
|
|
||||||
|
// Terminal handling
|
||||||
|
|
||||||
|
type Termios C.struct_termios
|
||||||
|
|
||||||
|
type Winsize C.struct_winsize
|
||||||
|
|
||||||
|
// fchmodat-like syscalls.
|
||||||
|
|
||||||
|
const (
|
||||||
|
AT_FDCWD = C.AT_FDCWD
|
||||||
|
AT_REMOVEDIR = C.AT_REMOVEDIR
|
||||||
|
AT_SYMLINK_FOLLOW = C.AT_SYMLINK_FOLLOW
|
||||||
|
AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW
|
||||||
|
)
|
||||||
|
|
||||||
|
// poll
|
||||||
|
|
||||||
|
type PollFd C.struct_pollfd
|
||||||
|
|
||||||
|
const (
|
||||||
|
POLLERR = C.POLLERR
|
||||||
|
POLLHUP = C.POLLHUP
|
||||||
|
POLLIN = C.POLLIN
|
||||||
|
POLLNVAL = C.POLLNVAL
|
||||||
|
POLLOUT = C.POLLOUT
|
||||||
|
POLLPRI = C.POLLPRI
|
||||||
|
POLLRDBAND = C.POLLRDBAND
|
||||||
|
POLLRDNORM = C.POLLRDNORM
|
||||||
|
POLLWRBAND = C.POLLWRBAND
|
||||||
|
POLLWRNORM = C.POLLWRNORM
|
||||||
|
)
|
||||||
|
|
||||||
|
// uname
|
||||||
|
|
||||||
|
type Utsname C.struct_utsname
|
||||||
|
|
||||||
|
// Clockinfo
|
||||||
|
|
||||||
|
const SizeofClockinfo = C.sizeof_struct_clockinfo
|
||||||
|
|
||||||
|
type Clockinfo C.struct_clockinfo
|
||||||
263
vendor/golang.org/x/sys/unix/types_dragonfly.go
generated
vendored
Normal file
263
vendor/golang.org/x/sys/unix/types_dragonfly.go
generated
vendored
Normal file
@@ -0,0 +1,263 @@
|
|||||||
|
// Copyright 2009 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
/*
|
||||||
|
Input to cgo -godefs. See README.md
|
||||||
|
*/
|
||||||
|
|
||||||
|
// +godefs map struct_in_addr [4]byte /* in_addr */
|
||||||
|
// +godefs map struct_in6_addr [16]byte /* in6_addr */
|
||||||
|
|
||||||
|
package unix
|
||||||
|
|
||||||
|
/*
|
||||||
|
#define KERNEL
|
||||||
|
#include <dirent.h>
|
||||||
|
#include <fcntl.h>
|
||||||
|
#include <poll.h>
|
||||||
|
#include <signal.h>
|
||||||
|
#include <termios.h>
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <unistd.h>
|
||||||
|
#include <sys/event.h>
|
||||||
|
#include <sys/mman.h>
|
||||||
|
#include <sys/mount.h>
|
||||||
|
#include <sys/param.h>
|
||||||
|
#include <sys/ptrace.h>
|
||||||
|
#include <sys/resource.h>
|
||||||
|
#include <sys/select.h>
|
||||||
|
#include <sys/signal.h>
|
||||||
|
#include <sys/socket.h>
|
||||||
|
#include <sys/stat.h>
|
||||||
|
#include <sys/time.h>
|
||||||
|
#include <sys/types.h>
|
||||||
|
#include <sys/un.h>
|
||||||
|
#include <sys/utsname.h>
|
||||||
|
#include <sys/wait.h>
|
||||||
|
#include <net/bpf.h>
|
||||||
|
#include <net/if.h>
|
||||||
|
#include <net/if_dl.h>
|
||||||
|
#include <net/route.h>
|
||||||
|
#include <netinet/in.h>
|
||||||
|
#include <netinet/icmp6.h>
|
||||||
|
#include <netinet/tcp.h>
|
||||||
|
|
||||||
|
enum {
|
||||||
|
sizeofPtr = sizeof(void*),
|
||||||
|
};
|
||||||
|
|
||||||
|
union sockaddr_all {
|
||||||
|
struct sockaddr s1; // this one gets used for fields
|
||||||
|
struct sockaddr_in s2; // these pad it out
|
||||||
|
struct sockaddr_in6 s3;
|
||||||
|
struct sockaddr_un s4;
|
||||||
|
struct sockaddr_dl s5;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct sockaddr_any {
|
||||||
|
struct sockaddr addr;
|
||||||
|
char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)];
|
||||||
|
};
|
||||||
|
|
||||||
|
*/
|
||||||
|
import "C"
|
||||||
|
|
||||||
|
// Machine characteristics
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofPtr = C.sizeofPtr
|
||||||
|
SizeofShort = C.sizeof_short
|
||||||
|
SizeofInt = C.sizeof_int
|
||||||
|
SizeofLong = C.sizeof_long
|
||||||
|
SizeofLongLong = C.sizeof_longlong
|
||||||
|
)
|
||||||
|
|
||||||
|
// Basic types
|
||||||
|
|
||||||
|
type (
|
||||||
|
_C_short C.short
|
||||||
|
_C_int C.int
|
||||||
|
_C_long C.long
|
||||||
|
_C_long_long C.longlong
|
||||||
|
)
|
||||||
|
|
||||||
|
// Time
|
||||||
|
|
||||||
|
type Timespec C.struct_timespec
|
||||||
|
|
||||||
|
type Timeval C.struct_timeval
|
||||||
|
|
||||||
|
// Processes
|
||||||
|
|
||||||
|
type Rusage C.struct_rusage
|
||||||
|
|
||||||
|
type Rlimit C.struct_rlimit
|
||||||
|
|
||||||
|
type _Gid_t C.gid_t
|
||||||
|
|
||||||
|
// Files
|
||||||
|
|
||||||
|
type Stat_t C.struct_stat
|
||||||
|
|
||||||
|
type Statfs_t C.struct_statfs
|
||||||
|
|
||||||
|
type Flock_t C.struct_flock
|
||||||
|
|
||||||
|
type Dirent C.struct_dirent
|
||||||
|
|
||||||
|
type Fsid C.struct_fsid
|
||||||
|
|
||||||
|
// File system limits
|
||||||
|
|
||||||
|
const (
|
||||||
|
PathMax = C.PATH_MAX
|
||||||
|
)
|
||||||
|
|
||||||
|
// Sockets
|
||||||
|
|
||||||
|
type RawSockaddrInet4 C.struct_sockaddr_in
|
||||||
|
|
||||||
|
type RawSockaddrInet6 C.struct_sockaddr_in6
|
||||||
|
|
||||||
|
type RawSockaddrUnix C.struct_sockaddr_un
|
||||||
|
|
||||||
|
type RawSockaddrDatalink C.struct_sockaddr_dl
|
||||||
|
|
||||||
|
type RawSockaddr C.struct_sockaddr
|
||||||
|
|
||||||
|
type RawSockaddrAny C.struct_sockaddr_any
|
||||||
|
|
||||||
|
type _Socklen C.socklen_t
|
||||||
|
|
||||||
|
type Linger C.struct_linger
|
||||||
|
|
||||||
|
type Iovec C.struct_iovec
|
||||||
|
|
||||||
|
type IPMreq C.struct_ip_mreq
|
||||||
|
|
||||||
|
type IPv6Mreq C.struct_ipv6_mreq
|
||||||
|
|
||||||
|
type Msghdr C.struct_msghdr
|
||||||
|
|
||||||
|
type Cmsghdr C.struct_cmsghdr
|
||||||
|
|
||||||
|
type Inet6Pktinfo C.struct_in6_pktinfo
|
||||||
|
|
||||||
|
type IPv6MTUInfo C.struct_ip6_mtuinfo
|
||||||
|
|
||||||
|
type ICMPv6Filter C.struct_icmp6_filter
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in
|
||||||
|
SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6
|
||||||
|
SizeofSockaddrAny = C.sizeof_struct_sockaddr_any
|
||||||
|
SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un
|
||||||
|
SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl
|
||||||
|
SizeofLinger = C.sizeof_struct_linger
|
||||||
|
SizeofIPMreq = C.sizeof_struct_ip_mreq
|
||||||
|
SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq
|
||||||
|
SizeofMsghdr = C.sizeof_struct_msghdr
|
||||||
|
SizeofCmsghdr = C.sizeof_struct_cmsghdr
|
||||||
|
SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo
|
||||||
|
SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo
|
||||||
|
SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter
|
||||||
|
)
|
||||||
|
|
||||||
|
// Ptrace requests
|
||||||
|
|
||||||
|
const (
|
||||||
|
PTRACE_TRACEME = C.PT_TRACE_ME
|
||||||
|
PTRACE_CONT = C.PT_CONTINUE
|
||||||
|
PTRACE_KILL = C.PT_KILL
|
||||||
|
)
|
||||||
|
|
||||||
|
// Events (kqueue, kevent)
|
||||||
|
|
||||||
|
type Kevent_t C.struct_kevent
|
||||||
|
|
||||||
|
// Select
|
||||||
|
|
||||||
|
type FdSet C.fd_set
|
||||||
|
|
||||||
|
// Routing and interface messages
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofIfMsghdr = C.sizeof_struct_if_msghdr
|
||||||
|
SizeofIfData = C.sizeof_struct_if_data
|
||||||
|
SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr
|
||||||
|
SizeofIfmaMsghdr = C.sizeof_struct_ifma_msghdr
|
||||||
|
SizeofIfAnnounceMsghdr = C.sizeof_struct_if_announcemsghdr
|
||||||
|
SizeofRtMsghdr = C.sizeof_struct_rt_msghdr
|
||||||
|
SizeofRtMetrics = C.sizeof_struct_rt_metrics
|
||||||
|
)
|
||||||
|
|
||||||
|
type IfMsghdr C.struct_if_msghdr
|
||||||
|
|
||||||
|
type IfData C.struct_if_data
|
||||||
|
|
||||||
|
type IfaMsghdr C.struct_ifa_msghdr
|
||||||
|
|
||||||
|
type IfmaMsghdr C.struct_ifma_msghdr
|
||||||
|
|
||||||
|
type IfAnnounceMsghdr C.struct_if_announcemsghdr
|
||||||
|
|
||||||
|
type RtMsghdr C.struct_rt_msghdr
|
||||||
|
|
||||||
|
type RtMetrics C.struct_rt_metrics
|
||||||
|
|
||||||
|
// Berkeley packet filter
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofBpfVersion = C.sizeof_struct_bpf_version
|
||||||
|
SizeofBpfStat = C.sizeof_struct_bpf_stat
|
||||||
|
SizeofBpfProgram = C.sizeof_struct_bpf_program
|
||||||
|
SizeofBpfInsn = C.sizeof_struct_bpf_insn
|
||||||
|
SizeofBpfHdr = C.sizeof_struct_bpf_hdr
|
||||||
|
)
|
||||||
|
|
||||||
|
type BpfVersion C.struct_bpf_version
|
||||||
|
|
||||||
|
type BpfStat C.struct_bpf_stat
|
||||||
|
|
||||||
|
type BpfProgram C.struct_bpf_program
|
||||||
|
|
||||||
|
type BpfInsn C.struct_bpf_insn
|
||||||
|
|
||||||
|
type BpfHdr C.struct_bpf_hdr
|
||||||
|
|
||||||
|
// Terminal handling
|
||||||
|
|
||||||
|
type Termios C.struct_termios
|
||||||
|
|
||||||
|
type Winsize C.struct_winsize
|
||||||
|
|
||||||
|
// fchmodat-like syscalls.
|
||||||
|
|
||||||
|
const (
|
||||||
|
AT_FDCWD = C.AT_FDCWD
|
||||||
|
AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW
|
||||||
|
)
|
||||||
|
|
||||||
|
// poll
|
||||||
|
|
||||||
|
type PollFd C.struct_pollfd
|
||||||
|
|
||||||
|
const (
|
||||||
|
POLLERR = C.POLLERR
|
||||||
|
POLLHUP = C.POLLHUP
|
||||||
|
POLLIN = C.POLLIN
|
||||||
|
POLLNVAL = C.POLLNVAL
|
||||||
|
POLLOUT = C.POLLOUT
|
||||||
|
POLLPRI = C.POLLPRI
|
||||||
|
POLLRDBAND = C.POLLRDBAND
|
||||||
|
POLLRDNORM = C.POLLRDNORM
|
||||||
|
POLLWRBAND = C.POLLWRBAND
|
||||||
|
POLLWRNORM = C.POLLWRNORM
|
||||||
|
)
|
||||||
|
|
||||||
|
// Uname
|
||||||
|
|
||||||
|
type Utsname C.struct_utsname
|
||||||
356
vendor/golang.org/x/sys/unix/types_freebsd.go
generated
vendored
Normal file
356
vendor/golang.org/x/sys/unix/types_freebsd.go
generated
vendored
Normal file
@@ -0,0 +1,356 @@
|
|||||||
|
// Copyright 2009 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
/*
|
||||||
|
Input to cgo -godefs. See README.md
|
||||||
|
*/
|
||||||
|
|
||||||
|
// +godefs map struct_in_addr [4]byte /* in_addr */
|
||||||
|
// +godefs map struct_in6_addr [16]byte /* in6_addr */
|
||||||
|
|
||||||
|
package unix
|
||||||
|
|
||||||
|
/*
|
||||||
|
#define _WANT_FREEBSD11_STAT 1
|
||||||
|
#define _WANT_FREEBSD11_STATFS 1
|
||||||
|
#define _WANT_FREEBSD11_DIRENT 1
|
||||||
|
#define _WANT_FREEBSD11_KEVENT 1
|
||||||
|
|
||||||
|
#include <dirent.h>
|
||||||
|
#include <fcntl.h>
|
||||||
|
#include <poll.h>
|
||||||
|
#include <signal.h>
|
||||||
|
#include <termios.h>
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <unistd.h>
|
||||||
|
#include <sys/capsicum.h>
|
||||||
|
#include <sys/event.h>
|
||||||
|
#include <sys/mman.h>
|
||||||
|
#include <sys/mount.h>
|
||||||
|
#include <sys/param.h>
|
||||||
|
#include <sys/ptrace.h>
|
||||||
|
#include <sys/resource.h>
|
||||||
|
#include <sys/select.h>
|
||||||
|
#include <sys/signal.h>
|
||||||
|
#include <sys/socket.h>
|
||||||
|
#include <sys/stat.h>
|
||||||
|
#include <sys/time.h>
|
||||||
|
#include <sys/types.h>
|
||||||
|
#include <sys/un.h>
|
||||||
|
#include <sys/utsname.h>
|
||||||
|
#include <sys/wait.h>
|
||||||
|
#include <net/bpf.h>
|
||||||
|
#include <net/if.h>
|
||||||
|
#include <net/if_dl.h>
|
||||||
|
#include <net/route.h>
|
||||||
|
#include <netinet/in.h>
|
||||||
|
#include <netinet/icmp6.h>
|
||||||
|
#include <netinet/tcp.h>
|
||||||
|
|
||||||
|
enum {
|
||||||
|
sizeofPtr = sizeof(void*),
|
||||||
|
};
|
||||||
|
|
||||||
|
union sockaddr_all {
|
||||||
|
struct sockaddr s1; // this one gets used for fields
|
||||||
|
struct sockaddr_in s2; // these pad it out
|
||||||
|
struct sockaddr_in6 s3;
|
||||||
|
struct sockaddr_un s4;
|
||||||
|
struct sockaddr_dl s5;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct sockaddr_any {
|
||||||
|
struct sockaddr addr;
|
||||||
|
char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)];
|
||||||
|
};
|
||||||
|
|
||||||
|
// This structure is a duplicate of if_data on FreeBSD 8-STABLE.
|
||||||
|
// See /usr/include/net/if.h.
|
||||||
|
struct if_data8 {
|
||||||
|
u_char ifi_type;
|
||||||
|
u_char ifi_physical;
|
||||||
|
u_char ifi_addrlen;
|
||||||
|
u_char ifi_hdrlen;
|
||||||
|
u_char ifi_link_state;
|
||||||
|
u_char ifi_spare_char1;
|
||||||
|
u_char ifi_spare_char2;
|
||||||
|
u_char ifi_datalen;
|
||||||
|
u_long ifi_mtu;
|
||||||
|
u_long ifi_metric;
|
||||||
|
u_long ifi_baudrate;
|
||||||
|
u_long ifi_ipackets;
|
||||||
|
u_long ifi_ierrors;
|
||||||
|
u_long ifi_opackets;
|
||||||
|
u_long ifi_oerrors;
|
||||||
|
u_long ifi_collisions;
|
||||||
|
u_long ifi_ibytes;
|
||||||
|
u_long ifi_obytes;
|
||||||
|
u_long ifi_imcasts;
|
||||||
|
u_long ifi_omcasts;
|
||||||
|
u_long ifi_iqdrops;
|
||||||
|
u_long ifi_noproto;
|
||||||
|
u_long ifi_hwassist;
|
||||||
|
// FIXME: these are now unions, so maybe need to change definitions?
|
||||||
|
#undef ifi_epoch
|
||||||
|
time_t ifi_epoch;
|
||||||
|
#undef ifi_lastchange
|
||||||
|
struct timeval ifi_lastchange;
|
||||||
|
};
|
||||||
|
|
||||||
|
// This structure is a duplicate of if_msghdr on FreeBSD 8-STABLE.
|
||||||
|
// See /usr/include/net/if.h.
|
||||||
|
struct if_msghdr8 {
|
||||||
|
u_short ifm_msglen;
|
||||||
|
u_char ifm_version;
|
||||||
|
u_char ifm_type;
|
||||||
|
int ifm_addrs;
|
||||||
|
int ifm_flags;
|
||||||
|
u_short ifm_index;
|
||||||
|
struct if_data8 ifm_data;
|
||||||
|
};
|
||||||
|
*/
|
||||||
|
import "C"
|
||||||
|
|
||||||
|
// Machine characteristics
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofPtr = C.sizeofPtr
|
||||||
|
SizeofShort = C.sizeof_short
|
||||||
|
SizeofInt = C.sizeof_int
|
||||||
|
SizeofLong = C.sizeof_long
|
||||||
|
SizeofLongLong = C.sizeof_longlong
|
||||||
|
)
|
||||||
|
|
||||||
|
// Basic types
|
||||||
|
|
||||||
|
type (
|
||||||
|
_C_short C.short
|
||||||
|
_C_int C.int
|
||||||
|
_C_long C.long
|
||||||
|
_C_long_long C.longlong
|
||||||
|
)
|
||||||
|
|
||||||
|
// Time
|
||||||
|
|
||||||
|
type Timespec C.struct_timespec
|
||||||
|
|
||||||
|
type Timeval C.struct_timeval
|
||||||
|
|
||||||
|
// Processes
|
||||||
|
|
||||||
|
type Rusage C.struct_rusage
|
||||||
|
|
||||||
|
type Rlimit C.struct_rlimit
|
||||||
|
|
||||||
|
type _Gid_t C.gid_t
|
||||||
|
|
||||||
|
// Files
|
||||||
|
|
||||||
|
const (
|
||||||
|
_statfsVersion = C.STATFS_VERSION
|
||||||
|
_dirblksiz = C.DIRBLKSIZ
|
||||||
|
)
|
||||||
|
|
||||||
|
type Stat_t C.struct_stat
|
||||||
|
|
||||||
|
type stat_freebsd11_t C.struct_freebsd11_stat
|
||||||
|
|
||||||
|
type Statfs_t C.struct_statfs
|
||||||
|
|
||||||
|
type statfs_freebsd11_t C.struct_freebsd11_statfs
|
||||||
|
|
||||||
|
type Flock_t C.struct_flock
|
||||||
|
|
||||||
|
type Dirent C.struct_dirent
|
||||||
|
|
||||||
|
type dirent_freebsd11 C.struct_freebsd11_dirent
|
||||||
|
|
||||||
|
type Fsid C.struct_fsid
|
||||||
|
|
||||||
|
// File system limits
|
||||||
|
|
||||||
|
const (
|
||||||
|
PathMax = C.PATH_MAX
|
||||||
|
)
|
||||||
|
|
||||||
|
// Advice to Fadvise
|
||||||
|
|
||||||
|
const (
|
||||||
|
FADV_NORMAL = C.POSIX_FADV_NORMAL
|
||||||
|
FADV_RANDOM = C.POSIX_FADV_RANDOM
|
||||||
|
FADV_SEQUENTIAL = C.POSIX_FADV_SEQUENTIAL
|
||||||
|
FADV_WILLNEED = C.POSIX_FADV_WILLNEED
|
||||||
|
FADV_DONTNEED = C.POSIX_FADV_DONTNEED
|
||||||
|
FADV_NOREUSE = C.POSIX_FADV_NOREUSE
|
||||||
|
)
|
||||||
|
|
||||||
|
// Sockets
|
||||||
|
|
||||||
|
type RawSockaddrInet4 C.struct_sockaddr_in
|
||||||
|
|
||||||
|
type RawSockaddrInet6 C.struct_sockaddr_in6
|
||||||
|
|
||||||
|
type RawSockaddrUnix C.struct_sockaddr_un
|
||||||
|
|
||||||
|
type RawSockaddrDatalink C.struct_sockaddr_dl
|
||||||
|
|
||||||
|
type RawSockaddr C.struct_sockaddr
|
||||||
|
|
||||||
|
type RawSockaddrAny C.struct_sockaddr_any
|
||||||
|
|
||||||
|
type _Socklen C.socklen_t
|
||||||
|
|
||||||
|
type Linger C.struct_linger
|
||||||
|
|
||||||
|
type Iovec C.struct_iovec
|
||||||
|
|
||||||
|
type IPMreq C.struct_ip_mreq
|
||||||
|
|
||||||
|
type IPMreqn C.struct_ip_mreqn
|
||||||
|
|
||||||
|
type IPv6Mreq C.struct_ipv6_mreq
|
||||||
|
|
||||||
|
type Msghdr C.struct_msghdr
|
||||||
|
|
||||||
|
type Cmsghdr C.struct_cmsghdr
|
||||||
|
|
||||||
|
type Inet6Pktinfo C.struct_in6_pktinfo
|
||||||
|
|
||||||
|
type IPv6MTUInfo C.struct_ip6_mtuinfo
|
||||||
|
|
||||||
|
type ICMPv6Filter C.struct_icmp6_filter
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in
|
||||||
|
SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6
|
||||||
|
SizeofSockaddrAny = C.sizeof_struct_sockaddr_any
|
||||||
|
SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un
|
||||||
|
SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl
|
||||||
|
SizeofLinger = C.sizeof_struct_linger
|
||||||
|
SizeofIPMreq = C.sizeof_struct_ip_mreq
|
||||||
|
SizeofIPMreqn = C.sizeof_struct_ip_mreqn
|
||||||
|
SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq
|
||||||
|
SizeofMsghdr = C.sizeof_struct_msghdr
|
||||||
|
SizeofCmsghdr = C.sizeof_struct_cmsghdr
|
||||||
|
SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo
|
||||||
|
SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo
|
||||||
|
SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter
|
||||||
|
)
|
||||||
|
|
||||||
|
// Ptrace requests
|
||||||
|
|
||||||
|
const (
|
||||||
|
PTRACE_TRACEME = C.PT_TRACE_ME
|
||||||
|
PTRACE_CONT = C.PT_CONTINUE
|
||||||
|
PTRACE_KILL = C.PT_KILL
|
||||||
|
)
|
||||||
|
|
||||||
|
// Events (kqueue, kevent)
|
||||||
|
|
||||||
|
type Kevent_t C.struct_kevent_freebsd11
|
||||||
|
|
||||||
|
// Select
|
||||||
|
|
||||||
|
type FdSet C.fd_set
|
||||||
|
|
||||||
|
// Routing and interface messages
|
||||||
|
|
||||||
|
const (
|
||||||
|
sizeofIfMsghdr = C.sizeof_struct_if_msghdr
|
||||||
|
SizeofIfMsghdr = C.sizeof_struct_if_msghdr8
|
||||||
|
sizeofIfData = C.sizeof_struct_if_data
|
||||||
|
SizeofIfData = C.sizeof_struct_if_data8
|
||||||
|
SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr
|
||||||
|
SizeofIfmaMsghdr = C.sizeof_struct_ifma_msghdr
|
||||||
|
SizeofIfAnnounceMsghdr = C.sizeof_struct_if_announcemsghdr
|
||||||
|
SizeofRtMsghdr = C.sizeof_struct_rt_msghdr
|
||||||
|
SizeofRtMetrics = C.sizeof_struct_rt_metrics
|
||||||
|
)
|
||||||
|
|
||||||
|
type ifMsghdr C.struct_if_msghdr
|
||||||
|
|
||||||
|
type IfMsghdr C.struct_if_msghdr8
|
||||||
|
|
||||||
|
type ifData C.struct_if_data
|
||||||
|
|
||||||
|
type IfData C.struct_if_data8
|
||||||
|
|
||||||
|
type IfaMsghdr C.struct_ifa_msghdr
|
||||||
|
|
||||||
|
type IfmaMsghdr C.struct_ifma_msghdr
|
||||||
|
|
||||||
|
type IfAnnounceMsghdr C.struct_if_announcemsghdr
|
||||||
|
|
||||||
|
type RtMsghdr C.struct_rt_msghdr
|
||||||
|
|
||||||
|
type RtMetrics C.struct_rt_metrics
|
||||||
|
|
||||||
|
// Berkeley packet filter
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofBpfVersion = C.sizeof_struct_bpf_version
|
||||||
|
SizeofBpfStat = C.sizeof_struct_bpf_stat
|
||||||
|
SizeofBpfZbuf = C.sizeof_struct_bpf_zbuf
|
||||||
|
SizeofBpfProgram = C.sizeof_struct_bpf_program
|
||||||
|
SizeofBpfInsn = C.sizeof_struct_bpf_insn
|
||||||
|
SizeofBpfHdr = C.sizeof_struct_bpf_hdr
|
||||||
|
SizeofBpfZbufHeader = C.sizeof_struct_bpf_zbuf_header
|
||||||
|
)
|
||||||
|
|
||||||
|
type BpfVersion C.struct_bpf_version
|
||||||
|
|
||||||
|
type BpfStat C.struct_bpf_stat
|
||||||
|
|
||||||
|
type BpfZbuf C.struct_bpf_zbuf
|
||||||
|
|
||||||
|
type BpfProgram C.struct_bpf_program
|
||||||
|
|
||||||
|
type BpfInsn C.struct_bpf_insn
|
||||||
|
|
||||||
|
type BpfHdr C.struct_bpf_hdr
|
||||||
|
|
||||||
|
type BpfZbufHeader C.struct_bpf_zbuf_header
|
||||||
|
|
||||||
|
// Terminal handling
|
||||||
|
|
||||||
|
type Termios C.struct_termios
|
||||||
|
|
||||||
|
type Winsize C.struct_winsize
|
||||||
|
|
||||||
|
// fchmodat-like syscalls.
|
||||||
|
|
||||||
|
const (
|
||||||
|
AT_FDCWD = C.AT_FDCWD
|
||||||
|
AT_REMOVEDIR = C.AT_REMOVEDIR
|
||||||
|
AT_SYMLINK_FOLLOW = C.AT_SYMLINK_FOLLOW
|
||||||
|
AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW
|
||||||
|
)
|
||||||
|
|
||||||
|
// poll
|
||||||
|
|
||||||
|
type PollFd C.struct_pollfd
|
||||||
|
|
||||||
|
const (
|
||||||
|
POLLERR = C.POLLERR
|
||||||
|
POLLHUP = C.POLLHUP
|
||||||
|
POLLIN = C.POLLIN
|
||||||
|
POLLINIGNEOF = C.POLLINIGNEOF
|
||||||
|
POLLNVAL = C.POLLNVAL
|
||||||
|
POLLOUT = C.POLLOUT
|
||||||
|
POLLPRI = C.POLLPRI
|
||||||
|
POLLRDBAND = C.POLLRDBAND
|
||||||
|
POLLRDNORM = C.POLLRDNORM
|
||||||
|
POLLWRBAND = C.POLLWRBAND
|
||||||
|
POLLWRNORM = C.POLLWRNORM
|
||||||
|
)
|
||||||
|
|
||||||
|
// Capabilities
|
||||||
|
|
||||||
|
type CapRights C.struct_cap_rights
|
||||||
|
|
||||||
|
// Uname
|
||||||
|
|
||||||
|
type Utsname C.struct_utsname
|
||||||
289
vendor/golang.org/x/sys/unix/types_netbsd.go
generated
vendored
Normal file
289
vendor/golang.org/x/sys/unix/types_netbsd.go
generated
vendored
Normal file
@@ -0,0 +1,289 @@
|
|||||||
|
// Copyright 2009 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
/*
|
||||||
|
Input to cgo -godefs. See README.md
|
||||||
|
*/
|
||||||
|
|
||||||
|
// +godefs map struct_in_addr [4]byte /* in_addr */
|
||||||
|
// +godefs map struct_in6_addr [16]byte /* in6_addr */
|
||||||
|
|
||||||
|
package unix
|
||||||
|
|
||||||
|
/*
|
||||||
|
#define KERNEL
|
||||||
|
#include <dirent.h>
|
||||||
|
#include <fcntl.h>
|
||||||
|
#include <poll.h>
|
||||||
|
#include <signal.h>
|
||||||
|
#include <termios.h>
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <unistd.h>
|
||||||
|
#include <sys/param.h>
|
||||||
|
#include <sys/types.h>
|
||||||
|
#include <sys/event.h>
|
||||||
|
#include <sys/mman.h>
|
||||||
|
#include <sys/mount.h>
|
||||||
|
#include <sys/ptrace.h>
|
||||||
|
#include <sys/resource.h>
|
||||||
|
#include <sys/select.h>
|
||||||
|
#include <sys/signal.h>
|
||||||
|
#include <sys/socket.h>
|
||||||
|
#include <sys/stat.h>
|
||||||
|
#include <sys/sysctl.h>
|
||||||
|
#include <sys/time.h>
|
||||||
|
#include <sys/uio.h>
|
||||||
|
#include <sys/un.h>
|
||||||
|
#include <sys/utsname.h>
|
||||||
|
#include <sys/wait.h>
|
||||||
|
#include <net/bpf.h>
|
||||||
|
#include <net/if.h>
|
||||||
|
#include <net/if_dl.h>
|
||||||
|
#include <net/route.h>
|
||||||
|
#include <netinet/in.h>
|
||||||
|
#include <netinet/icmp6.h>
|
||||||
|
#include <netinet/tcp.h>
|
||||||
|
|
||||||
|
enum {
|
||||||
|
sizeofPtr = sizeof(void*),
|
||||||
|
};
|
||||||
|
|
||||||
|
union sockaddr_all {
|
||||||
|
struct sockaddr s1; // this one gets used for fields
|
||||||
|
struct sockaddr_in s2; // these pad it out
|
||||||
|
struct sockaddr_in6 s3;
|
||||||
|
struct sockaddr_un s4;
|
||||||
|
struct sockaddr_dl s5;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct sockaddr_any {
|
||||||
|
struct sockaddr addr;
|
||||||
|
char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)];
|
||||||
|
};
|
||||||
|
|
||||||
|
*/
|
||||||
|
import "C"
|
||||||
|
|
||||||
|
// Machine characteristics
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofPtr = C.sizeofPtr
|
||||||
|
SizeofShort = C.sizeof_short
|
||||||
|
SizeofInt = C.sizeof_int
|
||||||
|
SizeofLong = C.sizeof_long
|
||||||
|
SizeofLongLong = C.sizeof_longlong
|
||||||
|
)
|
||||||
|
|
||||||
|
// Basic types
|
||||||
|
|
||||||
|
type (
|
||||||
|
_C_short C.short
|
||||||
|
_C_int C.int
|
||||||
|
_C_long C.long
|
||||||
|
_C_long_long C.longlong
|
||||||
|
)
|
||||||
|
|
||||||
|
// Time
|
||||||
|
|
||||||
|
type Timespec C.struct_timespec
|
||||||
|
|
||||||
|
type Timeval C.struct_timeval
|
||||||
|
|
||||||
|
// Processes
|
||||||
|
|
||||||
|
type Rusage C.struct_rusage
|
||||||
|
|
||||||
|
type Rlimit C.struct_rlimit
|
||||||
|
|
||||||
|
type _Gid_t C.gid_t
|
||||||
|
|
||||||
|
// Files
|
||||||
|
|
||||||
|
type Stat_t C.struct_stat
|
||||||
|
|
||||||
|
type Statfs_t C.struct_statfs
|
||||||
|
|
||||||
|
type Flock_t C.struct_flock
|
||||||
|
|
||||||
|
type Dirent C.struct_dirent
|
||||||
|
|
||||||
|
type Fsid C.fsid_t
|
||||||
|
|
||||||
|
// File system limits
|
||||||
|
|
||||||
|
const (
|
||||||
|
PathMax = C.PATH_MAX
|
||||||
|
)
|
||||||
|
|
||||||
|
// Advice to Fadvise
|
||||||
|
|
||||||
|
const (
|
||||||
|
FADV_NORMAL = C.POSIX_FADV_NORMAL
|
||||||
|
FADV_RANDOM = C.POSIX_FADV_RANDOM
|
||||||
|
FADV_SEQUENTIAL = C.POSIX_FADV_SEQUENTIAL
|
||||||
|
FADV_WILLNEED = C.POSIX_FADV_WILLNEED
|
||||||
|
FADV_DONTNEED = C.POSIX_FADV_DONTNEED
|
||||||
|
FADV_NOREUSE = C.POSIX_FADV_NOREUSE
|
||||||
|
)
|
||||||
|
|
||||||
|
// Sockets
|
||||||
|
|
||||||
|
type RawSockaddrInet4 C.struct_sockaddr_in
|
||||||
|
|
||||||
|
type RawSockaddrInet6 C.struct_sockaddr_in6
|
||||||
|
|
||||||
|
type RawSockaddrUnix C.struct_sockaddr_un
|
||||||
|
|
||||||
|
type RawSockaddrDatalink C.struct_sockaddr_dl
|
||||||
|
|
||||||
|
type RawSockaddr C.struct_sockaddr
|
||||||
|
|
||||||
|
type RawSockaddrAny C.struct_sockaddr_any
|
||||||
|
|
||||||
|
type _Socklen C.socklen_t
|
||||||
|
|
||||||
|
type Linger C.struct_linger
|
||||||
|
|
||||||
|
type Iovec C.struct_iovec
|
||||||
|
|
||||||
|
type IPMreq C.struct_ip_mreq
|
||||||
|
|
||||||
|
type IPv6Mreq C.struct_ipv6_mreq
|
||||||
|
|
||||||
|
type Msghdr C.struct_msghdr
|
||||||
|
|
||||||
|
type Cmsghdr C.struct_cmsghdr
|
||||||
|
|
||||||
|
type Inet6Pktinfo C.struct_in6_pktinfo
|
||||||
|
|
||||||
|
type IPv6MTUInfo C.struct_ip6_mtuinfo
|
||||||
|
|
||||||
|
type ICMPv6Filter C.struct_icmp6_filter
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in
|
||||||
|
SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6
|
||||||
|
SizeofSockaddrAny = C.sizeof_struct_sockaddr_any
|
||||||
|
SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un
|
||||||
|
SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl
|
||||||
|
SizeofLinger = C.sizeof_struct_linger
|
||||||
|
SizeofIPMreq = C.sizeof_struct_ip_mreq
|
||||||
|
SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq
|
||||||
|
SizeofMsghdr = C.sizeof_struct_msghdr
|
||||||
|
SizeofCmsghdr = C.sizeof_struct_cmsghdr
|
||||||
|
SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo
|
||||||
|
SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo
|
||||||
|
SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter
|
||||||
|
)
|
||||||
|
|
||||||
|
// Ptrace requests
|
||||||
|
|
||||||
|
const (
|
||||||
|
PTRACE_TRACEME = C.PT_TRACE_ME
|
||||||
|
PTRACE_CONT = C.PT_CONTINUE
|
||||||
|
PTRACE_KILL = C.PT_KILL
|
||||||
|
)
|
||||||
|
|
||||||
|
// Events (kqueue, kevent)
|
||||||
|
|
||||||
|
type Kevent_t C.struct_kevent
|
||||||
|
|
||||||
|
// Select
|
||||||
|
|
||||||
|
type FdSet C.fd_set
|
||||||
|
|
||||||
|
// Routing and interface messages
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofIfMsghdr = C.sizeof_struct_if_msghdr
|
||||||
|
SizeofIfData = C.sizeof_struct_if_data
|
||||||
|
SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr
|
||||||
|
SizeofIfAnnounceMsghdr = C.sizeof_struct_if_announcemsghdr
|
||||||
|
SizeofRtMsghdr = C.sizeof_struct_rt_msghdr
|
||||||
|
SizeofRtMetrics = C.sizeof_struct_rt_metrics
|
||||||
|
)
|
||||||
|
|
||||||
|
type IfMsghdr C.struct_if_msghdr
|
||||||
|
|
||||||
|
type IfData C.struct_if_data
|
||||||
|
|
||||||
|
type IfaMsghdr C.struct_ifa_msghdr
|
||||||
|
|
||||||
|
type IfAnnounceMsghdr C.struct_if_announcemsghdr
|
||||||
|
|
||||||
|
type RtMsghdr C.struct_rt_msghdr
|
||||||
|
|
||||||
|
type RtMetrics C.struct_rt_metrics
|
||||||
|
|
||||||
|
type Mclpool C.struct_mclpool
|
||||||
|
|
||||||
|
// Berkeley packet filter
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofBpfVersion = C.sizeof_struct_bpf_version
|
||||||
|
SizeofBpfStat = C.sizeof_struct_bpf_stat
|
||||||
|
SizeofBpfProgram = C.sizeof_struct_bpf_program
|
||||||
|
SizeofBpfInsn = C.sizeof_struct_bpf_insn
|
||||||
|
SizeofBpfHdr = C.sizeof_struct_bpf_hdr
|
||||||
|
)
|
||||||
|
|
||||||
|
type BpfVersion C.struct_bpf_version
|
||||||
|
|
||||||
|
type BpfStat C.struct_bpf_stat
|
||||||
|
|
||||||
|
type BpfProgram C.struct_bpf_program
|
||||||
|
|
||||||
|
type BpfInsn C.struct_bpf_insn
|
||||||
|
|
||||||
|
type BpfHdr C.struct_bpf_hdr
|
||||||
|
|
||||||
|
type BpfTimeval C.struct_bpf_timeval
|
||||||
|
|
||||||
|
// Terminal handling
|
||||||
|
|
||||||
|
type Termios C.struct_termios
|
||||||
|
|
||||||
|
type Winsize C.struct_winsize
|
||||||
|
|
||||||
|
type Ptmget C.struct_ptmget
|
||||||
|
|
||||||
|
// fchmodat-like syscalls.
|
||||||
|
|
||||||
|
const (
|
||||||
|
AT_FDCWD = C.AT_FDCWD
|
||||||
|
AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW
|
||||||
|
)
|
||||||
|
|
||||||
|
// poll
|
||||||
|
|
||||||
|
type PollFd C.struct_pollfd
|
||||||
|
|
||||||
|
const (
|
||||||
|
POLLERR = C.POLLERR
|
||||||
|
POLLHUP = C.POLLHUP
|
||||||
|
POLLIN = C.POLLIN
|
||||||
|
POLLNVAL = C.POLLNVAL
|
||||||
|
POLLOUT = C.POLLOUT
|
||||||
|
POLLPRI = C.POLLPRI
|
||||||
|
POLLRDBAND = C.POLLRDBAND
|
||||||
|
POLLRDNORM = C.POLLRDNORM
|
||||||
|
POLLWRBAND = C.POLLWRBAND
|
||||||
|
POLLWRNORM = C.POLLWRNORM
|
||||||
|
)
|
||||||
|
|
||||||
|
// Sysctl
|
||||||
|
|
||||||
|
type Sysctlnode C.struct_sysctlnode
|
||||||
|
|
||||||
|
// Uname
|
||||||
|
|
||||||
|
type Utsname C.struct_utsname
|
||||||
|
|
||||||
|
// Clockinfo
|
||||||
|
|
||||||
|
const SizeofClockinfo = C.sizeof_struct_clockinfo
|
||||||
|
|
||||||
|
type Clockinfo C.struct_clockinfo
|
||||||
282
vendor/golang.org/x/sys/unix/types_openbsd.go
generated
vendored
Normal file
282
vendor/golang.org/x/sys/unix/types_openbsd.go
generated
vendored
Normal file
@@ -0,0 +1,282 @@
|
|||||||
|
// Copyright 2009 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
/*
|
||||||
|
Input to cgo -godefs. See README.md
|
||||||
|
*/
|
||||||
|
|
||||||
|
// +godefs map struct_in_addr [4]byte /* in_addr */
|
||||||
|
// +godefs map struct_in6_addr [16]byte /* in6_addr */
|
||||||
|
|
||||||
|
package unix
|
||||||
|
|
||||||
|
/*
|
||||||
|
#define KERNEL
|
||||||
|
#include <dirent.h>
|
||||||
|
#include <fcntl.h>
|
||||||
|
#include <poll.h>
|
||||||
|
#include <signal.h>
|
||||||
|
#include <termios.h>
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <unistd.h>
|
||||||
|
#include <sys/param.h>
|
||||||
|
#include <sys/types.h>
|
||||||
|
#include <sys/event.h>
|
||||||
|
#include <sys/mman.h>
|
||||||
|
#include <sys/mount.h>
|
||||||
|
#include <sys/ptrace.h>
|
||||||
|
#include <sys/resource.h>
|
||||||
|
#include <sys/select.h>
|
||||||
|
#include <sys/signal.h>
|
||||||
|
#include <sys/socket.h>
|
||||||
|
#include <sys/stat.h>
|
||||||
|
#include <sys/time.h>
|
||||||
|
#include <sys/uio.h>
|
||||||
|
#include <sys/un.h>
|
||||||
|
#include <sys/utsname.h>
|
||||||
|
#include <sys/wait.h>
|
||||||
|
#include <uvm/uvmexp.h>
|
||||||
|
#include <net/bpf.h>
|
||||||
|
#include <net/if.h>
|
||||||
|
#include <net/if_dl.h>
|
||||||
|
#include <net/route.h>
|
||||||
|
#include <netinet/in.h>
|
||||||
|
#include <netinet/icmp6.h>
|
||||||
|
#include <netinet/tcp.h>
|
||||||
|
|
||||||
|
enum {
|
||||||
|
sizeofPtr = sizeof(void*),
|
||||||
|
};
|
||||||
|
|
||||||
|
union sockaddr_all {
|
||||||
|
struct sockaddr s1; // this one gets used for fields
|
||||||
|
struct sockaddr_in s2; // these pad it out
|
||||||
|
struct sockaddr_in6 s3;
|
||||||
|
struct sockaddr_un s4;
|
||||||
|
struct sockaddr_dl s5;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct sockaddr_any {
|
||||||
|
struct sockaddr addr;
|
||||||
|
char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)];
|
||||||
|
};
|
||||||
|
|
||||||
|
*/
|
||||||
|
import "C"
|
||||||
|
|
||||||
|
// Machine characteristics
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofPtr = C.sizeofPtr
|
||||||
|
SizeofShort = C.sizeof_short
|
||||||
|
SizeofInt = C.sizeof_int
|
||||||
|
SizeofLong = C.sizeof_long
|
||||||
|
SizeofLongLong = C.sizeof_longlong
|
||||||
|
)
|
||||||
|
|
||||||
|
// Basic types
|
||||||
|
|
||||||
|
type (
|
||||||
|
_C_short C.short
|
||||||
|
_C_int C.int
|
||||||
|
_C_long C.long
|
||||||
|
_C_long_long C.longlong
|
||||||
|
)
|
||||||
|
|
||||||
|
// Time
|
||||||
|
|
||||||
|
type Timespec C.struct_timespec
|
||||||
|
|
||||||
|
type Timeval C.struct_timeval
|
||||||
|
|
||||||
|
// Processes
|
||||||
|
|
||||||
|
type Rusage C.struct_rusage
|
||||||
|
|
||||||
|
type Rlimit C.struct_rlimit
|
||||||
|
|
||||||
|
type _Gid_t C.gid_t
|
||||||
|
|
||||||
|
// Files
|
||||||
|
|
||||||
|
type Stat_t C.struct_stat
|
||||||
|
|
||||||
|
type Statfs_t C.struct_statfs
|
||||||
|
|
||||||
|
type Flock_t C.struct_flock
|
||||||
|
|
||||||
|
type Dirent C.struct_dirent
|
||||||
|
|
||||||
|
type Fsid C.fsid_t
|
||||||
|
|
||||||
|
// File system limits
|
||||||
|
|
||||||
|
const (
|
||||||
|
PathMax = C.PATH_MAX
|
||||||
|
)
|
||||||
|
|
||||||
|
// Sockets
|
||||||
|
|
||||||
|
type RawSockaddrInet4 C.struct_sockaddr_in
|
||||||
|
|
||||||
|
type RawSockaddrInet6 C.struct_sockaddr_in6
|
||||||
|
|
||||||
|
type RawSockaddrUnix C.struct_sockaddr_un
|
||||||
|
|
||||||
|
type RawSockaddrDatalink C.struct_sockaddr_dl
|
||||||
|
|
||||||
|
type RawSockaddr C.struct_sockaddr
|
||||||
|
|
||||||
|
type RawSockaddrAny C.struct_sockaddr_any
|
||||||
|
|
||||||
|
type _Socklen C.socklen_t
|
||||||
|
|
||||||
|
type Linger C.struct_linger
|
||||||
|
|
||||||
|
type Iovec C.struct_iovec
|
||||||
|
|
||||||
|
type IPMreq C.struct_ip_mreq
|
||||||
|
|
||||||
|
type IPv6Mreq C.struct_ipv6_mreq
|
||||||
|
|
||||||
|
type Msghdr C.struct_msghdr
|
||||||
|
|
||||||
|
type Cmsghdr C.struct_cmsghdr
|
||||||
|
|
||||||
|
type Inet6Pktinfo C.struct_in6_pktinfo
|
||||||
|
|
||||||
|
type IPv6MTUInfo C.struct_ip6_mtuinfo
|
||||||
|
|
||||||
|
type ICMPv6Filter C.struct_icmp6_filter
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in
|
||||||
|
SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6
|
||||||
|
SizeofSockaddrAny = C.sizeof_struct_sockaddr_any
|
||||||
|
SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un
|
||||||
|
SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl
|
||||||
|
SizeofLinger = C.sizeof_struct_linger
|
||||||
|
SizeofIPMreq = C.sizeof_struct_ip_mreq
|
||||||
|
SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq
|
||||||
|
SizeofMsghdr = C.sizeof_struct_msghdr
|
||||||
|
SizeofCmsghdr = C.sizeof_struct_cmsghdr
|
||||||
|
SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo
|
||||||
|
SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo
|
||||||
|
SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter
|
||||||
|
)
|
||||||
|
|
||||||
|
// Ptrace requests
|
||||||
|
|
||||||
|
const (
|
||||||
|
PTRACE_TRACEME = C.PT_TRACE_ME
|
||||||
|
PTRACE_CONT = C.PT_CONTINUE
|
||||||
|
PTRACE_KILL = C.PT_KILL
|
||||||
|
)
|
||||||
|
|
||||||
|
// Events (kqueue, kevent)
|
||||||
|
|
||||||
|
type Kevent_t C.struct_kevent
|
||||||
|
|
||||||
|
// Select
|
||||||
|
|
||||||
|
type FdSet C.fd_set
|
||||||
|
|
||||||
|
// Routing and interface messages
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofIfMsghdr = C.sizeof_struct_if_msghdr
|
||||||
|
SizeofIfData = C.sizeof_struct_if_data
|
||||||
|
SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr
|
||||||
|
SizeofIfAnnounceMsghdr = C.sizeof_struct_if_announcemsghdr
|
||||||
|
SizeofRtMsghdr = C.sizeof_struct_rt_msghdr
|
||||||
|
SizeofRtMetrics = C.sizeof_struct_rt_metrics
|
||||||
|
)
|
||||||
|
|
||||||
|
type IfMsghdr C.struct_if_msghdr
|
||||||
|
|
||||||
|
type IfData C.struct_if_data
|
||||||
|
|
||||||
|
type IfaMsghdr C.struct_ifa_msghdr
|
||||||
|
|
||||||
|
type IfAnnounceMsghdr C.struct_if_announcemsghdr
|
||||||
|
|
||||||
|
type RtMsghdr C.struct_rt_msghdr
|
||||||
|
|
||||||
|
type RtMetrics C.struct_rt_metrics
|
||||||
|
|
||||||
|
type Mclpool C.struct_mclpool
|
||||||
|
|
||||||
|
// Berkeley packet filter
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofBpfVersion = C.sizeof_struct_bpf_version
|
||||||
|
SizeofBpfStat = C.sizeof_struct_bpf_stat
|
||||||
|
SizeofBpfProgram = C.sizeof_struct_bpf_program
|
||||||
|
SizeofBpfInsn = C.sizeof_struct_bpf_insn
|
||||||
|
SizeofBpfHdr = C.sizeof_struct_bpf_hdr
|
||||||
|
)
|
||||||
|
|
||||||
|
type BpfVersion C.struct_bpf_version
|
||||||
|
|
||||||
|
type BpfStat C.struct_bpf_stat
|
||||||
|
|
||||||
|
type BpfProgram C.struct_bpf_program
|
||||||
|
|
||||||
|
type BpfInsn C.struct_bpf_insn
|
||||||
|
|
||||||
|
type BpfHdr C.struct_bpf_hdr
|
||||||
|
|
||||||
|
type BpfTimeval C.struct_bpf_timeval
|
||||||
|
|
||||||
|
// Terminal handling
|
||||||
|
|
||||||
|
type Termios C.struct_termios
|
||||||
|
|
||||||
|
type Winsize C.struct_winsize
|
||||||
|
|
||||||
|
// fchmodat-like syscalls.
|
||||||
|
|
||||||
|
const (
|
||||||
|
AT_FDCWD = C.AT_FDCWD
|
||||||
|
AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW
|
||||||
|
)
|
||||||
|
|
||||||
|
// poll
|
||||||
|
|
||||||
|
type PollFd C.struct_pollfd
|
||||||
|
|
||||||
|
const (
|
||||||
|
POLLERR = C.POLLERR
|
||||||
|
POLLHUP = C.POLLHUP
|
||||||
|
POLLIN = C.POLLIN
|
||||||
|
POLLNVAL = C.POLLNVAL
|
||||||
|
POLLOUT = C.POLLOUT
|
||||||
|
POLLPRI = C.POLLPRI
|
||||||
|
POLLRDBAND = C.POLLRDBAND
|
||||||
|
POLLRDNORM = C.POLLRDNORM
|
||||||
|
POLLWRBAND = C.POLLWRBAND
|
||||||
|
POLLWRNORM = C.POLLWRNORM
|
||||||
|
)
|
||||||
|
|
||||||
|
// Signal Sets
|
||||||
|
|
||||||
|
type Sigset_t C.sigset_t
|
||||||
|
|
||||||
|
// Uname
|
||||||
|
|
||||||
|
type Utsname C.struct_utsname
|
||||||
|
|
||||||
|
// Uvmexp
|
||||||
|
|
||||||
|
const SizeofUvmexp = C.sizeof_struct_uvmexp
|
||||||
|
|
||||||
|
type Uvmexp C.struct_uvmexp
|
||||||
|
|
||||||
|
// Clockinfo
|
||||||
|
|
||||||
|
const SizeofClockinfo = C.sizeof_struct_clockinfo
|
||||||
|
|
||||||
|
type Clockinfo C.struct_clockinfo
|
||||||
266
vendor/golang.org/x/sys/unix/types_solaris.go
generated
vendored
Normal file
266
vendor/golang.org/x/sys/unix/types_solaris.go
generated
vendored
Normal file
@@ -0,0 +1,266 @@
|
|||||||
|
// Copyright 2009 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
/*
|
||||||
|
Input to cgo -godefs. See README.md
|
||||||
|
*/
|
||||||
|
|
||||||
|
// +godefs map struct_in_addr [4]byte /* in_addr */
|
||||||
|
// +godefs map struct_in6_addr [16]byte /* in6_addr */
|
||||||
|
|
||||||
|
package unix
|
||||||
|
|
||||||
|
/*
|
||||||
|
#define KERNEL
|
||||||
|
// These defines ensure that builds done on newer versions of Solaris are
|
||||||
|
// backwards-compatible with older versions of Solaris and
|
||||||
|
// OpenSolaris-based derivatives.
|
||||||
|
#define __USE_SUNOS_SOCKETS__ // msghdr
|
||||||
|
#define __USE_LEGACY_PROTOTYPES__ // iovec
|
||||||
|
#include <dirent.h>
|
||||||
|
#include <fcntl.h>
|
||||||
|
#include <netdb.h>
|
||||||
|
#include <limits.h>
|
||||||
|
#include <poll.h>
|
||||||
|
#include <signal.h>
|
||||||
|
#include <termios.h>
|
||||||
|
#include <termio.h>
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <unistd.h>
|
||||||
|
#include <sys/mman.h>
|
||||||
|
#include <sys/mount.h>
|
||||||
|
#include <sys/param.h>
|
||||||
|
#include <sys/resource.h>
|
||||||
|
#include <sys/select.h>
|
||||||
|
#include <sys/signal.h>
|
||||||
|
#include <sys/socket.h>
|
||||||
|
#include <sys/stat.h>
|
||||||
|
#include <sys/statvfs.h>
|
||||||
|
#include <sys/time.h>
|
||||||
|
#include <sys/times.h>
|
||||||
|
#include <sys/types.h>
|
||||||
|
#include <sys/utsname.h>
|
||||||
|
#include <sys/un.h>
|
||||||
|
#include <sys/wait.h>
|
||||||
|
#include <net/bpf.h>
|
||||||
|
#include <net/if.h>
|
||||||
|
#include <net/if_dl.h>
|
||||||
|
#include <net/route.h>
|
||||||
|
#include <netinet/in.h>
|
||||||
|
#include <netinet/icmp6.h>
|
||||||
|
#include <netinet/tcp.h>
|
||||||
|
#include <ustat.h>
|
||||||
|
#include <utime.h>
|
||||||
|
|
||||||
|
enum {
|
||||||
|
sizeofPtr = sizeof(void*),
|
||||||
|
};
|
||||||
|
|
||||||
|
union sockaddr_all {
|
||||||
|
struct sockaddr s1; // this one gets used for fields
|
||||||
|
struct sockaddr_in s2; // these pad it out
|
||||||
|
struct sockaddr_in6 s3;
|
||||||
|
struct sockaddr_un s4;
|
||||||
|
struct sockaddr_dl s5;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct sockaddr_any {
|
||||||
|
struct sockaddr addr;
|
||||||
|
char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)];
|
||||||
|
};
|
||||||
|
|
||||||
|
*/
|
||||||
|
import "C"
|
||||||
|
|
||||||
|
// Machine characteristics
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofPtr = C.sizeofPtr
|
||||||
|
SizeofShort = C.sizeof_short
|
||||||
|
SizeofInt = C.sizeof_int
|
||||||
|
SizeofLong = C.sizeof_long
|
||||||
|
SizeofLongLong = C.sizeof_longlong
|
||||||
|
PathMax = C.PATH_MAX
|
||||||
|
MaxHostNameLen = C.MAXHOSTNAMELEN
|
||||||
|
)
|
||||||
|
|
||||||
|
// Basic types
|
||||||
|
|
||||||
|
type (
|
||||||
|
_C_short C.short
|
||||||
|
_C_int C.int
|
||||||
|
_C_long C.long
|
||||||
|
_C_long_long C.longlong
|
||||||
|
)
|
||||||
|
|
||||||
|
// Time
|
||||||
|
|
||||||
|
type Timespec C.struct_timespec
|
||||||
|
|
||||||
|
type Timeval C.struct_timeval
|
||||||
|
|
||||||
|
type Timeval32 C.struct_timeval32
|
||||||
|
|
||||||
|
type Tms C.struct_tms
|
||||||
|
|
||||||
|
type Utimbuf C.struct_utimbuf
|
||||||
|
|
||||||
|
// Processes
|
||||||
|
|
||||||
|
type Rusage C.struct_rusage
|
||||||
|
|
||||||
|
type Rlimit C.struct_rlimit
|
||||||
|
|
||||||
|
type _Gid_t C.gid_t
|
||||||
|
|
||||||
|
// Files
|
||||||
|
|
||||||
|
type Stat_t C.struct_stat
|
||||||
|
|
||||||
|
type Flock_t C.struct_flock
|
||||||
|
|
||||||
|
type Dirent C.struct_dirent
|
||||||
|
|
||||||
|
// Filesystems
|
||||||
|
|
||||||
|
type _Fsblkcnt_t C.fsblkcnt_t
|
||||||
|
|
||||||
|
type Statvfs_t C.struct_statvfs
|
||||||
|
|
||||||
|
// Sockets
|
||||||
|
|
||||||
|
type RawSockaddrInet4 C.struct_sockaddr_in
|
||||||
|
|
||||||
|
type RawSockaddrInet6 C.struct_sockaddr_in6
|
||||||
|
|
||||||
|
type RawSockaddrUnix C.struct_sockaddr_un
|
||||||
|
|
||||||
|
type RawSockaddrDatalink C.struct_sockaddr_dl
|
||||||
|
|
||||||
|
type RawSockaddr C.struct_sockaddr
|
||||||
|
|
||||||
|
type RawSockaddrAny C.struct_sockaddr_any
|
||||||
|
|
||||||
|
type _Socklen C.socklen_t
|
||||||
|
|
||||||
|
type Linger C.struct_linger
|
||||||
|
|
||||||
|
type Iovec C.struct_iovec
|
||||||
|
|
||||||
|
type IPMreq C.struct_ip_mreq
|
||||||
|
|
||||||
|
type IPv6Mreq C.struct_ipv6_mreq
|
||||||
|
|
||||||
|
type Msghdr C.struct_msghdr
|
||||||
|
|
||||||
|
type Cmsghdr C.struct_cmsghdr
|
||||||
|
|
||||||
|
type Inet6Pktinfo C.struct_in6_pktinfo
|
||||||
|
|
||||||
|
type IPv6MTUInfo C.struct_ip6_mtuinfo
|
||||||
|
|
||||||
|
type ICMPv6Filter C.struct_icmp6_filter
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in
|
||||||
|
SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6
|
||||||
|
SizeofSockaddrAny = C.sizeof_struct_sockaddr_any
|
||||||
|
SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un
|
||||||
|
SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl
|
||||||
|
SizeofLinger = C.sizeof_struct_linger
|
||||||
|
SizeofIPMreq = C.sizeof_struct_ip_mreq
|
||||||
|
SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq
|
||||||
|
SizeofMsghdr = C.sizeof_struct_msghdr
|
||||||
|
SizeofCmsghdr = C.sizeof_struct_cmsghdr
|
||||||
|
SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo
|
||||||
|
SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo
|
||||||
|
SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter
|
||||||
|
)
|
||||||
|
|
||||||
|
// Select
|
||||||
|
|
||||||
|
type FdSet C.fd_set
|
||||||
|
|
||||||
|
// Misc
|
||||||
|
|
||||||
|
type Utsname C.struct_utsname
|
||||||
|
|
||||||
|
type Ustat_t C.struct_ustat
|
||||||
|
|
||||||
|
const (
|
||||||
|
AT_FDCWD = C.AT_FDCWD
|
||||||
|
AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW
|
||||||
|
AT_SYMLINK_FOLLOW = C.AT_SYMLINK_FOLLOW
|
||||||
|
AT_REMOVEDIR = C.AT_REMOVEDIR
|
||||||
|
AT_EACCESS = C.AT_EACCESS
|
||||||
|
)
|
||||||
|
|
||||||
|
// Routing and interface messages
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofIfMsghdr = C.sizeof_struct_if_msghdr
|
||||||
|
SizeofIfData = C.sizeof_struct_if_data
|
||||||
|
SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr
|
||||||
|
SizeofRtMsghdr = C.sizeof_struct_rt_msghdr
|
||||||
|
SizeofRtMetrics = C.sizeof_struct_rt_metrics
|
||||||
|
)
|
||||||
|
|
||||||
|
type IfMsghdr C.struct_if_msghdr
|
||||||
|
|
||||||
|
type IfData C.struct_if_data
|
||||||
|
|
||||||
|
type IfaMsghdr C.struct_ifa_msghdr
|
||||||
|
|
||||||
|
type RtMsghdr C.struct_rt_msghdr
|
||||||
|
|
||||||
|
type RtMetrics C.struct_rt_metrics
|
||||||
|
|
||||||
|
// Berkeley packet filter
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofBpfVersion = C.sizeof_struct_bpf_version
|
||||||
|
SizeofBpfStat = C.sizeof_struct_bpf_stat
|
||||||
|
SizeofBpfProgram = C.sizeof_struct_bpf_program
|
||||||
|
SizeofBpfInsn = C.sizeof_struct_bpf_insn
|
||||||
|
SizeofBpfHdr = C.sizeof_struct_bpf_hdr
|
||||||
|
)
|
||||||
|
|
||||||
|
type BpfVersion C.struct_bpf_version
|
||||||
|
|
||||||
|
type BpfStat C.struct_bpf_stat
|
||||||
|
|
||||||
|
type BpfProgram C.struct_bpf_program
|
||||||
|
|
||||||
|
type BpfInsn C.struct_bpf_insn
|
||||||
|
|
||||||
|
type BpfTimeval C.struct_bpf_timeval
|
||||||
|
|
||||||
|
type BpfHdr C.struct_bpf_hdr
|
||||||
|
|
||||||
|
// Terminal handling
|
||||||
|
|
||||||
|
type Termios C.struct_termios
|
||||||
|
|
||||||
|
type Termio C.struct_termio
|
||||||
|
|
||||||
|
type Winsize C.struct_winsize
|
||||||
|
|
||||||
|
// poll
|
||||||
|
|
||||||
|
type PollFd C.struct_pollfd
|
||||||
|
|
||||||
|
const (
|
||||||
|
POLLERR = C.POLLERR
|
||||||
|
POLLHUP = C.POLLHUP
|
||||||
|
POLLIN = C.POLLIN
|
||||||
|
POLLNVAL = C.POLLNVAL
|
||||||
|
POLLOUT = C.POLLOUT
|
||||||
|
POLLPRI = C.POLLPRI
|
||||||
|
POLLRDBAND = C.POLLRDBAND
|
||||||
|
POLLRDNORM = C.POLLRDNORM
|
||||||
|
POLLWRBAND = C.POLLWRBAND
|
||||||
|
POLLWRNORM = C.POLLWRNORM
|
||||||
|
)
|
||||||
556
vendor/golang.org/x/text/encoding/charmap/maketables.go
generated
vendored
Normal file
556
vendor/golang.org/x/text/encoding/charmap/maketables.go
generated
vendored
Normal file
@@ -0,0 +1,556 @@
|
|||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
"unicode/utf8"
|
||||||
|
|
||||||
|
"golang.org/x/text/encoding"
|
||||||
|
"golang.org/x/text/internal/gen"
|
||||||
|
)
|
||||||
|
|
||||||
|
const ascii = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" +
|
||||||
|
"\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" +
|
||||||
|
` !"#$%&'()*+,-./0123456789:;<=>?` +
|
||||||
|
`@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_` +
|
||||||
|
"`abcdefghijklmnopqrstuvwxyz{|}~\u007f"
|
||||||
|
|
||||||
|
var encodings = []struct {
|
||||||
|
name string
|
||||||
|
mib string
|
||||||
|
comment string
|
||||||
|
varName string
|
||||||
|
replacement byte
|
||||||
|
mapping string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
"IBM Code Page 037",
|
||||||
|
"IBM037",
|
||||||
|
"",
|
||||||
|
"CodePage037",
|
||||||
|
0x3f,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/glibc-IBM037-2.1.2.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"IBM Code Page 437",
|
||||||
|
"PC8CodePage437",
|
||||||
|
"",
|
||||||
|
"CodePage437",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/glibc-IBM437-2.1.2.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"IBM Code Page 850",
|
||||||
|
"PC850Multilingual",
|
||||||
|
"",
|
||||||
|
"CodePage850",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/glibc-IBM850-2.1.2.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"IBM Code Page 852",
|
||||||
|
"PCp852",
|
||||||
|
"",
|
||||||
|
"CodePage852",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/glibc-IBM852-2.1.2.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"IBM Code Page 855",
|
||||||
|
"IBM855",
|
||||||
|
"",
|
||||||
|
"CodePage855",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/glibc-IBM855-2.1.2.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Windows Code Page 858", // PC latin1 with Euro
|
||||||
|
"IBM00858",
|
||||||
|
"",
|
||||||
|
"CodePage858",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/windows-858-2000.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"IBM Code Page 860",
|
||||||
|
"IBM860",
|
||||||
|
"",
|
||||||
|
"CodePage860",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/glibc-IBM860-2.1.2.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"IBM Code Page 862",
|
||||||
|
"PC862LatinHebrew",
|
||||||
|
"",
|
||||||
|
"CodePage862",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/glibc-IBM862-2.1.2.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"IBM Code Page 863",
|
||||||
|
"IBM863",
|
||||||
|
"",
|
||||||
|
"CodePage863",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/glibc-IBM863-2.1.2.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"IBM Code Page 865",
|
||||||
|
"IBM865",
|
||||||
|
"",
|
||||||
|
"CodePage865",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/glibc-IBM865-2.1.2.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"IBM Code Page 866",
|
||||||
|
"IBM866",
|
||||||
|
"",
|
||||||
|
"CodePage866",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-ibm866.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"IBM Code Page 1047",
|
||||||
|
"IBM1047",
|
||||||
|
"",
|
||||||
|
"CodePage1047",
|
||||||
|
0x3f,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/glibc-IBM1047-2.1.2.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"IBM Code Page 1140",
|
||||||
|
"IBM01140",
|
||||||
|
"",
|
||||||
|
"CodePage1140",
|
||||||
|
0x3f,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/ibm-1140_P100-1997.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-1",
|
||||||
|
"ISOLatin1",
|
||||||
|
"",
|
||||||
|
"ISO8859_1",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/iso-8859_1-1998.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-2",
|
||||||
|
"ISOLatin2",
|
||||||
|
"",
|
||||||
|
"ISO8859_2",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-iso-8859-2.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-3",
|
||||||
|
"ISOLatin3",
|
||||||
|
"",
|
||||||
|
"ISO8859_3",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-iso-8859-3.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-4",
|
||||||
|
"ISOLatin4",
|
||||||
|
"",
|
||||||
|
"ISO8859_4",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-iso-8859-4.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-5",
|
||||||
|
"ISOLatinCyrillic",
|
||||||
|
"",
|
||||||
|
"ISO8859_5",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-iso-8859-5.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-6",
|
||||||
|
"ISOLatinArabic",
|
||||||
|
"",
|
||||||
|
"ISO8859_6,ISO8859_6E,ISO8859_6I",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-iso-8859-6.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-7",
|
||||||
|
"ISOLatinGreek",
|
||||||
|
"",
|
||||||
|
"ISO8859_7",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-iso-8859-7.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-8",
|
||||||
|
"ISOLatinHebrew",
|
||||||
|
"",
|
||||||
|
"ISO8859_8,ISO8859_8E,ISO8859_8I",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-iso-8859-8.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-9",
|
||||||
|
"ISOLatin5",
|
||||||
|
"",
|
||||||
|
"ISO8859_9",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/iso-8859_9-1999.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-10",
|
||||||
|
"ISOLatin6",
|
||||||
|
"",
|
||||||
|
"ISO8859_10",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-iso-8859-10.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-13",
|
||||||
|
"ISO885913",
|
||||||
|
"",
|
||||||
|
"ISO8859_13",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-iso-8859-13.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-14",
|
||||||
|
"ISO885914",
|
||||||
|
"",
|
||||||
|
"ISO8859_14",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-iso-8859-14.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-15",
|
||||||
|
"ISO885915",
|
||||||
|
"",
|
||||||
|
"ISO8859_15",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-iso-8859-15.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-16",
|
||||||
|
"ISO885916",
|
||||||
|
"",
|
||||||
|
"ISO8859_16",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-iso-8859-16.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"KOI8-R",
|
||||||
|
"KOI8R",
|
||||||
|
"",
|
||||||
|
"KOI8R",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-koi8-r.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"KOI8-U",
|
||||||
|
"KOI8U",
|
||||||
|
"",
|
||||||
|
"KOI8U",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-koi8-u.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Macintosh",
|
||||||
|
"Macintosh",
|
||||||
|
"",
|
||||||
|
"Macintosh",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-macintosh.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Macintosh Cyrillic",
|
||||||
|
"MacintoshCyrillic",
|
||||||
|
"",
|
||||||
|
"MacintoshCyrillic",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-x-mac-cyrillic.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Windows 874",
|
||||||
|
"Windows874",
|
||||||
|
"",
|
||||||
|
"Windows874",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-windows-874.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Windows 1250",
|
||||||
|
"Windows1250",
|
||||||
|
"",
|
||||||
|
"Windows1250",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-windows-1250.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Windows 1251",
|
||||||
|
"Windows1251",
|
||||||
|
"",
|
||||||
|
"Windows1251",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-windows-1251.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Windows 1252",
|
||||||
|
"Windows1252",
|
||||||
|
"",
|
||||||
|
"Windows1252",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-windows-1252.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Windows 1253",
|
||||||
|
"Windows1253",
|
||||||
|
"",
|
||||||
|
"Windows1253",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-windows-1253.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Windows 1254",
|
||||||
|
"Windows1254",
|
||||||
|
"",
|
||||||
|
"Windows1254",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-windows-1254.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Windows 1255",
|
||||||
|
"Windows1255",
|
||||||
|
"",
|
||||||
|
"Windows1255",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-windows-1255.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Windows 1256",
|
||||||
|
"Windows1256",
|
||||||
|
"",
|
||||||
|
"Windows1256",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-windows-1256.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Windows 1257",
|
||||||
|
"Windows1257",
|
||||||
|
"",
|
||||||
|
"Windows1257",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-windows-1257.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Windows 1258",
|
||||||
|
"Windows1258",
|
||||||
|
"",
|
||||||
|
"Windows1258",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-windows-1258.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"X-User-Defined",
|
||||||
|
"XUserDefined",
|
||||||
|
"It is defined at http://encoding.spec.whatwg.org/#x-user-defined",
|
||||||
|
"XUserDefined",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
ascii +
|
||||||
|
"\uf780\uf781\uf782\uf783\uf784\uf785\uf786\uf787" +
|
||||||
|
"\uf788\uf789\uf78a\uf78b\uf78c\uf78d\uf78e\uf78f" +
|
||||||
|
"\uf790\uf791\uf792\uf793\uf794\uf795\uf796\uf797" +
|
||||||
|
"\uf798\uf799\uf79a\uf79b\uf79c\uf79d\uf79e\uf79f" +
|
||||||
|
"\uf7a0\uf7a1\uf7a2\uf7a3\uf7a4\uf7a5\uf7a6\uf7a7" +
|
||||||
|
"\uf7a8\uf7a9\uf7aa\uf7ab\uf7ac\uf7ad\uf7ae\uf7af" +
|
||||||
|
"\uf7b0\uf7b1\uf7b2\uf7b3\uf7b4\uf7b5\uf7b6\uf7b7" +
|
||||||
|
"\uf7b8\uf7b9\uf7ba\uf7bb\uf7bc\uf7bd\uf7be\uf7bf" +
|
||||||
|
"\uf7c0\uf7c1\uf7c2\uf7c3\uf7c4\uf7c5\uf7c6\uf7c7" +
|
||||||
|
"\uf7c8\uf7c9\uf7ca\uf7cb\uf7cc\uf7cd\uf7ce\uf7cf" +
|
||||||
|
"\uf7d0\uf7d1\uf7d2\uf7d3\uf7d4\uf7d5\uf7d6\uf7d7" +
|
||||||
|
"\uf7d8\uf7d9\uf7da\uf7db\uf7dc\uf7dd\uf7de\uf7df" +
|
||||||
|
"\uf7e0\uf7e1\uf7e2\uf7e3\uf7e4\uf7e5\uf7e6\uf7e7" +
|
||||||
|
"\uf7e8\uf7e9\uf7ea\uf7eb\uf7ec\uf7ed\uf7ee\uf7ef" +
|
||||||
|
"\uf7f0\uf7f1\uf7f2\uf7f3\uf7f4\uf7f5\uf7f6\uf7f7" +
|
||||||
|
"\uf7f8\uf7f9\uf7fa\uf7fb\uf7fc\uf7fd\uf7fe\uf7ff",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func getWHATWG(url string) string {
|
||||||
|
res, err := http.Get(url)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("%q: Get: %v", url, err)
|
||||||
|
}
|
||||||
|
defer res.Body.Close()
|
||||||
|
|
||||||
|
mapping := make([]rune, 128)
|
||||||
|
for i := range mapping {
|
||||||
|
mapping[i] = '\ufffd'
|
||||||
|
}
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(res.Body)
|
||||||
|
for scanner.Scan() {
|
||||||
|
s := strings.TrimSpace(scanner.Text())
|
||||||
|
if s == "" || s[0] == '#' {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
x, y := 0, 0
|
||||||
|
if _, err := fmt.Sscanf(s, "%d\t0x%x", &x, &y); err != nil {
|
||||||
|
log.Fatalf("could not parse %q", s)
|
||||||
|
}
|
||||||
|
if x < 0 || 128 <= x {
|
||||||
|
log.Fatalf("code %d is out of range", x)
|
||||||
|
}
|
||||||
|
if 0x80 <= y && y < 0xa0 {
|
||||||
|
// We diverge from the WHATWG spec by mapping control characters
|
||||||
|
// in the range [0x80, 0xa0) to U+FFFD.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
mapping[x] = rune(y)
|
||||||
|
}
|
||||||
|
return ascii + string(mapping)
|
||||||
|
}
|
||||||
|
|
||||||
|
func getUCM(url string) string {
|
||||||
|
res, err := http.Get(url)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("%q: Get: %v", url, err)
|
||||||
|
}
|
||||||
|
defer res.Body.Close()
|
||||||
|
|
||||||
|
mapping := make([]rune, 256)
|
||||||
|
for i := range mapping {
|
||||||
|
mapping[i] = '\ufffd'
|
||||||
|
}
|
||||||
|
|
||||||
|
charsFound := 0
|
||||||
|
scanner := bufio.NewScanner(res.Body)
|
||||||
|
for scanner.Scan() {
|
||||||
|
s := strings.TrimSpace(scanner.Text())
|
||||||
|
if s == "" || s[0] == '#' {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
var c byte
|
||||||
|
var r rune
|
||||||
|
if _, err := fmt.Sscanf(s, `<U%x> \x%x |0`, &r, &c); err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
mapping[c] = r
|
||||||
|
charsFound++
|
||||||
|
}
|
||||||
|
|
||||||
|
if charsFound < 200 {
|
||||||
|
log.Fatalf("%q: only %d characters found (wrong page format?)", url, charsFound)
|
||||||
|
}
|
||||||
|
|
||||||
|
return string(mapping)
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
mibs := map[string]bool{}
|
||||||
|
all := []string{}
|
||||||
|
|
||||||
|
w := gen.NewCodeWriter()
|
||||||
|
defer w.WriteGoFile("tables.go", "charmap")
|
||||||
|
|
||||||
|
printf := func(s string, a ...interface{}) { fmt.Fprintf(w, s, a...) }
|
||||||
|
|
||||||
|
printf("import (\n")
|
||||||
|
printf("\t\"golang.org/x/text/encoding\"\n")
|
||||||
|
printf("\t\"golang.org/x/text/encoding/internal/identifier\"\n")
|
||||||
|
printf(")\n\n")
|
||||||
|
for _, e := range encodings {
|
||||||
|
varNames := strings.Split(e.varName, ",")
|
||||||
|
all = append(all, varNames...)
|
||||||
|
varName := varNames[0]
|
||||||
|
switch {
|
||||||
|
case strings.HasPrefix(e.mapping, "http://encoding.spec.whatwg.org/"):
|
||||||
|
e.mapping = getWHATWG(e.mapping)
|
||||||
|
case strings.HasPrefix(e.mapping, "http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/"):
|
||||||
|
e.mapping = getUCM(e.mapping)
|
||||||
|
}
|
||||||
|
|
||||||
|
asciiSuperset, low := strings.HasPrefix(e.mapping, ascii), 0x00
|
||||||
|
if asciiSuperset {
|
||||||
|
low = 0x80
|
||||||
|
}
|
||||||
|
lvn := 1
|
||||||
|
if strings.HasPrefix(varName, "ISO") || strings.HasPrefix(varName, "KOI") {
|
||||||
|
lvn = 3
|
||||||
|
}
|
||||||
|
lowerVarName := strings.ToLower(varName[:lvn]) + varName[lvn:]
|
||||||
|
printf("// %s is the %s encoding.\n", varName, e.name)
|
||||||
|
if e.comment != "" {
|
||||||
|
printf("//\n// %s\n", e.comment)
|
||||||
|
}
|
||||||
|
printf("var %s *Charmap = &%s\n\nvar %s = Charmap{\nname: %q,\n",
|
||||||
|
varName, lowerVarName, lowerVarName, e.name)
|
||||||
|
if mibs[e.mib] {
|
||||||
|
log.Fatalf("MIB type %q declared multiple times.", e.mib)
|
||||||
|
}
|
||||||
|
printf("mib: identifier.%s,\n", e.mib)
|
||||||
|
printf("asciiSuperset: %t,\n", asciiSuperset)
|
||||||
|
printf("low: 0x%02x,\n", low)
|
||||||
|
printf("replacement: 0x%02x,\n", e.replacement)
|
||||||
|
|
||||||
|
printf("decode: [256]utf8Enc{\n")
|
||||||
|
i, backMapping := 0, map[rune]byte{}
|
||||||
|
for _, c := range e.mapping {
|
||||||
|
if _, ok := backMapping[c]; !ok && c != utf8.RuneError {
|
||||||
|
backMapping[c] = byte(i)
|
||||||
|
}
|
||||||
|
var buf [8]byte
|
||||||
|
n := utf8.EncodeRune(buf[:], c)
|
||||||
|
if n > 3 {
|
||||||
|
panic(fmt.Sprintf("rune %q (%U) is too long", c, c))
|
||||||
|
}
|
||||||
|
printf("{%d,[3]byte{0x%02x,0x%02x,0x%02x}},", n, buf[0], buf[1], buf[2])
|
||||||
|
if i%2 == 1 {
|
||||||
|
printf("\n")
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
printf("},\n")
|
||||||
|
|
||||||
|
printf("encode: [256]uint32{\n")
|
||||||
|
encode := make([]uint32, 0, 256)
|
||||||
|
for c, i := range backMapping {
|
||||||
|
encode = append(encode, uint32(i)<<24|uint32(c))
|
||||||
|
}
|
||||||
|
sort.Sort(byRune(encode))
|
||||||
|
for len(encode) < cap(encode) {
|
||||||
|
encode = append(encode, encode[len(encode)-1])
|
||||||
|
}
|
||||||
|
for i, enc := range encode {
|
||||||
|
printf("0x%08x,", enc)
|
||||||
|
if i%8 == 7 {
|
||||||
|
printf("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
printf("},\n}\n")
|
||||||
|
|
||||||
|
// Add an estimate of the size of a single Charmap{} struct value, which
|
||||||
|
// includes two 256 elem arrays of 4 bytes and some extra fields, which
|
||||||
|
// align to 3 uint64s on 64-bit architectures.
|
||||||
|
w.Size += 2*4*256 + 3*8
|
||||||
|
}
|
||||||
|
// TODO: add proper line breaking.
|
||||||
|
printf("var listAll = []encoding.Encoding{\n%s,\n}\n\n", strings.Join(all, ",\n"))
|
||||||
|
}
|
||||||
|
|
||||||
|
type byRune []uint32
|
||||||
|
|
||||||
|
func (b byRune) Len() int { return len(b) }
|
||||||
|
func (b byRune) Less(i, j int) bool { return b[i]&0xffffff < b[j]&0xffffff }
|
||||||
|
func (b byRune) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
|
||||||
173
vendor/golang.org/x/text/encoding/htmlindex/gen.go
generated
vendored
Normal file
173
vendor/golang.org/x/text/encoding/htmlindex/gen.go
generated
vendored
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
// Copyright 2015 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/text/internal/gen"
|
||||||
|
)
|
||||||
|
|
||||||
|
type group struct {
|
||||||
|
Encodings []struct {
|
||||||
|
Labels []string
|
||||||
|
Name string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
gen.Init()
|
||||||
|
|
||||||
|
r := gen.Open("https://encoding.spec.whatwg.org", "whatwg", "encodings.json")
|
||||||
|
var groups []group
|
||||||
|
if err := json.NewDecoder(r).Decode(&groups); err != nil {
|
||||||
|
log.Fatalf("Error reading encodings.json: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
w := &bytes.Buffer{}
|
||||||
|
fmt.Fprintln(w, "type htmlEncoding byte")
|
||||||
|
fmt.Fprintln(w, "const (")
|
||||||
|
for i, g := range groups {
|
||||||
|
for _, e := range g.Encodings {
|
||||||
|
key := strings.ToLower(e.Name)
|
||||||
|
name := consts[key]
|
||||||
|
if name == "" {
|
||||||
|
log.Fatalf("No const defined for %s.", key)
|
||||||
|
}
|
||||||
|
if i == 0 {
|
||||||
|
fmt.Fprintf(w, "%s htmlEncoding = iota\n", name)
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(w, "%s\n", name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Fprintln(w, "numEncodings")
|
||||||
|
fmt.Fprint(w, ")\n\n")
|
||||||
|
|
||||||
|
fmt.Fprintln(w, "var canonical = [numEncodings]string{")
|
||||||
|
for _, g := range groups {
|
||||||
|
for _, e := range g.Encodings {
|
||||||
|
fmt.Fprintf(w, "%q,\n", strings.ToLower(e.Name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Fprint(w, "}\n\n")
|
||||||
|
|
||||||
|
fmt.Fprintln(w, "var nameMap = map[string]htmlEncoding{")
|
||||||
|
for _, g := range groups {
|
||||||
|
for _, e := range g.Encodings {
|
||||||
|
for _, l := range e.Labels {
|
||||||
|
key := strings.ToLower(e.Name)
|
||||||
|
name := consts[key]
|
||||||
|
fmt.Fprintf(w, "%q: %s,\n", l, name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Fprint(w, "}\n\n")
|
||||||
|
|
||||||
|
var tags []string
|
||||||
|
fmt.Fprintln(w, "var localeMap = []htmlEncoding{")
|
||||||
|
for _, loc := range locales {
|
||||||
|
tags = append(tags, loc.tag)
|
||||||
|
fmt.Fprintf(w, "%s, // %s \n", consts[loc.name], loc.tag)
|
||||||
|
}
|
||||||
|
fmt.Fprint(w, "}\n\n")
|
||||||
|
|
||||||
|
fmt.Fprintf(w, "const locales = %q\n", strings.Join(tags, " "))
|
||||||
|
|
||||||
|
gen.WriteGoFile("tables.go", "htmlindex", w.Bytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
// consts maps canonical encoding name to internal constant.
|
||||||
|
var consts = map[string]string{
|
||||||
|
"utf-8": "utf8",
|
||||||
|
"ibm866": "ibm866",
|
||||||
|
"iso-8859-2": "iso8859_2",
|
||||||
|
"iso-8859-3": "iso8859_3",
|
||||||
|
"iso-8859-4": "iso8859_4",
|
||||||
|
"iso-8859-5": "iso8859_5",
|
||||||
|
"iso-8859-6": "iso8859_6",
|
||||||
|
"iso-8859-7": "iso8859_7",
|
||||||
|
"iso-8859-8": "iso8859_8",
|
||||||
|
"iso-8859-8-i": "iso8859_8I",
|
||||||
|
"iso-8859-10": "iso8859_10",
|
||||||
|
"iso-8859-13": "iso8859_13",
|
||||||
|
"iso-8859-14": "iso8859_14",
|
||||||
|
"iso-8859-15": "iso8859_15",
|
||||||
|
"iso-8859-16": "iso8859_16",
|
||||||
|
"koi8-r": "koi8r",
|
||||||
|
"koi8-u": "koi8u",
|
||||||
|
"macintosh": "macintosh",
|
||||||
|
"windows-874": "windows874",
|
||||||
|
"windows-1250": "windows1250",
|
||||||
|
"windows-1251": "windows1251",
|
||||||
|
"windows-1252": "windows1252",
|
||||||
|
"windows-1253": "windows1253",
|
||||||
|
"windows-1254": "windows1254",
|
||||||
|
"windows-1255": "windows1255",
|
||||||
|
"windows-1256": "windows1256",
|
||||||
|
"windows-1257": "windows1257",
|
||||||
|
"windows-1258": "windows1258",
|
||||||
|
"x-mac-cyrillic": "macintoshCyrillic",
|
||||||
|
"gbk": "gbk",
|
||||||
|
"gb18030": "gb18030",
|
||||||
|
// "hz-gb-2312": "hzgb2312", // Was removed from WhatWG
|
||||||
|
"big5": "big5",
|
||||||
|
"euc-jp": "eucjp",
|
||||||
|
"iso-2022-jp": "iso2022jp",
|
||||||
|
"shift_jis": "shiftJIS",
|
||||||
|
"euc-kr": "euckr",
|
||||||
|
"replacement": "replacement",
|
||||||
|
"utf-16be": "utf16be",
|
||||||
|
"utf-16le": "utf16le",
|
||||||
|
"x-user-defined": "xUserDefined",
|
||||||
|
}
|
||||||
|
|
||||||
|
// locales is taken from
|
||||||
|
// https://html.spec.whatwg.org/multipage/syntax.html#encoding-sniffing-algorithm.
|
||||||
|
var locales = []struct{ tag, name string }{
|
||||||
|
// The default value. Explicitly state latin to benefit from the exact
|
||||||
|
// script option, while still making 1252 the default encoding for languages
|
||||||
|
// written in Latin script.
|
||||||
|
{"und_Latn", "windows-1252"},
|
||||||
|
{"ar", "windows-1256"},
|
||||||
|
{"ba", "windows-1251"},
|
||||||
|
{"be", "windows-1251"},
|
||||||
|
{"bg", "windows-1251"},
|
||||||
|
{"cs", "windows-1250"},
|
||||||
|
{"el", "iso-8859-7"},
|
||||||
|
{"et", "windows-1257"},
|
||||||
|
{"fa", "windows-1256"},
|
||||||
|
{"he", "windows-1255"},
|
||||||
|
{"hr", "windows-1250"},
|
||||||
|
{"hu", "iso-8859-2"},
|
||||||
|
{"ja", "shift_jis"},
|
||||||
|
{"kk", "windows-1251"},
|
||||||
|
{"ko", "euc-kr"},
|
||||||
|
{"ku", "windows-1254"},
|
||||||
|
{"ky", "windows-1251"},
|
||||||
|
{"lt", "windows-1257"},
|
||||||
|
{"lv", "windows-1257"},
|
||||||
|
{"mk", "windows-1251"},
|
||||||
|
{"pl", "iso-8859-2"},
|
||||||
|
{"ru", "windows-1251"},
|
||||||
|
{"sah", "windows-1251"},
|
||||||
|
{"sk", "windows-1250"},
|
||||||
|
{"sl", "iso-8859-2"},
|
||||||
|
{"sr", "windows-1251"},
|
||||||
|
{"tg", "windows-1251"},
|
||||||
|
{"th", "windows-874"},
|
||||||
|
{"tr", "windows-1254"},
|
||||||
|
{"tt", "windows-1251"},
|
||||||
|
{"uk", "windows-1251"},
|
||||||
|
{"vi", "windows-1258"},
|
||||||
|
{"zh-hans", "gb18030"},
|
||||||
|
{"zh-hant", "big5"},
|
||||||
|
}
|
||||||
142
vendor/golang.org/x/text/encoding/internal/identifier/gen.go
generated
vendored
Normal file
142
vendor/golang.org/x/text/encoding/internal/identifier/gen.go
generated
vendored
Normal file
@@ -0,0 +1,142 @@
|
|||||||
|
// Copyright 2015 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/xml"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"log"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/text/internal/gen"
|
||||||
|
)
|
||||||
|
|
||||||
|
type registry struct {
|
||||||
|
XMLName xml.Name `xml:"registry"`
|
||||||
|
Updated string `xml:"updated"`
|
||||||
|
Registry []struct {
|
||||||
|
ID string `xml:"id,attr"`
|
||||||
|
Record []struct {
|
||||||
|
Name string `xml:"name"`
|
||||||
|
Xref []struct {
|
||||||
|
Type string `xml:"type,attr"`
|
||||||
|
Data string `xml:"data,attr"`
|
||||||
|
} `xml:"xref"`
|
||||||
|
Desc struct {
|
||||||
|
Data string `xml:",innerxml"`
|
||||||
|
// Any []struct {
|
||||||
|
// Data string `xml:",chardata"`
|
||||||
|
// } `xml:",any"`
|
||||||
|
// Data string `xml:",chardata"`
|
||||||
|
} `xml:"description,"`
|
||||||
|
MIB string `xml:"value"`
|
||||||
|
Alias []string `xml:"alias"`
|
||||||
|
MIME string `xml:"preferred_alias"`
|
||||||
|
} `xml:"record"`
|
||||||
|
} `xml:"registry"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
r := gen.OpenIANAFile("assignments/character-sets/character-sets.xml")
|
||||||
|
reg := ®istry{}
|
||||||
|
if err := xml.NewDecoder(r).Decode(®); err != nil && err != io.EOF {
|
||||||
|
log.Fatalf("Error decoding charset registry: %v", err)
|
||||||
|
}
|
||||||
|
if len(reg.Registry) == 0 || reg.Registry[0].ID != "character-sets-1" {
|
||||||
|
log.Fatalf("Unexpected ID %s", reg.Registry[0].ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
w := &bytes.Buffer{}
|
||||||
|
fmt.Fprintf(w, "const (\n")
|
||||||
|
for _, rec := range reg.Registry[0].Record {
|
||||||
|
constName := ""
|
||||||
|
for _, a := range rec.Alias {
|
||||||
|
if strings.HasPrefix(a, "cs") && strings.IndexByte(a, '-') == -1 {
|
||||||
|
// Some of the constant definitions have comments in them. Strip those.
|
||||||
|
constName = strings.Title(strings.SplitN(a[2:], "\n", 2)[0])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if constName == "" {
|
||||||
|
switch rec.MIB {
|
||||||
|
case "2085":
|
||||||
|
constName = "HZGB2312" // Not listed as alias for some reason.
|
||||||
|
default:
|
||||||
|
log.Fatalf("No cs alias defined for %s.", rec.MIB)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if rec.MIME != "" {
|
||||||
|
rec.MIME = fmt.Sprintf(" (MIME: %s)", rec.MIME)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(w, "// %s is the MIB identifier with IANA name %s%s.\n//\n", constName, rec.Name, rec.MIME)
|
||||||
|
if len(rec.Desc.Data) > 0 {
|
||||||
|
fmt.Fprint(w, "// ")
|
||||||
|
d := xml.NewDecoder(strings.NewReader(rec.Desc.Data))
|
||||||
|
inElem := true
|
||||||
|
attr := ""
|
||||||
|
for {
|
||||||
|
t, err := d.Token()
|
||||||
|
if err != nil {
|
||||||
|
if err != io.EOF {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
switch x := t.(type) {
|
||||||
|
case xml.CharData:
|
||||||
|
attr = "" // Don't need attribute info.
|
||||||
|
a := bytes.Split([]byte(x), []byte("\n"))
|
||||||
|
for i, b := range a {
|
||||||
|
if b = bytes.TrimSpace(b); len(b) != 0 {
|
||||||
|
if !inElem && i > 0 {
|
||||||
|
fmt.Fprint(w, "\n// ")
|
||||||
|
}
|
||||||
|
inElem = false
|
||||||
|
fmt.Fprintf(w, "%s ", string(b))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case xml.StartElement:
|
||||||
|
if x.Name.Local == "xref" {
|
||||||
|
inElem = true
|
||||||
|
use := false
|
||||||
|
for _, a := range x.Attr {
|
||||||
|
if a.Name.Local == "type" {
|
||||||
|
use = use || a.Value != "person"
|
||||||
|
}
|
||||||
|
if a.Name.Local == "data" && use {
|
||||||
|
// Patch up URLs to use https. From some links, the
|
||||||
|
// https version is different from the http one.
|
||||||
|
s := a.Value
|
||||||
|
s = strings.Replace(s, "http://", "https://", -1)
|
||||||
|
s = strings.Replace(s, "/unicode/", "/", -1)
|
||||||
|
attr = s + " "
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case xml.EndElement:
|
||||||
|
inElem = false
|
||||||
|
fmt.Fprint(w, attr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Fprint(w, "\n")
|
||||||
|
}
|
||||||
|
for _, x := range rec.Xref {
|
||||||
|
switch x.Type {
|
||||||
|
case "rfc":
|
||||||
|
fmt.Fprintf(w, "// Reference: %s\n", strings.ToUpper(x.Data))
|
||||||
|
case "uri":
|
||||||
|
fmt.Fprintf(w, "// Reference: %s\n", x.Data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Fprintf(w, "%s MIB = %s\n", constName, rec.MIB)
|
||||||
|
fmt.Fprintln(w)
|
||||||
|
}
|
||||||
|
fmt.Fprintln(w, ")")
|
||||||
|
|
||||||
|
gen.WriteGoFile("mib.go", "identifier", w.Bytes())
|
||||||
|
}
|
||||||
161
vendor/golang.org/x/text/encoding/japanese/maketables.go
generated
vendored
Normal file
161
vendor/golang.org/x/text/encoding/japanese/maketables.go
generated
vendored
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
// This program generates tables.go:
|
||||||
|
// go run maketables.go | gofmt > tables.go
|
||||||
|
|
||||||
|
// TODO: Emoji extensions?
|
||||||
|
// https://www.unicode.org/faq/emoji_dingbats.html
|
||||||
|
// https://www.unicode.org/Public/UNIDATA/EmojiSources.txt
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type entry struct {
|
||||||
|
jisCode, table int
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
fmt.Printf("// generated by go run maketables.go; DO NOT EDIT\n\n")
|
||||||
|
fmt.Printf("// Package japanese provides Japanese encodings such as EUC-JP and Shift JIS.\n")
|
||||||
|
fmt.Printf(`package japanese // import "golang.org/x/text/encoding/japanese"` + "\n\n")
|
||||||
|
|
||||||
|
reverse := [65536]entry{}
|
||||||
|
for i := range reverse {
|
||||||
|
reverse[i].table = -1
|
||||||
|
}
|
||||||
|
|
||||||
|
tables := []struct {
|
||||||
|
url string
|
||||||
|
name string
|
||||||
|
}{
|
||||||
|
{"http://encoding.spec.whatwg.org/index-jis0208.txt", "0208"},
|
||||||
|
{"http://encoding.spec.whatwg.org/index-jis0212.txt", "0212"},
|
||||||
|
}
|
||||||
|
for i, table := range tables {
|
||||||
|
res, err := http.Get(table.url)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("%q: Get: %v", table.url, err)
|
||||||
|
}
|
||||||
|
defer res.Body.Close()
|
||||||
|
|
||||||
|
mapping := [65536]uint16{}
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(res.Body)
|
||||||
|
for scanner.Scan() {
|
||||||
|
s := strings.TrimSpace(scanner.Text())
|
||||||
|
if s == "" || s[0] == '#' {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
x, y := 0, uint16(0)
|
||||||
|
if _, err := fmt.Sscanf(s, "%d 0x%x", &x, &y); err != nil {
|
||||||
|
log.Fatalf("%q: could not parse %q", table.url, s)
|
||||||
|
}
|
||||||
|
if x < 0 || 120*94 <= x {
|
||||||
|
log.Fatalf("%q: JIS code %d is out of range", table.url, x)
|
||||||
|
}
|
||||||
|
mapping[x] = y
|
||||||
|
if reverse[y].table == -1 {
|
||||||
|
reverse[y] = entry{jisCode: x, table: i}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := scanner.Err(); err != nil {
|
||||||
|
log.Fatalf("%q: scanner error: %v", table.url, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("// jis%sDecode is the decoding table from JIS %s code to Unicode.\n// It is defined at %s\n",
|
||||||
|
table.name, table.name, table.url)
|
||||||
|
fmt.Printf("var jis%sDecode = [...]uint16{\n", table.name)
|
||||||
|
for i, m := range mapping {
|
||||||
|
if m != 0 {
|
||||||
|
fmt.Printf("\t%d: 0x%04X,\n", i, m)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Printf("}\n\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Any run of at least separation continuous zero entries in the reverse map will
|
||||||
|
// be a separate encode table.
|
||||||
|
const separation = 1024
|
||||||
|
|
||||||
|
intervals := []interval(nil)
|
||||||
|
low, high := -1, -1
|
||||||
|
for i, v := range reverse {
|
||||||
|
if v.table == -1 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if low < 0 {
|
||||||
|
low = i
|
||||||
|
} else if i-high >= separation {
|
||||||
|
if high >= 0 {
|
||||||
|
intervals = append(intervals, interval{low, high})
|
||||||
|
}
|
||||||
|
low = i
|
||||||
|
}
|
||||||
|
high = i + 1
|
||||||
|
}
|
||||||
|
if high >= 0 {
|
||||||
|
intervals = append(intervals, interval{low, high})
|
||||||
|
}
|
||||||
|
sort.Sort(byDecreasingLength(intervals))
|
||||||
|
|
||||||
|
fmt.Printf("const (\n")
|
||||||
|
fmt.Printf("\tjis0208 = 1\n")
|
||||||
|
fmt.Printf("\tjis0212 = 2\n")
|
||||||
|
fmt.Printf("\tcodeMask = 0x7f\n")
|
||||||
|
fmt.Printf("\tcodeShift = 7\n")
|
||||||
|
fmt.Printf("\ttableShift = 14\n")
|
||||||
|
fmt.Printf(")\n\n")
|
||||||
|
|
||||||
|
fmt.Printf("const numEncodeTables = %d\n\n", len(intervals))
|
||||||
|
fmt.Printf("// encodeX are the encoding tables from Unicode to JIS code,\n")
|
||||||
|
fmt.Printf("// sorted by decreasing length.\n")
|
||||||
|
for i, v := range intervals {
|
||||||
|
fmt.Printf("// encode%d: %5d entries for runes in [%5d, %5d).\n", i, v.len(), v.low, v.high)
|
||||||
|
}
|
||||||
|
fmt.Printf("//\n")
|
||||||
|
fmt.Printf("// The high two bits of the value record whether the JIS code comes from the\n")
|
||||||
|
fmt.Printf("// JIS0208 table (high bits == 1) or the JIS0212 table (high bits == 2).\n")
|
||||||
|
fmt.Printf("// The low 14 bits are two 7-bit unsigned integers j1 and j2 that form the\n")
|
||||||
|
fmt.Printf("// JIS code (94*j1 + j2) within that table.\n")
|
||||||
|
fmt.Printf("\n")
|
||||||
|
|
||||||
|
for i, v := range intervals {
|
||||||
|
fmt.Printf("const encode%dLow, encode%dHigh = %d, %d\n\n", i, i, v.low, v.high)
|
||||||
|
fmt.Printf("var encode%d = [...]uint16{\n", i)
|
||||||
|
for j := v.low; j < v.high; j++ {
|
||||||
|
x := reverse[j]
|
||||||
|
if x.table == -1 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fmt.Printf("\t%d - %d: jis%s<<14 | 0x%02X<<7 | 0x%02X,\n",
|
||||||
|
j, v.low, tables[x.table].name, x.jisCode/94, x.jisCode%94)
|
||||||
|
}
|
||||||
|
fmt.Printf("}\n\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// interval is a half-open interval [low, high).
|
||||||
|
type interval struct {
|
||||||
|
low, high int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i interval) len() int { return i.high - i.low }
|
||||||
|
|
||||||
|
// byDecreasingLength sorts intervals by decreasing length.
|
||||||
|
type byDecreasingLength []interval
|
||||||
|
|
||||||
|
func (b byDecreasingLength) Len() int { return len(b) }
|
||||||
|
func (b byDecreasingLength) Less(i, j int) bool { return b[i].len() > b[j].len() }
|
||||||
|
func (b byDecreasingLength) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
|
||||||
143
vendor/golang.org/x/text/encoding/korean/maketables.go
generated
vendored
Normal file
143
vendor/golang.org/x/text/encoding/korean/maketables.go
generated
vendored
Normal file
@@ -0,0 +1,143 @@
|
|||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
// This program generates tables.go:
|
||||||
|
// go run maketables.go | gofmt > tables.go
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
fmt.Printf("// generated by go run maketables.go; DO NOT EDIT\n\n")
|
||||||
|
fmt.Printf("// Package korean provides Korean encodings such as EUC-KR.\n")
|
||||||
|
fmt.Printf(`package korean // import "golang.org/x/text/encoding/korean"` + "\n\n")
|
||||||
|
|
||||||
|
res, err := http.Get("http://encoding.spec.whatwg.org/index-euc-kr.txt")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Get: %v", err)
|
||||||
|
}
|
||||||
|
defer res.Body.Close()
|
||||||
|
|
||||||
|
mapping := [65536]uint16{}
|
||||||
|
reverse := [65536]uint16{}
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(res.Body)
|
||||||
|
for scanner.Scan() {
|
||||||
|
s := strings.TrimSpace(scanner.Text())
|
||||||
|
if s == "" || s[0] == '#' {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
x, y := uint16(0), uint16(0)
|
||||||
|
if _, err := fmt.Sscanf(s, "%d 0x%x", &x, &y); err != nil {
|
||||||
|
log.Fatalf("could not parse %q", s)
|
||||||
|
}
|
||||||
|
if x < 0 || 178*(0xc7-0x81)+(0xfe-0xc7)*94+(0xff-0xa1) <= x {
|
||||||
|
log.Fatalf("EUC-KR code %d is out of range", x)
|
||||||
|
}
|
||||||
|
mapping[x] = y
|
||||||
|
if reverse[y] == 0 {
|
||||||
|
c0, c1 := uint16(0), uint16(0)
|
||||||
|
if x < 178*(0xc7-0x81) {
|
||||||
|
c0 = uint16(x/178) + 0x81
|
||||||
|
c1 = uint16(x % 178)
|
||||||
|
switch {
|
||||||
|
case c1 < 1*26:
|
||||||
|
c1 += 0x41
|
||||||
|
case c1 < 2*26:
|
||||||
|
c1 += 0x47
|
||||||
|
default:
|
||||||
|
c1 += 0x4d
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
x -= 178 * (0xc7 - 0x81)
|
||||||
|
c0 = uint16(x/94) + 0xc7
|
||||||
|
c1 = uint16(x%94) + 0xa1
|
||||||
|
}
|
||||||
|
reverse[y] = c0<<8 | c1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := scanner.Err(); err != nil {
|
||||||
|
log.Fatalf("scanner error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("// decode is the decoding table from EUC-KR code to Unicode.\n")
|
||||||
|
fmt.Printf("// It is defined at http://encoding.spec.whatwg.org/index-euc-kr.txt\n")
|
||||||
|
fmt.Printf("var decode = [...]uint16{\n")
|
||||||
|
for i, v := range mapping {
|
||||||
|
if v != 0 {
|
||||||
|
fmt.Printf("\t%d: 0x%04X,\n", i, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Printf("}\n\n")
|
||||||
|
|
||||||
|
// Any run of at least separation continuous zero entries in the reverse map will
|
||||||
|
// be a separate encode table.
|
||||||
|
const separation = 1024
|
||||||
|
|
||||||
|
intervals := []interval(nil)
|
||||||
|
low, high := -1, -1
|
||||||
|
for i, v := range reverse {
|
||||||
|
if v == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if low < 0 {
|
||||||
|
low = i
|
||||||
|
} else if i-high >= separation {
|
||||||
|
if high >= 0 {
|
||||||
|
intervals = append(intervals, interval{low, high})
|
||||||
|
}
|
||||||
|
low = i
|
||||||
|
}
|
||||||
|
high = i + 1
|
||||||
|
}
|
||||||
|
if high >= 0 {
|
||||||
|
intervals = append(intervals, interval{low, high})
|
||||||
|
}
|
||||||
|
sort.Sort(byDecreasingLength(intervals))
|
||||||
|
|
||||||
|
fmt.Printf("const numEncodeTables = %d\n\n", len(intervals))
|
||||||
|
fmt.Printf("// encodeX are the encoding tables from Unicode to EUC-KR code,\n")
|
||||||
|
fmt.Printf("// sorted by decreasing length.\n")
|
||||||
|
for i, v := range intervals {
|
||||||
|
fmt.Printf("// encode%d: %5d entries for runes in [%5d, %5d).\n", i, v.len(), v.low, v.high)
|
||||||
|
}
|
||||||
|
fmt.Printf("\n")
|
||||||
|
|
||||||
|
for i, v := range intervals {
|
||||||
|
fmt.Printf("const encode%dLow, encode%dHigh = %d, %d\n\n", i, i, v.low, v.high)
|
||||||
|
fmt.Printf("var encode%d = [...]uint16{\n", i)
|
||||||
|
for j := v.low; j < v.high; j++ {
|
||||||
|
x := reverse[j]
|
||||||
|
if x == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fmt.Printf("\t%d-%d: 0x%04X,\n", j, v.low, x)
|
||||||
|
}
|
||||||
|
fmt.Printf("}\n\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// interval is a half-open interval [low, high).
|
||||||
|
type interval struct {
|
||||||
|
low, high int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i interval) len() int { return i.high - i.low }
|
||||||
|
|
||||||
|
// byDecreasingLength sorts intervals by decreasing length.
|
||||||
|
type byDecreasingLength []interval
|
||||||
|
|
||||||
|
func (b byDecreasingLength) Len() int { return len(b) }
|
||||||
|
func (b byDecreasingLength) Less(i, j int) bool { return b[i].len() > b[j].len() }
|
||||||
|
func (b byDecreasingLength) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
|
||||||
161
vendor/golang.org/x/text/encoding/simplifiedchinese/maketables.go
generated
vendored
Normal file
161
vendor/golang.org/x/text/encoding/simplifiedchinese/maketables.go
generated
vendored
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
// This program generates tables.go:
|
||||||
|
// go run maketables.go | gofmt > tables.go
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
fmt.Printf("// generated by go run maketables.go; DO NOT EDIT\n\n")
|
||||||
|
fmt.Printf("// Package simplifiedchinese provides Simplified Chinese encodings such as GBK.\n")
|
||||||
|
fmt.Printf(`package simplifiedchinese // import "golang.org/x/text/encoding/simplifiedchinese"` + "\n\n")
|
||||||
|
|
||||||
|
printGB18030()
|
||||||
|
printGBK()
|
||||||
|
}
|
||||||
|
|
||||||
|
func printGB18030() {
|
||||||
|
res, err := http.Get("http://encoding.spec.whatwg.org/index-gb18030.txt")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Get: %v", err)
|
||||||
|
}
|
||||||
|
defer res.Body.Close()
|
||||||
|
|
||||||
|
fmt.Printf("// gb18030 is the table from http://encoding.spec.whatwg.org/index-gb18030.txt\n")
|
||||||
|
fmt.Printf("var gb18030 = [...][2]uint16{\n")
|
||||||
|
scanner := bufio.NewScanner(res.Body)
|
||||||
|
for scanner.Scan() {
|
||||||
|
s := strings.TrimSpace(scanner.Text())
|
||||||
|
if s == "" || s[0] == '#' {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
x, y := uint32(0), uint32(0)
|
||||||
|
if _, err := fmt.Sscanf(s, "%d 0x%x", &x, &y); err != nil {
|
||||||
|
log.Fatalf("could not parse %q", s)
|
||||||
|
}
|
||||||
|
if x < 0x10000 && y < 0x10000 {
|
||||||
|
fmt.Printf("\t{0x%04x, 0x%04x},\n", x, y)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Printf("}\n\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func printGBK() {
|
||||||
|
res, err := http.Get("http://encoding.spec.whatwg.org/index-gbk.txt")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Get: %v", err)
|
||||||
|
}
|
||||||
|
defer res.Body.Close()
|
||||||
|
|
||||||
|
mapping := [65536]uint16{}
|
||||||
|
reverse := [65536]uint16{}
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(res.Body)
|
||||||
|
for scanner.Scan() {
|
||||||
|
s := strings.TrimSpace(scanner.Text())
|
||||||
|
if s == "" || s[0] == '#' {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
x, y := uint16(0), uint16(0)
|
||||||
|
if _, err := fmt.Sscanf(s, "%d 0x%x", &x, &y); err != nil {
|
||||||
|
log.Fatalf("could not parse %q", s)
|
||||||
|
}
|
||||||
|
if x < 0 || 126*190 <= x {
|
||||||
|
log.Fatalf("GBK code %d is out of range", x)
|
||||||
|
}
|
||||||
|
mapping[x] = y
|
||||||
|
if reverse[y] == 0 {
|
||||||
|
c0, c1 := x/190, x%190
|
||||||
|
if c1 >= 0x3f {
|
||||||
|
c1++
|
||||||
|
}
|
||||||
|
reverse[y] = (0x81+c0)<<8 | (0x40 + c1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := scanner.Err(); err != nil {
|
||||||
|
log.Fatalf("scanner error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("// decode is the decoding table from GBK code to Unicode.\n")
|
||||||
|
fmt.Printf("// It is defined at http://encoding.spec.whatwg.org/index-gbk.txt\n")
|
||||||
|
fmt.Printf("var decode = [...]uint16{\n")
|
||||||
|
for i, v := range mapping {
|
||||||
|
if v != 0 {
|
||||||
|
fmt.Printf("\t%d: 0x%04X,\n", i, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Printf("}\n\n")
|
||||||
|
|
||||||
|
// Any run of at least separation continuous zero entries in the reverse map will
|
||||||
|
// be a separate encode table.
|
||||||
|
const separation = 1024
|
||||||
|
|
||||||
|
intervals := []interval(nil)
|
||||||
|
low, high := -1, -1
|
||||||
|
for i, v := range reverse {
|
||||||
|
if v == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if low < 0 {
|
||||||
|
low = i
|
||||||
|
} else if i-high >= separation {
|
||||||
|
if high >= 0 {
|
||||||
|
intervals = append(intervals, interval{low, high})
|
||||||
|
}
|
||||||
|
low = i
|
||||||
|
}
|
||||||
|
high = i + 1
|
||||||
|
}
|
||||||
|
if high >= 0 {
|
||||||
|
intervals = append(intervals, interval{low, high})
|
||||||
|
}
|
||||||
|
sort.Sort(byDecreasingLength(intervals))
|
||||||
|
|
||||||
|
fmt.Printf("const numEncodeTables = %d\n\n", len(intervals))
|
||||||
|
fmt.Printf("// encodeX are the encoding tables from Unicode to GBK code,\n")
|
||||||
|
fmt.Printf("// sorted by decreasing length.\n")
|
||||||
|
for i, v := range intervals {
|
||||||
|
fmt.Printf("// encode%d: %5d entries for runes in [%5d, %5d).\n", i, v.len(), v.low, v.high)
|
||||||
|
}
|
||||||
|
fmt.Printf("\n")
|
||||||
|
|
||||||
|
for i, v := range intervals {
|
||||||
|
fmt.Printf("const encode%dLow, encode%dHigh = %d, %d\n\n", i, i, v.low, v.high)
|
||||||
|
fmt.Printf("var encode%d = [...]uint16{\n", i)
|
||||||
|
for j := v.low; j < v.high; j++ {
|
||||||
|
x := reverse[j]
|
||||||
|
if x == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fmt.Printf("\t%d-%d: 0x%04X,\n", j, v.low, x)
|
||||||
|
}
|
||||||
|
fmt.Printf("}\n\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// interval is a half-open interval [low, high).
|
||||||
|
type interval struct {
|
||||||
|
low, high int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i interval) len() int { return i.high - i.low }
|
||||||
|
|
||||||
|
// byDecreasingLength sorts intervals by decreasing length.
|
||||||
|
type byDecreasingLength []interval
|
||||||
|
|
||||||
|
func (b byDecreasingLength) Len() int { return len(b) }
|
||||||
|
func (b byDecreasingLength) Less(i, j int) bool { return b[i].len() > b[j].len() }
|
||||||
|
func (b byDecreasingLength) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
|
||||||
140
vendor/golang.org/x/text/encoding/traditionalchinese/maketables.go
generated
vendored
Normal file
140
vendor/golang.org/x/text/encoding/traditionalchinese/maketables.go
generated
vendored
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
// This program generates tables.go:
|
||||||
|
// go run maketables.go | gofmt > tables.go
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
fmt.Printf("// generated by go run maketables.go; DO NOT EDIT\n\n")
|
||||||
|
fmt.Printf("// Package traditionalchinese provides Traditional Chinese encodings such as Big5.\n")
|
||||||
|
fmt.Printf(`package traditionalchinese // import "golang.org/x/text/encoding/traditionalchinese"` + "\n\n")
|
||||||
|
|
||||||
|
res, err := http.Get("http://encoding.spec.whatwg.org/index-big5.txt")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Get: %v", err)
|
||||||
|
}
|
||||||
|
defer res.Body.Close()
|
||||||
|
|
||||||
|
mapping := [65536]uint32{}
|
||||||
|
reverse := [65536 * 4]uint16{}
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(res.Body)
|
||||||
|
for scanner.Scan() {
|
||||||
|
s := strings.TrimSpace(scanner.Text())
|
||||||
|
if s == "" || s[0] == '#' {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
x, y := uint16(0), uint32(0)
|
||||||
|
if _, err := fmt.Sscanf(s, "%d 0x%x", &x, &y); err != nil {
|
||||||
|
log.Fatalf("could not parse %q", s)
|
||||||
|
}
|
||||||
|
if x < 0 || 126*157 <= x {
|
||||||
|
log.Fatalf("Big5 code %d is out of range", x)
|
||||||
|
}
|
||||||
|
mapping[x] = y
|
||||||
|
|
||||||
|
// The WHATWG spec http://encoding.spec.whatwg.org/#indexes says that
|
||||||
|
// "The index pointer for code point in index is the first pointer
|
||||||
|
// corresponding to code point in index", which would normally mean
|
||||||
|
// that the code below should be guarded by "if reverse[y] == 0", but
|
||||||
|
// last instead of first seems to match the behavior of
|
||||||
|
// "iconv -f UTF-8 -t BIG5". For example, U+8005 者 occurs twice in
|
||||||
|
// http://encoding.spec.whatwg.org/index-big5.txt, as index 2148
|
||||||
|
// (encoded as "\x8e\xcd") and index 6543 (encoded as "\xaa\xcc")
|
||||||
|
// and "echo 者 | iconv -f UTF-8 -t BIG5 | xxd" gives "\xaa\xcc".
|
||||||
|
c0, c1 := x/157, x%157
|
||||||
|
if c1 < 0x3f {
|
||||||
|
c1 += 0x40
|
||||||
|
} else {
|
||||||
|
c1 += 0x62
|
||||||
|
}
|
||||||
|
reverse[y] = (0x81+c0)<<8 | c1
|
||||||
|
}
|
||||||
|
if err := scanner.Err(); err != nil {
|
||||||
|
log.Fatalf("scanner error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("// decode is the decoding table from Big5 code to Unicode.\n")
|
||||||
|
fmt.Printf("// It is defined at http://encoding.spec.whatwg.org/index-big5.txt\n")
|
||||||
|
fmt.Printf("var decode = [...]uint32{\n")
|
||||||
|
for i, v := range mapping {
|
||||||
|
if v != 0 {
|
||||||
|
fmt.Printf("\t%d: 0x%08X,\n", i, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Printf("}\n\n")
|
||||||
|
|
||||||
|
// Any run of at least separation continuous zero entries in the reverse map will
|
||||||
|
// be a separate encode table.
|
||||||
|
const separation = 1024
|
||||||
|
|
||||||
|
intervals := []interval(nil)
|
||||||
|
low, high := -1, -1
|
||||||
|
for i, v := range reverse {
|
||||||
|
if v == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if low < 0 {
|
||||||
|
low = i
|
||||||
|
} else if i-high >= separation {
|
||||||
|
if high >= 0 {
|
||||||
|
intervals = append(intervals, interval{low, high})
|
||||||
|
}
|
||||||
|
low = i
|
||||||
|
}
|
||||||
|
high = i + 1
|
||||||
|
}
|
||||||
|
if high >= 0 {
|
||||||
|
intervals = append(intervals, interval{low, high})
|
||||||
|
}
|
||||||
|
sort.Sort(byDecreasingLength(intervals))
|
||||||
|
|
||||||
|
fmt.Printf("const numEncodeTables = %d\n\n", len(intervals))
|
||||||
|
fmt.Printf("// encodeX are the encoding tables from Unicode to Big5 code,\n")
|
||||||
|
fmt.Printf("// sorted by decreasing length.\n")
|
||||||
|
for i, v := range intervals {
|
||||||
|
fmt.Printf("// encode%d: %5d entries for runes in [%6d, %6d).\n", i, v.len(), v.low, v.high)
|
||||||
|
}
|
||||||
|
fmt.Printf("\n")
|
||||||
|
|
||||||
|
for i, v := range intervals {
|
||||||
|
fmt.Printf("const encode%dLow, encode%dHigh = %d, %d\n\n", i, i, v.low, v.high)
|
||||||
|
fmt.Printf("var encode%d = [...]uint16{\n", i)
|
||||||
|
for j := v.low; j < v.high; j++ {
|
||||||
|
x := reverse[j]
|
||||||
|
if x == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fmt.Printf("\t%d-%d: 0x%04X,\n", j, v.low, x)
|
||||||
|
}
|
||||||
|
fmt.Printf("}\n\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// interval is a half-open interval [low, high).
|
||||||
|
type interval struct {
|
||||||
|
low, high int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i interval) len() int { return i.high - i.low }
|
||||||
|
|
||||||
|
// byDecreasingLength sorts intervals by decreasing length.
|
||||||
|
type byDecreasingLength []interval
|
||||||
|
|
||||||
|
func (b byDecreasingLength) Len() int { return len(b) }
|
||||||
|
func (b byDecreasingLength) Less(i, j int) bool { return b[i].len() > b[j].len() }
|
||||||
|
func (b byDecreasingLength) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
|
||||||
64
vendor/golang.org/x/text/internal/language/compact/gen.go
generated
vendored
Normal file
64
vendor/golang.org/x/text/internal/language/compact/gen.go
generated
vendored
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
// Language tag table generator.
|
||||||
|
// Data read from the web.
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
|
||||||
|
"golang.org/x/text/internal/gen"
|
||||||
|
"golang.org/x/text/unicode/cldr"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
test = flag.Bool("test",
|
||||||
|
false,
|
||||||
|
"test existing tables; can be used to compare web data with package data.")
|
||||||
|
outputFile = flag.String("output",
|
||||||
|
"tables.go",
|
||||||
|
"output file for generated tables")
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
gen.Init()
|
||||||
|
|
||||||
|
w := gen.NewCodeWriter()
|
||||||
|
defer w.WriteGoFile("tables.go", "compact")
|
||||||
|
|
||||||
|
fmt.Fprintln(w, `import "golang.org/x/text/internal/language"`)
|
||||||
|
|
||||||
|
b := newBuilder(w)
|
||||||
|
gen.WriteCLDRVersion(w)
|
||||||
|
|
||||||
|
b.writeCompactIndex()
|
||||||
|
}
|
||||||
|
|
||||||
|
type builder struct {
|
||||||
|
w *gen.CodeWriter
|
||||||
|
data *cldr.CLDR
|
||||||
|
supp *cldr.SupplementalData
|
||||||
|
}
|
||||||
|
|
||||||
|
func newBuilder(w *gen.CodeWriter) *builder {
|
||||||
|
r := gen.OpenCLDRCoreZip()
|
||||||
|
defer r.Close()
|
||||||
|
d := &cldr.Decoder{}
|
||||||
|
data, err := d.DecodeZip(r)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
b := builder{
|
||||||
|
w: w,
|
||||||
|
data: data,
|
||||||
|
supp: data.Supplemental(),
|
||||||
|
}
|
||||||
|
return &b
|
||||||
|
}
|
||||||
113
vendor/golang.org/x/text/internal/language/compact/gen_index.go
generated
vendored
Normal file
113
vendor/golang.org/x/text/internal/language/compact/gen_index.go
generated
vendored
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
// Copyright 2015 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
// This file generates derivative tables based on the language package itself.
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/text/internal/language"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Compact indices:
|
||||||
|
// Note -va-X variants only apply to localization variants.
|
||||||
|
// BCP variants only ever apply to language.
|
||||||
|
// The only ambiguity between tags is with regions.
|
||||||
|
|
||||||
|
func (b *builder) writeCompactIndex() {
|
||||||
|
// Collect all language tags for which we have any data in CLDR.
|
||||||
|
m := map[language.Tag]bool{}
|
||||||
|
for _, lang := range b.data.Locales() {
|
||||||
|
// We include all locales unconditionally to be consistent with en_US.
|
||||||
|
// We want en_US, even though it has no data associated with it.
|
||||||
|
|
||||||
|
// TODO: put any of the languages for which no data exists at the end
|
||||||
|
// of the index. This allows all components based on ICU to use that
|
||||||
|
// as the cutoff point.
|
||||||
|
// if x := data.RawLDML(lang); false ||
|
||||||
|
// x.LocaleDisplayNames != nil ||
|
||||||
|
// x.Characters != nil ||
|
||||||
|
// x.Delimiters != nil ||
|
||||||
|
// x.Measurement != nil ||
|
||||||
|
// x.Dates != nil ||
|
||||||
|
// x.Numbers != nil ||
|
||||||
|
// x.Units != nil ||
|
||||||
|
// x.ListPatterns != nil ||
|
||||||
|
// x.Collations != nil ||
|
||||||
|
// x.Segmentations != nil ||
|
||||||
|
// x.Rbnf != nil ||
|
||||||
|
// x.Annotations != nil ||
|
||||||
|
// x.Metadata != nil {
|
||||||
|
|
||||||
|
// TODO: support POSIX natively, albeit non-standard.
|
||||||
|
tag := language.Make(strings.Replace(lang, "_POSIX", "-u-va-posix", 1))
|
||||||
|
m[tag] = true
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: plural rules are also defined for the deprecated tags:
|
||||||
|
// iw mo sh tl
|
||||||
|
// Consider removing these as compact tags.
|
||||||
|
|
||||||
|
// Include locales for plural rules, which uses a different structure.
|
||||||
|
for _, plurals := range b.supp.Plurals {
|
||||||
|
for _, rules := range plurals.PluralRules {
|
||||||
|
for _, lang := range strings.Split(rules.Locales, " ") {
|
||||||
|
m[language.Make(lang)] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var coreTags []language.CompactCoreInfo
|
||||||
|
var special []string
|
||||||
|
|
||||||
|
for t := range m {
|
||||||
|
if x := t.Extensions(); len(x) != 0 && fmt.Sprint(x) != "[u-va-posix]" {
|
||||||
|
log.Fatalf("Unexpected extension %v in %v", x, t)
|
||||||
|
}
|
||||||
|
if len(t.Variants()) == 0 && len(t.Extensions()) == 0 {
|
||||||
|
cci, ok := language.GetCompactCore(t)
|
||||||
|
if !ok {
|
||||||
|
log.Fatalf("Locale for non-basic language %q", t)
|
||||||
|
}
|
||||||
|
coreTags = append(coreTags, cci)
|
||||||
|
} else {
|
||||||
|
special = append(special, t.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
w := b.w
|
||||||
|
|
||||||
|
sort.Slice(coreTags, func(i, j int) bool { return coreTags[i] < coreTags[j] })
|
||||||
|
sort.Strings(special)
|
||||||
|
|
||||||
|
w.WriteComment(`
|
||||||
|
NumCompactTags is the number of common tags. The maximum tag is
|
||||||
|
NumCompactTags-1.`)
|
||||||
|
w.WriteConst("NumCompactTags", len(m))
|
||||||
|
|
||||||
|
fmt.Fprintln(w, "const (")
|
||||||
|
for i, t := range coreTags {
|
||||||
|
fmt.Fprintf(w, "%s ID = %d\n", ident(t.Tag().String()), i)
|
||||||
|
}
|
||||||
|
for i, t := range special {
|
||||||
|
fmt.Fprintf(w, "%s ID = %d\n", ident(t), i+len(coreTags))
|
||||||
|
}
|
||||||
|
fmt.Fprintln(w, ")")
|
||||||
|
|
||||||
|
w.WriteVar("coreTags", coreTags)
|
||||||
|
|
||||||
|
w.WriteConst("specialTagsStr", strings.Join(special, " "))
|
||||||
|
}
|
||||||
|
|
||||||
|
func ident(s string) string {
|
||||||
|
return strings.Replace(s, "-", "", -1) + "Index"
|
||||||
|
}
|
||||||
54
vendor/golang.org/x/text/internal/language/compact/gen_parents.go
generated
vendored
Normal file
54
vendor/golang.org/x/text/internal/language/compact/gen_parents.go
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
// Copyright 2018 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
|
||||||
|
"golang.org/x/text/internal/gen"
|
||||||
|
"golang.org/x/text/internal/language"
|
||||||
|
"golang.org/x/text/internal/language/compact"
|
||||||
|
"golang.org/x/text/unicode/cldr"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
r := gen.OpenCLDRCoreZip()
|
||||||
|
defer r.Close()
|
||||||
|
|
||||||
|
d := &cldr.Decoder{}
|
||||||
|
data, err := d.DecodeZip(r)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("DecodeZip: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
w := gen.NewCodeWriter()
|
||||||
|
defer w.WriteGoFile("parents.go", "compact")
|
||||||
|
|
||||||
|
// Create parents table.
|
||||||
|
type ID uint16
|
||||||
|
parents := make([]ID, compact.NumCompactTags)
|
||||||
|
for _, loc := range data.Locales() {
|
||||||
|
tag := language.MustParse(loc)
|
||||||
|
index, ok := compact.FromTag(tag)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
parentIndex := compact.ID(0) // und
|
||||||
|
for p := tag.Parent(); p != language.Und; p = p.Parent() {
|
||||||
|
if x, ok := compact.FromTag(p); ok {
|
||||||
|
parentIndex = x
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
parents[index] = ID(parentIndex)
|
||||||
|
}
|
||||||
|
|
||||||
|
w.WriteComment(`
|
||||||
|
parents maps a compact index of a tag to the compact index of the parent of
|
||||||
|
this tag.`)
|
||||||
|
w.WriteVar("parents", parents)
|
||||||
|
}
|
||||||
1520
vendor/golang.org/x/text/internal/language/gen.go
generated
vendored
Normal file
1520
vendor/golang.org/x/text/internal/language/gen.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
20
vendor/golang.org/x/text/internal/language/gen_common.go
generated
vendored
Normal file
20
vendor/golang.org/x/text/internal/language/gen_common.go
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
// Copyright 2014 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
// This file contains code common to the maketables.go and the package code.
|
||||||
|
|
||||||
|
// AliasType is the type of an alias in AliasMap.
|
||||||
|
type AliasType int8
|
||||||
|
|
||||||
|
const (
|
||||||
|
Deprecated AliasType = iota
|
||||||
|
Macro
|
||||||
|
Legacy
|
||||||
|
|
||||||
|
AliasTypeUnknown AliasType = -1
|
||||||
|
)
|
||||||
305
vendor/golang.org/x/text/language/gen.go
generated
vendored
Normal file
305
vendor/golang.org/x/text/language/gen.go
generated
vendored
Normal file
@@ -0,0 +1,305 @@
|
|||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
// Language tag table generator.
|
||||||
|
// Data read from the web.
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"log"
|
||||||
|
"sort"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/text/internal/gen"
|
||||||
|
"golang.org/x/text/internal/language"
|
||||||
|
"golang.org/x/text/unicode/cldr"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
test = flag.Bool("test",
|
||||||
|
false,
|
||||||
|
"test existing tables; can be used to compare web data with package data.")
|
||||||
|
outputFile = flag.String("output",
|
||||||
|
"tables.go",
|
||||||
|
"output file for generated tables")
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
gen.Init()
|
||||||
|
|
||||||
|
w := gen.NewCodeWriter()
|
||||||
|
defer w.WriteGoFile("tables.go", "language")
|
||||||
|
|
||||||
|
b := newBuilder(w)
|
||||||
|
gen.WriteCLDRVersion(w)
|
||||||
|
|
||||||
|
b.writeConstants()
|
||||||
|
b.writeMatchData()
|
||||||
|
}
|
||||||
|
|
||||||
|
type builder struct {
|
||||||
|
w *gen.CodeWriter
|
||||||
|
hw io.Writer // MultiWriter for w and w.Hash
|
||||||
|
data *cldr.CLDR
|
||||||
|
supp *cldr.SupplementalData
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *builder) langIndex(s string) uint16 {
|
||||||
|
return uint16(language.MustParseBase(s))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *builder) regionIndex(s string) int {
|
||||||
|
return int(language.MustParseRegion(s))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *builder) scriptIndex(s string) int {
|
||||||
|
return int(language.MustParseScript(s))
|
||||||
|
}
|
||||||
|
|
||||||
|
func newBuilder(w *gen.CodeWriter) *builder {
|
||||||
|
r := gen.OpenCLDRCoreZip()
|
||||||
|
defer r.Close()
|
||||||
|
d := &cldr.Decoder{}
|
||||||
|
data, err := d.DecodeZip(r)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
b := builder{
|
||||||
|
w: w,
|
||||||
|
hw: io.MultiWriter(w, w.Hash),
|
||||||
|
data: data,
|
||||||
|
supp: data.Supplemental(),
|
||||||
|
}
|
||||||
|
return &b
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeConsts computes f(v) for all v in values and writes the results
|
||||||
|
// as constants named _v to a single constant block.
|
||||||
|
func (b *builder) writeConsts(f func(string) int, values ...string) {
|
||||||
|
fmt.Fprintln(b.w, "const (")
|
||||||
|
for _, v := range values {
|
||||||
|
fmt.Fprintf(b.w, "\t_%s = %v\n", v, f(v))
|
||||||
|
}
|
||||||
|
fmt.Fprintln(b.w, ")")
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: region inclusion data will probably not be use used in future matchers.
|
||||||
|
|
||||||
|
var langConsts = []string{
|
||||||
|
"de", "en", "fr", "it", "mo", "no", "nb", "pt", "sh", "mul", "und",
|
||||||
|
}
|
||||||
|
|
||||||
|
var scriptConsts = []string{
|
||||||
|
"Latn", "Hani", "Hans", "Hant", "Qaaa", "Qaai", "Qabx", "Zinh", "Zyyy",
|
||||||
|
"Zzzz",
|
||||||
|
}
|
||||||
|
|
||||||
|
var regionConsts = []string{
|
||||||
|
"001", "419", "BR", "CA", "ES", "GB", "MD", "PT", "UK", "US",
|
||||||
|
"ZZ", "XA", "XC", "XK", // Unofficial tag for Kosovo.
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *builder) writeConstants() {
|
||||||
|
b.writeConsts(func(s string) int { return int(b.langIndex(s)) }, langConsts...)
|
||||||
|
b.writeConsts(b.regionIndex, regionConsts...)
|
||||||
|
b.writeConsts(b.scriptIndex, scriptConsts...)
|
||||||
|
}
|
||||||
|
|
||||||
|
type mutualIntelligibility struct {
|
||||||
|
want, have uint16
|
||||||
|
distance uint8
|
||||||
|
oneway bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type scriptIntelligibility struct {
|
||||||
|
wantLang, haveLang uint16
|
||||||
|
wantScript, haveScript uint8
|
||||||
|
distance uint8
|
||||||
|
// Always oneway
|
||||||
|
}
|
||||||
|
|
||||||
|
type regionIntelligibility struct {
|
||||||
|
lang uint16 // compact language id
|
||||||
|
script uint8 // 0 means any
|
||||||
|
group uint8 // 0 means any; if bit 7 is set it means inverse
|
||||||
|
distance uint8
|
||||||
|
// Always twoway.
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeMatchData writes tables with languages and scripts for which there is
|
||||||
|
// mutual intelligibility. The data is based on CLDR's languageMatching data.
|
||||||
|
// Note that we use a different algorithm than the one defined by CLDR and that
|
||||||
|
// we slightly modify the data. For example, we convert scores to confidence levels.
|
||||||
|
// We also drop all region-related data as we use a different algorithm to
|
||||||
|
// determine region equivalence.
|
||||||
|
func (b *builder) writeMatchData() {
|
||||||
|
lm := b.supp.LanguageMatching.LanguageMatches
|
||||||
|
cldr.MakeSlice(&lm).SelectAnyOf("type", "written_new")
|
||||||
|
|
||||||
|
regionHierarchy := map[string][]string{}
|
||||||
|
for _, g := range b.supp.TerritoryContainment.Group {
|
||||||
|
regions := strings.Split(g.Contains, " ")
|
||||||
|
regionHierarchy[g.Type] = append(regionHierarchy[g.Type], regions...)
|
||||||
|
}
|
||||||
|
regionToGroups := make([]uint8, language.NumRegions)
|
||||||
|
|
||||||
|
idToIndex := map[string]uint8{}
|
||||||
|
for i, mv := range lm[0].MatchVariable {
|
||||||
|
if i > 6 {
|
||||||
|
log.Fatalf("Too many groups: %d", i)
|
||||||
|
}
|
||||||
|
idToIndex[mv.Id] = uint8(i + 1)
|
||||||
|
// TODO: also handle '-'
|
||||||
|
for _, r := range strings.Split(mv.Value, "+") {
|
||||||
|
todo := []string{r}
|
||||||
|
for k := 0; k < len(todo); k++ {
|
||||||
|
r := todo[k]
|
||||||
|
regionToGroups[b.regionIndex(r)] |= 1 << uint8(i)
|
||||||
|
todo = append(todo, regionHierarchy[r]...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
b.w.WriteVar("regionToGroups", regionToGroups)
|
||||||
|
|
||||||
|
// maps language id to in- and out-of-group region.
|
||||||
|
paradigmLocales := [][3]uint16{}
|
||||||
|
locales := strings.Split(lm[0].ParadigmLocales[0].Locales, " ")
|
||||||
|
for i := 0; i < len(locales); i += 2 {
|
||||||
|
x := [3]uint16{}
|
||||||
|
for j := 0; j < 2; j++ {
|
||||||
|
pc := strings.SplitN(locales[i+j], "-", 2)
|
||||||
|
x[0] = b.langIndex(pc[0])
|
||||||
|
if len(pc) == 2 {
|
||||||
|
x[1+j] = uint16(b.regionIndex(pc[1]))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
paradigmLocales = append(paradigmLocales, x)
|
||||||
|
}
|
||||||
|
b.w.WriteVar("paradigmLocales", paradigmLocales)
|
||||||
|
|
||||||
|
b.w.WriteType(mutualIntelligibility{})
|
||||||
|
b.w.WriteType(scriptIntelligibility{})
|
||||||
|
b.w.WriteType(regionIntelligibility{})
|
||||||
|
|
||||||
|
matchLang := []mutualIntelligibility{}
|
||||||
|
matchScript := []scriptIntelligibility{}
|
||||||
|
matchRegion := []regionIntelligibility{}
|
||||||
|
// Convert the languageMatch entries in lists keyed by desired language.
|
||||||
|
for _, m := range lm[0].LanguageMatch {
|
||||||
|
// Different versions of CLDR use different separators.
|
||||||
|
desired := strings.Replace(m.Desired, "-", "_", -1)
|
||||||
|
supported := strings.Replace(m.Supported, "-", "_", -1)
|
||||||
|
d := strings.Split(desired, "_")
|
||||||
|
s := strings.Split(supported, "_")
|
||||||
|
if len(d) != len(s) {
|
||||||
|
log.Fatalf("not supported: desired=%q; supported=%q", desired, supported)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
distance, _ := strconv.ParseInt(m.Distance, 10, 8)
|
||||||
|
switch len(d) {
|
||||||
|
case 2:
|
||||||
|
if desired == supported && desired == "*_*" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// language-script pair.
|
||||||
|
matchScript = append(matchScript, scriptIntelligibility{
|
||||||
|
wantLang: uint16(b.langIndex(d[0])),
|
||||||
|
haveLang: uint16(b.langIndex(s[0])),
|
||||||
|
wantScript: uint8(b.scriptIndex(d[1])),
|
||||||
|
haveScript: uint8(b.scriptIndex(s[1])),
|
||||||
|
distance: uint8(distance),
|
||||||
|
})
|
||||||
|
if m.Oneway != "true" {
|
||||||
|
matchScript = append(matchScript, scriptIntelligibility{
|
||||||
|
wantLang: uint16(b.langIndex(s[0])),
|
||||||
|
haveLang: uint16(b.langIndex(d[0])),
|
||||||
|
wantScript: uint8(b.scriptIndex(s[1])),
|
||||||
|
haveScript: uint8(b.scriptIndex(d[1])),
|
||||||
|
distance: uint8(distance),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
case 1:
|
||||||
|
if desired == supported && desired == "*" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if distance == 1 {
|
||||||
|
// nb == no is already handled by macro mapping. Check there
|
||||||
|
// really is only this case.
|
||||||
|
if d[0] != "no" || s[0] != "nb" {
|
||||||
|
log.Fatalf("unhandled equivalence %s == %s", s[0], d[0])
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// TODO: consider dropping oneway field and just doubling the entry.
|
||||||
|
matchLang = append(matchLang, mutualIntelligibility{
|
||||||
|
want: uint16(b.langIndex(d[0])),
|
||||||
|
have: uint16(b.langIndex(s[0])),
|
||||||
|
distance: uint8(distance),
|
||||||
|
oneway: m.Oneway == "true",
|
||||||
|
})
|
||||||
|
case 3:
|
||||||
|
if desired == supported && desired == "*_*_*" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if desired != supported {
|
||||||
|
// This is now supported by CLDR, but only one case, which
|
||||||
|
// should already be covered by paradigm locales. For instance,
|
||||||
|
// test case "und, en, en-GU, en-IN, en-GB ; en-ZA ; en-GB" in
|
||||||
|
// testdata/CLDRLocaleMatcherTest.txt tests this.
|
||||||
|
if supported != "en_*_GB" {
|
||||||
|
log.Fatalf("not supported: desired=%q; supported=%q", desired, supported)
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
ri := regionIntelligibility{
|
||||||
|
lang: b.langIndex(d[0]),
|
||||||
|
distance: uint8(distance),
|
||||||
|
}
|
||||||
|
if d[1] != "*" {
|
||||||
|
ri.script = uint8(b.scriptIndex(d[1]))
|
||||||
|
}
|
||||||
|
switch {
|
||||||
|
case d[2] == "*":
|
||||||
|
ri.group = 0x80 // not contained in anything
|
||||||
|
case strings.HasPrefix(d[2], "$!"):
|
||||||
|
ri.group = 0x80
|
||||||
|
d[2] = "$" + d[2][len("$!"):]
|
||||||
|
fallthrough
|
||||||
|
case strings.HasPrefix(d[2], "$"):
|
||||||
|
ri.group |= idToIndex[d[2]]
|
||||||
|
}
|
||||||
|
matchRegion = append(matchRegion, ri)
|
||||||
|
default:
|
||||||
|
log.Fatalf("not supported: desired=%q; supported=%q", desired, supported)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sort.SliceStable(matchLang, func(i, j int) bool {
|
||||||
|
return matchLang[i].distance < matchLang[j].distance
|
||||||
|
})
|
||||||
|
b.w.WriteComment(`
|
||||||
|
matchLang holds pairs of langIDs of base languages that are typically
|
||||||
|
mutually intelligible. Each pair is associated with a confidence and
|
||||||
|
whether the intelligibility goes one or both ways.`)
|
||||||
|
b.w.WriteVar("matchLang", matchLang)
|
||||||
|
|
||||||
|
b.w.WriteComment(`
|
||||||
|
matchScript holds pairs of scriptIDs where readers of one script
|
||||||
|
can typically also read the other. Each is associated with a confidence.`)
|
||||||
|
sort.SliceStable(matchScript, func(i, j int) bool {
|
||||||
|
return matchScript[i].distance < matchScript[j].distance
|
||||||
|
})
|
||||||
|
b.w.WriteVar("matchScript", matchScript)
|
||||||
|
|
||||||
|
sort.SliceStable(matchRegion, func(i, j int) bool {
|
||||||
|
return matchRegion[i].distance < matchRegion[j].distance
|
||||||
|
})
|
||||||
|
b.w.WriteVar("matchRegion", matchRegion)
|
||||||
|
}
|
||||||
986
vendor/golang.org/x/text/unicode/norm/maketables.go
generated
vendored
Normal file
986
vendor/golang.org/x/text/unicode/norm/maketables.go
generated
vendored
Normal file
@@ -0,0 +1,986 @@
|
|||||||
|
// Copyright 2011 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
// Normalization table generator.
|
||||||
|
// Data read from the web.
|
||||||
|
// See forminfo.go for a description of the trie values associated with each rune.
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/binary"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"log"
|
||||||
|
"sort"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/text/internal/gen"
|
||||||
|
"golang.org/x/text/internal/triegen"
|
||||||
|
"golang.org/x/text/internal/ucd"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
gen.Init()
|
||||||
|
loadUnicodeData()
|
||||||
|
compactCCC()
|
||||||
|
loadCompositionExclusions()
|
||||||
|
completeCharFields(FCanonical)
|
||||||
|
completeCharFields(FCompatibility)
|
||||||
|
computeNonStarterCounts()
|
||||||
|
verifyComputed()
|
||||||
|
printChars()
|
||||||
|
testDerived()
|
||||||
|
printTestdata()
|
||||||
|
makeTables()
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
tablelist = flag.String("tables",
|
||||||
|
"all",
|
||||||
|
"comma-separated list of which tables to generate; "+
|
||||||
|
"can be 'decomp', 'recomp', 'info' and 'all'")
|
||||||
|
test = flag.Bool("test",
|
||||||
|
false,
|
||||||
|
"test existing tables against DerivedNormalizationProps and generate test data for regression testing")
|
||||||
|
verbose = flag.Bool("verbose",
|
||||||
|
false,
|
||||||
|
"write data to stdout as it is parsed")
|
||||||
|
)
|
||||||
|
|
||||||
|
const MaxChar = 0x10FFFF // anything above this shouldn't exist
|
||||||
|
|
||||||
|
// Quick Check properties of runes allow us to quickly
|
||||||
|
// determine whether a rune may occur in a normal form.
|
||||||
|
// For a given normal form, a rune may be guaranteed to occur
|
||||||
|
// verbatim (QC=Yes), may or may not combine with another
|
||||||
|
// rune (QC=Maybe), or may not occur (QC=No).
|
||||||
|
type QCResult int
|
||||||
|
|
||||||
|
const (
|
||||||
|
QCUnknown QCResult = iota
|
||||||
|
QCYes
|
||||||
|
QCNo
|
||||||
|
QCMaybe
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r QCResult) String() string {
|
||||||
|
switch r {
|
||||||
|
case QCYes:
|
||||||
|
return "Yes"
|
||||||
|
case QCNo:
|
||||||
|
return "No"
|
||||||
|
case QCMaybe:
|
||||||
|
return "Maybe"
|
||||||
|
}
|
||||||
|
return "***UNKNOWN***"
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
FCanonical = iota // NFC or NFD
|
||||||
|
FCompatibility // NFKC or NFKD
|
||||||
|
FNumberOfFormTypes
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
MComposed = iota // NFC or NFKC
|
||||||
|
MDecomposed // NFD or NFKD
|
||||||
|
MNumberOfModes
|
||||||
|
)
|
||||||
|
|
||||||
|
// This contains only the properties we're interested in.
|
||||||
|
type Char struct {
|
||||||
|
name string
|
||||||
|
codePoint rune // if zero, this index is not a valid code point.
|
||||||
|
ccc uint8 // canonical combining class
|
||||||
|
origCCC uint8
|
||||||
|
excludeInComp bool // from CompositionExclusions.txt
|
||||||
|
compatDecomp bool // it has a compatibility expansion
|
||||||
|
|
||||||
|
nTrailingNonStarters uint8
|
||||||
|
nLeadingNonStarters uint8 // must be equal to trailing if non-zero
|
||||||
|
|
||||||
|
forms [FNumberOfFormTypes]FormInfo // For FCanonical and FCompatibility
|
||||||
|
|
||||||
|
state State
|
||||||
|
}
|
||||||
|
|
||||||
|
var chars = make([]Char, MaxChar+1)
|
||||||
|
var cccMap = make(map[uint8]uint8)
|
||||||
|
|
||||||
|
func (c Char) String() string {
|
||||||
|
buf := new(bytes.Buffer)
|
||||||
|
|
||||||
|
fmt.Fprintf(buf, "%U [%s]:\n", c.codePoint, c.name)
|
||||||
|
fmt.Fprintf(buf, " ccc: %v\n", c.ccc)
|
||||||
|
fmt.Fprintf(buf, " excludeInComp: %v\n", c.excludeInComp)
|
||||||
|
fmt.Fprintf(buf, " compatDecomp: %v\n", c.compatDecomp)
|
||||||
|
fmt.Fprintf(buf, " state: %v\n", c.state)
|
||||||
|
fmt.Fprintf(buf, " NFC:\n")
|
||||||
|
fmt.Fprint(buf, c.forms[FCanonical])
|
||||||
|
fmt.Fprintf(buf, " NFKC:\n")
|
||||||
|
fmt.Fprint(buf, c.forms[FCompatibility])
|
||||||
|
|
||||||
|
return buf.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// In UnicodeData.txt, some ranges are marked like this:
|
||||||
|
// 3400;<CJK Ideograph Extension A, First>;Lo;0;L;;;;;N;;;;;
|
||||||
|
// 4DB5;<CJK Ideograph Extension A, Last>;Lo;0;L;;;;;N;;;;;
|
||||||
|
// parseCharacter keeps a state variable indicating the weirdness.
|
||||||
|
type State int
|
||||||
|
|
||||||
|
const (
|
||||||
|
SNormal State = iota // known to be zero for the type
|
||||||
|
SFirst
|
||||||
|
SLast
|
||||||
|
SMissing
|
||||||
|
)
|
||||||
|
|
||||||
|
var lastChar = rune('\u0000')
|
||||||
|
|
||||||
|
func (c Char) isValid() bool {
|
||||||
|
return c.codePoint != 0 && c.state != SMissing
|
||||||
|
}
|
||||||
|
|
||||||
|
type FormInfo struct {
|
||||||
|
quickCheck [MNumberOfModes]QCResult // index: MComposed or MDecomposed
|
||||||
|
verified [MNumberOfModes]bool // index: MComposed or MDecomposed
|
||||||
|
|
||||||
|
combinesForward bool // May combine with rune on the right
|
||||||
|
combinesBackward bool // May combine with rune on the left
|
||||||
|
isOneWay bool // Never appears in result
|
||||||
|
inDecomp bool // Some decompositions result in this char.
|
||||||
|
decomp Decomposition
|
||||||
|
expandedDecomp Decomposition
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f FormInfo) String() string {
|
||||||
|
buf := bytes.NewBuffer(make([]byte, 0))
|
||||||
|
|
||||||
|
fmt.Fprintf(buf, " quickCheck[C]: %v\n", f.quickCheck[MComposed])
|
||||||
|
fmt.Fprintf(buf, " quickCheck[D]: %v\n", f.quickCheck[MDecomposed])
|
||||||
|
fmt.Fprintf(buf, " cmbForward: %v\n", f.combinesForward)
|
||||||
|
fmt.Fprintf(buf, " cmbBackward: %v\n", f.combinesBackward)
|
||||||
|
fmt.Fprintf(buf, " isOneWay: %v\n", f.isOneWay)
|
||||||
|
fmt.Fprintf(buf, " inDecomp: %v\n", f.inDecomp)
|
||||||
|
fmt.Fprintf(buf, " decomposition: %X\n", f.decomp)
|
||||||
|
fmt.Fprintf(buf, " expandedDecomp: %X\n", f.expandedDecomp)
|
||||||
|
|
||||||
|
return buf.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
type Decomposition []rune
|
||||||
|
|
||||||
|
func parseDecomposition(s string, skipfirst bool) (a []rune, err error) {
|
||||||
|
decomp := strings.Split(s, " ")
|
||||||
|
if len(decomp) > 0 && skipfirst {
|
||||||
|
decomp = decomp[1:]
|
||||||
|
}
|
||||||
|
for _, d := range decomp {
|
||||||
|
point, err := strconv.ParseUint(d, 16, 64)
|
||||||
|
if err != nil {
|
||||||
|
return a, err
|
||||||
|
}
|
||||||
|
a = append(a, rune(point))
|
||||||
|
}
|
||||||
|
return a, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadUnicodeData() {
|
||||||
|
f := gen.OpenUCDFile("UnicodeData.txt")
|
||||||
|
defer f.Close()
|
||||||
|
p := ucd.New(f)
|
||||||
|
for p.Next() {
|
||||||
|
r := p.Rune(ucd.CodePoint)
|
||||||
|
char := &chars[r]
|
||||||
|
|
||||||
|
char.ccc = uint8(p.Uint(ucd.CanonicalCombiningClass))
|
||||||
|
decmap := p.String(ucd.DecompMapping)
|
||||||
|
|
||||||
|
exp, err := parseDecomposition(decmap, false)
|
||||||
|
isCompat := false
|
||||||
|
if err != nil {
|
||||||
|
if len(decmap) > 0 {
|
||||||
|
exp, err = parseDecomposition(decmap, true)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf(`%U: bad decomp |%v|: "%s"`, r, decmap, err)
|
||||||
|
}
|
||||||
|
isCompat = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
char.name = p.String(ucd.Name)
|
||||||
|
char.codePoint = r
|
||||||
|
char.forms[FCompatibility].decomp = exp
|
||||||
|
if !isCompat {
|
||||||
|
char.forms[FCanonical].decomp = exp
|
||||||
|
} else {
|
||||||
|
char.compatDecomp = true
|
||||||
|
}
|
||||||
|
if len(decmap) > 0 {
|
||||||
|
char.forms[FCompatibility].decomp = exp
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := p.Err(); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// compactCCC converts the sparse set of CCC values to a continguous one,
|
||||||
|
// reducing the number of bits needed from 8 to 6.
|
||||||
|
func compactCCC() {
|
||||||
|
m := make(map[uint8]uint8)
|
||||||
|
for i := range chars {
|
||||||
|
c := &chars[i]
|
||||||
|
m[c.ccc] = 0
|
||||||
|
}
|
||||||
|
cccs := []int{}
|
||||||
|
for v, _ := range m {
|
||||||
|
cccs = append(cccs, int(v))
|
||||||
|
}
|
||||||
|
sort.Ints(cccs)
|
||||||
|
for i, c := range cccs {
|
||||||
|
cccMap[uint8(i)] = uint8(c)
|
||||||
|
m[uint8(c)] = uint8(i)
|
||||||
|
}
|
||||||
|
for i := range chars {
|
||||||
|
c := &chars[i]
|
||||||
|
c.origCCC = c.ccc
|
||||||
|
c.ccc = m[c.ccc]
|
||||||
|
}
|
||||||
|
if len(m) >= 1<<6 {
|
||||||
|
log.Fatalf("too many difference CCC values: %d >= 64", len(m))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CompositionExclusions.txt has form:
|
||||||
|
// 0958 # ...
|
||||||
|
// See https://unicode.org/reports/tr44/ for full explanation
|
||||||
|
func loadCompositionExclusions() {
|
||||||
|
f := gen.OpenUCDFile("CompositionExclusions.txt")
|
||||||
|
defer f.Close()
|
||||||
|
p := ucd.New(f)
|
||||||
|
for p.Next() {
|
||||||
|
c := &chars[p.Rune(0)]
|
||||||
|
if c.excludeInComp {
|
||||||
|
log.Fatalf("%U: Duplicate entry in exclusions.", c.codePoint)
|
||||||
|
}
|
||||||
|
c.excludeInComp = true
|
||||||
|
}
|
||||||
|
if e := p.Err(); e != nil {
|
||||||
|
log.Fatal(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasCompatDecomp returns true if any of the recursive
|
||||||
|
// decompositions contains a compatibility expansion.
|
||||||
|
// In this case, the character may not occur in NFK*.
|
||||||
|
func hasCompatDecomp(r rune) bool {
|
||||||
|
c := &chars[r]
|
||||||
|
if c.compatDecomp {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
for _, d := range c.forms[FCompatibility].decomp {
|
||||||
|
if hasCompatDecomp(d) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hangul related constants.
|
||||||
|
const (
|
||||||
|
HangulBase = 0xAC00
|
||||||
|
HangulEnd = 0xD7A4 // hangulBase + Jamo combinations (19 * 21 * 28)
|
||||||
|
|
||||||
|
JamoLBase = 0x1100
|
||||||
|
JamoLEnd = 0x1113
|
||||||
|
JamoVBase = 0x1161
|
||||||
|
JamoVEnd = 0x1176
|
||||||
|
JamoTBase = 0x11A8
|
||||||
|
JamoTEnd = 0x11C3
|
||||||
|
|
||||||
|
JamoLVTCount = 19 * 21 * 28
|
||||||
|
JamoTCount = 28
|
||||||
|
)
|
||||||
|
|
||||||
|
func isHangul(r rune) bool {
|
||||||
|
return HangulBase <= r && r < HangulEnd
|
||||||
|
}
|
||||||
|
|
||||||
|
func isHangulWithoutJamoT(r rune) bool {
|
||||||
|
if !isHangul(r) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
r -= HangulBase
|
||||||
|
return r < JamoLVTCount && r%JamoTCount == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func ccc(r rune) uint8 {
|
||||||
|
return chars[r].ccc
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insert a rune in a buffer, ordered by Canonical Combining Class.
|
||||||
|
func insertOrdered(b Decomposition, r rune) Decomposition {
|
||||||
|
n := len(b)
|
||||||
|
b = append(b, 0)
|
||||||
|
cc := ccc(r)
|
||||||
|
if cc > 0 {
|
||||||
|
// Use bubble sort.
|
||||||
|
for ; n > 0; n-- {
|
||||||
|
if ccc(b[n-1]) <= cc {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
b[n] = b[n-1]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
b[n] = r
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
// Recursively decompose.
|
||||||
|
func decomposeRecursive(form int, r rune, d Decomposition) Decomposition {
|
||||||
|
dcomp := chars[r].forms[form].decomp
|
||||||
|
if len(dcomp) == 0 {
|
||||||
|
return insertOrdered(d, r)
|
||||||
|
}
|
||||||
|
for _, c := range dcomp {
|
||||||
|
d = decomposeRecursive(form, c, d)
|
||||||
|
}
|
||||||
|
return d
|
||||||
|
}
|
||||||
|
|
||||||
|
func completeCharFields(form int) {
|
||||||
|
// Phase 0: pre-expand decomposition.
|
||||||
|
for i := range chars {
|
||||||
|
f := &chars[i].forms[form]
|
||||||
|
if len(f.decomp) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
exp := make(Decomposition, 0)
|
||||||
|
for _, c := range f.decomp {
|
||||||
|
exp = decomposeRecursive(form, c, exp)
|
||||||
|
}
|
||||||
|
f.expandedDecomp = exp
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 1: composition exclusion, mark decomposition.
|
||||||
|
for i := range chars {
|
||||||
|
c := &chars[i]
|
||||||
|
f := &c.forms[form]
|
||||||
|
|
||||||
|
// Marks script-specific exclusions and version restricted.
|
||||||
|
f.isOneWay = c.excludeInComp
|
||||||
|
|
||||||
|
// Singletons
|
||||||
|
f.isOneWay = f.isOneWay || len(f.decomp) == 1
|
||||||
|
|
||||||
|
// Non-starter decompositions
|
||||||
|
if len(f.decomp) > 1 {
|
||||||
|
chk := c.ccc != 0 || chars[f.decomp[0]].ccc != 0
|
||||||
|
f.isOneWay = f.isOneWay || chk
|
||||||
|
}
|
||||||
|
|
||||||
|
// Runes that decompose into more than two runes.
|
||||||
|
f.isOneWay = f.isOneWay || len(f.decomp) > 2
|
||||||
|
|
||||||
|
if form == FCompatibility {
|
||||||
|
f.isOneWay = f.isOneWay || hasCompatDecomp(c.codePoint)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, r := range f.decomp {
|
||||||
|
chars[r].forms[form].inDecomp = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 2: forward and backward combining.
|
||||||
|
for i := range chars {
|
||||||
|
c := &chars[i]
|
||||||
|
f := &c.forms[form]
|
||||||
|
|
||||||
|
if !f.isOneWay && len(f.decomp) == 2 {
|
||||||
|
f0 := &chars[f.decomp[0]].forms[form]
|
||||||
|
f1 := &chars[f.decomp[1]].forms[form]
|
||||||
|
if !f0.isOneWay {
|
||||||
|
f0.combinesForward = true
|
||||||
|
}
|
||||||
|
if !f1.isOneWay {
|
||||||
|
f1.combinesBackward = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if isHangulWithoutJamoT(rune(i)) {
|
||||||
|
f.combinesForward = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 3: quick check values.
|
||||||
|
for i := range chars {
|
||||||
|
c := &chars[i]
|
||||||
|
f := &c.forms[form]
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case len(f.decomp) > 0:
|
||||||
|
f.quickCheck[MDecomposed] = QCNo
|
||||||
|
case isHangul(rune(i)):
|
||||||
|
f.quickCheck[MDecomposed] = QCNo
|
||||||
|
default:
|
||||||
|
f.quickCheck[MDecomposed] = QCYes
|
||||||
|
}
|
||||||
|
switch {
|
||||||
|
case f.isOneWay:
|
||||||
|
f.quickCheck[MComposed] = QCNo
|
||||||
|
case (i & 0xffff00) == JamoLBase:
|
||||||
|
f.quickCheck[MComposed] = QCYes
|
||||||
|
if JamoLBase <= i && i < JamoLEnd {
|
||||||
|
f.combinesForward = true
|
||||||
|
}
|
||||||
|
if JamoVBase <= i && i < JamoVEnd {
|
||||||
|
f.quickCheck[MComposed] = QCMaybe
|
||||||
|
f.combinesBackward = true
|
||||||
|
f.combinesForward = true
|
||||||
|
}
|
||||||
|
if JamoTBase <= i && i < JamoTEnd {
|
||||||
|
f.quickCheck[MComposed] = QCMaybe
|
||||||
|
f.combinesBackward = true
|
||||||
|
}
|
||||||
|
case !f.combinesBackward:
|
||||||
|
f.quickCheck[MComposed] = QCYes
|
||||||
|
default:
|
||||||
|
f.quickCheck[MComposed] = QCMaybe
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func computeNonStarterCounts() {
|
||||||
|
// Phase 4: leading and trailing non-starter count
|
||||||
|
for i := range chars {
|
||||||
|
c := &chars[i]
|
||||||
|
|
||||||
|
runes := []rune{rune(i)}
|
||||||
|
// We always use FCompatibility so that the CGJ insertion points do not
|
||||||
|
// change for repeated normalizations with different forms.
|
||||||
|
if exp := c.forms[FCompatibility].expandedDecomp; len(exp) > 0 {
|
||||||
|
runes = exp
|
||||||
|
}
|
||||||
|
// We consider runes that combine backwards to be non-starters for the
|
||||||
|
// purpose of Stream-Safe Text Processing.
|
||||||
|
for _, r := range runes {
|
||||||
|
if cr := &chars[r]; cr.ccc == 0 && !cr.forms[FCompatibility].combinesBackward {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
c.nLeadingNonStarters++
|
||||||
|
}
|
||||||
|
for i := len(runes) - 1; i >= 0; i-- {
|
||||||
|
if cr := &chars[runes[i]]; cr.ccc == 0 && !cr.forms[FCompatibility].combinesBackward {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
c.nTrailingNonStarters++
|
||||||
|
}
|
||||||
|
if c.nTrailingNonStarters > 3 {
|
||||||
|
log.Fatalf("%U: Decomposition with more than 3 (%d) trailing modifiers (%U)", i, c.nTrailingNonStarters, runes)
|
||||||
|
}
|
||||||
|
|
||||||
|
if isHangul(rune(i)) {
|
||||||
|
c.nTrailingNonStarters = 2
|
||||||
|
if isHangulWithoutJamoT(rune(i)) {
|
||||||
|
c.nTrailingNonStarters = 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if l, t := c.nLeadingNonStarters, c.nTrailingNonStarters; l > 0 && l != t {
|
||||||
|
log.Fatalf("%U: number of leading and trailing non-starters should be equal (%d vs %d)", i, l, t)
|
||||||
|
}
|
||||||
|
if t := c.nTrailingNonStarters; t > 3 {
|
||||||
|
log.Fatalf("%U: number of trailing non-starters is %d > 3", t)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func printBytes(w io.Writer, b []byte, name string) {
|
||||||
|
fmt.Fprintf(w, "// %s: %d bytes\n", name, len(b))
|
||||||
|
fmt.Fprintf(w, "var %s = [...]byte {", name)
|
||||||
|
for i, c := range b {
|
||||||
|
switch {
|
||||||
|
case i%64 == 0:
|
||||||
|
fmt.Fprintf(w, "\n// Bytes %x - %x\n", i, i+63)
|
||||||
|
case i%8 == 0:
|
||||||
|
fmt.Fprintf(w, "\n")
|
||||||
|
}
|
||||||
|
fmt.Fprintf(w, "0x%.2X, ", c)
|
||||||
|
}
|
||||||
|
fmt.Fprint(w, "\n}\n\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// See forminfo.go for format.
|
||||||
|
func makeEntry(f *FormInfo, c *Char) uint16 {
|
||||||
|
e := uint16(0)
|
||||||
|
if r := c.codePoint; HangulBase <= r && r < HangulEnd {
|
||||||
|
e |= 0x40
|
||||||
|
}
|
||||||
|
if f.combinesForward {
|
||||||
|
e |= 0x20
|
||||||
|
}
|
||||||
|
if f.quickCheck[MDecomposed] == QCNo {
|
||||||
|
e |= 0x4
|
||||||
|
}
|
||||||
|
switch f.quickCheck[MComposed] {
|
||||||
|
case QCYes:
|
||||||
|
case QCNo:
|
||||||
|
e |= 0x10
|
||||||
|
case QCMaybe:
|
||||||
|
e |= 0x18
|
||||||
|
default:
|
||||||
|
log.Fatalf("Illegal quickcheck value %v.", f.quickCheck[MComposed])
|
||||||
|
}
|
||||||
|
e |= uint16(c.nTrailingNonStarters)
|
||||||
|
return e
|
||||||
|
}
|
||||||
|
|
||||||
|
// decompSet keeps track of unique decompositions, grouped by whether
|
||||||
|
// the decomposition is followed by a trailing and/or leading CCC.
|
||||||
|
type decompSet [7]map[string]bool
|
||||||
|
|
||||||
|
const (
|
||||||
|
normalDecomp = iota
|
||||||
|
firstMulti
|
||||||
|
firstCCC
|
||||||
|
endMulti
|
||||||
|
firstLeadingCCC
|
||||||
|
firstCCCZeroExcept
|
||||||
|
firstStarterWithNLead
|
||||||
|
lastDecomp
|
||||||
|
)
|
||||||
|
|
||||||
|
var cname = []string{"firstMulti", "firstCCC", "endMulti", "firstLeadingCCC", "firstCCCZeroExcept", "firstStarterWithNLead", "lastDecomp"}
|
||||||
|
|
||||||
|
func makeDecompSet() decompSet {
|
||||||
|
m := decompSet{}
|
||||||
|
for i := range m {
|
||||||
|
m[i] = make(map[string]bool)
|
||||||
|
}
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
func (m *decompSet) insert(key int, s string) {
|
||||||
|
m[key][s] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
func printCharInfoTables(w io.Writer) int {
|
||||||
|
mkstr := func(r rune, f *FormInfo) (int, string) {
|
||||||
|
d := f.expandedDecomp
|
||||||
|
s := string([]rune(d))
|
||||||
|
if max := 1 << 6; len(s) >= max {
|
||||||
|
const msg = "%U: too many bytes in decomposition: %d >= %d"
|
||||||
|
log.Fatalf(msg, r, len(s), max)
|
||||||
|
}
|
||||||
|
head := uint8(len(s))
|
||||||
|
if f.quickCheck[MComposed] != QCYes {
|
||||||
|
head |= 0x40
|
||||||
|
}
|
||||||
|
if f.combinesForward {
|
||||||
|
head |= 0x80
|
||||||
|
}
|
||||||
|
s = string([]byte{head}) + s
|
||||||
|
|
||||||
|
lccc := ccc(d[0])
|
||||||
|
tccc := ccc(d[len(d)-1])
|
||||||
|
cc := ccc(r)
|
||||||
|
if cc != 0 && lccc == 0 && tccc == 0 {
|
||||||
|
log.Fatalf("%U: trailing and leading ccc are 0 for non-zero ccc %d", r, cc)
|
||||||
|
}
|
||||||
|
if tccc < lccc && lccc != 0 {
|
||||||
|
const msg = "%U: lccc (%d) must be <= tcc (%d)"
|
||||||
|
log.Fatalf(msg, r, lccc, tccc)
|
||||||
|
}
|
||||||
|
index := normalDecomp
|
||||||
|
nTrail := chars[r].nTrailingNonStarters
|
||||||
|
nLead := chars[r].nLeadingNonStarters
|
||||||
|
if tccc > 0 || lccc > 0 || nTrail > 0 {
|
||||||
|
tccc <<= 2
|
||||||
|
tccc |= nTrail
|
||||||
|
s += string([]byte{tccc})
|
||||||
|
index = endMulti
|
||||||
|
for _, r := range d[1:] {
|
||||||
|
if ccc(r) == 0 {
|
||||||
|
index = firstCCC
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if lccc > 0 || nLead > 0 {
|
||||||
|
s += string([]byte{lccc})
|
||||||
|
if index == firstCCC {
|
||||||
|
log.Fatalf("%U: multi-segment decomposition not supported for decompositions with leading CCC != 0", r)
|
||||||
|
}
|
||||||
|
index = firstLeadingCCC
|
||||||
|
}
|
||||||
|
if cc != lccc {
|
||||||
|
if cc != 0 {
|
||||||
|
log.Fatalf("%U: for lccc != ccc, expected ccc to be 0; was %d", r, cc)
|
||||||
|
}
|
||||||
|
index = firstCCCZeroExcept
|
||||||
|
}
|
||||||
|
} else if len(d) > 1 {
|
||||||
|
index = firstMulti
|
||||||
|
}
|
||||||
|
return index, s
|
||||||
|
}
|
||||||
|
|
||||||
|
decompSet := makeDecompSet()
|
||||||
|
const nLeadStr = "\x00\x01" // 0-byte length and tccc with nTrail.
|
||||||
|
decompSet.insert(firstStarterWithNLead, nLeadStr)
|
||||||
|
|
||||||
|
// Store the uniqued decompositions in a byte buffer,
|
||||||
|
// preceded by their byte length.
|
||||||
|
for _, c := range chars {
|
||||||
|
for _, f := range c.forms {
|
||||||
|
if len(f.expandedDecomp) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if f.combinesBackward {
|
||||||
|
log.Fatalf("%U: combinesBackward and decompose", c.codePoint)
|
||||||
|
}
|
||||||
|
index, s := mkstr(c.codePoint, &f)
|
||||||
|
decompSet.insert(index, s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
decompositions := bytes.NewBuffer(make([]byte, 0, 10000))
|
||||||
|
size := 0
|
||||||
|
positionMap := make(map[string]uint16)
|
||||||
|
decompositions.WriteString("\000")
|
||||||
|
fmt.Fprintln(w, "const (")
|
||||||
|
for i, m := range decompSet {
|
||||||
|
sa := []string{}
|
||||||
|
for s := range m {
|
||||||
|
sa = append(sa, s)
|
||||||
|
}
|
||||||
|
sort.Strings(sa)
|
||||||
|
for _, s := range sa {
|
||||||
|
p := decompositions.Len()
|
||||||
|
decompositions.WriteString(s)
|
||||||
|
positionMap[s] = uint16(p)
|
||||||
|
}
|
||||||
|
if cname[i] != "" {
|
||||||
|
fmt.Fprintf(w, "%s = 0x%X\n", cname[i], decompositions.Len())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Fprintln(w, "maxDecomp = 0x8000")
|
||||||
|
fmt.Fprintln(w, ")")
|
||||||
|
b := decompositions.Bytes()
|
||||||
|
printBytes(w, b, "decomps")
|
||||||
|
size += len(b)
|
||||||
|
|
||||||
|
varnames := []string{"nfc", "nfkc"}
|
||||||
|
for i := 0; i < FNumberOfFormTypes; i++ {
|
||||||
|
trie := triegen.NewTrie(varnames[i])
|
||||||
|
|
||||||
|
for r, c := range chars {
|
||||||
|
f := c.forms[i]
|
||||||
|
d := f.expandedDecomp
|
||||||
|
if len(d) != 0 {
|
||||||
|
_, key := mkstr(c.codePoint, &f)
|
||||||
|
trie.Insert(rune(r), uint64(positionMap[key]))
|
||||||
|
if c.ccc != ccc(d[0]) {
|
||||||
|
// We assume the lead ccc of a decomposition !=0 in this case.
|
||||||
|
if ccc(d[0]) == 0 {
|
||||||
|
log.Fatalf("Expected leading CCC to be non-zero; ccc is %d", c.ccc)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if c.nLeadingNonStarters > 0 && len(f.expandedDecomp) == 0 && c.ccc == 0 && !f.combinesBackward {
|
||||||
|
// Handle cases where it can't be detected that the nLead should be equal
|
||||||
|
// to nTrail.
|
||||||
|
trie.Insert(c.codePoint, uint64(positionMap[nLeadStr]))
|
||||||
|
} else if v := makeEntry(&f, &c)<<8 | uint16(c.ccc); v != 0 {
|
||||||
|
trie.Insert(c.codePoint, uint64(0x8000|v))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sz, err := trie.Gen(w, triegen.Compact(&normCompacter{name: varnames[i]}))
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
size += sz
|
||||||
|
}
|
||||||
|
return size
|
||||||
|
}
|
||||||
|
|
||||||
|
func contains(sa []string, s string) bool {
|
||||||
|
for _, a := range sa {
|
||||||
|
if a == s {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func makeTables() {
|
||||||
|
w := &bytes.Buffer{}
|
||||||
|
|
||||||
|
size := 0
|
||||||
|
if *tablelist == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
list := strings.Split(*tablelist, ",")
|
||||||
|
if *tablelist == "all" {
|
||||||
|
list = []string{"recomp", "info"}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compute maximum decomposition size.
|
||||||
|
max := 0
|
||||||
|
for _, c := range chars {
|
||||||
|
if n := len(string(c.forms[FCompatibility].expandedDecomp)); n > max {
|
||||||
|
max = n
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Fprintln(w, `import "sync"`)
|
||||||
|
fmt.Fprintln(w)
|
||||||
|
|
||||||
|
fmt.Fprintln(w, "const (")
|
||||||
|
fmt.Fprintln(w, "\t// Version is the Unicode edition from which the tables are derived.")
|
||||||
|
fmt.Fprintf(w, "\tVersion = %q\n", gen.UnicodeVersion())
|
||||||
|
fmt.Fprintln(w)
|
||||||
|
fmt.Fprintln(w, "\t// MaxTransformChunkSize indicates the maximum number of bytes that Transform")
|
||||||
|
fmt.Fprintln(w, "\t// may need to write atomically for any Form. Making a destination buffer at")
|
||||||
|
fmt.Fprintln(w, "\t// least this size ensures that Transform can always make progress and that")
|
||||||
|
fmt.Fprintln(w, "\t// the user does not need to grow the buffer on an ErrShortDst.")
|
||||||
|
fmt.Fprintf(w, "\tMaxTransformChunkSize = %d+maxNonStarters*4\n", len(string(0x034F))+max)
|
||||||
|
fmt.Fprintln(w, ")\n")
|
||||||
|
|
||||||
|
// Print the CCC remap table.
|
||||||
|
size += len(cccMap)
|
||||||
|
fmt.Fprintf(w, "var ccc = [%d]uint8{", len(cccMap))
|
||||||
|
for i := 0; i < len(cccMap); i++ {
|
||||||
|
if i%8 == 0 {
|
||||||
|
fmt.Fprintln(w)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(w, "%3d, ", cccMap[uint8(i)])
|
||||||
|
}
|
||||||
|
fmt.Fprintln(w, "\n}\n")
|
||||||
|
|
||||||
|
if contains(list, "info") {
|
||||||
|
size += printCharInfoTables(w)
|
||||||
|
}
|
||||||
|
|
||||||
|
if contains(list, "recomp") {
|
||||||
|
// Note that we use 32 bit keys, instead of 64 bit.
|
||||||
|
// This clips the bits of three entries, but we know
|
||||||
|
// this won't cause a collision. The compiler will catch
|
||||||
|
// any changes made to UnicodeData.txt that introduces
|
||||||
|
// a collision.
|
||||||
|
// Note that the recomposition map for NFC and NFKC
|
||||||
|
// are identical.
|
||||||
|
|
||||||
|
// Recomposition map
|
||||||
|
nrentries := 0
|
||||||
|
for _, c := range chars {
|
||||||
|
f := c.forms[FCanonical]
|
||||||
|
if !f.isOneWay && len(f.decomp) > 0 {
|
||||||
|
nrentries++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sz := nrentries * 8
|
||||||
|
size += sz
|
||||||
|
fmt.Fprintf(w, "// recompMap: %d bytes (entries only)\n", sz)
|
||||||
|
fmt.Fprintln(w, "var recompMap map[uint32]rune")
|
||||||
|
fmt.Fprintln(w, "var recompMapOnce sync.Once\n")
|
||||||
|
fmt.Fprintln(w, `const recompMapPacked = "" +`)
|
||||||
|
var buf [8]byte
|
||||||
|
for i, c := range chars {
|
||||||
|
f := c.forms[FCanonical]
|
||||||
|
d := f.decomp
|
||||||
|
if !f.isOneWay && len(d) > 0 {
|
||||||
|
key := uint32(uint16(d[0]))<<16 + uint32(uint16(d[1]))
|
||||||
|
binary.BigEndian.PutUint32(buf[:4], key)
|
||||||
|
binary.BigEndian.PutUint32(buf[4:], uint32(i))
|
||||||
|
fmt.Fprintf(w, "\t\t%q + // 0x%.8X: 0x%.8X\n", string(buf[:]), key, uint32(i))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// hack so we don't have to special case the trailing plus sign
|
||||||
|
fmt.Fprintf(w, ` ""`)
|
||||||
|
fmt.Fprintln(w)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(w, "// Total size of tables: %dKB (%d bytes)\n", (size+512)/1024, size)
|
||||||
|
gen.WriteVersionedGoFile("tables.go", "norm", w.Bytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
func printChars() {
|
||||||
|
if *verbose {
|
||||||
|
for _, c := range chars {
|
||||||
|
if !c.isValid() || c.state == SMissing {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fmt.Println(c)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// verifyComputed does various consistency tests.
|
||||||
|
func verifyComputed() {
|
||||||
|
for i, c := range chars {
|
||||||
|
for _, f := range c.forms {
|
||||||
|
isNo := (f.quickCheck[MDecomposed] == QCNo)
|
||||||
|
if (len(f.decomp) > 0) != isNo && !isHangul(rune(i)) {
|
||||||
|
log.Fatalf("%U: NF*D QC must be No if rune decomposes", i)
|
||||||
|
}
|
||||||
|
|
||||||
|
isMaybe := f.quickCheck[MComposed] == QCMaybe
|
||||||
|
if f.combinesBackward != isMaybe {
|
||||||
|
log.Fatalf("%U: NF*C QC must be Maybe if combinesBackward", i)
|
||||||
|
}
|
||||||
|
if len(f.decomp) > 0 && f.combinesForward && isMaybe {
|
||||||
|
log.Fatalf("%U: NF*C QC must be Yes or No if combinesForward and decomposes", i)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(f.expandedDecomp) != 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if a, b := c.nLeadingNonStarters > 0, (c.ccc > 0 || f.combinesBackward); a != b {
|
||||||
|
// We accept these runes to be treated differently (it only affects
|
||||||
|
// segment breaking in iteration, most likely on improper use), but
|
||||||
|
// reconsider if more characters are added.
|
||||||
|
// U+FF9E HALFWIDTH KATAKANA VOICED SOUND MARK;Lm;0;L;<narrow> 3099;;;;N;;;;;
|
||||||
|
// U+FF9F HALFWIDTH KATAKANA SEMI-VOICED SOUND MARK;Lm;0;L;<narrow> 309A;;;;N;;;;;
|
||||||
|
// U+3133 HANGUL LETTER KIYEOK-SIOS;Lo;0;L;<compat> 11AA;;;;N;HANGUL LETTER GIYEOG SIOS;;;;
|
||||||
|
// U+318E HANGUL LETTER ARAEAE;Lo;0;L;<compat> 11A1;;;;N;HANGUL LETTER ALAE AE;;;;
|
||||||
|
// U+FFA3 HALFWIDTH HANGUL LETTER KIYEOK-SIOS;Lo;0;L;<narrow> 3133;;;;N;HALFWIDTH HANGUL LETTER GIYEOG SIOS;;;;
|
||||||
|
// U+FFDC HALFWIDTH HANGUL LETTER I;Lo;0;L;<narrow> 3163;;;;N;;;;;
|
||||||
|
if i != 0xFF9E && i != 0xFF9F && !(0x3133 <= i && i <= 0x318E) && !(0xFFA3 <= i && i <= 0xFFDC) {
|
||||||
|
log.Fatalf("%U: nLead was %v; want %v", i, a, b)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
nfc := c.forms[FCanonical]
|
||||||
|
nfkc := c.forms[FCompatibility]
|
||||||
|
if nfc.combinesBackward != nfkc.combinesBackward {
|
||||||
|
log.Fatalf("%U: Cannot combine combinesBackward\n", c.codePoint)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use values in DerivedNormalizationProps.txt to compare against the
|
||||||
|
// values we computed.
|
||||||
|
// DerivedNormalizationProps.txt has form:
|
||||||
|
// 00C0..00C5 ; NFD_QC; N # ...
|
||||||
|
// 0374 ; NFD_QC; N # ...
|
||||||
|
// See https://unicode.org/reports/tr44/ for full explanation
|
||||||
|
func testDerived() {
|
||||||
|
f := gen.OpenUCDFile("DerivedNormalizationProps.txt")
|
||||||
|
defer f.Close()
|
||||||
|
p := ucd.New(f)
|
||||||
|
for p.Next() {
|
||||||
|
r := p.Rune(0)
|
||||||
|
c := &chars[r]
|
||||||
|
|
||||||
|
var ftype, mode int
|
||||||
|
qt := p.String(1)
|
||||||
|
switch qt {
|
||||||
|
case "NFC_QC":
|
||||||
|
ftype, mode = FCanonical, MComposed
|
||||||
|
case "NFD_QC":
|
||||||
|
ftype, mode = FCanonical, MDecomposed
|
||||||
|
case "NFKC_QC":
|
||||||
|
ftype, mode = FCompatibility, MComposed
|
||||||
|
case "NFKD_QC":
|
||||||
|
ftype, mode = FCompatibility, MDecomposed
|
||||||
|
default:
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
var qr QCResult
|
||||||
|
switch p.String(2) {
|
||||||
|
case "Y":
|
||||||
|
qr = QCYes
|
||||||
|
case "N":
|
||||||
|
qr = QCNo
|
||||||
|
case "M":
|
||||||
|
qr = QCMaybe
|
||||||
|
default:
|
||||||
|
log.Fatalf(`Unexpected quick check value "%s"`, p.String(2))
|
||||||
|
}
|
||||||
|
if got := c.forms[ftype].quickCheck[mode]; got != qr {
|
||||||
|
log.Printf("%U: FAILED %s (was %v need %v)\n", r, qt, got, qr)
|
||||||
|
}
|
||||||
|
c.forms[ftype].verified[mode] = true
|
||||||
|
}
|
||||||
|
if err := p.Err(); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
// Any unspecified value must be QCYes. Verify this.
|
||||||
|
for i, c := range chars {
|
||||||
|
for j, fd := range c.forms {
|
||||||
|
for k, qr := range fd.quickCheck {
|
||||||
|
if !fd.verified[k] && qr != QCYes {
|
||||||
|
m := "%U: FAIL F:%d M:%d (was %v need Yes) %s\n"
|
||||||
|
log.Printf(m, i, j, k, qr, c.name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var testHeader = `const (
|
||||||
|
Yes = iota
|
||||||
|
No
|
||||||
|
Maybe
|
||||||
|
)
|
||||||
|
|
||||||
|
type formData struct {
|
||||||
|
qc uint8
|
||||||
|
combinesForward bool
|
||||||
|
decomposition string
|
||||||
|
}
|
||||||
|
|
||||||
|
type runeData struct {
|
||||||
|
r rune
|
||||||
|
ccc uint8
|
||||||
|
nLead uint8
|
||||||
|
nTrail uint8
|
||||||
|
f [2]formData // 0: canonical; 1: compatibility
|
||||||
|
}
|
||||||
|
|
||||||
|
func f(qc uint8, cf bool, dec string) [2]formData {
|
||||||
|
return [2]formData{{qc, cf, dec}, {qc, cf, dec}}
|
||||||
|
}
|
||||||
|
|
||||||
|
func g(qc, qck uint8, cf, cfk bool, d, dk string) [2]formData {
|
||||||
|
return [2]formData{{qc, cf, d}, {qck, cfk, dk}}
|
||||||
|
}
|
||||||
|
|
||||||
|
var testData = []runeData{
|
||||||
|
`
|
||||||
|
|
||||||
|
func printTestdata() {
|
||||||
|
type lastInfo struct {
|
||||||
|
ccc uint8
|
||||||
|
nLead uint8
|
||||||
|
nTrail uint8
|
||||||
|
f string
|
||||||
|
}
|
||||||
|
|
||||||
|
last := lastInfo{}
|
||||||
|
w := &bytes.Buffer{}
|
||||||
|
fmt.Fprintf(w, testHeader)
|
||||||
|
for r, c := range chars {
|
||||||
|
f := c.forms[FCanonical]
|
||||||
|
qc, cf, d := f.quickCheck[MComposed], f.combinesForward, string(f.expandedDecomp)
|
||||||
|
f = c.forms[FCompatibility]
|
||||||
|
qck, cfk, dk := f.quickCheck[MComposed], f.combinesForward, string(f.expandedDecomp)
|
||||||
|
s := ""
|
||||||
|
if d == dk && qc == qck && cf == cfk {
|
||||||
|
s = fmt.Sprintf("f(%s, %v, %q)", qc, cf, d)
|
||||||
|
} else {
|
||||||
|
s = fmt.Sprintf("g(%s, %s, %v, %v, %q, %q)", qc, qck, cf, cfk, d, dk)
|
||||||
|
}
|
||||||
|
current := lastInfo{c.ccc, c.nLeadingNonStarters, c.nTrailingNonStarters, s}
|
||||||
|
if last != current {
|
||||||
|
fmt.Fprintf(w, "\t{0x%x, %d, %d, %d, %s},\n", r, c.origCCC, c.nLeadingNonStarters, c.nTrailingNonStarters, s)
|
||||||
|
last = current
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Fprintln(w, "}")
|
||||||
|
gen.WriteVersionedGoFile("data_test.go", "norm", w.Bytes())
|
||||||
|
}
|
||||||
117
vendor/golang.org/x/text/unicode/norm/triegen.go
generated
vendored
Normal file
117
vendor/golang.org/x/text/unicode/norm/triegen.go
generated
vendored
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
// Copyright 2011 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
// Trie table generator.
|
||||||
|
// Used by make*tables tools to generate a go file with trie data structures
|
||||||
|
// for mapping UTF-8 to a 16-bit value. All but the last byte in a UTF-8 byte
|
||||||
|
// sequence are used to lookup offsets in the index table to be used for the
|
||||||
|
// next byte. The last byte is used to index into a table with 16-bit values.
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
)
|
||||||
|
|
||||||
|
const maxSparseEntries = 16
|
||||||
|
|
||||||
|
type normCompacter struct {
|
||||||
|
sparseBlocks [][]uint64
|
||||||
|
sparseOffset []uint16
|
||||||
|
sparseCount int
|
||||||
|
name string
|
||||||
|
}
|
||||||
|
|
||||||
|
func mostFrequentStride(a []uint64) int {
|
||||||
|
counts := make(map[int]int)
|
||||||
|
var v int
|
||||||
|
for _, x := range a {
|
||||||
|
if stride := int(x) - v; v != 0 && stride >= 0 {
|
||||||
|
counts[stride]++
|
||||||
|
}
|
||||||
|
v = int(x)
|
||||||
|
}
|
||||||
|
var maxs, maxc int
|
||||||
|
for stride, cnt := range counts {
|
||||||
|
if cnt > maxc || (cnt == maxc && stride < maxs) {
|
||||||
|
maxs, maxc = stride, cnt
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return maxs
|
||||||
|
}
|
||||||
|
|
||||||
|
func countSparseEntries(a []uint64) int {
|
||||||
|
stride := mostFrequentStride(a)
|
||||||
|
var v, count int
|
||||||
|
for _, tv := range a {
|
||||||
|
if int(tv)-v != stride {
|
||||||
|
if tv != 0 {
|
||||||
|
count++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
v = int(tv)
|
||||||
|
}
|
||||||
|
return count
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *normCompacter) Size(v []uint64) (sz int, ok bool) {
|
||||||
|
if n := countSparseEntries(v); n <= maxSparseEntries {
|
||||||
|
return (n+1)*4 + 2, true
|
||||||
|
}
|
||||||
|
return 0, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *normCompacter) Store(v []uint64) uint32 {
|
||||||
|
h := uint32(len(c.sparseOffset))
|
||||||
|
c.sparseBlocks = append(c.sparseBlocks, v)
|
||||||
|
c.sparseOffset = append(c.sparseOffset, uint16(c.sparseCount))
|
||||||
|
c.sparseCount += countSparseEntries(v) + 1
|
||||||
|
return h
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *normCompacter) Handler() string {
|
||||||
|
return c.name + "Sparse.lookup"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *normCompacter) Print(w io.Writer) (retErr error) {
|
||||||
|
p := func(f string, x ...interface{}) {
|
||||||
|
if _, err := fmt.Fprintf(w, f, x...); retErr == nil && err != nil {
|
||||||
|
retErr = err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ls := len(c.sparseBlocks)
|
||||||
|
p("// %sSparseOffset: %d entries, %d bytes\n", c.name, ls, ls*2)
|
||||||
|
p("var %sSparseOffset = %#v\n\n", c.name, c.sparseOffset)
|
||||||
|
|
||||||
|
ns := c.sparseCount
|
||||||
|
p("// %sSparseValues: %d entries, %d bytes\n", c.name, ns, ns*4)
|
||||||
|
p("var %sSparseValues = [%d]valueRange {", c.name, ns)
|
||||||
|
for i, b := range c.sparseBlocks {
|
||||||
|
p("\n// Block %#x, offset %#x", i, c.sparseOffset[i])
|
||||||
|
var v int
|
||||||
|
stride := mostFrequentStride(b)
|
||||||
|
n := countSparseEntries(b)
|
||||||
|
p("\n{value:%#04x,lo:%#02x},", stride, uint8(n))
|
||||||
|
for i, nv := range b {
|
||||||
|
if int(nv)-v != stride {
|
||||||
|
if v != 0 {
|
||||||
|
p(",hi:%#02x},", 0x80+i-1)
|
||||||
|
}
|
||||||
|
if nv != 0 {
|
||||||
|
p("\n{value:%#04x,lo:%#02x", nv, 0x80+i)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
v = int(nv)
|
||||||
|
}
|
||||||
|
if v != 0 {
|
||||||
|
p(",hi:%#02x},", 0x80+len(b)-1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
p("\n}\n\n")
|
||||||
|
return
|
||||||
|
}
|
||||||
99
vendor/golang.org/x/tools/go/gcexportdata/main.go
generated
vendored
Normal file
99
vendor/golang.org/x/tools/go/gcexportdata/main.go
generated
vendored
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
// Copyright 2017 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
// The gcexportdata command is a diagnostic tool that displays the
|
||||||
|
// contents of gc export data files.
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/gcexportdata"
|
||||||
|
"golang.org/x/tools/go/types/typeutil"
|
||||||
|
)
|
||||||
|
|
||||||
|
var packageFlag = flag.String("package", "", "alternative package to print")
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
log.SetPrefix("gcexportdata: ")
|
||||||
|
log.SetFlags(0)
|
||||||
|
flag.Usage = func() {
|
||||||
|
fmt.Fprintln(os.Stderr, "usage: gcexportdata [-package path] file.a")
|
||||||
|
}
|
||||||
|
flag.Parse()
|
||||||
|
if flag.NArg() != 1 {
|
||||||
|
flag.Usage()
|
||||||
|
os.Exit(2)
|
||||||
|
}
|
||||||
|
filename := flag.Args()[0]
|
||||||
|
|
||||||
|
f, err := os.Open(filename)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
r, err := gcexportdata.NewReader(f)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("%s: %s", filename, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decode the package.
|
||||||
|
const primary = "<primary>"
|
||||||
|
imports := make(map[string]*types.Package)
|
||||||
|
fset := token.NewFileSet()
|
||||||
|
pkg, err := gcexportdata.Read(r, fset, imports, primary)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("%s: %s", filename, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Optionally select an indirectly mentioned package.
|
||||||
|
if *packageFlag != "" {
|
||||||
|
pkg = imports[*packageFlag]
|
||||||
|
if pkg == nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "export data file %s does not mention %s; has:\n",
|
||||||
|
filename, *packageFlag)
|
||||||
|
for p := range imports {
|
||||||
|
if p != primary {
|
||||||
|
fmt.Fprintf(os.Stderr, "\t%s\n", p)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print all package-level declarations, including non-exported ones.
|
||||||
|
fmt.Printf("package %s\n", pkg.Name())
|
||||||
|
for _, imp := range pkg.Imports() {
|
||||||
|
fmt.Printf("import %q\n", imp.Path())
|
||||||
|
}
|
||||||
|
qual := func(p *types.Package) string {
|
||||||
|
if pkg == p {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return p.Name()
|
||||||
|
}
|
||||||
|
scope := pkg.Scope()
|
||||||
|
for _, name := range scope.Names() {
|
||||||
|
obj := scope.Lookup(name)
|
||||||
|
fmt.Printf("%s: %s\n",
|
||||||
|
fset.Position(obj.Pos()),
|
||||||
|
types.ObjectString(obj, qual))
|
||||||
|
|
||||||
|
// For types, print each method.
|
||||||
|
if _, ok := obj.(*types.TypeName); ok {
|
||||||
|
for _, method := range typeutil.IntuitiveMethodSet(obj.Type(), nil) {
|
||||||
|
fmt.Printf("%s: %s\n",
|
||||||
|
fset.Position(method.Obj().Pos()),
|
||||||
|
types.SelectionString(method, qual))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
173
vendor/golang.org/x/tools/imports/mkindex.go
generated
vendored
Normal file
173
vendor/golang.org/x/tools/imports/mkindex.go
generated
vendored
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
// +build ignore
|
||||||
|
|
||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// Command mkindex creates the file "pkgindex.go" containing an index of the Go
|
||||||
|
// standard library. The file is intended to be built as part of the imports
|
||||||
|
// package, so that the package may be used in environments where a GOROOT is
|
||||||
|
// not available (such as App Engine).
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/build"
|
||||||
|
"go/format"
|
||||||
|
"go/parser"
|
||||||
|
"go/token"
|
||||||
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
pkgIndex = make(map[string][]pkg)
|
||||||
|
exports = make(map[string]map[string]bool)
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Don't use GOPATH.
|
||||||
|
ctx := build.Default
|
||||||
|
ctx.GOPATH = ""
|
||||||
|
|
||||||
|
// Populate pkgIndex global from GOROOT.
|
||||||
|
for _, path := range ctx.SrcDirs() {
|
||||||
|
f, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
log.Print(err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
children, err := f.Readdir(-1)
|
||||||
|
f.Close()
|
||||||
|
if err != nil {
|
||||||
|
log.Print(err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for _, child := range children {
|
||||||
|
if child.IsDir() {
|
||||||
|
loadPkg(path, child.Name())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Populate exports global.
|
||||||
|
for _, ps := range pkgIndex {
|
||||||
|
for _, p := range ps {
|
||||||
|
e := loadExports(p.dir)
|
||||||
|
if e != nil {
|
||||||
|
exports[p.dir] = e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Construct source file.
|
||||||
|
var buf bytes.Buffer
|
||||||
|
fmt.Fprint(&buf, pkgIndexHead)
|
||||||
|
fmt.Fprintf(&buf, "var pkgIndexMaster = %#v\n", pkgIndex)
|
||||||
|
fmt.Fprintf(&buf, "var exportsMaster = %#v\n", exports)
|
||||||
|
src := buf.Bytes()
|
||||||
|
|
||||||
|
// Replace main.pkg type name with pkg.
|
||||||
|
src = bytes.Replace(src, []byte("main.pkg"), []byte("pkg"), -1)
|
||||||
|
// Replace actual GOROOT with "/go".
|
||||||
|
src = bytes.Replace(src, []byte(ctx.GOROOT), []byte("/go"), -1)
|
||||||
|
// Add some line wrapping.
|
||||||
|
src = bytes.Replace(src, []byte("}, "), []byte("},\n"), -1)
|
||||||
|
src = bytes.Replace(src, []byte("true, "), []byte("true,\n"), -1)
|
||||||
|
|
||||||
|
var err error
|
||||||
|
src, err = format.Source(src)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write out source file.
|
||||||
|
err = ioutil.WriteFile("pkgindex.go", src, 0644)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const pkgIndexHead = `package imports
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
pkgIndexOnce.Do(func() {
|
||||||
|
pkgIndex.m = pkgIndexMaster
|
||||||
|
})
|
||||||
|
loadExports = func(dir string) map[string]bool {
|
||||||
|
return exportsMaster[dir]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`
|
||||||
|
|
||||||
|
type pkg struct {
|
||||||
|
importpath string // full pkg import path, e.g. "net/http"
|
||||||
|
dir string // absolute file path to pkg directory e.g. "/usr/lib/go/src/fmt"
|
||||||
|
}
|
||||||
|
|
||||||
|
var fset = token.NewFileSet()
|
||||||
|
|
||||||
|
func loadPkg(root, importpath string) {
|
||||||
|
shortName := path.Base(importpath)
|
||||||
|
if shortName == "testdata" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
dir := filepath.Join(root, importpath)
|
||||||
|
pkgIndex[shortName] = append(pkgIndex[shortName], pkg{
|
||||||
|
importpath: importpath,
|
||||||
|
dir: dir,
|
||||||
|
})
|
||||||
|
|
||||||
|
pkgDir, err := os.Open(dir)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
children, err := pkgDir.Readdir(-1)
|
||||||
|
pkgDir.Close()
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
for _, child := range children {
|
||||||
|
name := child.Name()
|
||||||
|
if name == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if c := name[0]; c == '.' || ('0' <= c && c <= '9') {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if child.IsDir() {
|
||||||
|
loadPkg(root, filepath.Join(importpath, name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadExports(dir string) map[string]bool {
|
||||||
|
exports := make(map[string]bool)
|
||||||
|
buildPkg, err := build.ImportDir(dir, 0)
|
||||||
|
if err != nil {
|
||||||
|
if strings.Contains(err.Error(), "no buildable Go source files in") {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
log.Printf("could not import %q: %v", dir, err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
for _, file := range buildPkg.GoFiles {
|
||||||
|
f, err := parser.ParseFile(fset, filepath.Join(dir, file), nil, 0)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("could not parse %q: %v", file, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for name := range f.Scope.Objects {
|
||||||
|
if ast.IsExported(name) {
|
||||||
|
exports[name] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return exports
|
||||||
|
}
|
||||||
132
vendor/golang.org/x/tools/imports/mkstdlib.go
generated
vendored
Normal file
132
vendor/golang.org/x/tools/imports/mkstdlib.go
generated
vendored
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
// +build ignore
|
||||||
|
|
||||||
|
// mkstdlib generates the zstdlib.go file, containing the Go standard
|
||||||
|
// library API symbols. It's baked into the binary to avoid scanning
|
||||||
|
// GOPATH in the common case.
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"go/format"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
|
"runtime"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
func mustOpen(name string) io.Reader {
|
||||||
|
f, err := os.Open(name)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
return f
|
||||||
|
}
|
||||||
|
|
||||||
|
func api(base string) string {
|
||||||
|
return filepath.Join(runtime.GOROOT(), "api", base)
|
||||||
|
}
|
||||||
|
|
||||||
|
var sym = regexp.MustCompile(`^pkg (\S+).*?, (?:var|func|type|const) ([A-Z]\w*)`)
|
||||||
|
|
||||||
|
var unsafeSyms = map[string]bool{"Alignof": true, "ArbitraryType": true, "Offsetof": true, "Pointer": true, "Sizeof": true}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
outf := func(format string, args ...interface{}) {
|
||||||
|
fmt.Fprintf(&buf, format, args...)
|
||||||
|
}
|
||||||
|
outf("// Code generated by mkstdlib.go. DO NOT EDIT.\n\n")
|
||||||
|
outf("package imports\n")
|
||||||
|
outf("var stdlib = map[string]map[string]bool{\n")
|
||||||
|
f := io.MultiReader(
|
||||||
|
mustOpen(api("go1.txt")),
|
||||||
|
mustOpen(api("go1.1.txt")),
|
||||||
|
mustOpen(api("go1.2.txt")),
|
||||||
|
mustOpen(api("go1.3.txt")),
|
||||||
|
mustOpen(api("go1.4.txt")),
|
||||||
|
mustOpen(api("go1.5.txt")),
|
||||||
|
mustOpen(api("go1.6.txt")),
|
||||||
|
mustOpen(api("go1.7.txt")),
|
||||||
|
mustOpen(api("go1.8.txt")),
|
||||||
|
mustOpen(api("go1.9.txt")),
|
||||||
|
mustOpen(api("go1.10.txt")),
|
||||||
|
mustOpen(api("go1.11.txt")),
|
||||||
|
mustOpen(api("go1.12.txt")),
|
||||||
|
|
||||||
|
// The API of the syscall/js package needs to be computed explicitly,
|
||||||
|
// because it's not included in the GOROOT/api/go1.*.txt files at this time.
|
||||||
|
syscallJSAPI(),
|
||||||
|
)
|
||||||
|
sc := bufio.NewScanner(f)
|
||||||
|
|
||||||
|
pkgs := map[string]map[string]bool{
|
||||||
|
"unsafe": unsafeSyms,
|
||||||
|
}
|
||||||
|
paths := []string{"unsafe"}
|
||||||
|
|
||||||
|
for sc.Scan() {
|
||||||
|
l := sc.Text()
|
||||||
|
has := func(v string) bool { return strings.Contains(l, v) }
|
||||||
|
if has("struct, ") || has("interface, ") || has(", method (") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if m := sym.FindStringSubmatch(l); m != nil {
|
||||||
|
path, sym := m[1], m[2]
|
||||||
|
|
||||||
|
if _, ok := pkgs[path]; !ok {
|
||||||
|
pkgs[path] = map[string]bool{}
|
||||||
|
paths = append(paths, path)
|
||||||
|
}
|
||||||
|
pkgs[path][sym] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := sc.Err(); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
sort.Strings(paths)
|
||||||
|
for _, path := range paths {
|
||||||
|
outf("\t%q: map[string]bool{\n", path)
|
||||||
|
pkg := pkgs[path]
|
||||||
|
var syms []string
|
||||||
|
for sym := range pkg {
|
||||||
|
syms = append(syms, sym)
|
||||||
|
}
|
||||||
|
sort.Strings(syms)
|
||||||
|
for _, sym := range syms {
|
||||||
|
outf("\t\t%q: true,\n", sym)
|
||||||
|
}
|
||||||
|
outf("},\n")
|
||||||
|
}
|
||||||
|
outf("}\n")
|
||||||
|
fmtbuf, err := format.Source(buf.Bytes())
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
err = ioutil.WriteFile("zstdlib.go", fmtbuf, 0666)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// syscallJSAPI returns the API of the syscall/js package.
|
||||||
|
// It's computed from the contents of $(go env GOROOT)/src/syscall/js.
|
||||||
|
func syscallJSAPI() io.Reader {
|
||||||
|
var exeSuffix string
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
exeSuffix = ".exe"
|
||||||
|
}
|
||||||
|
cmd := exec.Command("go"+exeSuffix, "run", "cmd/api", "-contexts", "js-wasm", "syscall/js")
|
||||||
|
out, err := cmd.Output()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalln(err)
|
||||||
|
}
|
||||||
|
return bytes.NewReader(out)
|
||||||
|
}
|
||||||
74
vendor/modules.txt
vendored
74
vendor/modules.txt
vendored
@@ -1,20 +1,20 @@
|
|||||||
# github.com/99designs/gqlgen v0.9.0
|
# github.com/99designs/gqlgen v0.9.0
|
||||||
github.com/99designs/gqlgen
|
github.com/99designs/gqlgen
|
||||||
github.com/99designs/gqlgen/api
|
|
||||||
github.com/99designs/gqlgen/cmd
|
|
||||||
github.com/99designs/gqlgen/codegen
|
|
||||||
github.com/99designs/gqlgen/codegen/config
|
|
||||||
github.com/99designs/gqlgen/codegen/templates
|
|
||||||
github.com/99designs/gqlgen/complexity
|
|
||||||
github.com/99designs/gqlgen/graphql
|
github.com/99designs/gqlgen/graphql
|
||||||
github.com/99designs/gqlgen/graphql/introspection
|
|
||||||
github.com/99designs/gqlgen/handler
|
github.com/99designs/gqlgen/handler
|
||||||
github.com/99designs/gqlgen/internal/code
|
github.com/99designs/gqlgen/graphql/introspection
|
||||||
github.com/99designs/gqlgen/internal/imports
|
github.com/99designs/gqlgen/cmd
|
||||||
|
github.com/99designs/gqlgen/complexity
|
||||||
|
github.com/99designs/gqlgen/api
|
||||||
|
github.com/99designs/gqlgen/codegen/config
|
||||||
|
github.com/99designs/gqlgen/plugin/servergen
|
||||||
|
github.com/99designs/gqlgen/codegen
|
||||||
github.com/99designs/gqlgen/plugin
|
github.com/99designs/gqlgen/plugin
|
||||||
github.com/99designs/gqlgen/plugin/modelgen
|
github.com/99designs/gqlgen/plugin/modelgen
|
||||||
github.com/99designs/gqlgen/plugin/resolvergen
|
github.com/99designs/gqlgen/plugin/resolvergen
|
||||||
github.com/99designs/gqlgen/plugin/servergen
|
github.com/99designs/gqlgen/codegen/templates
|
||||||
|
github.com/99designs/gqlgen/internal/code
|
||||||
|
github.com/99designs/gqlgen/internal/imports
|
||||||
# github.com/BurntSushi/toml v0.3.1
|
# github.com/BurntSushi/toml v0.3.1
|
||||||
github.com/BurntSushi/toml
|
github.com/BurntSushi/toml
|
||||||
# github.com/PuerkitoBio/goquery v1.5.0
|
# github.com/PuerkitoBio/goquery v1.5.0
|
||||||
@@ -46,8 +46,8 @@ github.com/gobuffalo/envy
|
|||||||
# github.com/gobuffalo/events v1.2.0
|
# github.com/gobuffalo/events v1.2.0
|
||||||
github.com/gobuffalo/events
|
github.com/gobuffalo/events
|
||||||
# github.com/gobuffalo/flect v0.1.0
|
# github.com/gobuffalo/flect v0.1.0
|
||||||
github.com/gobuffalo/flect
|
|
||||||
github.com/gobuffalo/flect/name
|
github.com/gobuffalo/flect/name
|
||||||
|
github.com/gobuffalo/flect
|
||||||
# github.com/gobuffalo/genny v0.0.0-20190219203444-c95082806342
|
# github.com/gobuffalo/genny v0.0.0-20190219203444-c95082806342
|
||||||
github.com/gobuffalo/genny
|
github.com/gobuffalo/genny
|
||||||
# github.com/gobuffalo/gogen v0.0.0-20190224213239-1c6076128bbc
|
# github.com/gobuffalo/gogen v0.0.0-20190224213239-1c6076128bbc
|
||||||
@@ -64,28 +64,32 @@ github.com/gobuffalo/meta
|
|||||||
github.com/gobuffalo/packd
|
github.com/gobuffalo/packd
|
||||||
# github.com/gobuffalo/packr/v2 v2.0.2
|
# github.com/gobuffalo/packr/v2 v2.0.2
|
||||||
github.com/gobuffalo/packr/v2
|
github.com/gobuffalo/packr/v2
|
||||||
github.com/gobuffalo/packr/v2/file
|
|
||||||
github.com/gobuffalo/packr/v2/file/resolver
|
github.com/gobuffalo/packr/v2/file/resolver
|
||||||
github.com/gobuffalo/packr/v2/file/resolver/encoding/hex
|
github.com/gobuffalo/packr/v2/file
|
||||||
github.com/gobuffalo/packr/v2/jam/parser
|
github.com/gobuffalo/packr/v2/jam/parser
|
||||||
github.com/gobuffalo/packr/v2/plog
|
github.com/gobuffalo/packr/v2/plog
|
||||||
|
github.com/gobuffalo/packr/v2/file/resolver/encoding/hex
|
||||||
# github.com/gobuffalo/syncx v0.0.0-20181120194010-558ac7de985f
|
# github.com/gobuffalo/syncx v0.0.0-20181120194010-558ac7de985f
|
||||||
github.com/gobuffalo/syncx
|
github.com/gobuffalo/syncx
|
||||||
# github.com/golang-migrate/migrate/v4 v4.3.1
|
# github.com/golang-migrate/migrate/v4 v4.3.1
|
||||||
github.com/golang-migrate/migrate/v4
|
|
||||||
github.com/golang-migrate/migrate/v4/database
|
|
||||||
github.com/golang-migrate/migrate/v4/database/sqlite3
|
github.com/golang-migrate/migrate/v4/database/sqlite3
|
||||||
github.com/golang-migrate/migrate/v4/source
|
|
||||||
github.com/golang-migrate/migrate/v4/source/file
|
github.com/golang-migrate/migrate/v4/source/file
|
||||||
|
github.com/golang-migrate/migrate/v4
|
||||||
|
github.com/golang-migrate/migrate/v4/source
|
||||||
|
github.com/golang-migrate/migrate/v4/database
|
||||||
# github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef
|
# github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef
|
||||||
github.com/golang/groupcache/lru
|
github.com/golang/groupcache/lru
|
||||||
|
# github.com/gorilla/securecookie v1.1.1
|
||||||
|
github.com/gorilla/securecookie
|
||||||
|
# github.com/gorilla/sessions v1.2.0
|
||||||
|
github.com/gorilla/sessions
|
||||||
# github.com/gorilla/websocket v1.4.0
|
# github.com/gorilla/websocket v1.4.0
|
||||||
github.com/gorilla/websocket
|
github.com/gorilla/websocket
|
||||||
# github.com/h2non/filetype v1.0.8
|
# github.com/h2non/filetype v1.0.8
|
||||||
github.com/h2non/filetype
|
github.com/h2non/filetype
|
||||||
|
github.com/h2non/filetype/types
|
||||||
github.com/h2non/filetype/matchers
|
github.com/h2non/filetype/matchers
|
||||||
github.com/h2non/filetype/matchers/isobmff
|
github.com/h2non/filetype/matchers/isobmff
|
||||||
github.com/h2non/filetype/types
|
|
||||||
# github.com/hashicorp/errwrap v1.0.0
|
# github.com/hashicorp/errwrap v1.0.0
|
||||||
github.com/hashicorp/errwrap
|
github.com/hashicorp/errwrap
|
||||||
# github.com/hashicorp/go-multierror v1.0.0
|
# github.com/hashicorp/go-multierror v1.0.0
|
||||||
@@ -95,13 +99,13 @@ github.com/hashicorp/golang-lru
|
|||||||
github.com/hashicorp/golang-lru/simplelru
|
github.com/hashicorp/golang-lru/simplelru
|
||||||
# github.com/hashicorp/hcl v1.0.0
|
# github.com/hashicorp/hcl v1.0.0
|
||||||
github.com/hashicorp/hcl
|
github.com/hashicorp/hcl
|
||||||
|
github.com/hashicorp/hcl/hcl/printer
|
||||||
github.com/hashicorp/hcl/hcl/ast
|
github.com/hashicorp/hcl/hcl/ast
|
||||||
github.com/hashicorp/hcl/hcl/parser
|
github.com/hashicorp/hcl/hcl/parser
|
||||||
github.com/hashicorp/hcl/hcl/printer
|
|
||||||
github.com/hashicorp/hcl/hcl/scanner
|
|
||||||
github.com/hashicorp/hcl/hcl/strconv
|
|
||||||
github.com/hashicorp/hcl/hcl/token
|
github.com/hashicorp/hcl/hcl/token
|
||||||
github.com/hashicorp/hcl/json/parser
|
github.com/hashicorp/hcl/json/parser
|
||||||
|
github.com/hashicorp/hcl/hcl/scanner
|
||||||
|
github.com/hashicorp/hcl/hcl/strconv
|
||||||
github.com/hashicorp/hcl/json/scanner
|
github.com/hashicorp/hcl/json/scanner
|
||||||
github.com/hashicorp/hcl/json/token
|
github.com/hashicorp/hcl/json/token
|
||||||
# github.com/inconshreveable/mousetrap v1.0.0
|
# github.com/inconshreveable/mousetrap v1.0.0
|
||||||
@@ -165,10 +169,10 @@ github.com/vektah/dataloaden/pkg/generator
|
|||||||
github.com/vektah/gqlparser
|
github.com/vektah/gqlparser
|
||||||
github.com/vektah/gqlparser/ast
|
github.com/vektah/gqlparser/ast
|
||||||
github.com/vektah/gqlparser/gqlerror
|
github.com/vektah/gqlparser/gqlerror
|
||||||
github.com/vektah/gqlparser/lexer
|
|
||||||
github.com/vektah/gqlparser/parser
|
github.com/vektah/gqlparser/parser
|
||||||
github.com/vektah/gqlparser/validator
|
github.com/vektah/gqlparser/validator
|
||||||
github.com/vektah/gqlparser/validator/rules
|
github.com/vektah/gqlparser/validator/rules
|
||||||
|
github.com/vektah/gqlparser/lexer
|
||||||
# golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4
|
# golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4
|
||||||
golang.org/x/crypto/bcrypt
|
golang.org/x/crypto/bcrypt
|
||||||
golang.org/x/crypto/blowfish
|
golang.org/x/crypto/blowfish
|
||||||
@@ -178,10 +182,10 @@ golang.org/x/image/bmp
|
|||||||
golang.org/x/image/tiff
|
golang.org/x/image/tiff
|
||||||
golang.org/x/image/tiff/lzw
|
golang.org/x/image/tiff/lzw
|
||||||
# golang.org/x/net v0.0.0-20190522155817-f3200d17e092
|
# golang.org/x/net v0.0.0-20190522155817-f3200d17e092
|
||||||
golang.org/x/net/context/ctxhttp
|
|
||||||
golang.org/x/net/html
|
golang.org/x/net/html
|
||||||
golang.org/x/net/html/atom
|
|
||||||
golang.org/x/net/html/charset
|
golang.org/x/net/html/charset
|
||||||
|
golang.org/x/net/context/ctxhttp
|
||||||
|
golang.org/x/net/html/atom
|
||||||
# golang.org/x/sys v0.0.0-20190426135247-a129542de9ae
|
# golang.org/x/sys v0.0.0-20190426135247-a129542de9ae
|
||||||
golang.org/x/sys/unix
|
golang.org/x/sys/unix
|
||||||
golang.org/x/sys/windows
|
golang.org/x/sys/windows
|
||||||
@@ -189,31 +193,31 @@ golang.org/x/sys/windows
|
|||||||
golang.org/x/text/encoding
|
golang.org/x/text/encoding
|
||||||
golang.org/x/text/encoding/charmap
|
golang.org/x/text/encoding/charmap
|
||||||
golang.org/x/text/encoding/htmlindex
|
golang.org/x/text/encoding/htmlindex
|
||||||
golang.org/x/text/encoding/internal
|
golang.org/x/text/transform
|
||||||
|
golang.org/x/text/unicode/norm
|
||||||
golang.org/x/text/encoding/internal/identifier
|
golang.org/x/text/encoding/internal/identifier
|
||||||
|
golang.org/x/text/encoding/internal
|
||||||
golang.org/x/text/encoding/japanese
|
golang.org/x/text/encoding/japanese
|
||||||
golang.org/x/text/encoding/korean
|
golang.org/x/text/encoding/korean
|
||||||
golang.org/x/text/encoding/simplifiedchinese
|
golang.org/x/text/encoding/simplifiedchinese
|
||||||
golang.org/x/text/encoding/traditionalchinese
|
golang.org/x/text/encoding/traditionalchinese
|
||||||
golang.org/x/text/encoding/unicode
|
golang.org/x/text/encoding/unicode
|
||||||
|
golang.org/x/text/language
|
||||||
|
golang.org/x/text/internal/utf8internal
|
||||||
|
golang.org/x/text/runes
|
||||||
golang.org/x/text/internal/language
|
golang.org/x/text/internal/language
|
||||||
golang.org/x/text/internal/language/compact
|
golang.org/x/text/internal/language/compact
|
||||||
golang.org/x/text/internal/tag
|
golang.org/x/text/internal/tag
|
||||||
golang.org/x/text/internal/utf8internal
|
|
||||||
golang.org/x/text/language
|
|
||||||
golang.org/x/text/runes
|
|
||||||
golang.org/x/text/transform
|
|
||||||
golang.org/x/text/unicode/norm
|
|
||||||
# golang.org/x/tools v0.0.0-20190515012406-7d7faa4812bd
|
# golang.org/x/tools v0.0.0-20190515012406-7d7faa4812bd
|
||||||
golang.org/x/tools/go/ast/astutil
|
|
||||||
golang.org/x/tools/go/gcexportdata
|
|
||||||
golang.org/x/tools/go/internal/gcimporter
|
|
||||||
golang.org/x/tools/go/internal/packagesdriver
|
|
||||||
golang.org/x/tools/go/packages
|
golang.org/x/tools/go/packages
|
||||||
golang.org/x/tools/imports
|
golang.org/x/tools/imports
|
||||||
golang.org/x/tools/internal/fastwalk
|
golang.org/x/tools/go/ast/astutil
|
||||||
|
golang.org/x/tools/go/gcexportdata
|
||||||
|
golang.org/x/tools/go/internal/packagesdriver
|
||||||
golang.org/x/tools/internal/gopathwalk
|
golang.org/x/tools/internal/gopathwalk
|
||||||
golang.org/x/tools/internal/module
|
|
||||||
golang.org/x/tools/internal/semver
|
golang.org/x/tools/internal/semver
|
||||||
|
golang.org/x/tools/internal/module
|
||||||
|
golang.org/x/tools/go/internal/gcimporter
|
||||||
|
golang.org/x/tools/internal/fastwalk
|
||||||
# gopkg.in/yaml.v2 v2.2.2
|
# gopkg.in/yaml.v2 v2.2.2
|
||||||
gopkg.in/yaml.v2
|
gopkg.in/yaml.v2
|
||||||
|
|||||||
Reference in New Issue
Block a user