update dependencies

This commit is contained in:
dhax 2017-10-21 18:30:08 +02:00
parent fce1b99683
commit 397e9c0842
164 changed files with 5207 additions and 2213 deletions

View file

@ -1,5 +1,11 @@
## `jwt-go` Version History
#### 3.1.0
* Improvements to `jwt` command line tool
* Added `SkipClaimsValidation` option to `Parser`
* Documentation updates
#### 3.0.0
* **Compatibility Breaking Changes**: See MIGRATION_GUIDE.md for tips on updating your code

View file

@ -1,5 +1,11 @@
# Changelog
## v3.3.0 (2017-10-10)
- New chi.RegisterMethod(method) to add support for custom HTTP methods, see _examples/custom-method for usage
- Deprecated LINK and UNLINK methods from the default list, please use `chi.RegisterMethod("LINK")` and `chi.RegisterMethod("UNLINK")` in an `init()` function
## v3.2.1 (2017-08-31)
- Add new `Match(rctx *Context, method, path string) bool` method to `Routes` interface

View file

@ -1,4 +1,4 @@
Copyright (c) 2015-present Peter Kieltyka (https://github.com/pkieltyka)
Copyright (c) 2015-present Peter Kieltyka (https://github.com/pkieltyka), Google Inc.
MIT License

View file

@ -337,16 +337,19 @@ with `net/http` can be used with chi's mux.
| WithValue | Short-hand middleware to set a key/value on the request context |
-----------------------------------------------------------------------------------------------------------
### Auxiliary middlewares
### Auxiliary middlewares & packages
-----------------------------------------------------------------------------------------------------------
| package | description |
|:-------------------------------------------------|:------------------------------------------------------
| [cors](https://github.com/go-chi/cors) | Cross-origin resource sharing (CORS) |
| [jwtauth](https://github.com/go-chi/jwtauth) | JWT authentication |
| [httpcoala](https://github.com/go-chi/httpcoala) | HTTP request coalescer |
| [chi-authz](https://github.com/casbin/chi-authz) | Request ACL via https://github.com/hsluoyz/casbin |
-----------------------------------------------------------------------------------------------------------
Please see https://github.com/go-chi for additional packages.
-------------------------------------------------------------------------------------------------------------
| package | description |
|:---------------------------------------------------|:------------------------------------------------------
| [cors](https://github.com/go-chi/cors) | Cross-origin resource sharing (CORS) |
| [jwtauth](https://github.com/go-chi/jwtauth) | JWT authentication |
| [hostrouter](https://github.com/go-chi/hostrouter) | Domain/host based request routing |
| [httpcoala](https://github.com/go-chi/httpcoala) | HTTP request coalescer |
| [chi-authz](https://github.com/casbin/chi-authz) | Request ACL via https://github.com/hsluoyz/casbin |
-------------------------------------------------------------------------------------------------------------
please [submit a PR](./CONTRIBUTING.md) if you'd like to include a link to a chi-compatible middleware

View file

@ -0,0 +1,33 @@
package main
import (
"net/http"
"github.com/go-chi/chi"
"github.com/go-chi/chi/middleware"
)
func init() {
chi.RegisterMethod("LINK")
chi.RegisterMethod("UNLINK")
chi.RegisterMethod("WOOHOO")
}
func main() {
r := chi.NewRouter()
r.Use(middleware.RequestID)
r.Use(middleware.Logger)
r.Get("/", func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte("hello world"))
})
r.MethodFunc("LINK", "/link", func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte("custom link method"))
})
r.MethodFunc("WOOHOO", "/woo", func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte("custom woohoo method"))
})
r.HandleFunc("/everything", func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte("capturing all standard http methods, as well as LINK, UNLINK and WOOHOO"))
})
http.ListenAndServe(":3333", r)
}

View file

@ -1,4 +1,4 @@
// +build go1.8
// +build go1.8 appengine
package middleware

View file

@ -1,4 +1,4 @@
// +build go1.8
// +build go1.8 appengine
package middleware

View file

@ -1,4 +1,4 @@
// +build go1.8
// +build go1.8 appengine
package middleware

42
vendor/github.com/go-chi/chi/tree.go generated vendored
View file

@ -6,44 +6,59 @@ package chi
import (
"fmt"
"math"
"net/http"
"regexp"
"sort"
"strconv"
"strings"
)
type methodTyp int
const (
mCONNECT methodTyp = 1 << iota
mSTUB methodTyp = 1 << iota
mCONNECT
mDELETE
mGET
mHEAD
mLINK
mOPTIONS
mPATCH
mPOST
mPUT
mTRACE
mUNLINK
mSTUB
mALL methodTyp = mCONNECT | mDELETE | mGET | mHEAD | mLINK |
mOPTIONS | mPATCH | mPOST | mPUT | mTRACE | mUNLINK
)
var mALL methodTyp = mCONNECT | mDELETE | mGET | mHEAD |
mOPTIONS | mPATCH | mPOST | mPUT | mTRACE
var methodMap = map[string]methodTyp{
"CONNECT": mCONNECT,
"DELETE": mDELETE,
"GET": mGET,
"HEAD": mHEAD,
"LINK": mLINK,
"OPTIONS": mOPTIONS,
"PATCH": mPATCH,
"POST": mPOST,
"PUT": mPUT,
"TRACE": mTRACE,
"UNLINK": mUNLINK,
}
func RegisterMethod(method string) {
if method == "" {
return
}
method = strings.ToUpper(method)
if _, ok := methodMap[method]; ok {
return
}
n := len(methodMap)
if n > strconv.IntSize {
panic(fmt.Sprintf("chi: max number of methods reached (%d)", strconv.IntSize))
}
mt := methodTyp(math.Exp2(float64(n)))
methodMap[method] = mt
mALL |= mt
}
type nodeTyp uint8
@ -676,6 +691,15 @@ func patNextSegment(pattern string) (nodeTyp, string, string, byte, int, int) {
key = key[:idx]
}
if len(rexpat) > 0 {
if rexpat[0] != '^' {
rexpat = "^" + rexpat
}
if rexpat[len(rexpat)-1] != '$' {
rexpat = rexpat + "$"
}
}
return nt, key, rexpat, tail, ps, pe
}

View file

@ -333,6 +333,31 @@ func TestTreeRegexp(t *testing.T) {
}
}
func TestTreeRegexMatchWholeParam(t *testing.T) {
hStub1 := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {})
rctx := NewRouteContext()
tr := &node{}
tr.InsertRoute(mGET, "/{id:[0-9]+}", hStub1)
tests := []struct {
url string
expectedHandler http.Handler
}{
{url: "/13", expectedHandler: hStub1},
{url: "/a13", expectedHandler: nil},
{url: "/13.jpg", expectedHandler: nil},
{url: "/a13.jpg", expectedHandler: nil},
}
for _, tc := range tests {
_, _, handler := tr.FindRoute(rctx, mGET, tc.url)
if fmt.Sprintf("%v", tc.expectedHandler) != fmt.Sprintf("%v", handler) {
t.Errorf("expecting handler:%v , got:%v", tc.expectedHandler, handler)
}
}
}
func TestTreeFindPattern(t *testing.T) {
hStub1 := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {})
hStub2 := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {})

View file

@ -1,2 +1,3 @@
vendor/
Gopkg.lock
.idea/

View file

@ -1,6 +1,6 @@
[[constraint]]
name = "github.com/dgrijalva/jwt-go"
version = "^3.0.0"
version = "^3.1.0"
[[constraint]]
name = "github.com/go-chi/chi"
version = "^3.0.0"

View file

@ -1,5 +1,6 @@
jwtauth - JWT authentication middleware for Go HTTP services
============================================================
[![GoDoc Widget]][GoDoc]
The `jwtauth` http middleware package provides a simple way to verify a JWT token
from a http request and send the result down the request context (`context.Context`).
@ -22,12 +23,11 @@ plain-text payload for all unverified tokens and passes the good ones through. Y
also copy the Authenticator and customize it to handle invalid tokens to better fit
your flow (ie. with a JSON error response body).
The `Verifier` will search for a JWT token in a http request, in the order:
By default, the `Verifier` will search for a JWT token in a http request, in the order:
1. 'jwt' URI query parameter
2. 'Authorization: BEARER T' request header
3. Cookie 'jwt' value
4. (optional), use `jwtauth.Verify("state")` for additional query/cookie parameter aliases
3. 'jwt' Cookie value
The first JWT string that is found as a query parameter, authorization header
or cookie header is then decoded by the `jwt-go` library and a *jwt.Token
@ -39,6 +39,11 @@ be the generic `jwtauth.Authenticator` middleware or your own custom handler
which checks the request context jwt token and error to prepare a custom
http response.
Note: jwtauth supports custom verification sequences for finding a token
from a request by using the `Verify` middleware instantiator directly. The default
`Verifier` is instantiated by calling `Verify(ja, TokenFromQuery, TokenFromHeader, TokenFromCookie)`.
# Usage
See the full [example](https://github.com/go-chi/jwtauth/blob/master/_example/main.go).
@ -105,3 +110,6 @@ func router() http.Handler {
# LICENSE
[MIT](/LICENSE)
[GoDoc]: https://godoc.org/github.com/go-chi/jwtauth
[GoDoc Widget]: https://godoc.org/github.com/go-chi/jwtauth?status.svg

View file

@ -21,16 +21,44 @@ var (
ErrExpired = errors.New("jwtauth: token is expired")
)
var (
// TokenFromCookie tries to retreive the token string from a cookie named
// "jwt".
TokenFromCookie = func(r *http.Request) string {
cookie, err := r.Cookie("jwt")
if err != nil {
return ""
}
return cookie.Value
}
// TokenFromHeader tries to retreive the token string from the
// "Authorization" reqeust header: "Authorization: BEARER T".
TokenFromHeader = func(r *http.Request) string {
// Get token from authorization header.
bearer := r.Header.Get("Authorization")
if len(bearer) > 7 && strings.ToUpper(bearer[0:6]) == "BEARER" {
return bearer[7:]
}
return ""
}
// TokenFromQuery tries to retreive the token string from the "jwt" URI
// query parameter.
TokenFromQuery = func(r *http.Request) string {
// Get token from query param named "jwt".
return r.URL.Query().Get("jwt")
}
)
type JwtAuth struct {
signKey []byte
verifyKey []byte
signKey interface{}
verifyKey interface{}
signer jwt.SigningMethod
parser *jwt.Parser
}
// New creates a JwtAuth authenticator instance that provides middleware handlers
// and encoding/decoding functions for JWT signing.
func New(alg string, signKey []byte, verifyKey []byte) *JwtAuth {
func New(alg string, signKey interface{}, verifyKey interface{}) *JwtAuth {
return NewWithParser(alg, &jwt.Parser{}, signKey, verifyKey)
}
@ -40,7 +68,7 @@ func New(alg string, signKey []byte, verifyKey []byte) *JwtAuth {
// We explicitly toggle `SkipClaimsValidation` in the `jwt-go` parser so that
// we can control when the claims are validated - in our case, by the Verifier
// http middleware handler.
func NewWithParser(alg string, parser *jwt.Parser, signKey []byte, verifyKey []byte) *JwtAuth {
func NewWithParser(alg string, parser *jwt.Parser, signKey interface{}, verifyKey interface{}) *JwtAuth {
parser.SkipClaimsValidation = true
return &JwtAuth{
signKey: signKey,
@ -68,15 +96,15 @@ func NewWithParser(alg string, parser *jwt.Parser, signKey []byte, verifyKey []b
// http response.
func Verifier(ja *JwtAuth) func(http.Handler) http.Handler {
return func(next http.Handler) http.Handler {
return Verify(ja, "")(next)
return Verify(ja, TokenFromQuery, TokenFromHeader, TokenFromCookie)(next)
}
}
func Verify(ja *JwtAuth, paramAliases ...string) func(http.Handler) http.Handler {
func Verify(ja *JwtAuth, findTokenFns ...func(r *http.Request) string) func(http.Handler) http.Handler {
return func(next http.Handler) http.Handler {
hfn := func(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
token, err := VerifyRequest(ja, r, paramAliases...)
token, err := VerifyRequest(ja, r, findTokenFns...)
ctx = NewContext(ctx, token, err)
next.ServeHTTP(w, r.WithContext(ctx))
}
@ -84,37 +112,17 @@ func Verify(ja *JwtAuth, paramAliases ...string) func(http.Handler) http.Handler
}
}
func VerifyRequest(ja *JwtAuth, r *http.Request, paramAliases ...string) (*jwt.Token, error) {
func VerifyRequest(ja *JwtAuth, r *http.Request, findTokenFns ...func(r *http.Request) string) (*jwt.Token, error) {
var tokenStr string
var err error
// Get token from query params
tokenStr = r.URL.Query().Get("jwt")
// Get token from other param aliases
if tokenStr == "" && paramAliases != nil && len(paramAliases) > 0 {
for _, p := range paramAliases {
tokenStr = r.URL.Query().Get(p)
if tokenStr != "" {
break
}
}
}
// Get token from authorization header
if tokenStr == "" {
bearer := r.Header.Get("Authorization")
if len(bearer) > 7 && strings.ToUpper(bearer[0:6]) == "BEARER" {
tokenStr = bearer[7:]
}
}
// Get token from cookie
if tokenStr == "" {
// TODO: paramAliases should apply to cookies too..
cookie, err := r.Cookie("jwt")
if err == nil {
tokenStr = cookie.Value
// Extract token string from the request by calling token find functions in
// the order they where provided. Further extraction stops if a function
// returns a non-empty string.
for _, fn := range findTokenFns {
tokenStr = fn(r)
if tokenStr != "" {
break
}
}
@ -127,24 +135,17 @@ func VerifyRequest(ja *JwtAuth, r *http.Request, paramAliases ...string) (*jwt.T
case "token is expired":
err = ErrExpired
}
// ctx = NewContext(ctx, token, err)
// next.ServeHTTP(w, r.WithContext(ctx))
return token, err
}
if token == nil || !token.Valid || token.Method != ja.signer {
err = ErrUnauthorized
// ctx = NewContext(ctx, token, err)
// next.ServeHTTP(w, r.WithContext(ctx))
return token, err
}
// Check expiry via "exp" claim
if IsExpired(token) {
err = ErrExpired
// ctx = NewContext(ctx, token, err)
// next.ServeHTTP(w, r.WithContext(ctx))
return token, err
}
@ -173,7 +174,7 @@ func (ja *JwtAuth) Decode(tokenString string) (t *jwt.Token, err error) {
}
func (ja *JwtAuth) keyFunc(t *jwt.Token) (interface{}, error) {
if ja.verifyKey != nil && len(ja.verifyKey) > 0 {
if ja.verifyKey != nil {
return ja.verifyKey, nil
} else {
return ja.signKey, nil

View file

@ -1,12 +1,15 @@
package jwtauth_test
import (
"crypto/x509"
"encoding/pem"
"fmt"
"io"
"io/ioutil"
"log"
"net/http"
"net/http/httptest"
"reflect"
"testing"
"time"
@ -16,22 +19,83 @@ import (
)
var (
TokenAuth *jwtauth.JwtAuth
TokenSecret = []byte("secretpass")
TokenAuthHS256 *jwtauth.JwtAuth
TokenSecret = []byte("secretpass")
TokenAuthRS256 *jwtauth.JwtAuth
PrivateKeyRS256String = `-----BEGIN RSA PRIVATE KEY-----
MIIBOwIBAAJBALxo3PCjFw4QjgOX06QCJIJBnXXNiEYwDLxxa5/7QyH6y77nCRQy
J3x3UwF9rUD0RCsp4sNdX5kOQ9PUyHyOtCUCAwEAAQJARjFLHtuj2zmPrwcBcjja
IS0Q3LKV8pA0LoCS+CdD+4QwCxeKFq0yEMZtMvcQOfqo9x9oAywFClMSlLRyl7ng
gQIhAOyerGbcdQxxwjwGpLS61Mprf4n2HzjwISg20cEEH1tfAiEAy9dXmgQpDPir
C6Q9QdLXpNgSB+o5CDqfor7TTyTCovsCIQDNCfpu795luDYN+dvD2JoIBfrwu9v2
ZO72f/pm/YGGlQIgUdRXyW9kH13wJFNBeBwxD27iBiVj0cbe8NFUONBUBmMCIQCN
jVK4eujt1lm/m60TlEhaWBC3p+3aPT2TqFPUigJ3RQ==
-----END RSA PRIVATE KEY-----
`
PublicKeyRS256String = `-----BEGIN PUBLIC KEY-----
MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBALxo3PCjFw4QjgOX06QCJIJBnXXNiEYw
DLxxa5/7QyH6y77nCRQyJ3x3UwF9rUD0RCsp4sNdX5kOQ9PUyHyOtCUCAwEAAQ==
-----END PUBLIC KEY-----
`
)
func init() {
TokenAuth = jwtauth.New("HS256", TokenSecret, nil)
TokenAuthHS256 = jwtauth.New("HS256", TokenSecret, nil)
}
//
// Tests
//
func TestSimpleRSA(t *testing.T) {
privateKeyBlock, _ := pem.Decode([]byte(PrivateKeyRS256String))
privateKey, err := x509.ParsePKCS1PrivateKey(privateKeyBlock.Bytes)
if err != nil {
t.Fatalf(err.Error())
}
publicKeyBlock, _ := pem.Decode([]byte(PublicKeyRS256String))
publicKey, err := x509.ParsePKIXPublicKey(publicKeyBlock.Bytes)
if err != nil {
t.Fatalf(err.Error())
}
TokenAuthRS256 = jwtauth.New("RS256", privateKey, publicKey)
claims := jwtauth.Claims{
"key": "val",
"key2": "val2",
"key3": "val3",
}
_, tokenString, err := TokenAuthRS256.Encode(claims)
if err != nil {
t.Fatalf("Failed to encode claims %s\n", err.Error())
}
token, err := TokenAuthRS256.Decode(tokenString)
if err != nil {
t.Fatalf("Failed to decode token string %s\n", err.Error())
}
if !reflect.DeepEqual(claims, jwtauth.Claims(token.Claims.(jwt.MapClaims))) {
t.Fatalf("The decoded claims don't match the original ones\n")
}
}
func TestSimple(t *testing.T) {
r := chi.NewRouter()
r.Use(jwtauth.Verifier(TokenAuth), jwtauth.Authenticator)
r.Use(jwtauth.Verifier(TokenAuthHS256), jwtauth.Authenticator)
r.Get("/", func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte("welcome"))
@ -76,7 +140,7 @@ func TestMore(t *testing.T) {
// Protected routes
r.Group(func(r chi.Router) {
r.Use(jwtauth.Verifier(TokenAuth))
r.Use(jwtauth.Verifier(TokenAuthHS256))
authenticator := func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {

View file

@ -10,6 +10,7 @@ addons:
go:
- 1.7
- 1.8
- 1.9
- tip
matrix:

View file

@ -1,5 +1,6 @@
# SQL migrations for Golang and PostgreSQL
[![Build Status](https://travis-ci.org/go-pg/migrations.svg)](https://travis-ci.org/go-pg/migrations)
[![GoDoc](https://godoc.org/github.com/go-pg/migrations?status.svg)](https://godoc.org/github.com/go-pg/migrations)
This package allows you to run migrations on your PostgreSQL database using [Golang Postgres client](https://github.com/go-pg/pg). See [example](example) for details.

View file

@ -80,6 +80,7 @@ func RunMigrations(db DB, migrations []Migration, a ...string) (oldVersion, newV
if err != nil {
return
}
newVersion = oldVersion
switch cmd {
case "create":

16
vendor/github.com/go-pg/pg/db.go generated vendored
View file

@ -151,11 +151,11 @@ func (db *DB) Close() error {
// Exec executes a query ignoring returned rows. The params are for any
// placeholders in the query.
func (db *DB) Exec(query interface{}, params ...interface{}) (res orm.Result, err error) {
for i := 0; i <= db.opt.MaxRetries; i++ {
for attempt := 0; attempt <= db.opt.MaxRetries; attempt++ {
var cn *pool.Conn
if i >= 1 {
time.Sleep(db.retryBackoff(i - 1))
if attempt >= 1 {
time.Sleep(db.retryBackoff(attempt - 1))
}
cn, err = db.conn()
@ -166,7 +166,7 @@ func (db *DB) Exec(query interface{}, params ...interface{}) (res orm.Result, er
start := time.Now()
res, err = db.simpleQuery(cn, query, params...)
db.freeConn(cn, err)
db.queryProcessed(db, start, query, params, res, err)
db.queryProcessed(db, start, query, params, attempt, res, err)
if !db.shouldRetry(err) {
break
@ -193,11 +193,11 @@ func (db *DB) ExecOne(query interface{}, params ...interface{}) (orm.Result, err
// Query executes a query that returns rows, typically a SELECT.
// The params are for any placeholders in the query.
func (db *DB) Query(model, query interface{}, params ...interface{}) (res orm.Result, err error) {
for i := 0; i <= db.opt.MaxRetries; i++ {
for attempt := 0; attempt <= db.opt.MaxRetries; attempt++ {
var cn *pool.Conn
if i >= 1 {
time.Sleep(db.retryBackoff(i - 1))
if attempt >= 1 {
time.Sleep(db.retryBackoff(attempt - 1))
}
cn, err = db.conn()
@ -208,7 +208,7 @@ func (db *DB) Query(model, query interface{}, params ...interface{}) (res orm.Re
start := time.Now()
res, err = db.simpleQueryData(cn, model, query, params...)
db.freeConn(cn, err)
db.queryProcessed(db, start, query, params, res, err)
db.queryProcessed(db, start, query, params, attempt, res, err)
if !db.shouldRetry(err) {
break

View file

@ -735,16 +735,41 @@ func ExampleDB_Model_manyToMany() {
func ExampleDB_Update() {
db := modelDB()
err := db.Update(&Book{
Id: 1,
Title: "updated book 1",
})
book := &Book{Id: 1}
err := db.Select(book)
if err != nil {
panic(err)
}
var book Book
err = db.Model(&book).Where("id = ?", 1).Select()
book.Title = "updated book 1"
err = db.Update(book)
if err != nil {
panic(err)
}
err = db.Select(book)
if err != nil {
panic(err)
}
fmt.Println(book)
// Output: Book<Id=1 Title="updated book 1">
}
func ExampleDB_Update_notNull() {
db := modelDB()
book := &Book{
Id: 1,
Title: "updated book 1",
}
_, err := db.Model(book).UpdateNotNull()
if err != nil {
panic(err)
}
book = new(Book)
err = db.Model(book).Where("id = ?", 1).Select()
if err != nil {
panic(err)
}

23
vendor/github.com/go-pg/pg/hook.go generated vendored
View file

@ -25,11 +25,12 @@ type QueryProcessedEvent struct {
File string
Line int
DB orm.DB
Query interface{}
Params []interface{}
Result orm.Result
Error error
DB orm.DB
Query interface{}
Params []interface{}
Attempt int
Result orm.Result
Error error
}
func (ev *QueryProcessedEvent) UnformattedQuery() (string, error) {
@ -74,6 +75,7 @@ func (db *DB) queryProcessed(
start time.Time,
query interface{},
params []interface{},
attempt int,
res orm.Result,
err error,
) {
@ -88,11 +90,12 @@ func (db *DB) queryProcessed(
File: file,
Line: line,
DB: ormDB,
Query: query,
Params: params,
Result: res,
Error: err,
DB: ormDB,
Query: query,
Params: params,
Attempt: attempt,
Result: res,
Error: err,
}
for _, hook := range db.queryProcessedHooks {
hook(event)

View file

@ -61,8 +61,10 @@ type Options struct {
type ConnPool struct {
opt *Options
dialErrorsNum uint32 // atomic
_lastDialError atomic.Value
dialErrorsNum uint32 // atomic
lastDialErrorMu sync.RWMutex
lastDialError error
queue chan struct{}
@ -101,7 +103,7 @@ func (p *ConnPool) NewConn() (*Conn, error) {
}
if atomic.LoadUint32(&p.dialErrorsNum) >= uint32(p.opt.PoolSize) {
return nil, p.lastDialError()
return nil, p.getLastDialError()
}
netConn, err := p.opt.Dialer()
@ -142,11 +144,16 @@ func (p *ConnPool) tryDial() {
}
func (p *ConnPool) setLastDialError(err error) {
p._lastDialError.Store(err)
p.lastDialErrorMu.Lock()
p.lastDialError = err
p.lastDialErrorMu.Unlock()
}
func (p *ConnPool) lastDialError() error {
return p._lastDialError.Load().(error)
func (p *ConnPool) getLastDialError() error {
p.lastDialErrorMu.RLock()
err := p.lastDialError
p.lastDialErrorMu.RUnlock()
return err
}
func (p *ConnPool) isStaleConn(cn *Conn) bool {

View file

@ -75,12 +75,6 @@ type Options struct {
// Frequency of idle checks.
// Default is 1 minute.
IdleCheckFrequency time.Duration
// When true Tx does not issue BEGIN, COMMIT, or ROLLBACK.
// Also underlying database connection is immediately returned to the pool.
// This is primarily useful for running your database tests in one big
// transaction, because PostgreSQL does not support nested transactions.
DisableTransaction bool
}
func (opt *Options) init() {

View file

@ -20,7 +20,7 @@ type Field struct {
GoName string // struct field name, e.g. Id
SQLName string // SQL name, .e.g. id
Column types.Q // escaped SQL name
Column types.Q // escaped SQL name, e.g. "id"
SQLType string
Index []int
Default types.Q

View file

@ -52,36 +52,41 @@ func (q insertQuery) AppendQuery(b []byte) ([]byte, error) {
} else {
b = q.q.appendFirstTable(b)
}
b = append(b, " ("...)
if q.q.hasModel() {
b = appendColumns(b, table.Fields)
} else if q.q.columns != nil {
b = q.q.appendColumns(b)
}
b = append(b, ')')
if q.q.hasModel() {
b = append(b, " VALUES ("...)
if q.q.hasOtherTables() && q.q.columns != nil {
b = append(b, " ("...)
b = q.q.appendColumns(b)
b = append(b, ")"...)
b = append(b, " SELECT * FROM "...)
b = q.q.appendOtherTables(b)
} else {
fields, err := q.q.getFields()
if err != nil {
return nil, err
}
if len(fields) == 0 {
fields = table.Fields
}
b = append(b, " ("...)
b = appendColumns(b, fields)
b = append(b, ") VALUES ("...)
if value.Kind() == reflect.Struct {
b = q.appendValues(b, table.Fields, value)
b = q.appendValues(b, fields, value)
} else {
for i := 0; i < value.Len(); i++ {
el := value.Index(i)
if el.Kind() == reflect.Interface {
el = el.Elem()
}
b = q.appendValues(b, table.Fields, reflect.Indirect(el))
b = q.appendValues(b, fields, reflect.Indirect(el))
if i != value.Len()-1 {
b = append(b, "), ("...)
}
}
}
b = append(b, ')')
}
if q.q.hasOtherTables() {
b = append(b, " SELECT * FROM "...)
b = q.q.appendOtherTables(b)
b = append(b, ")"...)
}
if q.q.onConflict != nil {

View file

@ -43,6 +43,18 @@ type InsertQTest struct {
}
var _ = Describe("Insert", func() {
It("supports Column", func() {
model := &InsertTest{
Id: 1,
Value: "hello",
}
q := NewQuery(nil, model).Column("id")
b, err := insertQuery{q: q}.AppendQuery(nil)
Expect(err).NotTo(HaveOccurred())
Expect(string(b)).To(Equal(`INSERT INTO "insert_tests" ("id") VALUES (1)`))
})
It("multi inserts", func() {
q := NewQuery(nil, &InsertTest{
Id: 1,

View file

@ -188,7 +188,7 @@ func (q *Query) getDataFields() ([]*Field, error) {
return q._getFields(true)
}
func (q *Query) _getFields(filterPKs bool) ([]*Field, error) {
func (q *Query) _getFields(omitPKs bool) ([]*Field, error) {
table := q.model.Table()
var columns []*Field
@ -203,7 +203,7 @@ func (q *Query) _getFields(filterPKs bool) ([]*Field, error) {
return nil, err
}
if filterPKs && field.HasFlag(PrimaryKeyFlag) {
if omitPKs && field.HasFlag(PrimaryKeyFlag) {
continue
}
@ -595,10 +595,16 @@ func (q *Query) SelectOrInsert(values ...interface{}) (inserted bool, err error)
return false, q.stickyErr
}
insertq := q
if len(insertq.columns) > 0 {
insertq = insertq.Copy()
insertq.columns = nil
}
var insertErr error
for i := 0; i < 5; i++ {
if i >= 2 {
time.Sleep(internal.RetryBackoff(i-2, 250*time.Millisecond, 4*time.Second))
time.Sleep(internal.RetryBackoff(i-2, 250*time.Millisecond, 5*time.Second))
}
err := q.Select(values...)
@ -609,7 +615,7 @@ func (q *Query) SelectOrInsert(values ...interface{}) (inserted bool, err error)
return false, err
}
res, err := q.Insert(values...)
res, err := insertq.Insert(values...)
if err != nil {
insertErr = err
if pgErr, ok := err.(internal.PGError); ok {
@ -636,12 +642,21 @@ func (q *Query) SelectOrInsert(values ...interface{}) (inserted bool, err error)
}
// Update updates the model.
func (q *Query) Update(values ...interface{}) (Result, error) {
func (q *Query) Update(scan ...interface{}) (Result, error) {
return q.update(scan, false)
}
// Update updates the model omitting null columns.
func (q *Query) UpdateNotNull(scan ...interface{}) (Result, error) {
return q.update(scan, true)
}
func (q *Query) update(scan []interface{}, omitZero bool) (Result, error) {
if q.stickyErr != nil {
return nil, q.stickyErr
}
model, err := q.newModel(values...)
model, err := q.newModel(scan...)
if err != nil {
return nil, err
}
@ -652,7 +667,7 @@ func (q *Query) Update(values ...interface{}) (Result, error) {
}
}
res, err := q.db.Query(model, updateQuery{q}, q.model)
res, err := q.db.Query(model, updateQuery{q: q, omitZero: omitZero}, q.model)
if err != nil {
return nil, err
}

View file

@ -32,10 +32,10 @@ type Table struct {
Alias types.Q
ModelName string
Fields []*Field
PKs []*Field
Columns []*Field
FieldsMap map[string]*Field
Fields []*Field // PKs + DataFields
PKs []*Field
DataFields []*Field
FieldsMap map[string]*Field
Methods map[string]*Method
Relations map[string]*Relation
@ -71,7 +71,7 @@ func (t *Table) AddField(field *Field) {
if field.HasFlag(PrimaryKeyFlag) {
t.PKs = append(t.PKs, field)
} else {
t.Columns = append(t.Columns, field)
t.DataFields = append(t.DataFields, field)
}
t.FieldsMap[field.SQLName] = field
}

View file

@ -17,6 +17,8 @@ func Update(db DB, model ...interface{}) error {
type updateQuery struct {
q *Query
omitZero bool
}
var _ QueryAppender = (*updateQuery)(nil)
@ -107,12 +109,18 @@ func (q updateQuery) appendSetStruct(b []byte, strct reflect.Value) ([]byte, err
}
if len(fields) == 0 {
fields = q.q.model.Table().Columns
fields = q.q.model.Table().DataFields
}
for i, f := range fields {
if i > 0 {
pos := len(b)
for _, f := range fields {
if q.omitZero && f.OmitZero(strct) {
continue
}
if len(b) != pos {
b = append(b, ", "...)
pos = len(b)
}
b = append(b, f.Column...)
@ -129,7 +137,7 @@ func (q updateQuery) appendSetSlice(b []byte, slice reflect.Value) ([]byte, erro
}
if len(fields) == 0 {
fields = q.q.model.Table().Columns
fields = q.q.model.Table().DataFields
}
for i, f := range fields {

View file

@ -11,6 +11,22 @@ type UpdateTest struct {
}
var _ = Describe("Update", func() {
It("updates model", func() {
q := NewQuery(nil, &UpdateTest{})
b, err := updateQuery{q: q}.AppendQuery(nil)
Expect(err).NotTo(HaveOccurred())
Expect(string(b)).To(Equal(`UPDATE "update_tests" AS "update_test" SET "value" = NULL WHERE "update_test"."id" = NULL`))
})
It("omits zero", func() {
q := NewQuery(nil, &UpdateTest{})
b, err := updateQuery{q: q, omitZero: true}.AppendQuery(nil)
Expect(err).NotTo(HaveOccurred())
Expect(string(b)).To(Equal(`UPDATE "update_tests" AS "update_test" SET WHERE "update_test"."id" = NULL`))
})
It("bulk updates", func() {
q := NewQuery(nil, &UpdateTest{}).
Model(&UpdateTest{
@ -20,7 +36,7 @@ var _ = Describe("Update", func() {
Id: 2,
})
b, err := updateQuery{q}.AppendQuery(nil)
b, err := updateQuery{q: q}.AppendQuery(nil)
Expect(err).NotTo(HaveOccurred())
Expect(string(b)).To(Equal(`UPDATE "update_tests" AS "update_test" SET "value" = _data."value" FROM (VALUES (1, 'hello'::mytype), (2, NULL::mytype)) AS _data("id", "value") WHERE "update_test"."id" = _data."id"`))
})
@ -32,7 +48,7 @@ var _ = Describe("Update", func() {
Table("wrapper").
Where("update_test.id = wrapper.id")
b, err := updateQuery{q}.AppendQuery(nil)
b, err := updateQuery{q: q}.AppendQuery(nil)
Expect(err).NotTo(HaveOccurred())
Expect(string(b)).To(Equal(`WITH "wrapper" AS (SELECT "update_test"."id", "update_test"."value" FROM "update_tests" AS "update_test") UPDATE "update_tests" AS "update_test" SET "value" = NULL FROM "wrapper" WHERE (update_test.id = wrapper.id)`))
})

16
vendor/github.com/go-pg/pg/stmt.go generated vendored
View file

@ -64,14 +64,14 @@ func (stmt *Stmt) exec(params ...interface{}) (orm.Result, error) {
// Exec executes a prepared statement with the given parameters.
func (stmt *Stmt) Exec(params ...interface{}) (res orm.Result, err error) {
for i := 0; i <= stmt.db.opt.MaxRetries; i++ {
if i >= 1 {
time.Sleep(stmt.db.retryBackoff(i - 1))
for attempt := 0; attempt <= stmt.db.opt.MaxRetries; attempt++ {
if attempt >= 1 {
time.Sleep(stmt.db.retryBackoff(attempt - 1))
}
start := time.Now()
res, err = stmt.exec(params...)
stmt.db.queryProcessed(stmt.db, start, stmt.q, params, res, err)
stmt.db.queryProcessed(stmt.db, start, stmt.q, params, attempt, res, err)
if !stmt.db.shouldRetry(err) {
break
@ -123,14 +123,14 @@ func (stmt *Stmt) query(model interface{}, params ...interface{}) (orm.Result, e
// Query executes a prepared query statement with the given parameters.
func (stmt *Stmt) Query(model interface{}, params ...interface{}) (res orm.Result, err error) {
for i := 0; i <= stmt.db.opt.MaxRetries; i++ {
if i >= 1 {
time.Sleep(stmt.db.retryBackoff(i - 1))
for attempt := 0; attempt <= stmt.db.opt.MaxRetries; attempt++ {
if attempt >= 1 {
time.Sleep(stmt.db.retryBackoff(attempt - 1))
}
start := time.Now()
res, err = stmt.query(model, params...)
stmt.db.queryProcessed(stmt.db, start, stmt.q, params, res, err)
stmt.db.queryProcessed(stmt.db, start, stmt.q, params, attempt, res, err)
if !stmt.db.shouldRetry(err) {
break

50
vendor/github.com/go-pg/pg/tx.go generated vendored
View file

@ -36,13 +36,11 @@ func (db *DB) Begin() (*Tx, error) {
db: db,
}
if !db.opt.DisableTransaction {
cn, err := db.conn()
if err != nil {
return nil, err
}
tx.cn = cn
cn, err := db.conn()
if err != nil {
return nil, err
}
tx.cn = cn
if err := tx.begin(); err != nil {
return nil, err
@ -85,29 +83,15 @@ func (tx *Tx) RunInTransaction(fn func(*Tx) error) error {
}
func (tx *Tx) conn() (*pool.Conn, error) {
var cn *pool.Conn
if tx.db.opt.DisableTransaction {
var err error
cn, err = tx.db.conn()
if err != nil {
return nil, err
}
} else {
cn = tx.cn
if cn == nil {
return nil, errTxDone
}
if tx.cn == nil {
return nil, errTxDone
}
cn.SetTimeout(tx.db.opt.ReadTimeout, tx.db.opt.WriteTimeout)
return cn, nil
tx.cn.SetTimeout(tx.db.opt.ReadTimeout, tx.db.opt.WriteTimeout)
return tx.cn, nil
}
func (tx *Tx) freeConn(cn *pool.Conn, err error) {
if tx.db.opt.DisableTransaction {
_ = tx.db.freeConn(cn, err)
}
}
func (tx *Tx) freeConn(cn *pool.Conn, err error) {}
// Stmt returns a transaction-specific prepared statement from an existing statement.
func (tx *Tx) Stmt(stmt *Stmt) *Stmt {
@ -152,7 +136,7 @@ func (tx *Tx) Exec(query interface{}, params ...interface{}) (orm.Result, error)
start := time.Now()
res, err := tx.db.simpleQuery(cn, query, params...)
tx.freeConn(cn, err)
tx.db.queryProcessed(tx, start, query, params, res, err)
tx.db.queryProcessed(tx, start, query, params, 0, res, err)
return res, err
}
@ -180,7 +164,7 @@ func (tx *Tx) Query(model interface{}, query interface{}, params ...interface{})
start := time.Now()
res, err := tx.db.simpleQueryData(cn, model, query, params...)
tx.freeConn(cn, err)
tx.db.queryProcessed(tx, start, query, params, res, err)
tx.db.queryProcessed(tx, start, query, params, 0, res, err)
if err != nil {
return nil, err
@ -279,10 +263,6 @@ func (tx *Tx) FormatQuery(dst []byte, query string, params ...interface{}) []byt
}
func (tx *Tx) begin() error {
if tx.db.opt.DisableTransaction {
return nil
}
_, err := tx.Exec("BEGIN")
if err != nil {
tx.close(err)
@ -292,10 +272,6 @@ func (tx *Tx) begin() error {
// Commit commits the transaction.
func (tx *Tx) Commit() error {
if tx.db.opt.DisableTransaction {
return nil
}
_, err := tx.Exec("COMMIT")
tx.close(err)
return err
@ -303,10 +279,6 @@ func (tx *Tx) Commit() error {
// Rollback aborts the transaction.
func (tx *Tx) Rollback() error {
if tx.db.opt.DisableTransaction {
return nil
}
_, err := tx.Exec("ROLLBACK")
tx.close(err)
return err

View file

@ -3,7 +3,8 @@ sudo: false
language: go
go:
- 1.8
- 1.x
- tip
branches:
only:

View file

@ -573,7 +573,11 @@ func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value)
// Compile the list of all the fields that we're going to be decoding
// from all the structs.
fields := make(map[*reflect.StructField]reflect.Value)
type field struct {
field reflect.StructField
val reflect.Value
}
fields := []field{}
for len(structs) > 0 {
structVal := structs[0]
structs = structs[1:]
@ -616,7 +620,7 @@ func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value)
}
// Normal struct field, store it away
fields[&fieldType] = structVal.Field(i)
fields = append(fields, field{fieldType, structVal.Field(i)})
}
}
@ -624,26 +628,27 @@ func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value)
decodedFields := make([]string, 0, len(fields))
decodedFieldsVal := make([]reflect.Value, 0)
unusedKeysVal := make([]reflect.Value, 0)
for fieldType, field := range fields {
if !field.IsValid() {
for _, f := range fields {
field, fieldValue := f.field, f.val
if !fieldValue.IsValid() {
// This should never happen
panic("field is not valid")
}
// If we can't set the field, then it is unexported or something,
// and we just continue onwards.
if !field.CanSet() {
if !fieldValue.CanSet() {
continue
}
fieldName := fieldType.Name
fieldName := field.Name
tagValue := fieldType.Tag.Get(tagName)
tagValue := field.Tag.Get(tagName)
tagParts := strings.SplitN(tagValue, ",", 2)
if len(tagParts) >= 2 {
switch tagParts[1] {
case "decodedFields":
decodedFieldsVal = append(decodedFieldsVal, field)
decodedFieldsVal = append(decodedFieldsVal, fieldValue)
continue
case "key":
if item == nil {
@ -654,10 +659,10 @@ func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value)
}
}
field.SetString(item.Keys[0].Token.Value().(string))
fieldValue.SetString(item.Keys[0].Token.Value().(string))
continue
case "unusedKeys":
unusedKeysVal = append(unusedKeysVal, field)
unusedKeysVal = append(unusedKeysVal, fieldValue)
continue
}
}
@ -684,7 +689,7 @@ func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value)
// because we actually want the value.
fieldName = fmt.Sprintf("%s.%s", name, fieldName)
if len(prefixMatches.Items) > 0 {
if err := d.decode(fieldName, prefixMatches, field); err != nil {
if err := d.decode(fieldName, prefixMatches, fieldValue); err != nil {
return err
}
}
@ -694,12 +699,12 @@ func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value)
decodeNode = &ast.ObjectList{Items: ot.List.Items}
}
if err := d.decode(fieldName, decodeNode, field); err != nil {
if err := d.decode(fieldName, decodeNode, fieldValue); err != nil {
return err
}
}
decodedFields = append(decodedFields, fieldType.Name)
decodedFields = append(decodedFields, field.Name)
}
if len(decodedFieldsVal) > 0 {

View file

@ -351,7 +351,7 @@ func (s *Scanner) scanNumber(ch rune) token.Type {
return token.NUMBER
}
// scanMantissa scans the mantissa begining from the rune. It returns the next
// scanMantissa scans the mantissa beginning from the rune. It returns the next
// non decimal rune. It's used to determine wheter it's a fraction or exponent.
func (s *Scanner) scanMantissa(ch rune) rune {
scanned := false

View file

@ -246,7 +246,7 @@ func (s *Scanner) scanNumber(ch rune) token.Type {
return token.NUMBER
}
// scanMantissa scans the mantissa begining from the rune. It returns the next
// scanMantissa scans the mantissa beginning from the rune. It returns the next
// non decimal rune. It's used to determine wheter it's a fraction or exponent.
func (s *Scanner) scanMantissa(ch rune) rune {
scanned := false

View file

@ -686,7 +686,11 @@ func (d *Decoder) decodeStruct(name string, data interface{}, val reflect.Value)
// Compile the list of all the fields that we're going to be decoding
// from all the structs.
fields := make(map[*reflect.StructField]reflect.Value)
type field struct {
field reflect.StructField
val reflect.Value
}
fields := []field{}
for len(structs) > 0 {
structVal := structs[0]
structs = structs[1:]
@ -718,14 +722,16 @@ func (d *Decoder) decodeStruct(name string, data interface{}, val reflect.Value)
}
// Normal struct field, store it away
fields[&fieldType] = structVal.Field(i)
fields = append(fields, field{fieldType, structVal.Field(i)})
}
}
for fieldType, field := range fields {
fieldName := fieldType.Name
// for fieldType, field := range fields {
for _, f := range fields {
field, fieldValue := f.field, f.val
fieldName := field.Name
tagValue := fieldType.Tag.Get(d.config.TagName)
tagValue := field.Tag.Get(d.config.TagName)
tagValue = strings.SplitN(tagValue, ",", 2)[0]
if tagValue != "" {
fieldName = tagValue
@ -760,14 +766,14 @@ func (d *Decoder) decodeStruct(name string, data interface{}, val reflect.Value)
// Delete the key we're using from the unused map so we stop tracking
delete(dataValKeysUnused, rawMapKey.Interface())
if !field.IsValid() {
if !fieldValue.IsValid() {
// This should never happen
panic("field is not valid")
}
// If we can't set the field, then it is unexported or something,
// and we just continue onwards.
if !field.CanSet() {
if !fieldValue.CanSet() {
continue
}
@ -777,7 +783,7 @@ func (d *Decoder) decodeStruct(name string, data interface{}, val reflect.Value)
fieldName = fmt.Sprintf("%s.%s", name, fieldName)
}
if err := d.decode(fieldName, rawMapVal.Interface(), field); err != nil {
if err := d.decode(fieldName, rawMapVal.Interface(), fieldValue); err != nil {
errors = appendErrors(errors, err)
}
}

View file

@ -77,6 +77,7 @@ type Table struct {
headerParams []string
columnsParams []string
footerParams []string
columnsAlign []int
}
// Start New Table
@ -109,7 +110,9 @@ func NewWriter(writer io.Writer) *Table {
borders: Border{Left: true, Right: true, Bottom: true, Top: true},
colSize: -1,
headerParams: []string{},
columnsParams: []string{}}
columnsParams: []string{},
footerParams: []string{},
columnsAlign: []int{}}
return t
}
@ -210,6 +213,22 @@ func (t *Table) SetAlignment(align int) {
t.align = align
}
func (t *Table) SetColumnAlignment(keys []int) {
for _, v := range keys {
switch v {
case ALIGN_CENTER:
break
case ALIGN_LEFT:
break
case ALIGN_RIGHT:
break
default:
v = ALIGN_DEFAULT
}
t.columnsAlign = append(t.columnsAlign, v)
}
}
// Set New Line
func (t *Table) SetNewLine(nl string) {
t.newLine = nl
@ -528,6 +547,15 @@ func (t Table) printRows() {
}
}
func (t *Table) fillAlignment(num int) {
if len(t.columnsAlign) < num {
t.columnsAlign = make([]int, num)
for i := range t.columnsAlign {
t.columnsAlign[i] = t.align
}
}
}
// Print Row Information
// Adjust column alignment based on type
@ -553,6 +581,7 @@ func (t *Table) printRow(columns [][]string, colKey int) {
if len(t.columnsParams) > 0 {
is_esc_seq = true
}
t.fillAlignment(total)
for i, line := range columns {
length := len(line)
@ -579,7 +608,7 @@ func (t *Table) printRow(columns [][]string, colKey int) {
// This would print alignment
// Default alignment would use multiple configuration
switch t.align {
switch t.columnsAlign[y] {
case ALIGN_CENTER: //
fmt.Fprintf(t.out, "%s", Pad(str, SPACE, t.cs[y]))
case ALIGN_RIGHT:
@ -656,6 +685,7 @@ func (t *Table) printRowMergeCells(writer io.Writer, columns [][]string, colKey
}
var displayCellBorder []bool
t.fillAlignment(total)
for x := 0; x < max; x++ {
for y := 0; y < total; y++ {
@ -681,7 +711,7 @@ func (t *Table) printRowMergeCells(writer io.Writer, columns [][]string, colKey
// This would print alignment
// Default alignment would use multiple configuration
switch t.align {
switch t.columnsAlign[y] {
case ALIGN_CENTER: //
fmt.Fprintf(writer, "%s", Pad(str, SPACE, t.cs[y]))
case ALIGN_RIGHT:

View file

@ -833,3 +833,37 @@ func TestWrapString(t *testing.T) {
t.Errorf("\ngot:\n%v\nwant:\n%v\n", got, want)
}
}
func TestCustomAlign(t *testing.T) {
var (
buf = &bytes.Buffer{}
table = NewWriter(buf)
header = []string{"AAA", "BBB", "CCC"}
data = [][]string{
[]string{"a", "b", "c"},
[]string{"1", "2", "3"},
}
footer = []string{"a", "b", "cccc"}
want = `+-----+-----+-------+
| AAA | BBB | CCC |
+-----+-----+-------+
| a | b | c |
| 1 | 2 | 3 |
+-----+-----+-------+
| A | B | CCCC |
+-----+-----+-------+
`
)
table.SetHeader(header)
table.SetFooter(footer)
table.AppendBulk(data)
table.SetColMinWidth(2, 5)
table.SetColumnAlignment([]int{ALIGN_LEFT, ALIGN_CENTER, ALIGN_RIGHT})
table.Render()
got := buf.String()
if got != want {
t.Errorf("\ngot:\n%s\nwant:\n%s\n", got, want)
}
}

View file

@ -1 +0,0 @@
*.test

View file

@ -1,7 +0,0 @@
language: go
sudo: false
go:
- 1.3.3
- 1.4.3
- 1.5.3
- tip

View file

@ -1,62 +0,0 @@
# buffruneio
[![Tests Status](https://travis-ci.org/pelletier/go-buffruneio.svg?branch=master)](https://travis-ci.org/pelletier/go-buffruneio)
[![GoDoc](https://godoc.org/github.com/pelletier/go-buffruneio?status.svg)](https://godoc.org/github.com/pelletier/go-buffruneio)
Buffruneio is a wrapper around bufio to provide buffered runes access with
unlimited unreads.
```go
import "github.com/pelletier/go-buffruneio"
```
## Examples
```go
import (
"fmt"
"github.com/pelletier/go-buffruneio"
"strings"
)
reader := buffruneio.NewReader(strings.NewReader("abcd"))
fmt.Println(reader.ReadRune()) // 'a'
fmt.Println(reader.ReadRune()) // 'b'
fmt.Println(reader.ReadRune()) // 'c'
reader.UnreadRune()
reader.UnreadRune()
fmt.Println(reader.ReadRune()) // 'b'
fmt.Println(reader.ReadRune()) // 'c'
```
## Documentation
The documentation and additional examples are available at
[godoc.org](http://godoc.org/github.com/pelletier/go-buffruneio).
## Contribute
Feel free to report bugs and patches using GitHub's pull requests system on
[pelletier/go-toml](https://github.com/pelletier/go-buffruneio). Any feedback is
much appreciated!
## LICENSE
Copyright (c) 2016 Thomas Pelletier
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View file

@ -1,117 +0,0 @@
// Package buffruneio is a wrapper around bufio to provide buffered runes access with unlimited unreads.
package buffruneio
import (
"bufio"
"container/list"
"errors"
"io"
)
// Rune to indicate end of file.
const (
EOF = -(iota + 1)
)
// ErrNoRuneToUnread is returned by UnreadRune() when the read index is already at the beginning of the buffer.
var ErrNoRuneToUnread = errors.New("no rune to unwind")
// Reader implements runes buffering for an io.Reader object.
type Reader struct {
buffer *list.List
current *list.Element
input *bufio.Reader
}
// NewReader returns a new Reader.
func NewReader(rd io.Reader) *Reader {
return &Reader{
buffer: list.New(),
input: bufio.NewReader(rd),
}
}
type runeWithSize struct {
r rune
size int
}
func (rd *Reader) feedBuffer() error {
r, size, err := rd.input.ReadRune()
if err != nil {
if err != io.EOF {
return err
}
r = EOF
}
newRuneWithSize := runeWithSize{r, size}
rd.buffer.PushBack(newRuneWithSize)
if rd.current == nil {
rd.current = rd.buffer.Back()
}
return nil
}
// ReadRune reads the next rune from buffer, or from the underlying reader if needed.
func (rd *Reader) ReadRune() (rune, int, error) {
if rd.current == rd.buffer.Back() || rd.current == nil {
err := rd.feedBuffer()
if err != nil {
return EOF, 0, err
}
}
runeWithSize := rd.current.Value.(runeWithSize)
rd.current = rd.current.Next()
return runeWithSize.r, runeWithSize.size, nil
}
// UnreadRune pushes back the previously read rune in the buffer, extending it if needed.
func (rd *Reader) UnreadRune() error {
if rd.current == rd.buffer.Front() {
return ErrNoRuneToUnread
}
if rd.current == nil {
rd.current = rd.buffer.Back()
} else {
rd.current = rd.current.Prev()
}
return nil
}
// Forget removes runes stored before the current stream position index.
func (rd *Reader) Forget() {
if rd.current == nil {
rd.current = rd.buffer.Back()
}
for ; rd.current != rd.buffer.Front(); rd.buffer.Remove(rd.current.Prev()) {
}
}
// PeekRune returns at most the next n runes, reading from the uderlying source if
// needed. Does not move the current index. It includes EOF if reached.
func (rd *Reader) PeekRunes(n int) []rune {
res := make([]rune, 0, n)
cursor := rd.current
for i := 0; i < n; i++ {
if cursor == nil {
err := rd.feedBuffer()
if err != nil {
return res
}
cursor = rd.buffer.Back()
}
if cursor != nil {
r := cursor.Value.(runeWithSize).r
res = append(res, r)
if r == EOF {
return res
}
cursor = cursor.Next()
}
}
return res
}

View file

@ -1,145 +0,0 @@
package buffruneio
import (
"runtime/debug"
"strings"
"testing"
)
func assertNoError(t *testing.T, err error) {
if err != nil {
t.Log("unexpected error", err)
debug.PrintStack()
t.FailNow()
}
}
func assumeRunesArray(t *testing.T, expected []rune, got []rune) {
if len(expected) != len(got) {
t.Fatal("expected", len(expected), "runes, but got", len(got))
}
for i := 0; i < len(got); i++ {
if expected[i] != got[i] {
t.Fatal("expected rune", expected[i], "at index", i, "but got", got[i])
}
}
}
func assumeRune(t *testing.T, rd *Reader, r rune) {
gotRune, size, err := rd.ReadRune()
assertNoError(t, err)
if gotRune != r {
t.Fatal("got", string(gotRune),
"(", []byte(string(gotRune)), ")",
"expected", string(r),
"(", []byte(string(r)), ")")
t.Fatal("got size", size,
"expected", len([]byte(string(r))))
}
}
func TestReadString(t *testing.T) {
s := "hello"
rd := NewReader(strings.NewReader(s))
assumeRune(t, rd, 'h')
assumeRune(t, rd, 'e')
assumeRune(t, rd, 'l')
assumeRune(t, rd, 'l')
assumeRune(t, rd, 'o')
assumeRune(t, rd, EOF)
}
func TestMultipleEOF(t *testing.T) {
s := ""
rd := NewReader(strings.NewReader(s))
assumeRune(t, rd, EOF)
assumeRune(t, rd, EOF)
}
func TestUnread(t *testing.T) {
s := "ab"
rd := NewReader(strings.NewReader(s))
assumeRune(t, rd, 'a')
assumeRune(t, rd, 'b')
assertNoError(t, rd.UnreadRune())
assumeRune(t, rd, 'b')
assumeRune(t, rd, EOF)
}
func TestUnreadEOF(t *testing.T) {
s := ""
rd := NewReader(strings.NewReader(s))
_ = rd.UnreadRune()
assumeRune(t, rd, EOF)
assumeRune(t, rd, EOF)
assertNoError(t, rd.UnreadRune())
assumeRune(t, rd, EOF)
}
func TestForget(t *testing.T) {
s := "hello"
rd := NewReader(strings.NewReader(s))
assumeRune(t, rd, 'h')
assumeRune(t, rd, 'e')
assumeRune(t, rd, 'l')
assumeRune(t, rd, 'l')
rd.Forget()
if rd.UnreadRune() != ErrNoRuneToUnread {
t.Fatal("no rune should be available")
}
}
func TestForgetEmpty(t *testing.T) {
s := ""
rd := NewReader(strings.NewReader(s))
rd.Forget()
assumeRune(t, rd, EOF)
rd.Forget()
}
func TestPeekEmpty(t *testing.T) {
s := ""
rd := NewReader(strings.NewReader(s))
runes := rd.PeekRunes(1)
if len(runes) != 1 {
t.Fatal("incorrect number of runes", len(runes))
}
if runes[0] != EOF {
t.Fatal("incorrect rune", runes[0])
}
}
func TestPeek(t *testing.T) {
s := "a"
rd := NewReader(strings.NewReader(s))
runes := rd.PeekRunes(1)
assumeRunesArray(t, []rune{'a'}, runes)
runes = rd.PeekRunes(1)
assumeRunesArray(t, []rune{'a'}, runes)
assumeRune(t, rd, 'a')
runes = rd.PeekRunes(1)
assumeRunesArray(t, []rune{EOF}, runes)
assumeRune(t, rd, EOF)
}
func TestPeekLarge(t *testing.T) {
s := "abcdefg"
rd := NewReader(strings.NewReader(s))
runes := rd.PeekRunes(100)
if len(runes) != len(s)+1 {
t.Fatal("incorrect number of runes", len(runes))
}
assumeRunesArray(t, []rune{'a', 'b', 'c', 'd', 'e', 'f', 'g', EOF}, runes)
}

View file

@ -1,16 +1,18 @@
sudo: false
language: go
go:
- 1.6.4
- 1.7.5
- 1.8
- 1.7.6
- 1.8.3
- 1.9
- tip
matrix:
allow_failures:
- go: tip
fast_finish: true
script:
- if [ -n "$(go fmt ./...)" ]; then exit 1; fi
- ./test.sh
- ./benchmark.sh $TRAVIS_BRANCH https://github.com/$TRAVIS_REPO_SLUG.git
before_install:
- go get github.com/axw/gocov/gocov
- go get github.com/mattn/goveralls

View file

@ -33,7 +33,7 @@ import "github.com/pelletier/go-toml"
Read a TOML document:
```go
config, _ := toml.LoadString(`
config, _ := toml.Load(`
[postgres]
user = "pelletier"
password = "mypassword"`)
@ -42,7 +42,7 @@ user := config.Get("postgres.user").(string)
// or using an intermediate object
postgresConfig := config.Get("postgres").(*toml.Tree)
password = postgresConfig.Get("password").(string)
password := postgresConfig.Get("password").(string)
```
Or use Unmarshal:
@ -62,7 +62,7 @@ user = "pelletier"
password = "mypassword"`)
config := Config{}
Unmarshal(doc, &config)
toml.Unmarshal(doc, &config)
fmt.Println("user=", config.Postgres.User)
```
@ -70,7 +70,8 @@ Or use a query:
```go
// use a query to gather elements without walking the tree
results, _ := config.Query("$..[user,password]")
q, _ := query.Compile("$..[user,password]")
results := q.Execute(config)
for ii, item := range results.Values() {
fmt.Println("Query result %d: %v", ii, item)
}

164
vendor/github.com/pelletier/go-toml/benchmark.json generated vendored Normal file
View file

@ -0,0 +1,164 @@
{
"array": {
"key1": [
1,
2,
3
],
"key2": [
"red",
"yellow",
"green"
],
"key3": [
[
1,
2
],
[
3,
4,
5
]
],
"key4": [
[
1,
2
],
[
"a",
"b",
"c"
]
],
"key5": [
1,
2,
3
],
"key6": [
1,
2
]
},
"boolean": {
"False": false,
"True": true
},
"datetime": {
"key1": "1979-05-27T07:32:00Z",
"key2": "1979-05-27T00:32:00-07:00",
"key3": "1979-05-27T00:32:00.999999-07:00"
},
"float": {
"both": {
"key": 6.626e-34
},
"exponent": {
"key1": 5e+22,
"key2": 1000000,
"key3": -0.02
},
"fractional": {
"key1": 1,
"key2": 3.1415,
"key3": -0.01
},
"underscores": {
"key1": 9224617.445991227,
"key2": 1e+100
}
},
"fruit": [{
"name": "apple",
"physical": {
"color": "red",
"shape": "round"
},
"variety": [{
"name": "red delicious"
},
{
"name": "granny smith"
}
]
},
{
"name": "banana",
"variety": [{
"name": "plantain"
}]
}
],
"integer": {
"key1": 99,
"key2": 42,
"key3": 0,
"key4": -17,
"underscores": {
"key1": 1000,
"key2": 5349221,
"key3": 12345
}
},
"products": [{
"name": "Hammer",
"sku": 738594937
},
{},
{
"color": "gray",
"name": "Nail",
"sku": 284758393
}
],
"string": {
"basic": {
"basic": "I'm a string. \"You can quote me\". Name\tJosé\nLocation\tSF."
},
"literal": {
"multiline": {
"lines": "The first newline is\ntrimmed in raw strings.\n All other whitespace\n is preserved.\n",
"regex2": "I [dw]on't need \\d{2} apples"
},
"quoted": "Tom \"Dubs\" Preston-Werner",
"regex": "\u003c\\i\\c*\\s*\u003e",
"winpath": "C:\\Users\\nodejs\\templates",
"winpath2": "\\\\ServerX\\admin$\\system32\\"
},
"multiline": {
"continued": {
"key1": "The quick brown fox jumps over the lazy dog.",
"key2": "The quick brown fox jumps over the lazy dog.",
"key3": "The quick brown fox jumps over the lazy dog."
},
"key1": "One\nTwo",
"key2": "One\nTwo",
"key3": "One\nTwo"
}
},
"table": {
"inline": {
"name": {
"first": "Tom",
"last": "Preston-Werner"
},
"point": {
"x": 1,
"y": 2
}
},
"key": "value",
"subtable": {
"key": "another value"
}
},
"x": {
"y": {
"z": {
"w": {}
}
}
}
}

32
vendor/github.com/pelletier/go-toml/benchmark.sh generated vendored Executable file
View file

@ -0,0 +1,32 @@
#!/bin/bash
set -e
reference_ref=${1:-master}
reference_git=${2:-.}
if ! `hash benchstat 2>/dev/null`; then
echo "Installing benchstat"
go get golang.org/x/perf/cmd/benchstat
go install golang.org/x/perf/cmd/benchstat
fi
tempdir=`mktemp -d /tmp/go-toml-benchmark-XXXXXX`
ref_tempdir="${tempdir}/ref"
ref_benchmark="${ref_tempdir}/benchmark-`echo -n ${reference_ref}|tr -s '/' '-'`.txt"
local_benchmark="`pwd`/benchmark-local.txt"
echo "=== ${reference_ref} (${ref_tempdir})"
git clone ${reference_git} ${ref_tempdir} >/dev/null 2>/dev/null
pushd ${ref_tempdir} >/dev/null
git checkout ${reference_ref} >/dev/null 2>/dev/null
go test -bench=. -benchmem | tee ${ref_benchmark}
popd >/dev/null
echo ""
echo "=== local"
go test -bench=. -benchmem | tee ${local_benchmark}
echo ""
echo "=== diff"
benchstat -delta-test=none ${ref_benchmark} ${local_benchmark}

244
vendor/github.com/pelletier/go-toml/benchmark.toml generated vendored Normal file
View file

@ -0,0 +1,244 @@
################################################################################
## Comment
# Speak your mind with the hash symbol. They go from the symbol to the end of
# the line.
################################################################################
## Table
# Tables (also known as hash tables or dictionaries) are collections of
# key/value pairs. They appear in square brackets on a line by themselves.
[table]
key = "value" # Yeah, you can do this.
# Nested tables are denoted by table names with dots in them. Name your tables
# whatever crap you please, just don't use #, ., [ or ].
[table.subtable]
key = "another value"
# You don't need to specify all the super-tables if you don't want to. TOML
# knows how to do it for you.
# [x] you
# [x.y] don't
# [x.y.z] need these
[x.y.z.w] # for this to work
################################################################################
## Inline Table
# Inline tables provide a more compact syntax for expressing tables. They are
# especially useful for grouped data that can otherwise quickly become verbose.
# Inline tables are enclosed in curly braces `{` and `}`. No newlines are
# allowed between the curly braces unless they are valid within a value.
[table.inline]
name = { first = "Tom", last = "Preston-Werner" }
point = { x = 1, y = 2 }
################################################################################
## String
# There are four ways to express strings: basic, multi-line basic, literal, and
# multi-line literal. All strings must contain only valid UTF-8 characters.
[string.basic]
basic = "I'm a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF."
[string.multiline]
# The following strings are byte-for-byte equivalent:
key1 = "One\nTwo"
key2 = """One\nTwo"""
key3 = """
One
Two"""
[string.multiline.continued]
# The following strings are byte-for-byte equivalent:
key1 = "The quick brown fox jumps over the lazy dog."
key2 = """
The quick brown \
fox jumps over \
the lazy dog."""
key3 = """\
The quick brown \
fox jumps over \
the lazy dog.\
"""
[string.literal]
# What you see is what you get.
winpath = 'C:\Users\nodejs\templates'
winpath2 = '\\ServerX\admin$\system32\'
quoted = 'Tom "Dubs" Preston-Werner'
regex = '<\i\c*\s*>'
[string.literal.multiline]
regex2 = '''I [dw]on't need \d{2} apples'''
lines = '''
The first newline is
trimmed in raw strings.
All other whitespace
is preserved.
'''
################################################################################
## Integer
# Integers are whole numbers. Positive numbers may be prefixed with a plus sign.
# Negative numbers are prefixed with a minus sign.
[integer]
key1 = +99
key2 = 42
key3 = 0
key4 = -17
[integer.underscores]
# For large numbers, you may use underscores to enhance readability. Each
# underscore must be surrounded by at least one digit.
key1 = 1_000
key2 = 5_349_221
key3 = 1_2_3_4_5 # valid but inadvisable
################################################################################
## Float
# A float consists of an integer part (which may be prefixed with a plus or
# minus sign) followed by a fractional part and/or an exponent part.
[float.fractional]
key1 = +1.0
key2 = 3.1415
key3 = -0.01
[float.exponent]
key1 = 5e+22
key2 = 1e6
key3 = -2E-2
[float.both]
key = 6.626e-34
[float.underscores]
key1 = 9_224_617.445_991_228_313
key2 = 1e1_00
################################################################################
## Boolean
# Booleans are just the tokens you're used to. Always lowercase.
[boolean]
True = true
False = false
################################################################################
## Datetime
# Datetimes are RFC 3339 dates.
[datetime]
key1 = 1979-05-27T07:32:00Z
key2 = 1979-05-27T00:32:00-07:00
key3 = 1979-05-27T00:32:00.999999-07:00
################################################################################
## Array
# Arrays are square brackets with other primitives inside. Whitespace is
# ignored. Elements are separated by commas. Data types may not be mixed.
[array]
key1 = [ 1, 2, 3 ]
key2 = [ "red", "yellow", "green" ]
key3 = [ [ 1, 2 ], [3, 4, 5] ]
#key4 = [ [ 1, 2 ], ["a", "b", "c"] ] # this is ok
# Arrays can also be multiline. So in addition to ignoring whitespace, arrays
# also ignore newlines between the brackets. Terminating commas are ok before
# the closing bracket.
key5 = [
1, 2, 3
]
key6 = [
1,
2, # this is ok
]
################################################################################
## Array of Tables
# These can be expressed by using a table name in double brackets. Each table
# with the same double bracketed name will be an element in the array. The
# tables are inserted in the order encountered.
[[products]]
name = "Hammer"
sku = 738594937
[[products]]
[[products]]
name = "Nail"
sku = 284758393
color = "gray"
# You can create nested arrays of tables as well.
[[fruit]]
name = "apple"
[fruit.physical]
color = "red"
shape = "round"
[[fruit.variety]]
name = "red delicious"
[[fruit.variety]]
name = "granny smith"
[[fruit]]
name = "banana"
[[fruit.variety]]
name = "plantain"

121
vendor/github.com/pelletier/go-toml/benchmark.yml generated vendored Normal file
View file

@ -0,0 +1,121 @@
---
array:
key1:
- 1
- 2
- 3
key2:
- red
- yellow
- green
key3:
- - 1
- 2
- - 3
- 4
- 5
key4:
- - 1
- 2
- - a
- b
- c
key5:
- 1
- 2
- 3
key6:
- 1
- 2
boolean:
'False': false
'True': true
datetime:
key1: '1979-05-27T07:32:00Z'
key2: '1979-05-27T00:32:00-07:00'
key3: '1979-05-27T00:32:00.999999-07:00'
float:
both:
key: 6.626e-34
exponent:
key1: 5.0e+22
key2: 1000000
key3: -0.02
fractional:
key1: 1
key2: 3.1415
key3: -0.01
underscores:
key1: 9224617.445991227
key2: 1.0e+100
fruit:
- name: apple
physical:
color: red
shape: round
variety:
- name: red delicious
- name: granny smith
- name: banana
variety:
- name: plantain
integer:
key1: 99
key2: 42
key3: 0
key4: -17
underscores:
key1: 1000
key2: 5349221
key3: 12345
products:
- name: Hammer
sku: 738594937
- {}
- color: gray
name: Nail
sku: 284758393
string:
basic:
basic: "I'm a string. \"You can quote me\". Name\tJosé\nLocation\tSF."
literal:
multiline:
lines: |
The first newline is
trimmed in raw strings.
All other whitespace
is preserved.
regex2: I [dw]on't need \d{2} apples
quoted: Tom "Dubs" Preston-Werner
regex: "<\\i\\c*\\s*>"
winpath: C:\Users\nodejs\templates
winpath2: "\\\\ServerX\\admin$\\system32\\"
multiline:
continued:
key1: The quick brown fox jumps over the lazy dog.
key2: The quick brown fox jumps over the lazy dog.
key3: The quick brown fox jumps over the lazy dog.
key1: |-
One
Two
key2: |-
One
Two
key3: |-
One
Two
table:
inline:
name:
first: Tom
last: Preston-Werner
point:
x: 1
y: 2
key: value
subtable:
key: another value
x:
y:
z:
w: {}

192
vendor/github.com/pelletier/go-toml/benchmark_test.go generated vendored Normal file
View file

@ -0,0 +1,192 @@
package toml
import (
"bytes"
"encoding/json"
"io/ioutil"
"testing"
"time"
burntsushi "github.com/BurntSushi/toml"
yaml "gopkg.in/yaml.v2"
)
type benchmarkDoc struct {
Table struct {
Key string
Subtable struct {
Key string
}
Inline struct {
Name struct {
First string
Last string
}
Point struct {
X int64
U int64
}
}
}
String struct {
Basic struct {
Basic string
}
Multiline struct {
Key1 string
Key2 string
Key3 string
Continued struct {
Key1 string
Key2 string
Key3 string
}
}
Literal struct {
Winpath string
Winpath2 string
Quoted string
Regex string
Multiline struct {
Regex2 string
Lines string
}
}
}
Integer struct {
Key1 int64
Key2 int64
Key3 int64
Key4 int64
Underscores struct {
Key1 int64
Key2 int64
Key3 int64
}
}
Float struct {
Fractional struct {
Key1 float64
Key2 float64
Key3 float64
}
Exponent struct {
Key1 float64
Key2 float64
Key3 float64
}
Both struct {
Key float64
}
Underscores struct {
Key1 float64
Key2 float64
}
}
Boolean struct {
True bool
False bool
}
Datetime struct {
Key1 time.Time
Key2 time.Time
Key3 time.Time
}
Array struct {
Key1 []int64
Key2 []string
Key3 [][]int64
// TODO: Key4 not supported by go-toml's Unmarshal
Key5 []int64
Key6 []int64
}
Products []struct {
Name string
Sku int64
Color string
}
Fruit []struct {
Name string
Physical struct {
Color string
Shape string
Variety []struct {
Name string
}
}
}
}
func BenchmarkParseToml(b *testing.B) {
fileBytes, err := ioutil.ReadFile("benchmark.toml")
if err != nil {
b.Fatal(err)
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
_, err := LoadReader(bytes.NewReader(fileBytes))
if err != nil {
b.Fatal(err)
}
}
}
func BenchmarkUnmarshalToml(b *testing.B) {
bytes, err := ioutil.ReadFile("benchmark.toml")
if err != nil {
b.Fatal(err)
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
target := benchmarkDoc{}
err := Unmarshal(bytes, &target)
if err != nil {
b.Fatal(err)
}
}
}
func BenchmarkUnmarshalBurntSushiToml(b *testing.B) {
bytes, err := ioutil.ReadFile("benchmark.toml")
if err != nil {
b.Fatal(err)
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
target := benchmarkDoc{}
err := burntsushi.Unmarshal(bytes, &target)
if err != nil {
b.Fatal(err)
}
}
}
func BenchmarkUnmarshalJson(b *testing.B) {
bytes, err := ioutil.ReadFile("benchmark.json")
if err != nil {
b.Fatal(err)
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
target := benchmarkDoc{}
err := json.Unmarshal(bytes, &target)
if err != nil {
b.Fatal(err)
}
}
}
func BenchmarkUnmarshalYaml(b *testing.B) {
bytes, err := ioutil.ReadFile("benchmark.yml")
if err != nil {
b.Fatal(err)
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
target := benchmarkDoc{}
err := yaml.Unmarshal(bytes, &target)
if err != nil {
b.Fatal(err)
}
}
}

View file

@ -1,13 +1,16 @@
// code examples for godoc
package toml
package toml_test
import (
"fmt"
"log"
toml "github.com/pelletier/go-toml"
)
func Example_tree() {
config, err := LoadFile("config.toml")
config, err := toml.LoadFile("config.toml")
if err != nil {
fmt.Println("Error ", err.Error())
@ -17,7 +20,7 @@ func Example_tree() {
password := config.Get("postgres.password").(string)
// or using an intermediate object
configTree := config.Get("postgres").(*Tree)
configTree := config.Get("postgres").(*toml.Tree)
user = configTree.Get("user").(string)
password = configTree.Get("password").(string)
fmt.Println("User is", user, " and password is", password)
@ -48,6 +51,50 @@ func Example_unmarshal() {
`)
person := Person{}
Unmarshal(document, &person)
toml.Unmarshal(document, &person)
fmt.Println(person.Name, "is", person.Age, "and works at", person.Employer.Name)
// Output:
// John is 30 and works at Company Inc.
}
func ExampleMarshal() {
type Postgres struct {
User string `toml:"user"`
Password string `toml:"password"`
}
type Config struct {
Postgres Postgres `toml:"postgres"`
}
config := Config{Postgres{User: "pelletier", Password: "mypassword"}}
b, err := toml.Marshal(config)
if err != nil {
log.Fatal(err)
}
fmt.Println(string(b))
// Output:
// [postgres]
// password = "mypassword"
// user = "pelletier"
}
func ExampleUnmarshal() {
type Postgres struct {
User string
Password string
}
type Config struct {
Postgres Postgres
}
doc := []byte(`
[postgres]
user = "pelletier"
password = "mypassword"`)
config := Config{}
toml.Unmarshal(doc, &config)
fmt.Println("user=", config.Postgres.User)
// Output:
// user= pelletier
}

View file

@ -9,12 +9,9 @@ import (
"bytes"
"errors"
"fmt"
"io"
"regexp"
"strconv"
"strings"
"github.com/pelletier/go-buffruneio"
)
var dateRegexp *regexp.Regexp
@ -24,29 +21,29 @@ type tomlLexStateFn func() tomlLexStateFn
// Define lexer
type tomlLexer struct {
input *buffruneio.Reader // Textual source
buffer bytes.Buffer // Runes composing the current token
tokens chan token
depth int
line int
col int
endbufferLine int
endbufferCol int
inputIdx int
input []rune // Textual source
currentTokenStart int
currentTokenStop int
tokens []token
depth int
line int
col int
endbufferLine int
endbufferCol int
}
// Basic read operations on input
func (l *tomlLexer) read() rune {
r, _, err := l.input.ReadRune()
if err != nil {
panic(err)
}
r := l.peek()
if r == '\n' {
l.endbufferLine++
l.endbufferCol = 1
} else {
l.endbufferCol++
}
l.inputIdx++
return r
}
@ -54,13 +51,13 @@ func (l *tomlLexer) next() rune {
r := l.read()
if r != eof {
l.buffer.WriteRune(r)
l.currentTokenStop++
}
return r
}
func (l *tomlLexer) ignore() {
l.buffer.Reset()
l.currentTokenStart = l.currentTokenStop
l.line = l.endbufferLine
l.col = l.endbufferCol
}
@ -77,49 +74,46 @@ func (l *tomlLexer) fastForward(n int) {
}
func (l *tomlLexer) emitWithValue(t tokenType, value string) {
l.tokens <- token{
l.tokens = append(l.tokens, token{
Position: Position{l.line, l.col},
typ: t,
val: value,
}
})
l.ignore()
}
func (l *tomlLexer) emit(t tokenType) {
l.emitWithValue(t, l.buffer.String())
l.emitWithValue(t, string(l.input[l.currentTokenStart:l.currentTokenStop]))
}
func (l *tomlLexer) peek() rune {
r, _, err := l.input.ReadRune()
if err != nil {
panic(err)
if l.inputIdx >= len(l.input) {
return eof
}
l.input.UnreadRune()
return r
return l.input[l.inputIdx]
}
func (l *tomlLexer) peekString(size int) string {
maxIdx := len(l.input)
upperIdx := l.inputIdx + size // FIXME: potential overflow
if upperIdx > maxIdx {
upperIdx = maxIdx
}
return string(l.input[l.inputIdx:upperIdx])
}
func (l *tomlLexer) follow(next string) bool {
for _, expectedRune := range next {
r, _, err := l.input.ReadRune()
defer l.input.UnreadRune()
if err != nil {
panic(err)
}
if expectedRune != r {
return false
}
}
return true
return next == l.peekString(len(next))
}
// Error management
func (l *tomlLexer) errorf(format string, args ...interface{}) tomlLexStateFn {
l.tokens <- token{
l.tokens = append(l.tokens, token{
Position: Position{l.line, l.col},
typ: tokenError,
val: fmt.Sprintf(format, args...),
}
})
return nil
}
@ -220,7 +214,7 @@ func (l *tomlLexer) lexRvalue() tomlLexStateFn {
break
}
possibleDate := string(l.input.PeekRunes(35))
possibleDate := l.peekString(35)
dateMatch := dateRegexp.FindString(possibleDate)
if dateMatch != "" {
l.fastForward(len(dateMatch))
@ -537,7 +531,7 @@ func (l *tomlLexer) lexInsideTableArrayKey() tomlLexStateFn {
for r := l.peek(); r != eof; r = l.peek() {
switch r {
case ']':
if l.buffer.Len() > 0 {
if l.currentTokenStop > l.currentTokenStart {
l.emit(tokenKeyGroupArray)
}
l.next()
@ -560,7 +554,7 @@ func (l *tomlLexer) lexInsideTableKey() tomlLexStateFn {
for r := l.peek(); r != eof; r = l.peek() {
switch r {
case ']':
if l.buffer.Len() > 0 {
if l.currentTokenStop > l.currentTokenStart {
l.emit(tokenKeyGroup)
}
l.next()
@ -635,7 +629,6 @@ func (l *tomlLexer) run() {
for state := l.lexVoid; state != nil; {
state = state()
}
close(l.tokens)
}
func init() {
@ -643,16 +636,16 @@ func init() {
}
// Entry point
func lexToml(input io.Reader) chan token {
bufferedInput := buffruneio.NewReader(input)
func lexToml(inputBytes []byte) []token {
runes := bytes.Runes(inputBytes)
l := &tomlLexer{
input: bufferedInput,
tokens: make(chan token),
input: runes,
tokens: make([]token, 0, 256),
line: 1,
col: 1,
endbufferLine: 1,
endbufferCol: 1,
}
go l.run()
l.run()
return l.tokens
}

View file

@ -1,38 +1,14 @@
package toml
import (
"os"
"strings"
"reflect"
"testing"
)
func testFlow(t *testing.T, input string, expectedFlow []token) {
ch := lexToml(strings.NewReader(input))
for _, expected := range expectedFlow {
token := <-ch
if token != expected {
t.Log("While testing: ", input)
t.Log("compared (got)", token, "to (expected)", expected)
t.Log("\tvalue:", token.val, "<->", expected.val)
t.Log("\tvalue as bytes:", []byte(token.val), "<->", []byte(expected.val))
t.Log("\ttype:", token.typ.String(), "<->", expected.typ.String())
t.Log("\tline:", token.Line, "<->", expected.Line)
t.Log("\tcolumn:", token.Col, "<->", expected.Col)
t.Log("compared", token, "to", expected)
t.FailNow()
}
}
tok, ok := <-ch
if ok {
t.Log("channel is not closed!")
t.Log(len(ch)+1, "tokens remaining:")
t.Log("token ->", tok)
for token := range ch {
t.Log("token ->", token)
}
t.FailNow()
tokens := lexToml([]byte(input))
if !reflect.DeepEqual(tokens, expectedFlow) {
t.Fatal("Different flows. Expected\n", expectedFlow, "\nGot:\n", tokens)
}
}
@ -767,13 +743,8 @@ pluralizeListTitles = false
url = "https://github.com/spf13/hugo/releases"
weight = -200
`
rd := strings.NewReader(sample)
b.ResetTimer()
for i := 0; i < b.N; i++ {
rd.Seek(0, os.SEEK_SET)
ch := lexToml(rd)
for _ = range ch {
}
lexToml([]byte(sample))
}
}

View file

@ -268,15 +268,20 @@ func valueFromTree(mtype reflect.Type, tval *Tree) (reflect.Value, error) {
mtypef := mtype.Field(i)
opts := tomlOptions(mtypef)
if opts.include {
key := opts.name
exists := tval.Has(key)
if exists {
baseKey := opts.name
keysToTry := []string{baseKey, strings.ToLower(baseKey), strings.ToTitle(baseKey)}
for _, key := range keysToTry {
exists := tval.Has(key)
if !exists {
continue
}
val := tval.Get(key)
mvalf, err := valueFromToml(mtypef.Type, val)
if err != nil {
return mval, formatError(err, tval.GetPosition(key))
}
mval.Field(i).Set(mvalf)
break
}
}
}

View file

@ -177,25 +177,6 @@ func TestDocUnmarshal(t *testing.T) {
}
}
func ExampleUnmarshal() {
type Postgres struct {
User string
Password string
}
type Config struct {
Postgres Postgres
}
doc := []byte(`
[postgres]
user = "pelletier"
password = "mypassword"`)
config := Config{}
Unmarshal(doc, &config)
fmt.Println("user=", config.Postgres.User)
}
func TestDocPartialUnmarshal(t *testing.T) {
result := testDocSubs{}

View file

@ -13,9 +13,9 @@ import (
)
type tomlParser struct {
flow chan token
flowIdx int
flow []token
tree *Tree
tokensBuffer []token
currentTable []string
seenTableKeys []string
}
@ -34,16 +34,10 @@ func (p *tomlParser) run() {
}
func (p *tomlParser) peek() *token {
if len(p.tokensBuffer) != 0 {
return &(p.tokensBuffer[0])
}
tok, ok := <-p.flow
if !ok {
if p.flowIdx >= len(p.flow) {
return nil
}
p.tokensBuffer = append(p.tokensBuffer, tok)
return &tok
return &p.flow[p.flowIdx]
}
func (p *tomlParser) assume(typ tokenType) {
@ -57,16 +51,12 @@ func (p *tomlParser) assume(typ tokenType) {
}
func (p *tomlParser) getToken() *token {
if len(p.tokensBuffer) != 0 {
tok := p.tokensBuffer[0]
p.tokensBuffer = p.tokensBuffer[1:]
return &tok
}
tok, ok := <-p.flow
if !ok {
tok := p.peek()
if tok == nil {
return nil
}
return &tok
p.flowIdx++
return tok
}
func (p *tomlParser) parseStart() tomlParserStateFn {
@ -374,13 +364,13 @@ func (p *tomlParser) parseArray() interface{} {
return array
}
func parseToml(flow chan token) *Tree {
func parseToml(flow []token) *Tree {
result := newTree()
result.position = Position{1, 1}
parser := &tomlParser{
flowIdx: 0,
flow: flow,
tree: result,
tokensBuffer: make([]token, 0),
currentTable: make([]string, 0),
seenTableKeys: make([]string, 0),
}

View file

@ -7,10 +7,10 @@ package query
import (
"fmt"
"github.com/pelletier/go-toml"
"strconv"
"strings"
"unicode/utf8"
"github.com/pelletier/go-toml"
)
// Lexer state function
@ -55,7 +55,7 @@ func (l *queryLexer) nextStart() {
func (l *queryLexer) emit(t tokenType) {
l.tokens <- token{
Position: toml.Position{Line:l.line, Col:l.col},
Position: toml.Position{Line: l.line, Col: l.col},
typ: t,
val: l.input[l.start:l.pos],
}
@ -64,7 +64,7 @@ func (l *queryLexer) emit(t tokenType) {
func (l *queryLexer) emitWithValue(t tokenType, value string) {
l.tokens <- token{
Position: toml.Position{Line:l.line, Col:l.col},
Position: toml.Position{Line: l.line, Col: l.col},
typ: t,
val: value,
}
@ -92,7 +92,7 @@ func (l *queryLexer) backup() {
func (l *queryLexer) errorf(format string, args ...interface{}) queryLexStateFn {
l.tokens <- token{
Position: toml.Position{Line:l.line, Col:l.col},
Position: toml.Position{Line: l.line, Col: l.col},
typ: tokenError,
val: fmt.Sprintf(format, args...),
}

View file

@ -1,8 +1,8 @@
package query
import (
"testing"
"github.com/pelletier/go-toml"
"testing"
)
func testQLFlow(t *testing.T, input string, expectedFlow []token) {

View file

@ -2,8 +2,8 @@ package query
import (
"fmt"
"testing"
"github.com/pelletier/go-toml"
"testing"
)
// dump path tree to a string

View file

@ -253,7 +253,7 @@ func (p *queryParser) parseFilterExpr() queryParserStateFn {
}
tok = p.getToken()
if tok.typ != tokenKey && tok.typ != tokenString {
return p.parseError(tok, "expected key or string for filter funciton name")
return p.parseError(tok, "expected key or string for filter function name")
}
name := tok.val
tok = p.getToken()

View file

@ -2,12 +2,12 @@ package query
import (
"fmt"
"github.com/pelletier/go-toml"
"io/ioutil"
"sort"
"strings"
"testing"
"time"
"github.com/pelletier/go-toml"
)
type queryTestNode struct {

View file

@ -1,10 +1,10 @@
package query
import (
"fmt"
"strconv"
"unicode"
"fmt"
"github.com/pelletier/go-toml"
"strconv"
"unicode"
)
// Define tokens
@ -104,4 +104,3 @@ func isHexDigit(r rune) bool {
(r >= 'a' && r <= 'f') ||
(r >= 'A' && r <= 'F')
}

View file

@ -27,6 +27,8 @@ go vet ./...
go get github.com/pelletier/go-buffruneio
go get github.com/davecgh/go-spew/spew
go get gopkg.in/yaml.v2
go get github.com/BurntSushi/toml
# get code for BurntSushi TOML validation
# pinning all to 'HEAD' for version 0.3.x work (TODO: pin to commit hash when tests stabilize)

View file

@ -4,6 +4,7 @@ import (
"errors"
"fmt"
"io"
"io/ioutil"
"os"
"runtime"
"strings"
@ -251,8 +252,8 @@ func (t *Tree) createSubTree(keys []string, pos Position) error {
return nil
}
// LoadReader creates a Tree from any io.Reader.
func LoadReader(reader io.Reader) (tree *Tree, err error) {
// LoadBytes creates a Tree from a []byte.
func LoadBytes(b []byte) (tree *Tree, err error) {
defer func() {
if r := recover(); r != nil {
if _, ok := r.(runtime.Error); ok {
@ -261,13 +262,23 @@ func LoadReader(reader io.Reader) (tree *Tree, err error) {
err = errors.New(r.(string))
}
}()
tree = parseToml(lexToml(reader))
tree = parseToml(lexToml(b))
return
}
// LoadReader creates a Tree from any io.Reader.
func LoadReader(reader io.Reader) (tree *Tree, err error) {
inputBytes, err := ioutil.ReadAll(reader)
if err != nil {
return
}
tree, err = LoadBytes(inputBytes)
return
}
// Load creates a Tree from a string.
func Load(content string) (tree *Tree, err error) {
return LoadReader(strings.NewReader(content))
return LoadBytes([]byte(content))
}
// LoadFile creates a Tree from a file.

View file

@ -4,6 +4,7 @@ import (
"bytes"
"fmt"
"io"
"math"
"reflect"
"sort"
"strconv"
@ -13,33 +14,34 @@ import (
// encodes a string to a TOML-compliant string value
func encodeTomlString(value string) string {
result := ""
var b bytes.Buffer
for _, rr := range value {
switch rr {
case '\b':
result += "\\b"
b.WriteString(`\b`)
case '\t':
result += "\\t"
b.WriteString(`\t`)
case '\n':
result += "\\n"
b.WriteString(`\n`)
case '\f':
result += "\\f"
b.WriteString(`\f`)
case '\r':
result += "\\r"
b.WriteString(`\r`)
case '"':
result += "\\\""
b.WriteString(`\"`)
case '\\':
result += "\\\\"
b.WriteString(`\\`)
default:
intRr := uint16(rr)
if intRr < 0x001F {
result += fmt.Sprintf("\\u%0.4X", intRr)
b.WriteString(fmt.Sprintf("\\u%0.4X", intRr))
} else {
result += string(rr)
b.WriteRune(rr)
}
}
}
return result
return b.String()
}
func tomlValueStringRepresentation(v interface{}) (string, error) {
@ -49,6 +51,11 @@ func tomlValueStringRepresentation(v interface{}) (string, error) {
case int64:
return strconv.FormatInt(value, 10), nil
case float64:
// Ensure a round float does contain a decimal point. Otherwise feeding
// the output back to the parser would convert to an integer.
if math.Trunc(value) == value {
return strconv.FormatFloat(value, 'f', 1, 32), nil
}
return strconv.FormatFloat(value, 'f', -1, 32), nil
case string:
return "\"" + encodeTomlString(value) + "\"", nil
@ -111,8 +118,7 @@ func (t *Tree) writeTo(w io.Writer, indent, keyspace string, bytesCount int64) (
return bytesCount, err
}
kvRepr := fmt.Sprintf("%s%s = %s\n", indent, k, repr)
writtenBytesCount, err := w.Write([]byte(kvRepr))
writtenBytesCount, err := writeStrings(w, indent, k, " = ", repr, "\n")
bytesCount += int64(writtenBytesCount)
if err != nil {
return bytesCount, err
@ -130,8 +136,7 @@ func (t *Tree) writeTo(w io.Writer, indent, keyspace string, bytesCount int64) (
switch node := v.(type) {
// node has to be of those two types given how keys are sorted above
case *Tree:
tableName := fmt.Sprintf("\n%s[%s]\n", indent, combinedKey)
writtenBytesCount, err := w.Write([]byte(tableName))
writtenBytesCount, err := writeStrings(w, "\n", indent, "[", combinedKey, "]\n")
bytesCount += int64(writtenBytesCount)
if err != nil {
return bytesCount, err
@ -142,8 +147,7 @@ func (t *Tree) writeTo(w io.Writer, indent, keyspace string, bytesCount int64) (
}
case []*Tree:
for _, subTree := range node {
tableArrayName := fmt.Sprintf("\n%s[[%s]]\n", indent, combinedKey)
writtenBytesCount, err := w.Write([]byte(tableArrayName))
writtenBytesCount, err := writeStrings(w, "\n", indent, "[[", combinedKey, "]]\n")
bytesCount += int64(writtenBytesCount)
if err != nil {
return bytesCount, err
@ -160,6 +164,18 @@ func (t *Tree) writeTo(w io.Writer, indent, keyspace string, bytesCount int64) (
return bytesCount, nil
}
func writeStrings(w io.Writer, s ...string) (int, error) {
var n int
for i := range s {
b, err := io.WriteString(w, s[i])
n += b
if err != nil {
return n, err
}
}
return n, nil
}
// WriteTo encode the Tree as Toml and writes it to the writer w.
// Returns the number of bytes written in case of success, or an error if anything happened.
func (t *Tree) WriteTo(w io.Writer) (int64, error) {

View file

@ -16,26 +16,26 @@ type failingWriter struct {
buffer bytes.Buffer
}
func (f failingWriter) Write(p []byte) (n int, err error) {
func (f *failingWriter) Write(p []byte) (n int, err error) {
count := len(p)
toWrite := f.failAt - count + f.written
toWrite := f.failAt - (count + f.written)
if toWrite < 0 {
toWrite = 0
}
if toWrite > count {
f.written += count
f.buffer.WriteString(string(p))
f.buffer.Write(p)
return count, nil
}
f.buffer.WriteString(string(p[:toWrite]))
f.buffer.Write(p[:toWrite])
f.written = f.failAt
return f.written, fmt.Errorf("failingWriter failed after writting %d bytes", f.written)
return toWrite, fmt.Errorf("failingWriter failed after writting %d bytes", f.written)
}
func assertErrorString(t *testing.T, expected string, err error) {
expectedErr := errors.New(expected)
if err.Error() != expectedErr.Error() {
if err == nil || err.Error() != expectedErr.Error() {
t.Errorf("expecting error %s, but got %s instead", expected, err)
}
}
@ -175,7 +175,7 @@ func TestTreeWriteToInvalidTreeTomlValueArray(t *testing.T) {
func TestTreeWriteToFailingWriterInSimpleValue(t *testing.T) {
toml, _ := Load(`a = 2`)
writer := failingWriter{failAt: 0, written: 0}
_, err := toml.WriteTo(writer)
_, err := toml.WriteTo(&writer)
assertErrorString(t, "failingWriter failed after writting 0 bytes", err)
}
@ -184,11 +184,11 @@ func TestTreeWriteToFailingWriterInTable(t *testing.T) {
[b]
a = 2`)
writer := failingWriter{failAt: 2, written: 0}
_, err := toml.WriteTo(writer)
_, err := toml.WriteTo(&writer)
assertErrorString(t, "failingWriter failed after writting 2 bytes", err)
writer = failingWriter{failAt: 13, written: 0}
_, err = toml.WriteTo(writer)
_, err = toml.WriteTo(&writer)
assertErrorString(t, "failingWriter failed after writting 13 bytes", err)
}
@ -197,11 +197,11 @@ func TestTreeWriteToFailingWriterInArray(t *testing.T) {
[[b]]
a = 2`)
writer := failingWriter{failAt: 2, written: 0}
_, err := toml.WriteTo(writer)
_, err := toml.WriteTo(&writer)
assertErrorString(t, "failingWriter failed after writting 2 bytes", err)
writer = failingWriter{failAt: 15, written: 0}
_, err = toml.WriteTo(writer)
_, err = toml.WriteTo(&writer)
assertErrorString(t, "failingWriter failed after writting 15 bytes", err)
}
@ -293,3 +293,66 @@ func TestTreeWriteToMapWithArrayOfInlineTables(t *testing.T) {
treeMap := tree.ToMap()
testMaps(t, treeMap, expected)
}
func TestTreeWriteToFloat(t *testing.T) {
tree, err := Load(`a = 3.0`)
if err != nil {
t.Fatal(err)
}
str, err := tree.ToTomlString()
if err != nil {
t.Fatal(err)
}
expected := `a = 3.0`
if strings.TrimSpace(str) != strings.TrimSpace(expected) {
t.Fatalf("Expected:\n%s\nGot:\n%s", expected, str)
}
}
func BenchmarkTreeToTomlString(b *testing.B) {
toml, err := Load(sampleHard)
if err != nil {
b.Fatal("Unexpected error:", err)
}
for i := 0; i < b.N; i++ {
_, err := toml.ToTomlString()
if err != nil {
b.Fatal(err)
}
}
}
var sampleHard = `# Test file for TOML
# Only this one tries to emulate a TOML file written by a user of the kind of parser writers probably hate
# This part you'll really hate
[the]
test_string = "You'll hate me after this - #" # " Annoying, isn't it?
[the.hard]
test_array = [ "] ", " # "] # ] There you go, parse this!
test_array2 = [ "Test #11 ]proved that", "Experiment #9 was a success" ]
# You didn't think it'd as easy as chucking out the last #, did you?
another_test_string = " Same thing, but with a string #"
harder_test_string = " And when \"'s are in the string, along with # \"" # "and comments are there too"
# Things will get harder
[the.hard."bit#"]
"what?" = "You don't think some user won't do that?"
multi_line_array = [
"]",
# ] Oh yes I did
]
# Each of the following keygroups/key value pairs should produce an error. Uncomment to them to test
#[error] if you didn't catch this, your parser is broken
#string = "Anything other than tabs, spaces and newline after a keygroup or key value pair has ended should produce an error unless it is a comment" like this
#array = [
# "This might most likely happen in multiline arrays",
# Like here,
# "or here,
# and here"
# ] End of array comment, forgot the #
#number = 3.14 pi <--again forgot the # `

View file

@ -16,5 +16,6 @@ matrix:
fast_finish: true
script:
- go test -v ./...
- go build
- go test -race -v ./...

View file

@ -61,11 +61,11 @@ import "github.com/spf13/afero"
First define a package variable and set it to a pointer to a filesystem.
```go
var AppFs afero.Fs = afero.NewMemMapFs()
var AppFs = afero.NewMemMapFs()
or
var AppFs afero.Fs = afero.NewOsFs()
var AppFs = afero.NewOsFs()
```
It is important to note that if you repeat the composite literal you
will be using a completely new and isolated filesystem. In the case of
@ -81,7 +81,10 @@ So if my application before had:
```go
os.Open('/tmp/foo')
```
We would replace it with a call to `AppFs.Open('/tmp/foo')`.
We would replace it with:
```go
AppFs.Open('/tmp/foo')
```
`AppFs` being the variable we defined above.
@ -166,8 +169,8 @@ f, err := afero.TempFile(fs,"", "ioutil-test")
### Calling via Afero
```go
fs := afero.NewMemMapFs
afs := &Afero{Fs: fs}
fs := afero.NewMemMapFs()
afs := &afero.Afero{Fs: fs}
f, err := afs.TempFile("", "ioutil-test")
```

View file

@ -12,4 +12,4 @@ build_script:
go build github.com/spf13/afero
test_script:
- cmd: go test -v github.com/spf13/afero
- cmd: go test -race -v github.com/spf13/afero/...

View file

@ -64,15 +64,10 @@ func (u *CacheOnReadFs) cacheStatus(name string) (state cacheState, fi os.FileIn
return cacheHit, lfi, nil
}
if err == syscall.ENOENT {
if err == syscall.ENOENT || os.IsNotExist(err) {
return cacheMiss, nil, nil
}
var ok bool
if err, ok = err.(*os.PathError); ok {
if err == os.ErrNotExist {
return cacheMiss, nil, nil
}
}
return cacheMiss, nil, err
}

View file

@ -1,6 +1,7 @@
package afero
import (
"bytes"
"fmt"
"io/ioutil"
"os"
@ -366,3 +367,38 @@ func TestUnionCacheExpire(t *testing.T) {
t.Errorf("cache time failed: <%s>", data)
}
}
func TestCacheOnReadFsNotInLayer(t *testing.T) {
base := NewMemMapFs()
layer := NewMemMapFs()
fs := NewCacheOnReadFs(base, layer, 0)
fh, err := base.Create("/file.txt")
if err != nil {
t.Fatal("unable to create file: ", err)
}
txt := []byte("This is a test")
fh.Write(txt)
fh.Close()
fh, err = fs.Open("/file.txt")
if err != nil {
t.Fatal("could not open file: ", err)
}
b, err := ReadAll(fh)
fh.Close()
if err != nil {
t.Fatal("could not read file: ", err)
} else if !bytes.Equal(txt, b) {
t.Fatalf("wanted file text %q, got %q", txt, b)
}
fh, err = layer.Open("/file.txt")
if err != nil {
t.Fatal("could not open file from layer: ", err)
}
fh.Close()
}

View file

@ -74,14 +74,24 @@ func CreateDir(name string) *FileData {
}
func ChangeFileName(f *FileData, newname string) {
f.Lock()
f.name = newname
f.Unlock()
}
func SetMode(f *FileData, mode os.FileMode) {
f.Lock()
f.mode = mode
f.Unlock()
}
func SetModTime(f *FileData, mtime time.Time) {
f.Lock()
setModTime(f, mtime)
f.Unlock()
}
func setModTime(f *FileData, mtime time.Time) {
f.modtime = mtime
}
@ -102,7 +112,7 @@ func (f *File) Close() error {
f.fileData.Lock()
f.closed = true
if !f.readOnly {
SetModTime(f.fileData, time.Now())
setModTime(f.fileData, time.Now())
}
f.fileData.Unlock()
return nil
@ -197,7 +207,7 @@ func (f *File) Truncate(size int64) error {
} else {
f.fileData.data = f.fileData.data[0:size]
}
SetModTime(f.fileData, time.Now())
setModTime(f.fileData, time.Now())
return nil
}
@ -236,7 +246,7 @@ func (f *File) Write(b []byte) (n int, err error) {
f.fileData.data = append(f.fileData.data[:cur], b...)
f.fileData.data = append(f.fileData.data, tail...)
}
SetModTime(f.fileData, time.Now())
setModTime(f.fileData, time.Now())
atomic.StoreInt64(&f.at, int64(len(f.fileData.data)))
return
@ -261,17 +271,33 @@ type FileInfo struct {
// Implements os.FileInfo
func (s *FileInfo) Name() string {
s.Lock()
_, name := filepath.Split(s.name)
s.Unlock()
return name
}
func (s *FileInfo) Mode() os.FileMode { return s.mode }
func (s *FileInfo) ModTime() time.Time { return s.modtime }
func (s *FileInfo) IsDir() bool { return s.dir }
func (s *FileInfo) Sys() interface{} { return nil }
func (s *FileInfo) Mode() os.FileMode {
s.Lock()
defer s.Unlock()
return s.mode
}
func (s *FileInfo) ModTime() time.Time {
s.Lock()
defer s.Unlock()
return s.modtime
}
func (s *FileInfo) IsDir() bool {
s.Lock()
defer s.Unlock()
return s.dir
}
func (s *FileInfo) Sys() interface{} { return nil }
func (s *FileInfo) Size() int64 {
if s.IsDir() {
return int64(42)
}
s.Lock()
defer s.Unlock()
return int64(len(s.data))
}

154
vendor/github.com/spf13/afero/mem/file_test.go generated vendored Normal file
View file

@ -0,0 +1,154 @@
package mem
import (
"testing"
"time"
)
func TestFileDataNameRace(t *testing.T) {
t.Parallel()
const someName = "someName"
const someOtherName = "someOtherName"
d := FileData{
name: someName,
}
if d.Name() != someName {
t.Errorf("Failed to read correct Name, was %v", d.Name())
}
ChangeFileName(&d, someOtherName)
if d.Name() != someOtherName {
t.Errorf("Failed to set Name, was %v", d.Name())
}
go func() {
ChangeFileName(&d, someName)
}()
if d.Name() != someName && d.Name() != someOtherName {
t.Errorf("Failed to read either Name, was %v", d.Name())
}
}
func TestFileDataModTimeRace(t *testing.T) {
t.Parallel()
someTime := time.Now()
someOtherTime := someTime.Add(1 * time.Minute)
d := FileData{
modtime: someTime,
}
s := FileInfo{
FileData: &d,
}
if s.ModTime() != someTime {
t.Errorf("Failed to read correct value, was %v", s.ModTime())
}
SetModTime(&d, someOtherTime)
if s.ModTime() != someOtherTime {
t.Errorf("Failed to set ModTime, was %v", s.ModTime())
}
go func() {
SetModTime(&d, someTime)
}()
if s.ModTime() != someTime && s.ModTime() != someOtherTime {
t.Errorf("Failed to read either modtime, was %v", s.ModTime())
}
}
func TestFileDataModeRace(t *testing.T) {
t.Parallel()
const someMode = 0777
const someOtherMode = 0660
d := FileData{
mode: someMode,
}
s := FileInfo{
FileData: &d,
}
if s.Mode() != someMode {
t.Errorf("Failed to read correct value, was %v", s.Mode())
}
SetMode(&d, someOtherMode)
if s.Mode() != someOtherMode {
t.Errorf("Failed to set Mode, was %v", s.Mode())
}
go func() {
SetMode(&d, someMode)
}()
if s.Mode() != someMode && s.Mode() != someOtherMode {
t.Errorf("Failed to read either mode, was %v", s.Mode())
}
}
func TestFileDataIsDirRace(t *testing.T) {
t.Parallel()
d := FileData{
dir: true,
}
s := FileInfo{
FileData: &d,
}
if s.IsDir() != true {
t.Errorf("Failed to read correct value, was %v", s.IsDir())
}
go func() {
s.Lock()
d.dir = false
s.Unlock()
}()
//just logging the value to trigger a read:
t.Logf("Value is %v", s.IsDir())
}
func TestFileDataSizeRace(t *testing.T) {
t.Parallel()
const someData = "Hello"
const someOtherDataSize = "Hello World"
d := FileData{
data: []byte(someData),
dir: false,
}
s := FileInfo{
FileData: &d,
}
if s.Size() != int64(len(someData)) {
t.Errorf("Failed to read correct value, was %v", s.Size())
}
go func() {
s.Lock()
d.data = []byte(someOtherDataSize)
s.Unlock()
}()
//just logging the value to trigger a read:
t.Logf("Value is %v", s.Size())
//Testing the Dir size case
d.dir = true
if s.Size() != int64(42) {
t.Errorf("Failed to read correct value for dir, was %v", s.Size())
}
}

View file

@ -141,7 +141,7 @@ func (m *MemMapFs) Mkdir(name string, perm os.FileMode) error {
m.registerWithParent(item)
m.mu.Unlock()
m.Chmod(name, perm)
m.Chmod(name, perm|os.ModeDir)
return nil
}
@ -151,9 +151,8 @@ func (m *MemMapFs) MkdirAll(path string, perm os.FileMode) error {
if err != nil {
if err.(*os.PathError).Err == ErrFileExists {
return nil
} else {
return err
}
return err
}
return nil
}

View file

@ -110,6 +110,8 @@ func TestPermSet(t *testing.T) {
const dirPathAll = "/my/path/to/dir"
const fileMode = os.FileMode(0765)
// directories will also have the directory bit set
const dirMode = fileMode | os.ModeDir
fs := NewMemMapFs()
@ -132,7 +134,7 @@ func TestPermSet(t *testing.T) {
}
// Test Mkdir
err = fs.Mkdir(dirPath, fileMode)
err = fs.Mkdir(dirPath, dirMode)
if err != nil {
t.Errorf("MkDir Create failed: %s", err)
return
@ -142,13 +144,14 @@ func TestPermSet(t *testing.T) {
t.Errorf("Stat failed: %s", err)
return
}
if s.Mode().String() != fileMode.String() {
t.Errorf("Permissions Incorrect: %s != %s", s.Mode().String(), fileMode.String())
// sets File
if s.Mode().String() != dirMode.String() {
t.Errorf("Permissions Incorrect: %s != %s", s.Mode().String(), dirMode.String())
return
}
// Test MkdirAll
err = fs.MkdirAll(dirPathAll, fileMode)
err = fs.MkdirAll(dirPathAll, dirMode)
if err != nil {
t.Errorf("MkDir Create failed: %s", err)
return
@ -158,8 +161,8 @@ func TestPermSet(t *testing.T) {
t.Errorf("Stat failed: %s", err)
return
}
if s.Mode().String() != fileMode.String() {
t.Errorf("Permissions Incorrect: %s != %s", s.Mode().String(), fileMode.String())
if s.Mode().String() != dirMode.String() {
t.Errorf("Permissions Incorrect: %s != %s", s.Mode().String(), dirMode.String())
return
}
}
@ -384,3 +387,35 @@ loop:
}
}
}
func TestMemFsDirMode(t *testing.T) {
fs := NewMemMapFs()
err := fs.Mkdir("/testDir1", 0644)
if err != nil {
t.Error(err)
}
err = fs.MkdirAll("/sub/testDir2", 0644)
if err != nil {
t.Error(err)
}
info, err := fs.Stat("/testDir1")
if err != nil {
t.Error(err)
}
if !info.IsDir() {
t.Error("should be a directory")
}
if !info.Mode().IsDir() {
t.Error("FileMode is not directory")
}
info, err = fs.Stat("/sub/testDir2")
if err != nil {
t.Error(err)
}
if !info.IsDir() {
t.Error("should be a directory")
}
if !info.Mode().IsDir() {
t.Error("FileMode is not directory")
}
}

View file

@ -1,14 +0,0 @@
// Copyright © 2014 Steve Francia <spf@spf13.com>.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package afero

View file

@ -157,7 +157,7 @@ func UnicodeSanitize(s string) string {
return string(target)
}
// Transform characters with accents into plan forms
// Transform characters with accents into plain forms.
func NeuterAccents(s string) string {
t := transform.Chain(norm.NFD, transform.RemoveFunc(isMn), norm.NFC)
result, _, _ := transform.String(t, string(s))

View file

@ -19,13 +19,33 @@ Many of the most widely used Go projects are built using Cobra including:
* [GiantSwarm's swarm](https://github.com/giantswarm/cli)
* [Nanobox](https://github.com/nanobox-io/nanobox)/[Nanopack](https://github.com/nanopack)
* [rclone](http://rclone.org/)
* [nehm](https://github.com/bogem/nehm)
[![Build Status](https://travis-ci.org/spf13/cobra.svg "Travis CI status")](https://travis-ci.org/spf13/cobra)
[![CircleCI status](https://circleci.com/gh/spf13/cobra.png?circle-token=:circle-token "CircleCI status")](https://circleci.com/gh/spf13/cobra)
[![GoDoc](https://godoc.org/github.com/spf13/cobra?status.svg)](https://godoc.org/github.com/spf13/cobra)
![cobra](https://cloud.githubusercontent.com/assets/173412/10911369/84832a8e-8212-11e5-9f82-cc96660a4794.gif)
# Table of Contents
- [Overview](#overview)
- [Concepts](#concepts)
* [Commands](#commands)
* [Flags](#flags)
- [Installing](#installing)
- [Getting Started](#getting-started)
* [Using the Cobra Generator](#using-the-cobra-generator)
* [Using the Cobra Library](#using-the-cobra-library)
* [Working with Flags](#working-with-flags)
* [Positional and Custom Arguments](#positional-and-custom-arguments)
* [Example](#example)
* [Help Command](#help-command)
* [Usage Message](#usage-message)
* [PreRun and PostRun Hooks](#prerun-and-postrun-hooks)
* [Suggestions when "unknown command" happens](#suggestions-when-unknown-command-happens)
* [Generating documentation for your command](#generating-documentation-for-your-command)
* [Generating bash completions](#generating-bash-completions)
- [Contributing](#contributing)
- [License](#license)
# Overview
@ -43,7 +63,6 @@ Cobra provides:
* Easy generation of applications & commands with `cobra init appname` & `cobra add cmdname`
* Intelligent suggestions (`app srver`... did you mean `app server`?)
* Automatic help generation for commands and flags
* Automatic detailed help for `app help [command]`
* Automatic help flag recognition of `-h`, `--help`, etc.
* Automatically generated bash autocomplete for your application
* Automatically generated man pages for your application
@ -51,16 +70,6 @@ Cobra provides:
* The flexibility to define your own help, usage, etc.
* Optional tight integration with [viper](http://github.com/spf13/viper) for 12-factor apps
Cobra has an exceptionally clean interface and simple design without needless
constructors or initialization methods.
Applications built with Cobra commands are designed to be as user-friendly as
possible. Flags can be placed before or after the command (as long as a
confusing space isnt provided). Both short and long flags can be used. A
command need not even be fully typed. Help is automatically generated and
available for the application or for a specific command using either the help
command or the `--help` flag.
# Concepts
Cobra is built on a structure of commands, arguments & flags.
@ -93,20 +102,11 @@ have children commands and optionally run an action.
In the example above, 'server' is the command.
A Command has the following structure:
```go
type Command struct {
Use string // The one-line usage message.
Short string // The short description shown in the 'help' output.
Long string // The long message shown in the 'help <this-command>' output.
Run func(cmd *Command, args []string) // Run runs the command.
}
```
[More about cobra.Command](https://godoc.org/github.com/spf13/cobra#Command)
## Flags
A Flag is a way to modify the behavior of a command. Cobra supports
A flag is a way to modify the behavior of a command. Cobra supports
fully POSIX-compliant flags as well as the Go [flag package](https://golang.org/pkg/flag/).
A Cobra command can define flags that persist through to children commands
and flags that are only available to that command.
@ -170,106 +170,14 @@ func main() {
Cobra provides its own program that will create your application and add any
commands you want. It's the easiest way to incorporate Cobra into your application.
In order to use the cobra command, compile it using the following command:
[Here](https://github.com/spf13/cobra/blob/master/cobra/README.md) you can find more information about it.
go get github.com/spf13/cobra/cobra
This will create the cobra executable under your `$GOPATH/bin` directory.
### cobra init
The `cobra init [yourApp]` command will create your initial application code
for you. It is a very powerful application that will populate your program with
the right structure so you can immediately enjoy all the benefits of Cobra. It
will also automatically apply the license you specify to your application.
Cobra init is pretty smart. You can provide it a full path, or simply a path
similar to what is expected in the import.
```
cobra init github.com/spf13/newAppName
```
### cobra add
Once an application is initialized Cobra can create additional commands for you.
Let's say you created an app and you wanted the following commands for it:
* app serve
* app config
* app config create
In your project directory (where your main.go file is) you would run the following:
```
cobra add serve
cobra add config
cobra add create -p 'configCmd'
```
*Note: Use camelCase (not snake_case/snake-case) for command names.
Otherwise, you will encounter errors.
For example, `cobra add add-user` is incorrect, but `cobra add addUser` is valid.*
Once you have run these three commands you would have an app structure similar to
the following:
```
▾ app/
▾ cmd/
serve.go
config.go
create.go
main.go
```
At this point you can run `go run main.go` and it would run your app. `go run
main.go serve`, `go run main.go config`, `go run main.go config create` along
with `go run main.go help serve`, etc. would all work.
Obviously you haven't added your own code to these yet. The commands are ready
for you to give them their tasks. Have fun!
### Configuring the cobra generator
The Cobra generator will be easier to use if you provide a simple configuration
file which will help you eliminate providing a bunch of repeated information in
flags over and over.
An example ~/.cobra.yaml file:
```yaml
author: Steve Francia <spf@spf13.com>
license: MIT
```
You can specify no license by setting `license` to `none` or you can specify
a custom license:
```yaml
license:
header: This file is part of {{ .appName }}.
text: |
{{ .copyright }}
This is my license. There are many like it, but this one is mine.
My license is my best friend. It is my life. I must master it as I must
master my life.
```
You can also use built-in licenses. For example, **GPLv2**, **GPLv3**, **LGPL**,
**AGPL**, **MIT**, **2-Clause BSD** or **3-Clause BSD**.
## Manually implementing Cobra
## Using the Cobra Library
To manually implement Cobra you need to create a bare main.go file and a RootCmd file.
You will optionally provide additional commands as you see fit.
### Create the root command
The root command represents your binary itself.
#### Manually create rootCmd
### Create rootCmd
Cobra doesn't require any special constructors. Simply create your commands.
@ -400,17 +308,6 @@ var versionCmd = &cobra.Command{
}
```
### Attach command to its parent
If you notice in the above example we attach the command to its parent. In
this case the parent is the rootCmd. In this example we are attaching it to the
root, but commands can be attached at any level.
```go
RootCmd.AddCommand(versionCmd)
```
## Working with Flags
Flags provide modifiers to control how the action command operates.
@ -446,6 +343,19 @@ A flag can also be assigned locally which will only apply to that specific comma
RootCmd.Flags().StringVarP(&Source, "source", "s", "", "Source directory to read from")
```
### Local Flag on Parent Commands
By default Cobra only parses local flags on the target command, any local flags on
parent commands are ignored. By enabling `Command.TraverseChildren` Cobra will
parse local flags on each command before executing the target command.
```go
command := cobra.Command{
Use: "print [OPTIONS] [COMMANDS]",
TraverseChildren: true,
}
```
### Bind Flags with Config
You can also bind your flags with [viper](https://github.com/spf13/viper):
@ -569,7 +479,7 @@ a count and a string.`,
For a more complete example of a larger application, please checkout [Hugo](http://gohugo.io/).
## The Help Command
## Help Command
Cobra automatically adds a help command to your application when you have subcommands.
This will be called when a user runs 'app help'. Additionally, help will also
@ -582,60 +492,28 @@ create' is called. Every command will automatically have the '--help' flag adde
The following output is automatically generated by Cobra. Nothing beyond the
command and flag definitions are needed.
> hugo help
$ cobra help
hugo is the main command, used to build your Hugo site.
Hugo is a Fast and Flexible Static Site Generator
built with love by spf13 and friends in Go.
Complete documentation is available at http://gohugo.io/.
Cobra is a CLI library for Go that empowers applications.
This application is a tool to generate the needed files
to quickly create a Cobra application.
Usage:
hugo [flags]
hugo [command]
cobra [command]
Available Commands:
server Hugo runs its own webserver to render the files
version Print the version number of Hugo
config Print the site configuration
check Check content in the source directory
benchmark Benchmark hugo by building a site a number of times.
convert Convert your content to different formats
new Create new content for your site
list Listing out various types of content
undraft Undraft changes the content's draft status from 'True' to 'False'
genautocomplete Generate shell autocompletion script for Hugo
gendoc Generate Markdown documentation for the Hugo CLI.
genman Generate man page for Hugo
import Import your site from others.
add Add a command to a Cobra Application
help Help about any command
init Initialize a Cobra Application
Flags:
-b, --baseURL="": hostname (and path) to the root, e.g. http://spf13.com/
-D, --buildDrafts[=false]: include content marked as draft
-F, --buildFuture[=false]: include content with publishdate in the future
--cacheDir="": filesystem path to cache directory. Defaults: $TMPDIR/hugo_cache/
--canonifyURLs[=false]: if true, all relative URLs will be canonicalized using baseURL
--config="": config file (default is path/config.yaml|json|toml)
-d, --destination="": filesystem path to write files to
--disableRSS[=false]: Do not build RSS files
--disableSitemap[=false]: Do not build Sitemap file
--editor="": edit new content with this editor, if provided
--ignoreCache[=false]: Ignores the cache directory for reading but still writes to it
--log[=false]: Enable Logging
--logFile="": Log File path (if set, logging enabled automatically)
--noTimes[=false]: Don't sync modification time of files
--pluralizeListTitles[=true]: Pluralize titles in lists using inflect
--preserveTaxonomyNames[=false]: Preserve taxonomy names as written ("Gérard Depardieu" vs "gerard-depardieu")
-s, --source="": filesystem path to read files relative from
--stepAnalysis[=false]: display memory and timing of different steps of the program
-t, --theme="": theme to use (located in /themes/THEMENAME/)
--uglyURLs[=false]: if true, use /filename.html instead of /filename/
-v, --verbose[=false]: verbose output
--verboseLog[=false]: verbose logging
-w, --watch[=false]: watch filesystem for changes and recreate as needed
-a, --author string author name for copyright attribution (default "YOUR NAME")
--config string config file (default is $HOME/.cobra.yaml)
-h, --help help for cobra
-l, --license string name of license for the project
--viper use Viper for configuration (default true)
Use "hugo [command] --help" for more information about a command.
Use "cobra [command] --help" for more information about a command.
Help is just a command like any other. There is no special logic or behavior
@ -643,36 +521,18 @@ around it. In fact, you can provide your own if you want.
### Defining your own help
You can provide your own Help command or your own template for the default command to use.
The default help command is
You can provide your own Help command or your own template for the default command to use
with followind functions:
```go
func (c *Command) initHelp() {
if c.helpCommand == nil {
c.helpCommand = &Command{
Use: "help [command]",
Short: "Help about any command",
Long: `Help provides help for any command in the application.
Simply type ` + c.Name() + ` help [path to command] for full details.`,
Run: c.HelpFunc(),
}
}
c.AddCommand(c.helpCommand)
}
```
You can provide your own command, function or template through the following methods:
```go
command.SetHelpCommand(cmd *Command)
command.SetHelpFunc(f func(*Command, []string))
command.SetHelpTemplate(s string)
cmd.SetHelpCommand(cmd *Command)
cmd.SetHelpFunc(f func(*Command, []string))
cmd.SetHelpTemplate(s string)
```
The latter two will also apply to any children commands.
## Usage
## Usage Message
When the user provides an invalid flag or invalid command, Cobra responds by
showing the user the 'usage'.
@ -681,71 +541,35 @@ showing the user the 'usage'.
You may recognize this from the help above. That's because the default help
embeds the usage as part of its output.
$ cobra --invalid
Error: unknown flag: --invalid
Usage:
hugo [flags]
hugo [command]
cobra [command]
Available Commands:
server Hugo runs its own webserver to render the files
version Print the version number of Hugo
config Print the site configuration
check Check content in the source directory
benchmark Benchmark hugo by building a site a number of times.
convert Convert your content to different formats
new Create new content for your site
list Listing out various types of content
undraft Undraft changes the content's draft status from 'True' to 'False'
genautocomplete Generate shell autocompletion script for Hugo
gendoc Generate Markdown documentation for the Hugo CLI.
genman Generate man page for Hugo
import Import your site from others.
add Add a command to a Cobra Application
help Help about any command
init Initialize a Cobra Application
Flags:
-b, --baseURL="": hostname (and path) to the root, e.g. http://spf13.com/
-D, --buildDrafts[=false]: include content marked as draft
-F, --buildFuture[=false]: include content with publishdate in the future
--cacheDir="": filesystem path to cache directory. Defaults: $TMPDIR/hugo_cache/
--canonifyURLs[=false]: if true, all relative URLs will be canonicalized using baseURL
--config="": config file (default is path/config.yaml|json|toml)
-d, --destination="": filesystem path to write files to
--disableRSS[=false]: Do not build RSS files
--disableSitemap[=false]: Do not build Sitemap file
--editor="": edit new content with this editor, if provided
--ignoreCache[=false]: Ignores the cache directory for reading but still writes to it
--log[=false]: Enable Logging
--logFile="": Log File path (if set, logging enabled automatically)
--noTimes[=false]: Don't sync modification time of files
--pluralizeListTitles[=true]: Pluralize titles in lists using inflect
--preserveTaxonomyNames[=false]: Preserve taxonomy names as written ("Gérard Depardieu" vs "gerard-depardieu")
-s, --source="": filesystem path to read files relative from
--stepAnalysis[=false]: display memory and timing of different steps of the program
-t, --theme="": theme to use (located in /themes/THEMENAME/)
--uglyURLs[=false]: if true, use /filename.html instead of /filename/
-v, --verbose[=false]: verbose output
--verboseLog[=false]: verbose logging
-w, --watch[=false]: watch filesystem for changes and recreate as needed
-a, --author string author name for copyright attribution (default "YOUR NAME")
--config string config file (default is $HOME/.cobra.yaml)
-h, --help help for cobra
-l, --license string name of license for the project
--viper use Viper for configuration (default true)
Use "cobra [command] --help" for more information about a command.
### Defining your own usage
You can provide your own usage function or template for Cobra to use.
The default usage function is:
```go
return func(c *Command) error {
err := tmpl(c.Out(), c.UsageTemplate(), c)
return err
}
```
Like help, the function and template are overridable through public methods:
```go
command.SetUsageFunc(f func(*Command) error)
command.SetUsageTemplate(s string)
cmd.SetUsageFunc(f func(*Command) error)
cmd.SetUsageTemplate(s string)
```
## PreRun or PostRun Hooks
## PreRun and PostRun Hooks
It is possible to run functions before or after the main `Run` function of your command. The `PersistentPreRun` and `PreRun` functions will be executed before `Run`. `PersistentPostRun` and `PostRun` will be executed after `Run`. The `Persistent*Run` functions will be inherited by children if they do not declare their own. These functions are run in the following order:
@ -815,51 +639,19 @@ func main() {
}
```
Output:
```
Inside rootCmd PersistentPreRun with args: []
Inside rootCmd PreRun with args: []
Inside rootCmd Run with args: []
Inside rootCmd PostRun with args: []
Inside rootCmd PersistentPostRun with args: []
## Alternative Error Handling
Cobra also has functions where the return signature is an error. This allows for errors to bubble up to the top,
providing a way to handle the errors in one location. The current list of functions that return an error is:
* PersistentPreRunE
* PreRunE
* RunE
* PostRunE
* PersistentPostRunE
If you would like to silence the default `error` and `usage` output in favor of your own, you can set `SilenceUsage`
and `SilenceErrors` to `true` on the command. A child command respects these flags if they are set on the parent
command.
**Example Usage using RunE:**
```go
package main
import (
"errors"
"log"
"github.com/spf13/cobra"
)
func main() {
var rootCmd = &cobra.Command{
Use: "hugo",
Short: "Hugo is a very fast static site generator",
Long: `A Fast and Flexible Static Site Generator built with
love by spf13 and friends in Go.
Complete documentation is available at http://hugo.spf13.com`,
RunE: func(cmd *cobra.Command, args []string) error {
// Do Stuff Here
return errors.New("some random error")
},
}
if err := rootCmd.Execute(); err != nil {
log.Fatal(err)
}
}
Inside rootCmd PersistentPreRun with args: [arg1 arg2]
Inside subCmd PreRun with args: [arg1 arg2]
Inside subCmd Run with args: [arg1 arg2]
Inside subCmd PostRun with args: [arg1 arg2]
Inside subCmd PersistentPostRun with args: [arg1 arg2]
```
## Suggestions when "unknown command" happens
@ -902,41 +694,28 @@ Did you mean this?
Run 'kubectl help' for usage.
```
## Generating Markdown-formatted documentation for your command
## Generating documentation for your command
Cobra can generate a Markdown-formatted document based on the subcommands, flags, etc. A simple example of how to do this for your command can be found in [Markdown Docs](doc/md_docs.md).
Cobra can generate documentation based on subcommands, flags, etc. in the following formats:
## Generating man pages for your command
- [Markdown](doc/md_docs.md)
- [ReStructured Text](doc/rest_docs.md)
- [Man Page](doc/man_docs.md)
Cobra can generate a man page based on the subcommands, flags, etc. A simple example of how to do this for your command can be found in [Man Docs](doc/man_docs.md).
## Generating bash completions for your command
## Generating bash completions
Cobra can generate a bash-completion file. If you add more information to your command, these completions can be amazingly powerful and flexible. Read more about it in [Bash Completions](bash_completions.md).
## Extensions
Libraries for extending Cobra:
* [cmdns](https://github.com/gosuri/cmdns): Enables name spacing a command's immediate children. It provides an alternative way to structure subcommands, similar to `heroku apps:create` and `ovrclk clusters:launch`.
## Contributing
# Contributing
1. Fork it
2. Create your feature branch (`git checkout -b my-new-feature`)
3. Commit your changes (`git commit -am 'Add some feature'`)
4. Push to the branch (`git push origin my-new-feature`)
5. Create new Pull Request
2. Download your fork to your PC (`git clone https://github.com/your_username/cobra && cd cobra`)
3. Create your feature branch (`git checkout -b my-new-feature`)
4. Make changes and add them (`git add .`)
5. Commit your changes (`git commit -m 'Add some feature'`)
6. Push to the branch (`git push origin my-new-feature`)
7. Create new pull request
## Contributors
Names in no particular order:
* [spf13](https://github.com/spf13),
[eparis](https://github.com/eparis),
[bep](https://github.com/bep), and many more!
## License
# License
Cobra is released under the Apache 2.0 license. See [LICENSE.txt](https://github.com/spf13/cobra/blob/master/LICENSE.txt)

View file

@ -92,7 +92,7 @@ __handle_reply()
cur="${cur#*=}"
${flags_completion[${index}]}
if [ -n "${ZSH_VERSION}" ]; then
# zfs completion needs --flag= prefix
# zsh completion needs --flag= prefix
eval "COMPREPLY=( \"\${COMPREPLY[@]/#/${flag}=}\" )"
fi
fi

94
vendor/github.com/spf13/cobra/cobra/README.md generated vendored Normal file
View file

@ -0,0 +1,94 @@
# Cobra Generator
Cobra provides its own program that will create your application and add any
commands you want. It's the easiest way to incorporate Cobra into your application.
In order to use the cobra command, compile it using the following command:
go get github.com/spf13/cobra/cobra
This will create the cobra executable under your `$GOPATH/bin` directory.
### cobra init
The `cobra init [app]` command will create your initial application code
for you. It is a very powerful application that will populate your program with
the right structure so you can immediately enjoy all the benefits of Cobra. It
will also automatically apply the license you specify to your application.
Cobra init is pretty smart. You can provide it a full path, or simply a path
similar to what is expected in the import.
```
cobra init github.com/spf13/newApp
```
### cobra add
Once an application is initialized, Cobra can create additional commands for you.
Let's say you created an app and you wanted the following commands for it:
* app serve
* app config
* app config create
In your project directory (where your main.go file is) you would run the following:
```
cobra add serve
cobra add config
cobra add create -p 'configCmd'
```
*Note: Use camelCase (not snake_case/snake-case) for command names.
Otherwise, you will encounter errors.
For example, `cobra add add-user` is incorrect, but `cobra add addUser` is valid.*
Once you have run these three commands you would have an app structure similar to
the following:
```
▾ app/
▾ cmd/
serve.go
config.go
create.go
main.go
```
At this point you can run `go run main.go` and it would run your app. `go run
main.go serve`, `go run main.go config`, `go run main.go config create` along
with `go run main.go help serve`, etc. would all work.
Obviously you haven't added your own code to these yet. The commands are ready
for you to give them their tasks. Have fun!
### Configuring the cobra generator
The Cobra generator will be easier to use if you provide a simple configuration
file which will help you eliminate providing a bunch of repeated information in
flags over and over.
An example ~/.cobra.yaml file:
```yaml
author: Steve Francia <spf@spf13.com>
license: MIT
```
You can specify no license by setting `license` to `none` or you can specify
a custom license:
```yaml
license:
header: This file is part of {{ .appName }}.
text: |
{{ .copyright }}
This is my license. There are many like it, but this one is mine.
My license is my best friend. It is my life. I must master it as I must
master my life.
```
You can also use built-in licenses. For example, **GPLv2**, **GPLv3**, **LGPL**,
**AGPL**, **MIT**, **2-Clause BSD** or **3-Clause BSD**.

View file

@ -18,6 +18,7 @@ import (
"fmt"
"io"
"os"
"os/exec"
"path/filepath"
"strings"
"text/template"
@ -31,7 +32,27 @@ func init() {
envGoPath := os.Getenv("GOPATH")
goPaths := filepath.SplitList(envGoPath)
if len(goPaths) == 0 {
er("$GOPATH is not set")
// Adapted from https://github.com/Masterminds/glide/pull/798/files.
// As of Go 1.8 the GOPATH is no longer required to be set. Instead there
// is a default value. If there is no GOPATH check for the default value.
// Note, checking the GOPATH first to avoid invoking the go toolchain if
// possible.
goExecutable := os.Getenv("COBRA_GO_EXECUTABLE")
if len(goExecutable) <= 0 {
goExecutable = "go"
}
out, err := exec.Command(goExecutable, "env", "GOPATH").Output()
if err != nil {
er(err)
}
toolchainGoPath := strings.TrimSpace(string(out))
goPaths = filepath.SplitList(toolchainGoPath)
if len(goPaths) == 0 {
er("$GOPATH is not set")
}
}
srcPaths = make([]string, 0, len(goPaths))
for _, goPath := range goPaths {

View file

@ -63,7 +63,7 @@ func getLicense() License {
// If user wants to have custom license, use that.
if viper.IsSet("license.header") || viper.IsSet("license.text") {
return License{Header: viper.GetString("license.header"),
Text: "license.text"}
Text: viper.GetString("license.text")}
}
// If user wants to have built-in license, use that.

View file

@ -190,6 +190,7 @@ func flagInit() {
cmdTimes.Flags().IntVarP(&flagi2, "inttwo", "j", 234, "help message for flag inttwo")
cmdTimes.Flags().StringVarP(&flags2b, "strtwo", "t", "2", strtwoChildHelp)
cmdTimes.PersistentFlags().StringVarP(&flags2b, "strtwo", "t", "2", strtwoChildHelp)
cmdTimes.LocalFlags() // populate lflags before parent is set
cmdPrint.Flags().BoolVarP(&flagb3, "boolthree", "b", true, "help message for flag boolthree")
cmdPrint.PersistentFlags().StringVarP(&flags3, "strthree", "s", "three", "help message for flag strthree")
}
@ -210,8 +211,8 @@ func initialize() *Command {
rootPersPre, echoPre, echoPersPre, timesPersPre = nil, nil, nil, nil
var c = cmdRootNoRun
flagInit()
commandInit()
flagInit()
return c
}
@ -219,8 +220,8 @@ func initializeWithSameName() *Command {
tt, tp, te = nil, nil, nil
rootPersPre, echoPre, echoPersPre, timesPersPre = nil, nil, nil, nil
var c = cmdRootSameName
flagInit()
commandInit()
flagInit()
return c
}
@ -910,6 +911,7 @@ func TestRootHelp(t *testing.T) {
func TestFlagAccess(t *testing.T) {
initialize()
cmdEcho.AddCommand(cmdTimes)
local := cmdTimes.LocalFlags()
inherited := cmdTimes.InheritedFlags()
@ -1165,11 +1167,18 @@ func TestGlobalNormFuncPropagation(t *testing.T) {
}
rootCmd := initialize()
rootCmd.AddCommand(cmdEcho)
rootCmd.SetGlobalNormalizationFunc(normFunc)
if reflect.ValueOf(normFunc).Pointer() != reflect.ValueOf(rootCmd.GlobalNormalizationFunc()).Pointer() {
t.Error("rootCmd seems to have a wrong normalization function")
}
// Also check it propagates retroactively
if reflect.ValueOf(normFunc).Pointer() != reflect.ValueOf(cmdEcho.GlobalNormalizationFunc()).Pointer() {
t.Error("cmdEcho should have had the normalization function of rootCmd")
}
// First add the cmdEchoSub to cmdPrint
cmdPrint.AddCommand(cmdEchoSub)
if cmdPrint.GlobalNormalizationFunc() != nil && cmdEchoSub.GlobalNormalizationFunc() != nil {
@ -1184,6 +1193,67 @@ func TestGlobalNormFuncPropagation(t *testing.T) {
}
}
func TestNormPassedOnLocal(t *testing.T) {
n := func(f *pflag.FlagSet, name string) pflag.NormalizedName {
return pflag.NormalizedName(strings.ToUpper(name))
}
cmd := &Command{}
flagVal := false
cmd.Flags().BoolVar(&flagVal, "flagname", true, "this is a dummy flag")
cmd.SetGlobalNormalizationFunc(n)
if cmd.LocalFlags().Lookup("flagname") != cmd.LocalFlags().Lookup("FLAGNAME") {
t.Error("Normalization function should be passed on to Local flag set")
}
}
func TestNormPassedOnInherited(t *testing.T) {
n := func(f *pflag.FlagSet, name string) pflag.NormalizedName {
return pflag.NormalizedName(strings.ToUpper(name))
}
cmd, childBefore, childAfter := &Command{}, &Command{}, &Command{}
flagVal := false
cmd.AddCommand(childBefore)
cmd.PersistentFlags().BoolVar(&flagVal, "flagname", true, "this is a dummy flag")
cmd.SetGlobalNormalizationFunc(n)
cmd.AddCommand(childAfter)
if f := childBefore.InheritedFlags(); f.Lookup("flagname") == nil || f.Lookup("flagname") != f.Lookup("FLAGNAME") {
t.Error("Normalization function should be passed on to inherited flag set in command added before flag")
}
if f := childAfter.InheritedFlags(); f.Lookup("flagname") == nil || f.Lookup("flagname") != f.Lookup("FLAGNAME") {
t.Error("Normalization function should be passed on to inherited flag set in command added after flag")
}
}
// Related to https://github.com/spf13/cobra/issues/521.
func TestNormConsistent(t *testing.T) {
n := func(f *pflag.FlagSet, name string) pflag.NormalizedName {
return pflag.NormalizedName(strings.ToUpper(name))
}
id := func(f *pflag.FlagSet, name string) pflag.NormalizedName {
return pflag.NormalizedName(name)
}
cmd := &Command{}
flagVal := false
cmd.Flags().BoolVar(&flagVal, "flagname", true, "this is a dummy flag")
// Build local flag set
cmd.LocalFlags()
cmd.SetGlobalNormalizationFunc(n)
cmd.SetGlobalNormalizationFunc(id)
if cmd.LocalFlags().Lookup("flagname") == cmd.LocalFlags().Lookup("FLAGNAME") {
t.Error("Normalizing flag names should not result in duplicate flags")
}
}
func TestFlagOnPflagCommandLine(t *testing.T) {
flagName := "flagOnCommandLine"
pflag.String(flagName, "", "about my flag")

View file

@ -125,8 +125,9 @@ type Command struct {
// Must be > 0.
SuggestionsMinimumDistance int
// name is the command name, usually the executable's name.
name string
// TraverseChildren parses flags on all parents before executing child command.
TraverseChildren bool
// commands is the list of commands supported by this program.
commands []*Command
// parent is a parent command for this command.
@ -475,13 +476,14 @@ func argsMinusFirstX(args []string, x string) []string {
return args
}
func isFlagArg(arg string) bool {
return ((len(arg) >= 3 && arg[1] == '-') ||
(len(arg) >= 2 && arg[0] == '-' && arg[1] != '-'))
}
// Find the target command given the args and command tree
// Meant to be run on the highest node. Only searches down.
func (c *Command) Find(args []string) (*Command, []string, error) {
if c == nil {
return nil, nil, fmt.Errorf("Called find() on a nil Command")
}
var innerfind func(*Command, []string) (*Command, []string)
innerfind = func(c *Command, innerArgs []string) (*Command, []string) {
@ -490,28 +492,11 @@ func (c *Command) Find(args []string) (*Command, []string, error) {
return c, innerArgs
}
nextSubCmd := argsWOflags[0]
matches := make([]*Command, 0)
for _, cmd := range c.commands {
if cmd.Name() == nextSubCmd || cmd.HasAlias(nextSubCmd) { // exact name or alias match
return innerfind(cmd, argsMinusFirstX(innerArgs, nextSubCmd))
}
if EnablePrefixMatching {
if strings.HasPrefix(cmd.Name(), nextSubCmd) { // prefix match
matches = append(matches, cmd)
}
for _, x := range cmd.Aliases {
if strings.HasPrefix(x, nextSubCmd) {
matches = append(matches, cmd)
}
}
}
}
// only accept a single prefix match - multiple matches would be ambiguous
if len(matches) == 1 {
return innerfind(matches[0], argsMinusFirstX(innerArgs, argsWOflags[0]))
cmd := c.findNext(nextSubCmd)
if cmd != nil {
return innerfind(cmd, argsMinusFirstX(innerArgs, nextSubCmd))
}
return c, innerArgs
}
@ -539,6 +524,66 @@ func (c *Command) findSuggestions(arg string) string {
return suggestionsString
}
func (c *Command) findNext(next string) *Command {
matches := make([]*Command, 0)
for _, cmd := range c.commands {
if cmd.Name() == next || cmd.HasAlias(next) {
return cmd
}
if EnablePrefixMatching && cmd.hasNameOrAliasPrefix(next) {
matches = append(matches, cmd)
}
}
if len(matches) == 1 {
return matches[0]
}
return nil
}
// Traverse the command tree to find the command, and parse args for
// each parent.
func (c *Command) Traverse(args []string) (*Command, []string, error) {
flags := []string{}
inFlag := false
for i, arg := range args {
switch {
// A long flag with a space separated value
case strings.HasPrefix(arg, "--") && !strings.Contains(arg, "="):
// TODO: this isn't quite right, we should really check ahead for 'true' or 'false'
inFlag = !hasNoOptDefVal(arg[2:], c.Flags())
flags = append(flags, arg)
continue
// A short flag with a space separated value
case strings.HasPrefix(arg, "-") && !strings.Contains(arg, "=") && len(arg) == 2 && !shortHasNoOptDefVal(arg[1:], c.Flags()):
inFlag = true
flags = append(flags, arg)
continue
// The value for a flag
case inFlag:
inFlag = false
flags = append(flags, arg)
continue
// A flag without a value, or with an `=` separated value
case isFlagArg(arg):
flags = append(flags, arg)
continue
}
cmd := c.findNext(arg)
if cmd == nil {
return c, args, nil
}
if err := c.ParseFlags(flags); err != nil {
return nil, args, err
}
return cmd.Traverse(args[i+1:])
}
return c, args, nil
}
// SuggestionsFor provides suggestions for the typedName.
func (c *Command) SuggestionsFor(typedName string) []string {
suggestions := []string{}
@ -646,6 +691,9 @@ func (c *Command) execute(a []string) (err error) {
c.PreRun(c, argWoFlags)
}
if err := c.validateRequiredFlags(); err != nil {
return err
}
if c.RunE != nil {
if err := c.RunE(c, argWoFlags); err != nil {
return err
@ -714,7 +762,12 @@ func (c *Command) ExecuteC() (cmd *Command, err error) {
args = c.args
}
cmd, flags, err := c.Find(args)
var flags []string
if c.TraverseChildren {
cmd, flags, err = c.Traverse(args)
} else {
cmd, flags, err = c.Find(args)
}
if err != nil {
// If found parse to a subcommand and then failed, talk about the subcommand
if cmd != nil {
@ -726,6 +779,7 @@ func (c *Command) ExecuteC() (cmd *Command, err error) {
}
return c, err
}
err = cmd.execute(flags)
if err != nil {
// Always show help if requested, even if SilenceErrors is in
@ -757,6 +811,25 @@ func (c *Command) ValidateArgs(args []string) error {
return c.Args(c, args)
}
func (c *Command) validateRequiredFlags() error {
flags := c.Flags()
missingFlagNames := []string{}
flags.VisitAll(func(pflag *flag.Flag) {
requiredAnnotation, found := pflag.Annotations[BashCompOneRequiredFlag]
if !found {
return
}
if (requiredAnnotation[0] == "true") && !pflag.Changed {
missingFlagNames = append(missingFlagNames, pflag.Name)
}
})
if len(missingFlagNames) > 0 {
return fmt.Errorf(`Required flag(s) "%s" have/has not been set`, strings.Join(missingFlagNames, `", "`))
}
return nil
}
// InitDefaultHelpFlag adds default help flag to c.
// It is called automatically by executing the c or by calling help and usage.
// If c already has help flag, it will do nothing.
@ -806,6 +879,7 @@ Simply type ` + c.Name() + ` help [path to command] for full details.`,
// ResetCommands used for testing.
func (c *Command) ResetCommands() {
c.parent = nil
c.commands = nil
c.helpCommand = nil
c.parentsPflags = nil
@ -971,15 +1045,12 @@ func (c *Command) DebugFlags() {
// Name returns the command's name: the first word in the use line.
func (c *Command) Name() string {
if c.name == "" {
name := c.Use
i := strings.Index(name, " ")
if i >= 0 {
name = name[:i]
}
c.name = name
name := c.Use
i := strings.Index(name, " ")
if i >= 0 {
name = name[:i]
}
return c.name
return name
}
// HasAlias determines if a given string is an alias of the command.
@ -992,7 +1063,21 @@ func (c *Command) HasAlias(s string) bool {
return false
}
// NameAndAliases returns string containing name and all aliases
// hasNameOrAliasPrefix returns true if the Name or any of aliases start
// with prefix
func (c *Command) hasNameOrAliasPrefix(prefix string) bool {
if strings.HasPrefix(c.Name(), prefix) {
return true
}
for _, alias := range c.Aliases {
if strings.HasPrefix(alias, prefix) {
return true
}
}
return false
}
// NameAndAliases returns a list of the command name and all aliases
func (c *Command) NameAndAliases() string {
return strings.Join(append([]string{c.Name()}, c.Aliases...), ", ")
}
@ -1132,6 +1217,9 @@ func (c *Command) LocalFlags() *flag.FlagSet {
c.lflags.SetOutput(c.flagErrorBuf)
}
c.lflags.SortFlags = c.Flags().SortFlags
if c.globNormFunc != nil {
c.lflags.SetNormalizeFunc(c.globNormFunc)
}
addToLocal := func(f *flag.Flag) {
if c.lflags.Lookup(f.Name) == nil && c.parentsPflags.Lookup(f.Name) == nil {
@ -1156,6 +1244,10 @@ func (c *Command) InheritedFlags() *flag.FlagSet {
}
local := c.LocalFlags()
if c.globNormFunc != nil {
c.iflags.SetNormalizeFunc(c.globNormFunc)
}
c.parentsPflags.VisitAll(func(f *flag.Flag) {
if c.iflags.Lookup(f.Name) == nil && local.Lookup(f.Name) == nil {
c.iflags.AddFlag(f)
@ -1189,6 +1281,10 @@ func (c *Command) ResetFlags() {
c.flags.SetOutput(c.flagErrorBuf)
c.pflags = flag.NewFlagSet(c.Name(), flag.ContinueOnError)
c.pflags.SetOutput(c.flagErrorBuf)
c.lflags = nil
c.iflags = nil
c.parentsPflags = nil
}
// HasFlags checks if the command contains any flags (local plus persistent from the entire structure).
@ -1264,6 +1360,9 @@ func (c *Command) ParseFlags(args []string) error {
return nil
}
if c.flagErrorBuf == nil {
c.flagErrorBuf = new(bytes.Buffer)
}
beforeErrorBufLen := c.flagErrorBuf.Len()
c.mergePersistentFlags()
err := c.Flags().Parse(args)
@ -1298,6 +1397,10 @@ func (c *Command) updateParentsPflags() {
c.parentsPflags.SortFlags = false
}
if c.globNormFunc != nil {
c.parentsPflags.SetNormalizeFunc(c.globNormFunc)
}
c.Root().PersistentFlags().AddFlagSet(flag.CommandLine)
c.VisitParents(func(parent *Command) {

View file

@ -347,3 +347,180 @@ func TestSetHelpCommand(t *testing.T) {
t.Errorf("Expected to contain %q message, but got %q", correctMessage, output.String())
}
}
func TestTraverseWithParentFlags(t *testing.T) {
cmd := &Command{
Use: "do",
TraverseChildren: true,
}
cmd.Flags().String("foo", "", "foo things")
cmd.Flags().BoolP("goo", "g", false, "foo things")
sub := &Command{Use: "next"}
sub.Flags().String("add", "", "add things")
cmd.AddCommand(sub)
c, args, err := cmd.Traverse([]string{"-g", "--foo", "ok", "next", "--add"})
if err != nil {
t.Fatalf("Expected no error: %s", err)
}
if len(args) != 1 && args[0] != "--add" {
t.Fatalf("wrong args %s", args)
}
if c.Name() != sub.Name() {
t.Fatalf("wrong command %q expected %q", c.Name(), sub.Name())
}
}
func TestTraverseNoParentFlags(t *testing.T) {
cmd := &Command{
Use: "do",
TraverseChildren: true,
}
cmd.Flags().String("foo", "", "foo things")
sub := &Command{Use: "next"}
sub.Flags().String("add", "", "add things")
cmd.AddCommand(sub)
c, args, err := cmd.Traverse([]string{"next"})
if err != nil {
t.Fatalf("Expected no error: %s", err)
}
if len(args) != 0 {
t.Fatalf("wrong args %s", args)
}
if c.Name() != sub.Name() {
t.Fatalf("wrong command %q expected %q", c.Name(), sub.Name())
}
}
func TestTraverseWithBadParentFlags(t *testing.T) {
cmd := &Command{
Use: "do",
TraverseChildren: true,
}
sub := &Command{Use: "next"}
sub.Flags().String("add", "", "add things")
cmd.AddCommand(sub)
expected := "got unknown flag: --add"
c, _, err := cmd.Traverse([]string{"--add", "ok", "next"})
if err == nil || strings.Contains(err.Error(), expected) {
t.Fatalf("Expected error %s got %s", expected, err)
}
if c != nil {
t.Fatalf("Expected nil command")
}
}
func TestTraverseWithBadChildFlag(t *testing.T) {
cmd := &Command{
Use: "do",
TraverseChildren: true,
}
cmd.Flags().String("foo", "", "foo things")
sub := &Command{Use: "next"}
cmd.AddCommand(sub)
// Expect no error because the last commands args shouldn't be parsed in
// Traverse
c, args, err := cmd.Traverse([]string{"next", "--add"})
if err != nil {
t.Fatalf("Expected no error: %s", err)
}
if len(args) != 1 && args[0] != "--add" {
t.Fatalf("wrong args %s", args)
}
if c.Name() != sub.Name() {
t.Fatalf("wrong command %q expected %q", c.Name(), sub.Name())
}
}
func TestTraverseWithTwoSubcommands(t *testing.T) {
cmd := &Command{
Use: "do",
TraverseChildren: true,
}
sub := &Command{
Use: "sub",
TraverseChildren: true,
}
cmd.AddCommand(sub)
subsub := &Command{
Use: "subsub",
}
sub.AddCommand(subsub)
c, _, err := cmd.Traverse([]string{"sub", "subsub"})
if err != nil {
t.Fatalf("Expected no error: %s", err)
}
if c.Name() != subsub.Name() {
t.Fatalf("wrong command %q expected %q", c.Name(), subsub.Name())
}
}
func TestRequiredFlags(t *testing.T) {
c := &Command{Use: "c", Run: func(*Command, []string) {}}
output := new(bytes.Buffer)
c.SetOutput(output)
c.Flags().String("foo1", "", "required foo1")
c.MarkFlagRequired("foo1")
c.Flags().String("foo2", "", "required foo2")
c.MarkFlagRequired("foo2")
c.Flags().String("bar", "", "optional bar")
expected := fmt.Sprintf("Required flag(s) %q, %q have/has not been set", "foo1", "foo2")
if err := c.Execute(); err != nil {
if err.Error() != expected {
t.Errorf("expected %v, got %v", expected, err.Error())
}
}
}
func TestPersistentRequiredFlags(t *testing.T) {
parent := &Command{Use: "parent", Run: func(*Command, []string) {}}
output := new(bytes.Buffer)
parent.SetOutput(output)
parent.PersistentFlags().String("foo1", "", "required foo1")
parent.MarkPersistentFlagRequired("foo1")
parent.PersistentFlags().String("foo2", "", "required foo2")
parent.MarkPersistentFlagRequired("foo2")
parent.Flags().String("foo3", "", "optional foo3")
child := &Command{Use: "child", Run: func(*Command, []string) {}}
child.Flags().String("bar1", "", "required bar1")
child.MarkFlagRequired("bar1")
child.Flags().String("bar2", "", "required bar2")
child.MarkFlagRequired("bar2")
child.Flags().String("bar3", "", "optional bar3")
parent.AddCommand(child)
parent.SetArgs([]string{"child"})
expected := fmt.Sprintf("Required flag(s) %q, %q, %q, %q have/has not been set", "bar1", "bar2", "foo1", "foo2")
if err := parent.Execute(); err != nil {
if err.Error() != expected {
t.Errorf("expected %v, got %v", expected, err.Error())
}
}
}
// TestUpdateName checks if c.Name() updates on changed c.Use.
// Related to https://github.com/spf13/cobra/pull/422#discussion_r143918343.
func TestUpdateName(t *testing.T) {
c := &Command{Use: "name xyz"}
originalName := c.Name()
c.Use = "changedName abc"
if originalName == c.Name() || c.Name() != "changedName" {
t.Error("c.Name() should be updated on changed c.Use")
}
}

185
vendor/github.com/spf13/cobra/doc/rest_docs.go generated vendored Normal file
View file

@ -0,0 +1,185 @@
//Copyright 2015 Red Hat Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package doc
import (
"bytes"
"fmt"
"io"
"os"
"path/filepath"
"sort"
"strings"
"time"
"github.com/spf13/cobra"
)
func printOptionsReST(buf *bytes.Buffer, cmd *cobra.Command, name string) error {
flags := cmd.NonInheritedFlags()
flags.SetOutput(buf)
if flags.HasFlags() {
buf.WriteString("Options\n")
buf.WriteString("~~~~~~~\n\n::\n\n")
flags.PrintDefaults()
buf.WriteString("\n")
}
parentFlags := cmd.InheritedFlags()
parentFlags.SetOutput(buf)
if parentFlags.HasFlags() {
buf.WriteString("Options inherited from parent commands\n")
buf.WriteString("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n::\n\n")
parentFlags.PrintDefaults()
buf.WriteString("\n")
}
return nil
}
// linkHandler for default ReST hyperlink markup
func defaultLinkHandler(name, ref string) string {
return fmt.Sprintf("`%s <%s.rst>`_", name, ref)
}
// GenReST creates reStructured Text output.
func GenReST(cmd *cobra.Command, w io.Writer) error {
return GenReSTCustom(cmd, w, defaultLinkHandler)
}
// GenReSTCustom creates custom reStructured Text output.
func GenReSTCustom(cmd *cobra.Command, w io.Writer, linkHandler func(string, string) string) error {
cmd.InitDefaultHelpCmd()
cmd.InitDefaultHelpFlag()
buf := new(bytes.Buffer)
name := cmd.CommandPath()
short := cmd.Short
long := cmd.Long
if len(long) == 0 {
long = short
}
ref := strings.Replace(name, " ", "_", -1)
buf.WriteString(".. _" + ref + ":\n\n")
buf.WriteString(name + "\n")
buf.WriteString(strings.Repeat("-", len(name)) + "\n\n")
buf.WriteString(short + "\n\n")
buf.WriteString("Synopsis\n")
buf.WriteString("~~~~~~~~\n\n")
buf.WriteString("\n" + long + "\n\n")
if cmd.Runnable() {
buf.WriteString(fmt.Sprintf("::\n\n %s\n\n", cmd.UseLine()))
}
if len(cmd.Example) > 0 {
buf.WriteString("Examples\n")
buf.WriteString("~~~~~~~~\n\n")
buf.WriteString(fmt.Sprintf("::\n\n%s\n\n", indentString(cmd.Example, " ")))
}
if err := printOptionsReST(buf, cmd, name); err != nil {
return err
}
if hasSeeAlso(cmd) {
buf.WriteString("SEE ALSO\n")
buf.WriteString("~~~~~~~~\n\n")
if cmd.HasParent() {
parent := cmd.Parent()
pname := parent.CommandPath()
ref = strings.Replace(pname, " ", "_", -1)
buf.WriteString(fmt.Sprintf("* %s \t - %s\n", linkHandler(pname, ref), parent.Short))
cmd.VisitParents(func(c *cobra.Command) {
if c.DisableAutoGenTag {
cmd.DisableAutoGenTag = c.DisableAutoGenTag
}
})
}
children := cmd.Commands()
sort.Sort(byName(children))
for _, child := range children {
if !child.IsAvailableCommand() || child.IsAdditionalHelpTopicCommand() {
continue
}
cname := name + " " + child.Name()
ref = strings.Replace(cname, " ", "_", -1)
buf.WriteString(fmt.Sprintf("* %s \t - %s\n", linkHandler(cname, ref), child.Short))
}
buf.WriteString("\n")
}
if !cmd.DisableAutoGenTag {
buf.WriteString("*Auto generated by spf13/cobra on " + time.Now().Format("2-Jan-2006") + "*\n")
}
_, err := buf.WriteTo(w)
return err
}
// GenReSTTree will generate a ReST page for this command and all
// descendants in the directory given.
// This function may not work correctly if your command names have `-` in them.
// If you have `cmd` with two subcmds, `sub` and `sub-third`,
// and `sub` has a subcommand called `third`, it is undefined which
// help output will be in the file `cmd-sub-third.1`.
func GenReSTTree(cmd *cobra.Command, dir string) error {
emptyStr := func(s string) string { return "" }
return GenReSTTreeCustom(cmd, dir, emptyStr, defaultLinkHandler)
}
// GenReSTTreeCustom is the the same as GenReSTTree, but
// with custom filePrepender and linkHandler.
func GenReSTTreeCustom(cmd *cobra.Command, dir string, filePrepender func(string) string, linkHandler func(string, string) string) error {
for _, c := range cmd.Commands() {
if !c.IsAvailableCommand() || c.IsAdditionalHelpTopicCommand() {
continue
}
if err := GenReSTTreeCustom(c, dir, filePrepender, linkHandler); err != nil {
return err
}
}
basename := strings.Replace(cmd.CommandPath(), " ", "_", -1) + ".rst"
filename := filepath.Join(dir, basename)
f, err := os.Create(filename)
if err != nil {
return err
}
defer f.Close()
if _, err := io.WriteString(f, filePrepender(filename)); err != nil {
return err
}
if err := GenReSTCustom(cmd, f, linkHandler); err != nil {
return err
}
return nil
}
// adapted from: https://github.com/kr/text/blob/main/indent.go
func indentString(s, p string) string {
var res []byte
b := []byte(s)
prefix := []byte(p)
bol := true
for _, c := range b {
if bol && c != '\n' {
res = append(res, prefix...)
}
res = append(res, c)
bol = c == '\n'
}
return string(res)
}

114
vendor/github.com/spf13/cobra/doc/rest_docs.md generated vendored Normal file
View file

@ -0,0 +1,114 @@
# Generating ReStructured Text Docs For Your Own cobra.Command
Generating ReST pages from a cobra command is incredibly easy. An example is as follows:
```go
package main
import (
"log"
"github.com/spf13/cobra"
"github.com/spf13/cobra/doc"
)
func main() {
cmd := &cobra.Command{
Use: "test",
Short: "my test program",
}
err := doc.GenReSTTree(cmd, "/tmp")
if err != nil {
log.Fatal(err)
}
}
```
That will get you a ReST document `/tmp/test.rst`
## Generate ReST docs for the entire command tree
This program can actually generate docs for the kubectl command in the kubernetes project
```go
package main
import (
"log"
"io/ioutil"
"os"
"k8s.io/kubernetes/pkg/kubectl/cmd"
cmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util"
"github.com/spf13/cobra/doc"
)
func main() {
kubectl := cmd.NewKubectlCommand(cmdutil.NewFactory(nil), os.Stdin, ioutil.Discard, ioutil.Discard)
err := doc.GenReSTTree(kubectl, "./")
if err != nil {
log.Fatal(err)
}
}
```
This will generate a whole series of files, one for each command in the tree, in the directory specified (in this case "./")
## Generate ReST docs for a single command
You may wish to have more control over the output, or only generate for a single command, instead of the entire command tree. If this is the case you may prefer to `GenReST` instead of `GenReSTTree`
```go
out := new(bytes.Buffer)
err := doc.GenReST(cmd, out)
if err != nil {
log.Fatal(err)
}
```
This will write the ReST doc for ONLY "cmd" into the out, buffer.
## Customize the output
Both `GenReST` and `GenReSTTree` have alternate versions with callbacks to get some control of the output:
```go
func GenReSTTreeCustom(cmd *Command, dir string, filePrepender func(string) string, linkHandler func(string, string) string) error {
//...
}
```
```go
func GenReSTCustom(cmd *Command, out *bytes.Buffer, linkHandler func(string, string) string) error {
//...
}
```
The `filePrepender` will prepend the return value given the full filepath to the rendered ReST file. A common use case is to add front matter to use the generated documentation with [Hugo](http://gohugo.io/):
```go
const fmTemplate = `---
date: %s
title: "%s"
slug: %s
url: %s
---
`
filePrepender := func(filename string) string {
now := time.Now().Format(time.RFC3339)
name := filepath.Base(filename)
base := strings.TrimSuffix(name, path.Ext(name))
url := "/commands/" + strings.ToLower(base) + "/"
return fmt.Sprintf(fmTemplate, now, strings.Replace(base, "_", " ", -1), base, url)
}
```
The `linkHandler` can be used to customize the rendered links to the commands, given a command name and reference. This is useful while converting rst to html or while generating documentation with tools like Sphinx where `:ref:` is used:
```go
// Sphinx cross-referencing format
linkHandler := func(name, ref string) string {
return fmt.Sprintf(":ref:`%s <%s>`", name, ref)
}
```

124
vendor/github.com/spf13/cobra/doc/rest_docs_test.go generated vendored Normal file
View file

@ -0,0 +1,124 @@
package doc
import (
"bytes"
"io/ioutil"
"os"
"path/filepath"
"strings"
"testing"
"github.com/spf13/cobra"
)
func TestGenRSTDoc(t *testing.T) {
c := initializeWithRootCmd()
// Need two commands to run the command alphabetical sort
cmdEcho.AddCommand(cmdTimes, cmdEchoSub, cmdDeprecated)
c.AddCommand(cmdPrint, cmdEcho)
cmdRootWithRun.PersistentFlags().StringVarP(&flags2a, "rootflag", "r", "two", strtwoParentHelp)
out := new(bytes.Buffer)
// We generate on s subcommand so we have both subcommands and parents
if err := GenReST(cmdEcho, out); err != nil {
t.Fatal(err)
}
found := out.String()
// Our description
expected := cmdEcho.Long
if !strings.Contains(found, expected) {
t.Errorf("Unexpected response.\nExpecting to contain: \n %q\nGot:\n %q\n", expected, found)
}
// Better have our example
expected = cmdEcho.Example
if !strings.Contains(found, expected) {
t.Errorf("Unexpected response.\nExpecting to contain: \n %q\nGot:\n %q\n", expected, found)
}
// A local flag
expected = "boolone"
if !strings.Contains(found, expected) {
t.Errorf("Unexpected response.\nExpecting to contain: \n %q\nGot:\n %q\n", expected, found)
}
// persistent flag on parent
expected = "rootflag"
if !strings.Contains(found, expected) {
t.Errorf("Unexpected response.\nExpecting to contain: \n %q\nGot:\n %q\n", expected, found)
}
// We better output info about our parent
expected = cmdRootWithRun.Short
if !strings.Contains(found, expected) {
t.Errorf("Unexpected response.\nExpecting to contain: \n %q\nGot:\n %q\n", expected, found)
}
// And about subcommands
expected = cmdEchoSub.Short
if !strings.Contains(found, expected) {
t.Errorf("Unexpected response.\nExpecting to contain: \n %q\nGot:\n %q\n", expected, found)
}
unexpected := cmdDeprecated.Short
if strings.Contains(found, unexpected) {
t.Errorf("Unexpected response.\nFound: %v\nBut should not have!!\n", unexpected)
}
}
func TestGenRSTNoTag(t *testing.T) {
c := initializeWithRootCmd()
// Need two commands to run the command alphabetical sort
cmdEcho.AddCommand(cmdTimes, cmdEchoSub, cmdDeprecated)
c.AddCommand(cmdPrint, cmdEcho)
c.DisableAutoGenTag = true
cmdRootWithRun.PersistentFlags().StringVarP(&flags2a, "rootflag", "r", "two", strtwoParentHelp)
out := new(bytes.Buffer)
if err := GenReST(c, out); err != nil {
t.Fatal(err)
}
found := out.String()
unexpected := "Auto generated"
checkStringOmits(t, found, unexpected)
}
func TestGenRSTTree(t *testing.T) {
cmd := &cobra.Command{
Use: "do [OPTIONS] arg1 arg2",
}
tmpdir, err := ioutil.TempDir("", "test-gen-rst-tree")
if err != nil {
t.Fatalf("Failed to create tmpdir: %s", err.Error())
}
defer os.RemoveAll(tmpdir)
if err := GenReSTTree(cmd, tmpdir); err != nil {
t.Fatalf("GenReSTTree failed: %s", err.Error())
}
if _, err := os.Stat(filepath.Join(tmpdir, "do.rst")); err != nil {
t.Fatalf("Expected file 'do.rst' to exist")
}
}
func BenchmarkGenReSTToFile(b *testing.B) {
c := initializeWithRootCmd()
file, err := ioutil.TempFile("", "")
if err != nil {
b.Fatal(err)
}
defer os.Remove(file.Name())
defer file.Close()
b.ResetTimer()
for i := 0; i < b.N; i++ {
if err := GenReST(c, file); err != nil {
b.Fatal(err)
}
}
}