update dependencies
This commit is contained in:
parent
fce1b99683
commit
397e9c0842
164 changed files with 5207 additions and 2213 deletions
39
vendor/github.com/pelletier/go-toml/lexer_test.go
generated
vendored
39
vendor/github.com/pelletier/go-toml/lexer_test.go
generated
vendored
|
|
@ -1,38 +1,14 @@
|
|||
package toml
|
||||
|
||||
import (
|
||||
"os"
|
||||
"strings"
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func testFlow(t *testing.T, input string, expectedFlow []token) {
|
||||
ch := lexToml(strings.NewReader(input))
|
||||
for _, expected := range expectedFlow {
|
||||
token := <-ch
|
||||
if token != expected {
|
||||
t.Log("While testing: ", input)
|
||||
t.Log("compared (got)", token, "to (expected)", expected)
|
||||
t.Log("\tvalue:", token.val, "<->", expected.val)
|
||||
t.Log("\tvalue as bytes:", []byte(token.val), "<->", []byte(expected.val))
|
||||
t.Log("\ttype:", token.typ.String(), "<->", expected.typ.String())
|
||||
t.Log("\tline:", token.Line, "<->", expected.Line)
|
||||
t.Log("\tcolumn:", token.Col, "<->", expected.Col)
|
||||
t.Log("compared", token, "to", expected)
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
|
||||
tok, ok := <-ch
|
||||
if ok {
|
||||
t.Log("channel is not closed!")
|
||||
t.Log(len(ch)+1, "tokens remaining:")
|
||||
|
||||
t.Log("token ->", tok)
|
||||
for token := range ch {
|
||||
t.Log("token ->", token)
|
||||
}
|
||||
t.FailNow()
|
||||
tokens := lexToml([]byte(input))
|
||||
if !reflect.DeepEqual(tokens, expectedFlow) {
|
||||
t.Fatal("Different flows. Expected\n", expectedFlow, "\nGot:\n", tokens)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -767,13 +743,8 @@ pluralizeListTitles = false
|
|||
url = "https://github.com/spf13/hugo/releases"
|
||||
weight = -200
|
||||
`
|
||||
rd := strings.NewReader(sample)
|
||||
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
rd.Seek(0, os.SEEK_SET)
|
||||
ch := lexToml(rd)
|
||||
for _ = range ch {
|
||||
}
|
||||
lexToml([]byte(sample))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue