update dependencies

This commit is contained in:
dhax 2017-10-21 18:30:08 +02:00
parent fce1b99683
commit 397e9c0842
164 changed files with 5207 additions and 2213 deletions

View file

@ -1 +0,0 @@
*.test

View file

@ -1,7 +0,0 @@
language: go
sudo: false
go:
- 1.3.3
- 1.4.3
- 1.5.3
- tip

View file

@ -1,62 +0,0 @@
# buffruneio
[![Tests Status](https://travis-ci.org/pelletier/go-buffruneio.svg?branch=master)](https://travis-ci.org/pelletier/go-buffruneio)
[![GoDoc](https://godoc.org/github.com/pelletier/go-buffruneio?status.svg)](https://godoc.org/github.com/pelletier/go-buffruneio)
Buffruneio is a wrapper around bufio to provide buffered runes access with
unlimited unreads.
```go
import "github.com/pelletier/go-buffruneio"
```
## Examples
```go
import (
"fmt"
"github.com/pelletier/go-buffruneio"
"strings"
)
reader := buffruneio.NewReader(strings.NewReader("abcd"))
fmt.Println(reader.ReadRune()) // 'a'
fmt.Println(reader.ReadRune()) // 'b'
fmt.Println(reader.ReadRune()) // 'c'
reader.UnreadRune()
reader.UnreadRune()
fmt.Println(reader.ReadRune()) // 'b'
fmt.Println(reader.ReadRune()) // 'c'
```
## Documentation
The documentation and additional examples are available at
[godoc.org](http://godoc.org/github.com/pelletier/go-buffruneio).
## Contribute
Feel free to report bugs and patches using GitHub's pull requests system on
[pelletier/go-toml](https://github.com/pelletier/go-buffruneio). Any feedback is
much appreciated!
## LICENSE
Copyright (c) 2016 Thomas Pelletier
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View file

@ -1,117 +0,0 @@
// Package buffruneio is a wrapper around bufio to provide buffered runes access with unlimited unreads.
package buffruneio
import (
"bufio"
"container/list"
"errors"
"io"
)
// Rune to indicate end of file.
const (
EOF = -(iota + 1)
)
// ErrNoRuneToUnread is returned by UnreadRune() when the read index is already at the beginning of the buffer.
var ErrNoRuneToUnread = errors.New("no rune to unwind")
// Reader implements runes buffering for an io.Reader object.
type Reader struct {
buffer *list.List
current *list.Element
input *bufio.Reader
}
// NewReader returns a new Reader.
func NewReader(rd io.Reader) *Reader {
return &Reader{
buffer: list.New(),
input: bufio.NewReader(rd),
}
}
type runeWithSize struct {
r rune
size int
}
func (rd *Reader) feedBuffer() error {
r, size, err := rd.input.ReadRune()
if err != nil {
if err != io.EOF {
return err
}
r = EOF
}
newRuneWithSize := runeWithSize{r, size}
rd.buffer.PushBack(newRuneWithSize)
if rd.current == nil {
rd.current = rd.buffer.Back()
}
return nil
}
// ReadRune reads the next rune from buffer, or from the underlying reader if needed.
func (rd *Reader) ReadRune() (rune, int, error) {
if rd.current == rd.buffer.Back() || rd.current == nil {
err := rd.feedBuffer()
if err != nil {
return EOF, 0, err
}
}
runeWithSize := rd.current.Value.(runeWithSize)
rd.current = rd.current.Next()
return runeWithSize.r, runeWithSize.size, nil
}
// UnreadRune pushes back the previously read rune in the buffer, extending it if needed.
func (rd *Reader) UnreadRune() error {
if rd.current == rd.buffer.Front() {
return ErrNoRuneToUnread
}
if rd.current == nil {
rd.current = rd.buffer.Back()
} else {
rd.current = rd.current.Prev()
}
return nil
}
// Forget removes runes stored before the current stream position index.
func (rd *Reader) Forget() {
if rd.current == nil {
rd.current = rd.buffer.Back()
}
for ; rd.current != rd.buffer.Front(); rd.buffer.Remove(rd.current.Prev()) {
}
}
// PeekRune returns at most the next n runes, reading from the uderlying source if
// needed. Does not move the current index. It includes EOF if reached.
func (rd *Reader) PeekRunes(n int) []rune {
res := make([]rune, 0, n)
cursor := rd.current
for i := 0; i < n; i++ {
if cursor == nil {
err := rd.feedBuffer()
if err != nil {
return res
}
cursor = rd.buffer.Back()
}
if cursor != nil {
r := cursor.Value.(runeWithSize).r
res = append(res, r)
if r == EOF {
return res
}
cursor = cursor.Next()
}
}
return res
}

View file

@ -1,145 +0,0 @@
package buffruneio
import (
"runtime/debug"
"strings"
"testing"
)
func assertNoError(t *testing.T, err error) {
if err != nil {
t.Log("unexpected error", err)
debug.PrintStack()
t.FailNow()
}
}
func assumeRunesArray(t *testing.T, expected []rune, got []rune) {
if len(expected) != len(got) {
t.Fatal("expected", len(expected), "runes, but got", len(got))
}
for i := 0; i < len(got); i++ {
if expected[i] != got[i] {
t.Fatal("expected rune", expected[i], "at index", i, "but got", got[i])
}
}
}
func assumeRune(t *testing.T, rd *Reader, r rune) {
gotRune, size, err := rd.ReadRune()
assertNoError(t, err)
if gotRune != r {
t.Fatal("got", string(gotRune),
"(", []byte(string(gotRune)), ")",
"expected", string(r),
"(", []byte(string(r)), ")")
t.Fatal("got size", size,
"expected", len([]byte(string(r))))
}
}
func TestReadString(t *testing.T) {
s := "hello"
rd := NewReader(strings.NewReader(s))
assumeRune(t, rd, 'h')
assumeRune(t, rd, 'e')
assumeRune(t, rd, 'l')
assumeRune(t, rd, 'l')
assumeRune(t, rd, 'o')
assumeRune(t, rd, EOF)
}
func TestMultipleEOF(t *testing.T) {
s := ""
rd := NewReader(strings.NewReader(s))
assumeRune(t, rd, EOF)
assumeRune(t, rd, EOF)
}
func TestUnread(t *testing.T) {
s := "ab"
rd := NewReader(strings.NewReader(s))
assumeRune(t, rd, 'a')
assumeRune(t, rd, 'b')
assertNoError(t, rd.UnreadRune())
assumeRune(t, rd, 'b')
assumeRune(t, rd, EOF)
}
func TestUnreadEOF(t *testing.T) {
s := ""
rd := NewReader(strings.NewReader(s))
_ = rd.UnreadRune()
assumeRune(t, rd, EOF)
assumeRune(t, rd, EOF)
assertNoError(t, rd.UnreadRune())
assumeRune(t, rd, EOF)
}
func TestForget(t *testing.T) {
s := "hello"
rd := NewReader(strings.NewReader(s))
assumeRune(t, rd, 'h')
assumeRune(t, rd, 'e')
assumeRune(t, rd, 'l')
assumeRune(t, rd, 'l')
rd.Forget()
if rd.UnreadRune() != ErrNoRuneToUnread {
t.Fatal("no rune should be available")
}
}
func TestForgetEmpty(t *testing.T) {
s := ""
rd := NewReader(strings.NewReader(s))
rd.Forget()
assumeRune(t, rd, EOF)
rd.Forget()
}
func TestPeekEmpty(t *testing.T) {
s := ""
rd := NewReader(strings.NewReader(s))
runes := rd.PeekRunes(1)
if len(runes) != 1 {
t.Fatal("incorrect number of runes", len(runes))
}
if runes[0] != EOF {
t.Fatal("incorrect rune", runes[0])
}
}
func TestPeek(t *testing.T) {
s := "a"
rd := NewReader(strings.NewReader(s))
runes := rd.PeekRunes(1)
assumeRunesArray(t, []rune{'a'}, runes)
runes = rd.PeekRunes(1)
assumeRunesArray(t, []rune{'a'}, runes)
assumeRune(t, rd, 'a')
runes = rd.PeekRunes(1)
assumeRunesArray(t, []rune{EOF}, runes)
assumeRune(t, rd, EOF)
}
func TestPeekLarge(t *testing.T) {
s := "abcdefg"
rd := NewReader(strings.NewReader(s))
runes := rd.PeekRunes(100)
if len(runes) != len(s)+1 {
t.Fatal("incorrect number of runes", len(runes))
}
assumeRunesArray(t, []rune{'a', 'b', 'c', 'd', 'e', 'f', 'g', EOF}, runes)
}

View file

@ -1,16 +1,18 @@
sudo: false
language: go
go:
- 1.6.4
- 1.7.5
- 1.8
- 1.7.6
- 1.8.3
- 1.9
- tip
matrix:
allow_failures:
- go: tip
fast_finish: true
script:
- if [ -n "$(go fmt ./...)" ]; then exit 1; fi
- ./test.sh
- ./benchmark.sh $TRAVIS_BRANCH https://github.com/$TRAVIS_REPO_SLUG.git
before_install:
- go get github.com/axw/gocov/gocov
- go get github.com/mattn/goveralls

View file

@ -33,7 +33,7 @@ import "github.com/pelletier/go-toml"
Read a TOML document:
```go
config, _ := toml.LoadString(`
config, _ := toml.Load(`
[postgres]
user = "pelletier"
password = "mypassword"`)
@ -42,7 +42,7 @@ user := config.Get("postgres.user").(string)
// or using an intermediate object
postgresConfig := config.Get("postgres").(*toml.Tree)
password = postgresConfig.Get("password").(string)
password := postgresConfig.Get("password").(string)
```
Or use Unmarshal:
@ -62,7 +62,7 @@ user = "pelletier"
password = "mypassword"`)
config := Config{}
Unmarshal(doc, &config)
toml.Unmarshal(doc, &config)
fmt.Println("user=", config.Postgres.User)
```
@ -70,7 +70,8 @@ Or use a query:
```go
// use a query to gather elements without walking the tree
results, _ := config.Query("$..[user,password]")
q, _ := query.Compile("$..[user,password]")
results := q.Execute(config)
for ii, item := range results.Values() {
fmt.Println("Query result %d: %v", ii, item)
}

164
vendor/github.com/pelletier/go-toml/benchmark.json generated vendored Normal file
View file

@ -0,0 +1,164 @@
{
"array": {
"key1": [
1,
2,
3
],
"key2": [
"red",
"yellow",
"green"
],
"key3": [
[
1,
2
],
[
3,
4,
5
]
],
"key4": [
[
1,
2
],
[
"a",
"b",
"c"
]
],
"key5": [
1,
2,
3
],
"key6": [
1,
2
]
},
"boolean": {
"False": false,
"True": true
},
"datetime": {
"key1": "1979-05-27T07:32:00Z",
"key2": "1979-05-27T00:32:00-07:00",
"key3": "1979-05-27T00:32:00.999999-07:00"
},
"float": {
"both": {
"key": 6.626e-34
},
"exponent": {
"key1": 5e+22,
"key2": 1000000,
"key3": -0.02
},
"fractional": {
"key1": 1,
"key2": 3.1415,
"key3": -0.01
},
"underscores": {
"key1": 9224617.445991227,
"key2": 1e+100
}
},
"fruit": [{
"name": "apple",
"physical": {
"color": "red",
"shape": "round"
},
"variety": [{
"name": "red delicious"
},
{
"name": "granny smith"
}
]
},
{
"name": "banana",
"variety": [{
"name": "plantain"
}]
}
],
"integer": {
"key1": 99,
"key2": 42,
"key3": 0,
"key4": -17,
"underscores": {
"key1": 1000,
"key2": 5349221,
"key3": 12345
}
},
"products": [{
"name": "Hammer",
"sku": 738594937
},
{},
{
"color": "gray",
"name": "Nail",
"sku": 284758393
}
],
"string": {
"basic": {
"basic": "I'm a string. \"You can quote me\". Name\tJosé\nLocation\tSF."
},
"literal": {
"multiline": {
"lines": "The first newline is\ntrimmed in raw strings.\n All other whitespace\n is preserved.\n",
"regex2": "I [dw]on't need \\d{2} apples"
},
"quoted": "Tom \"Dubs\" Preston-Werner",
"regex": "\u003c\\i\\c*\\s*\u003e",
"winpath": "C:\\Users\\nodejs\\templates",
"winpath2": "\\\\ServerX\\admin$\\system32\\"
},
"multiline": {
"continued": {
"key1": "The quick brown fox jumps over the lazy dog.",
"key2": "The quick brown fox jumps over the lazy dog.",
"key3": "The quick brown fox jumps over the lazy dog."
},
"key1": "One\nTwo",
"key2": "One\nTwo",
"key3": "One\nTwo"
}
},
"table": {
"inline": {
"name": {
"first": "Tom",
"last": "Preston-Werner"
},
"point": {
"x": 1,
"y": 2
}
},
"key": "value",
"subtable": {
"key": "another value"
}
},
"x": {
"y": {
"z": {
"w": {}
}
}
}
}

32
vendor/github.com/pelletier/go-toml/benchmark.sh generated vendored Executable file
View file

@ -0,0 +1,32 @@
#!/bin/bash
set -e
reference_ref=${1:-master}
reference_git=${2:-.}
if ! `hash benchstat 2>/dev/null`; then
echo "Installing benchstat"
go get golang.org/x/perf/cmd/benchstat
go install golang.org/x/perf/cmd/benchstat
fi
tempdir=`mktemp -d /tmp/go-toml-benchmark-XXXXXX`
ref_tempdir="${tempdir}/ref"
ref_benchmark="${ref_tempdir}/benchmark-`echo -n ${reference_ref}|tr -s '/' '-'`.txt"
local_benchmark="`pwd`/benchmark-local.txt"
echo "=== ${reference_ref} (${ref_tempdir})"
git clone ${reference_git} ${ref_tempdir} >/dev/null 2>/dev/null
pushd ${ref_tempdir} >/dev/null
git checkout ${reference_ref} >/dev/null 2>/dev/null
go test -bench=. -benchmem | tee ${ref_benchmark}
popd >/dev/null
echo ""
echo "=== local"
go test -bench=. -benchmem | tee ${local_benchmark}
echo ""
echo "=== diff"
benchstat -delta-test=none ${ref_benchmark} ${local_benchmark}

244
vendor/github.com/pelletier/go-toml/benchmark.toml generated vendored Normal file
View file

@ -0,0 +1,244 @@
################################################################################
## Comment
# Speak your mind with the hash symbol. They go from the symbol to the end of
# the line.
################################################################################
## Table
# Tables (also known as hash tables or dictionaries) are collections of
# key/value pairs. They appear in square brackets on a line by themselves.
[table]
key = "value" # Yeah, you can do this.
# Nested tables are denoted by table names with dots in them. Name your tables
# whatever crap you please, just don't use #, ., [ or ].
[table.subtable]
key = "another value"
# You don't need to specify all the super-tables if you don't want to. TOML
# knows how to do it for you.
# [x] you
# [x.y] don't
# [x.y.z] need these
[x.y.z.w] # for this to work
################################################################################
## Inline Table
# Inline tables provide a more compact syntax for expressing tables. They are
# especially useful for grouped data that can otherwise quickly become verbose.
# Inline tables are enclosed in curly braces `{` and `}`. No newlines are
# allowed between the curly braces unless they are valid within a value.
[table.inline]
name = { first = "Tom", last = "Preston-Werner" }
point = { x = 1, y = 2 }
################################################################################
## String
# There are four ways to express strings: basic, multi-line basic, literal, and
# multi-line literal. All strings must contain only valid UTF-8 characters.
[string.basic]
basic = "I'm a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF."
[string.multiline]
# The following strings are byte-for-byte equivalent:
key1 = "One\nTwo"
key2 = """One\nTwo"""
key3 = """
One
Two"""
[string.multiline.continued]
# The following strings are byte-for-byte equivalent:
key1 = "The quick brown fox jumps over the lazy dog."
key2 = """
The quick brown \
fox jumps over \
the lazy dog."""
key3 = """\
The quick brown \
fox jumps over \
the lazy dog.\
"""
[string.literal]
# What you see is what you get.
winpath = 'C:\Users\nodejs\templates'
winpath2 = '\\ServerX\admin$\system32\'
quoted = 'Tom "Dubs" Preston-Werner'
regex = '<\i\c*\s*>'
[string.literal.multiline]
regex2 = '''I [dw]on't need \d{2} apples'''
lines = '''
The first newline is
trimmed in raw strings.
All other whitespace
is preserved.
'''
################################################################################
## Integer
# Integers are whole numbers. Positive numbers may be prefixed with a plus sign.
# Negative numbers are prefixed with a minus sign.
[integer]
key1 = +99
key2 = 42
key3 = 0
key4 = -17
[integer.underscores]
# For large numbers, you may use underscores to enhance readability. Each
# underscore must be surrounded by at least one digit.
key1 = 1_000
key2 = 5_349_221
key3 = 1_2_3_4_5 # valid but inadvisable
################################################################################
## Float
# A float consists of an integer part (which may be prefixed with a plus or
# minus sign) followed by a fractional part and/or an exponent part.
[float.fractional]
key1 = +1.0
key2 = 3.1415
key3 = -0.01
[float.exponent]
key1 = 5e+22
key2 = 1e6
key3 = -2E-2
[float.both]
key = 6.626e-34
[float.underscores]
key1 = 9_224_617.445_991_228_313
key2 = 1e1_00
################################################################################
## Boolean
# Booleans are just the tokens you're used to. Always lowercase.
[boolean]
True = true
False = false
################################################################################
## Datetime
# Datetimes are RFC 3339 dates.
[datetime]
key1 = 1979-05-27T07:32:00Z
key2 = 1979-05-27T00:32:00-07:00
key3 = 1979-05-27T00:32:00.999999-07:00
################################################################################
## Array
# Arrays are square brackets with other primitives inside. Whitespace is
# ignored. Elements are separated by commas. Data types may not be mixed.
[array]
key1 = [ 1, 2, 3 ]
key2 = [ "red", "yellow", "green" ]
key3 = [ [ 1, 2 ], [3, 4, 5] ]
#key4 = [ [ 1, 2 ], ["a", "b", "c"] ] # this is ok
# Arrays can also be multiline. So in addition to ignoring whitespace, arrays
# also ignore newlines between the brackets. Terminating commas are ok before
# the closing bracket.
key5 = [
1, 2, 3
]
key6 = [
1,
2, # this is ok
]
################################################################################
## Array of Tables
# These can be expressed by using a table name in double brackets. Each table
# with the same double bracketed name will be an element in the array. The
# tables are inserted in the order encountered.
[[products]]
name = "Hammer"
sku = 738594937
[[products]]
[[products]]
name = "Nail"
sku = 284758393
color = "gray"
# You can create nested arrays of tables as well.
[[fruit]]
name = "apple"
[fruit.physical]
color = "red"
shape = "round"
[[fruit.variety]]
name = "red delicious"
[[fruit.variety]]
name = "granny smith"
[[fruit]]
name = "banana"
[[fruit.variety]]
name = "plantain"

121
vendor/github.com/pelletier/go-toml/benchmark.yml generated vendored Normal file
View file

@ -0,0 +1,121 @@
---
array:
key1:
- 1
- 2
- 3
key2:
- red
- yellow
- green
key3:
- - 1
- 2
- - 3
- 4
- 5
key4:
- - 1
- 2
- - a
- b
- c
key5:
- 1
- 2
- 3
key6:
- 1
- 2
boolean:
'False': false
'True': true
datetime:
key1: '1979-05-27T07:32:00Z'
key2: '1979-05-27T00:32:00-07:00'
key3: '1979-05-27T00:32:00.999999-07:00'
float:
both:
key: 6.626e-34
exponent:
key1: 5.0e+22
key2: 1000000
key3: -0.02
fractional:
key1: 1
key2: 3.1415
key3: -0.01
underscores:
key1: 9224617.445991227
key2: 1.0e+100
fruit:
- name: apple
physical:
color: red
shape: round
variety:
- name: red delicious
- name: granny smith
- name: banana
variety:
- name: plantain
integer:
key1: 99
key2: 42
key3: 0
key4: -17
underscores:
key1: 1000
key2: 5349221
key3: 12345
products:
- name: Hammer
sku: 738594937
- {}
- color: gray
name: Nail
sku: 284758393
string:
basic:
basic: "I'm a string. \"You can quote me\". Name\tJosé\nLocation\tSF."
literal:
multiline:
lines: |
The first newline is
trimmed in raw strings.
All other whitespace
is preserved.
regex2: I [dw]on't need \d{2} apples
quoted: Tom "Dubs" Preston-Werner
regex: "<\\i\\c*\\s*>"
winpath: C:\Users\nodejs\templates
winpath2: "\\\\ServerX\\admin$\\system32\\"
multiline:
continued:
key1: The quick brown fox jumps over the lazy dog.
key2: The quick brown fox jumps over the lazy dog.
key3: The quick brown fox jumps over the lazy dog.
key1: |-
One
Two
key2: |-
One
Two
key3: |-
One
Two
table:
inline:
name:
first: Tom
last: Preston-Werner
point:
x: 1
y: 2
key: value
subtable:
key: another value
x:
y:
z:
w: {}

192
vendor/github.com/pelletier/go-toml/benchmark_test.go generated vendored Normal file
View file

@ -0,0 +1,192 @@
package toml
import (
"bytes"
"encoding/json"
"io/ioutil"
"testing"
"time"
burntsushi "github.com/BurntSushi/toml"
yaml "gopkg.in/yaml.v2"
)
type benchmarkDoc struct {
Table struct {
Key string
Subtable struct {
Key string
}
Inline struct {
Name struct {
First string
Last string
}
Point struct {
X int64
U int64
}
}
}
String struct {
Basic struct {
Basic string
}
Multiline struct {
Key1 string
Key2 string
Key3 string
Continued struct {
Key1 string
Key2 string
Key3 string
}
}
Literal struct {
Winpath string
Winpath2 string
Quoted string
Regex string
Multiline struct {
Regex2 string
Lines string
}
}
}
Integer struct {
Key1 int64
Key2 int64
Key3 int64
Key4 int64
Underscores struct {
Key1 int64
Key2 int64
Key3 int64
}
}
Float struct {
Fractional struct {
Key1 float64
Key2 float64
Key3 float64
}
Exponent struct {
Key1 float64
Key2 float64
Key3 float64
}
Both struct {
Key float64
}
Underscores struct {
Key1 float64
Key2 float64
}
}
Boolean struct {
True bool
False bool
}
Datetime struct {
Key1 time.Time
Key2 time.Time
Key3 time.Time
}
Array struct {
Key1 []int64
Key2 []string
Key3 [][]int64
// TODO: Key4 not supported by go-toml's Unmarshal
Key5 []int64
Key6 []int64
}
Products []struct {
Name string
Sku int64
Color string
}
Fruit []struct {
Name string
Physical struct {
Color string
Shape string
Variety []struct {
Name string
}
}
}
}
func BenchmarkParseToml(b *testing.B) {
fileBytes, err := ioutil.ReadFile("benchmark.toml")
if err != nil {
b.Fatal(err)
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
_, err := LoadReader(bytes.NewReader(fileBytes))
if err != nil {
b.Fatal(err)
}
}
}
func BenchmarkUnmarshalToml(b *testing.B) {
bytes, err := ioutil.ReadFile("benchmark.toml")
if err != nil {
b.Fatal(err)
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
target := benchmarkDoc{}
err := Unmarshal(bytes, &target)
if err != nil {
b.Fatal(err)
}
}
}
func BenchmarkUnmarshalBurntSushiToml(b *testing.B) {
bytes, err := ioutil.ReadFile("benchmark.toml")
if err != nil {
b.Fatal(err)
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
target := benchmarkDoc{}
err := burntsushi.Unmarshal(bytes, &target)
if err != nil {
b.Fatal(err)
}
}
}
func BenchmarkUnmarshalJson(b *testing.B) {
bytes, err := ioutil.ReadFile("benchmark.json")
if err != nil {
b.Fatal(err)
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
target := benchmarkDoc{}
err := json.Unmarshal(bytes, &target)
if err != nil {
b.Fatal(err)
}
}
}
func BenchmarkUnmarshalYaml(b *testing.B) {
bytes, err := ioutil.ReadFile("benchmark.yml")
if err != nil {
b.Fatal(err)
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
target := benchmarkDoc{}
err := yaml.Unmarshal(bytes, &target)
if err != nil {
b.Fatal(err)
}
}
}

View file

@ -1,13 +1,16 @@
// code examples for godoc
package toml
package toml_test
import (
"fmt"
"log"
toml "github.com/pelletier/go-toml"
)
func Example_tree() {
config, err := LoadFile("config.toml")
config, err := toml.LoadFile("config.toml")
if err != nil {
fmt.Println("Error ", err.Error())
@ -17,7 +20,7 @@ func Example_tree() {
password := config.Get("postgres.password").(string)
// or using an intermediate object
configTree := config.Get("postgres").(*Tree)
configTree := config.Get("postgres").(*toml.Tree)
user = configTree.Get("user").(string)
password = configTree.Get("password").(string)
fmt.Println("User is", user, " and password is", password)
@ -48,6 +51,50 @@ func Example_unmarshal() {
`)
person := Person{}
Unmarshal(document, &person)
toml.Unmarshal(document, &person)
fmt.Println(person.Name, "is", person.Age, "and works at", person.Employer.Name)
// Output:
// John is 30 and works at Company Inc.
}
func ExampleMarshal() {
type Postgres struct {
User string `toml:"user"`
Password string `toml:"password"`
}
type Config struct {
Postgres Postgres `toml:"postgres"`
}
config := Config{Postgres{User: "pelletier", Password: "mypassword"}}
b, err := toml.Marshal(config)
if err != nil {
log.Fatal(err)
}
fmt.Println(string(b))
// Output:
// [postgres]
// password = "mypassword"
// user = "pelletier"
}
func ExampleUnmarshal() {
type Postgres struct {
User string
Password string
}
type Config struct {
Postgres Postgres
}
doc := []byte(`
[postgres]
user = "pelletier"
password = "mypassword"`)
config := Config{}
toml.Unmarshal(doc, &config)
fmt.Println("user=", config.Postgres.User)
// Output:
// user= pelletier
}

View file

@ -9,12 +9,9 @@ import (
"bytes"
"errors"
"fmt"
"io"
"regexp"
"strconv"
"strings"
"github.com/pelletier/go-buffruneio"
)
var dateRegexp *regexp.Regexp
@ -24,29 +21,29 @@ type tomlLexStateFn func() tomlLexStateFn
// Define lexer
type tomlLexer struct {
input *buffruneio.Reader // Textual source
buffer bytes.Buffer // Runes composing the current token
tokens chan token
depth int
line int
col int
endbufferLine int
endbufferCol int
inputIdx int
input []rune // Textual source
currentTokenStart int
currentTokenStop int
tokens []token
depth int
line int
col int
endbufferLine int
endbufferCol int
}
// Basic read operations on input
func (l *tomlLexer) read() rune {
r, _, err := l.input.ReadRune()
if err != nil {
panic(err)
}
r := l.peek()
if r == '\n' {
l.endbufferLine++
l.endbufferCol = 1
} else {
l.endbufferCol++
}
l.inputIdx++
return r
}
@ -54,13 +51,13 @@ func (l *tomlLexer) next() rune {
r := l.read()
if r != eof {
l.buffer.WriteRune(r)
l.currentTokenStop++
}
return r
}
func (l *tomlLexer) ignore() {
l.buffer.Reset()
l.currentTokenStart = l.currentTokenStop
l.line = l.endbufferLine
l.col = l.endbufferCol
}
@ -77,49 +74,46 @@ func (l *tomlLexer) fastForward(n int) {
}
func (l *tomlLexer) emitWithValue(t tokenType, value string) {
l.tokens <- token{
l.tokens = append(l.tokens, token{
Position: Position{l.line, l.col},
typ: t,
val: value,
}
})
l.ignore()
}
func (l *tomlLexer) emit(t tokenType) {
l.emitWithValue(t, l.buffer.String())
l.emitWithValue(t, string(l.input[l.currentTokenStart:l.currentTokenStop]))
}
func (l *tomlLexer) peek() rune {
r, _, err := l.input.ReadRune()
if err != nil {
panic(err)
if l.inputIdx >= len(l.input) {
return eof
}
l.input.UnreadRune()
return r
return l.input[l.inputIdx]
}
func (l *tomlLexer) peekString(size int) string {
maxIdx := len(l.input)
upperIdx := l.inputIdx + size // FIXME: potential overflow
if upperIdx > maxIdx {
upperIdx = maxIdx
}
return string(l.input[l.inputIdx:upperIdx])
}
func (l *tomlLexer) follow(next string) bool {
for _, expectedRune := range next {
r, _, err := l.input.ReadRune()
defer l.input.UnreadRune()
if err != nil {
panic(err)
}
if expectedRune != r {
return false
}
}
return true
return next == l.peekString(len(next))
}
// Error management
func (l *tomlLexer) errorf(format string, args ...interface{}) tomlLexStateFn {
l.tokens <- token{
l.tokens = append(l.tokens, token{
Position: Position{l.line, l.col},
typ: tokenError,
val: fmt.Sprintf(format, args...),
}
})
return nil
}
@ -220,7 +214,7 @@ func (l *tomlLexer) lexRvalue() tomlLexStateFn {
break
}
possibleDate := string(l.input.PeekRunes(35))
possibleDate := l.peekString(35)
dateMatch := dateRegexp.FindString(possibleDate)
if dateMatch != "" {
l.fastForward(len(dateMatch))
@ -537,7 +531,7 @@ func (l *tomlLexer) lexInsideTableArrayKey() tomlLexStateFn {
for r := l.peek(); r != eof; r = l.peek() {
switch r {
case ']':
if l.buffer.Len() > 0 {
if l.currentTokenStop > l.currentTokenStart {
l.emit(tokenKeyGroupArray)
}
l.next()
@ -560,7 +554,7 @@ func (l *tomlLexer) lexInsideTableKey() tomlLexStateFn {
for r := l.peek(); r != eof; r = l.peek() {
switch r {
case ']':
if l.buffer.Len() > 0 {
if l.currentTokenStop > l.currentTokenStart {
l.emit(tokenKeyGroup)
}
l.next()
@ -635,7 +629,6 @@ func (l *tomlLexer) run() {
for state := l.lexVoid; state != nil; {
state = state()
}
close(l.tokens)
}
func init() {
@ -643,16 +636,16 @@ func init() {
}
// Entry point
func lexToml(input io.Reader) chan token {
bufferedInput := buffruneio.NewReader(input)
func lexToml(inputBytes []byte) []token {
runes := bytes.Runes(inputBytes)
l := &tomlLexer{
input: bufferedInput,
tokens: make(chan token),
input: runes,
tokens: make([]token, 0, 256),
line: 1,
col: 1,
endbufferLine: 1,
endbufferCol: 1,
}
go l.run()
l.run()
return l.tokens
}

View file

@ -1,38 +1,14 @@
package toml
import (
"os"
"strings"
"reflect"
"testing"
)
func testFlow(t *testing.T, input string, expectedFlow []token) {
ch := lexToml(strings.NewReader(input))
for _, expected := range expectedFlow {
token := <-ch
if token != expected {
t.Log("While testing: ", input)
t.Log("compared (got)", token, "to (expected)", expected)
t.Log("\tvalue:", token.val, "<->", expected.val)
t.Log("\tvalue as bytes:", []byte(token.val), "<->", []byte(expected.val))
t.Log("\ttype:", token.typ.String(), "<->", expected.typ.String())
t.Log("\tline:", token.Line, "<->", expected.Line)
t.Log("\tcolumn:", token.Col, "<->", expected.Col)
t.Log("compared", token, "to", expected)
t.FailNow()
}
}
tok, ok := <-ch
if ok {
t.Log("channel is not closed!")
t.Log(len(ch)+1, "tokens remaining:")
t.Log("token ->", tok)
for token := range ch {
t.Log("token ->", token)
}
t.FailNow()
tokens := lexToml([]byte(input))
if !reflect.DeepEqual(tokens, expectedFlow) {
t.Fatal("Different flows. Expected\n", expectedFlow, "\nGot:\n", tokens)
}
}
@ -767,13 +743,8 @@ pluralizeListTitles = false
url = "https://github.com/spf13/hugo/releases"
weight = -200
`
rd := strings.NewReader(sample)
b.ResetTimer()
for i := 0; i < b.N; i++ {
rd.Seek(0, os.SEEK_SET)
ch := lexToml(rd)
for _ = range ch {
}
lexToml([]byte(sample))
}
}

View file

@ -268,15 +268,20 @@ func valueFromTree(mtype reflect.Type, tval *Tree) (reflect.Value, error) {
mtypef := mtype.Field(i)
opts := tomlOptions(mtypef)
if opts.include {
key := opts.name
exists := tval.Has(key)
if exists {
baseKey := opts.name
keysToTry := []string{baseKey, strings.ToLower(baseKey), strings.ToTitle(baseKey)}
for _, key := range keysToTry {
exists := tval.Has(key)
if !exists {
continue
}
val := tval.Get(key)
mvalf, err := valueFromToml(mtypef.Type, val)
if err != nil {
return mval, formatError(err, tval.GetPosition(key))
}
mval.Field(i).Set(mvalf)
break
}
}
}

View file

@ -177,25 +177,6 @@ func TestDocUnmarshal(t *testing.T) {
}
}
func ExampleUnmarshal() {
type Postgres struct {
User string
Password string
}
type Config struct {
Postgres Postgres
}
doc := []byte(`
[postgres]
user = "pelletier"
password = "mypassword"`)
config := Config{}
Unmarshal(doc, &config)
fmt.Println("user=", config.Postgres.User)
}
func TestDocPartialUnmarshal(t *testing.T) {
result := testDocSubs{}

View file

@ -13,9 +13,9 @@ import (
)
type tomlParser struct {
flow chan token
flowIdx int
flow []token
tree *Tree
tokensBuffer []token
currentTable []string
seenTableKeys []string
}
@ -34,16 +34,10 @@ func (p *tomlParser) run() {
}
func (p *tomlParser) peek() *token {
if len(p.tokensBuffer) != 0 {
return &(p.tokensBuffer[0])
}
tok, ok := <-p.flow
if !ok {
if p.flowIdx >= len(p.flow) {
return nil
}
p.tokensBuffer = append(p.tokensBuffer, tok)
return &tok
return &p.flow[p.flowIdx]
}
func (p *tomlParser) assume(typ tokenType) {
@ -57,16 +51,12 @@ func (p *tomlParser) assume(typ tokenType) {
}
func (p *tomlParser) getToken() *token {
if len(p.tokensBuffer) != 0 {
tok := p.tokensBuffer[0]
p.tokensBuffer = p.tokensBuffer[1:]
return &tok
}
tok, ok := <-p.flow
if !ok {
tok := p.peek()
if tok == nil {
return nil
}
return &tok
p.flowIdx++
return tok
}
func (p *tomlParser) parseStart() tomlParserStateFn {
@ -374,13 +364,13 @@ func (p *tomlParser) parseArray() interface{} {
return array
}
func parseToml(flow chan token) *Tree {
func parseToml(flow []token) *Tree {
result := newTree()
result.position = Position{1, 1}
parser := &tomlParser{
flowIdx: 0,
flow: flow,
tree: result,
tokensBuffer: make([]token, 0),
currentTable: make([]string, 0),
seenTableKeys: make([]string, 0),
}

View file

@ -7,10 +7,10 @@ package query
import (
"fmt"
"github.com/pelletier/go-toml"
"strconv"
"strings"
"unicode/utf8"
"github.com/pelletier/go-toml"
)
// Lexer state function
@ -55,7 +55,7 @@ func (l *queryLexer) nextStart() {
func (l *queryLexer) emit(t tokenType) {
l.tokens <- token{
Position: toml.Position{Line:l.line, Col:l.col},
Position: toml.Position{Line: l.line, Col: l.col},
typ: t,
val: l.input[l.start:l.pos],
}
@ -64,7 +64,7 @@ func (l *queryLexer) emit(t tokenType) {
func (l *queryLexer) emitWithValue(t tokenType, value string) {
l.tokens <- token{
Position: toml.Position{Line:l.line, Col:l.col},
Position: toml.Position{Line: l.line, Col: l.col},
typ: t,
val: value,
}
@ -92,7 +92,7 @@ func (l *queryLexer) backup() {
func (l *queryLexer) errorf(format string, args ...interface{}) queryLexStateFn {
l.tokens <- token{
Position: toml.Position{Line:l.line, Col:l.col},
Position: toml.Position{Line: l.line, Col: l.col},
typ: tokenError,
val: fmt.Sprintf(format, args...),
}

View file

@ -1,8 +1,8 @@
package query
import (
"testing"
"github.com/pelletier/go-toml"
"testing"
)
func testQLFlow(t *testing.T, input string, expectedFlow []token) {

View file

@ -2,8 +2,8 @@ package query
import (
"fmt"
"testing"
"github.com/pelletier/go-toml"
"testing"
)
// dump path tree to a string

View file

@ -253,7 +253,7 @@ func (p *queryParser) parseFilterExpr() queryParserStateFn {
}
tok = p.getToken()
if tok.typ != tokenKey && tok.typ != tokenString {
return p.parseError(tok, "expected key or string for filter funciton name")
return p.parseError(tok, "expected key or string for filter function name")
}
name := tok.val
tok = p.getToken()

View file

@ -2,12 +2,12 @@ package query
import (
"fmt"
"github.com/pelletier/go-toml"
"io/ioutil"
"sort"
"strings"
"testing"
"time"
"github.com/pelletier/go-toml"
)
type queryTestNode struct {

View file

@ -1,10 +1,10 @@
package query
import (
"fmt"
"strconv"
"unicode"
"fmt"
"github.com/pelletier/go-toml"
"strconv"
"unicode"
)
// Define tokens
@ -104,4 +104,3 @@ func isHexDigit(r rune) bool {
(r >= 'a' && r <= 'f') ||
(r >= 'A' && r <= 'F')
}

View file

@ -27,6 +27,8 @@ go vet ./...
go get github.com/pelletier/go-buffruneio
go get github.com/davecgh/go-spew/spew
go get gopkg.in/yaml.v2
go get github.com/BurntSushi/toml
# get code for BurntSushi TOML validation
# pinning all to 'HEAD' for version 0.3.x work (TODO: pin to commit hash when tests stabilize)

View file

@ -4,6 +4,7 @@ import (
"errors"
"fmt"
"io"
"io/ioutil"
"os"
"runtime"
"strings"
@ -251,8 +252,8 @@ func (t *Tree) createSubTree(keys []string, pos Position) error {
return nil
}
// LoadReader creates a Tree from any io.Reader.
func LoadReader(reader io.Reader) (tree *Tree, err error) {
// LoadBytes creates a Tree from a []byte.
func LoadBytes(b []byte) (tree *Tree, err error) {
defer func() {
if r := recover(); r != nil {
if _, ok := r.(runtime.Error); ok {
@ -261,13 +262,23 @@ func LoadReader(reader io.Reader) (tree *Tree, err error) {
err = errors.New(r.(string))
}
}()
tree = parseToml(lexToml(reader))
tree = parseToml(lexToml(b))
return
}
// LoadReader creates a Tree from any io.Reader.
func LoadReader(reader io.Reader) (tree *Tree, err error) {
inputBytes, err := ioutil.ReadAll(reader)
if err != nil {
return
}
tree, err = LoadBytes(inputBytes)
return
}
// Load creates a Tree from a string.
func Load(content string) (tree *Tree, err error) {
return LoadReader(strings.NewReader(content))
return LoadBytes([]byte(content))
}
// LoadFile creates a Tree from a file.

View file

@ -4,6 +4,7 @@ import (
"bytes"
"fmt"
"io"
"math"
"reflect"
"sort"
"strconv"
@ -13,33 +14,34 @@ import (
// encodes a string to a TOML-compliant string value
func encodeTomlString(value string) string {
result := ""
var b bytes.Buffer
for _, rr := range value {
switch rr {
case '\b':
result += "\\b"
b.WriteString(`\b`)
case '\t':
result += "\\t"
b.WriteString(`\t`)
case '\n':
result += "\\n"
b.WriteString(`\n`)
case '\f':
result += "\\f"
b.WriteString(`\f`)
case '\r':
result += "\\r"
b.WriteString(`\r`)
case '"':
result += "\\\""
b.WriteString(`\"`)
case '\\':
result += "\\\\"
b.WriteString(`\\`)
default:
intRr := uint16(rr)
if intRr < 0x001F {
result += fmt.Sprintf("\\u%0.4X", intRr)
b.WriteString(fmt.Sprintf("\\u%0.4X", intRr))
} else {
result += string(rr)
b.WriteRune(rr)
}
}
}
return result
return b.String()
}
func tomlValueStringRepresentation(v interface{}) (string, error) {
@ -49,6 +51,11 @@ func tomlValueStringRepresentation(v interface{}) (string, error) {
case int64:
return strconv.FormatInt(value, 10), nil
case float64:
// Ensure a round float does contain a decimal point. Otherwise feeding
// the output back to the parser would convert to an integer.
if math.Trunc(value) == value {
return strconv.FormatFloat(value, 'f', 1, 32), nil
}
return strconv.FormatFloat(value, 'f', -1, 32), nil
case string:
return "\"" + encodeTomlString(value) + "\"", nil
@ -111,8 +118,7 @@ func (t *Tree) writeTo(w io.Writer, indent, keyspace string, bytesCount int64) (
return bytesCount, err
}
kvRepr := fmt.Sprintf("%s%s = %s\n", indent, k, repr)
writtenBytesCount, err := w.Write([]byte(kvRepr))
writtenBytesCount, err := writeStrings(w, indent, k, " = ", repr, "\n")
bytesCount += int64(writtenBytesCount)
if err != nil {
return bytesCount, err
@ -130,8 +136,7 @@ func (t *Tree) writeTo(w io.Writer, indent, keyspace string, bytesCount int64) (
switch node := v.(type) {
// node has to be of those two types given how keys are sorted above
case *Tree:
tableName := fmt.Sprintf("\n%s[%s]\n", indent, combinedKey)
writtenBytesCount, err := w.Write([]byte(tableName))
writtenBytesCount, err := writeStrings(w, "\n", indent, "[", combinedKey, "]\n")
bytesCount += int64(writtenBytesCount)
if err != nil {
return bytesCount, err
@ -142,8 +147,7 @@ func (t *Tree) writeTo(w io.Writer, indent, keyspace string, bytesCount int64) (
}
case []*Tree:
for _, subTree := range node {
tableArrayName := fmt.Sprintf("\n%s[[%s]]\n", indent, combinedKey)
writtenBytesCount, err := w.Write([]byte(tableArrayName))
writtenBytesCount, err := writeStrings(w, "\n", indent, "[[", combinedKey, "]]\n")
bytesCount += int64(writtenBytesCount)
if err != nil {
return bytesCount, err
@ -160,6 +164,18 @@ func (t *Tree) writeTo(w io.Writer, indent, keyspace string, bytesCount int64) (
return bytesCount, nil
}
func writeStrings(w io.Writer, s ...string) (int, error) {
var n int
for i := range s {
b, err := io.WriteString(w, s[i])
n += b
if err != nil {
return n, err
}
}
return n, nil
}
// WriteTo encode the Tree as Toml and writes it to the writer w.
// Returns the number of bytes written in case of success, or an error if anything happened.
func (t *Tree) WriteTo(w io.Writer) (int64, error) {

View file

@ -16,26 +16,26 @@ type failingWriter struct {
buffer bytes.Buffer
}
func (f failingWriter) Write(p []byte) (n int, err error) {
func (f *failingWriter) Write(p []byte) (n int, err error) {
count := len(p)
toWrite := f.failAt - count + f.written
toWrite := f.failAt - (count + f.written)
if toWrite < 0 {
toWrite = 0
}
if toWrite > count {
f.written += count
f.buffer.WriteString(string(p))
f.buffer.Write(p)
return count, nil
}
f.buffer.WriteString(string(p[:toWrite]))
f.buffer.Write(p[:toWrite])
f.written = f.failAt
return f.written, fmt.Errorf("failingWriter failed after writting %d bytes", f.written)
return toWrite, fmt.Errorf("failingWriter failed after writting %d bytes", f.written)
}
func assertErrorString(t *testing.T, expected string, err error) {
expectedErr := errors.New(expected)
if err.Error() != expectedErr.Error() {
if err == nil || err.Error() != expectedErr.Error() {
t.Errorf("expecting error %s, but got %s instead", expected, err)
}
}
@ -175,7 +175,7 @@ func TestTreeWriteToInvalidTreeTomlValueArray(t *testing.T) {
func TestTreeWriteToFailingWriterInSimpleValue(t *testing.T) {
toml, _ := Load(`a = 2`)
writer := failingWriter{failAt: 0, written: 0}
_, err := toml.WriteTo(writer)
_, err := toml.WriteTo(&writer)
assertErrorString(t, "failingWriter failed after writting 0 bytes", err)
}
@ -184,11 +184,11 @@ func TestTreeWriteToFailingWriterInTable(t *testing.T) {
[b]
a = 2`)
writer := failingWriter{failAt: 2, written: 0}
_, err := toml.WriteTo(writer)
_, err := toml.WriteTo(&writer)
assertErrorString(t, "failingWriter failed after writting 2 bytes", err)
writer = failingWriter{failAt: 13, written: 0}
_, err = toml.WriteTo(writer)
_, err = toml.WriteTo(&writer)
assertErrorString(t, "failingWriter failed after writting 13 bytes", err)
}
@ -197,11 +197,11 @@ func TestTreeWriteToFailingWriterInArray(t *testing.T) {
[[b]]
a = 2`)
writer := failingWriter{failAt: 2, written: 0}
_, err := toml.WriteTo(writer)
_, err := toml.WriteTo(&writer)
assertErrorString(t, "failingWriter failed after writting 2 bytes", err)
writer = failingWriter{failAt: 15, written: 0}
_, err = toml.WriteTo(writer)
_, err = toml.WriteTo(&writer)
assertErrorString(t, "failingWriter failed after writting 15 bytes", err)
}
@ -293,3 +293,66 @@ func TestTreeWriteToMapWithArrayOfInlineTables(t *testing.T) {
treeMap := tree.ToMap()
testMaps(t, treeMap, expected)
}
func TestTreeWriteToFloat(t *testing.T) {
tree, err := Load(`a = 3.0`)
if err != nil {
t.Fatal(err)
}
str, err := tree.ToTomlString()
if err != nil {
t.Fatal(err)
}
expected := `a = 3.0`
if strings.TrimSpace(str) != strings.TrimSpace(expected) {
t.Fatalf("Expected:\n%s\nGot:\n%s", expected, str)
}
}
func BenchmarkTreeToTomlString(b *testing.B) {
toml, err := Load(sampleHard)
if err != nil {
b.Fatal("Unexpected error:", err)
}
for i := 0; i < b.N; i++ {
_, err := toml.ToTomlString()
if err != nil {
b.Fatal(err)
}
}
}
var sampleHard = `# Test file for TOML
# Only this one tries to emulate a TOML file written by a user of the kind of parser writers probably hate
# This part you'll really hate
[the]
test_string = "You'll hate me after this - #" # " Annoying, isn't it?
[the.hard]
test_array = [ "] ", " # "] # ] There you go, parse this!
test_array2 = [ "Test #11 ]proved that", "Experiment #9 was a success" ]
# You didn't think it'd as easy as chucking out the last #, did you?
another_test_string = " Same thing, but with a string #"
harder_test_string = " And when \"'s are in the string, along with # \"" # "and comments are there too"
# Things will get harder
[the.hard."bit#"]
"what?" = "You don't think some user won't do that?"
multi_line_array = [
"]",
# ] Oh yes I did
]
# Each of the following keygroups/key value pairs should produce an error. Uncomment to them to test
#[error] if you didn't catch this, your parser is broken
#string = "Anything other than tabs, spaces and newline after a keygroup or key value pair has ended should produce an error unless it is a comment" like this
#array = [
# "This might most likely happen in multiline arrays",
# Like here,
# "or here,
# and here"
# ] End of array comment, forgot the #
#number = 3.14 pi <--again forgot the # `