vendor dependencies with dep
This commit is contained in:
parent
93d8310491
commit
1384296a47
2712 changed files with 965742 additions and 0 deletions
20
vendor/gopkg.in/alexcesaro/quotedprintable.v3/LICENSE
generated
vendored
Normal file
20
vendor/gopkg.in/alexcesaro/quotedprintable.v3/LICENSE
generated
vendored
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Alexandre Cesaro
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
16
vendor/gopkg.in/alexcesaro/quotedprintable.v3/README.md
generated
vendored
Normal file
16
vendor/gopkg.in/alexcesaro/quotedprintable.v3/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
# quotedprintable
|
||||
|
||||
## Introduction
|
||||
|
||||
Package quotedprintable implements quoted-printable and message header encoding
|
||||
as specified by RFC 2045 and RFC 2047.
|
||||
|
||||
It is a copy of the Go 1.5 package `mime/quotedprintable`. It also includes
|
||||
the new functions of package `mime` concerning RFC 2047.
|
||||
|
||||
This code has minor changes with the standard library code in order to work
|
||||
with Go 1.0 and newer.
|
||||
|
||||
## Documentation
|
||||
|
||||
https://godoc.org/gopkg.in/alexcesaro/quotedprintable.v3
|
||||
279
vendor/gopkg.in/alexcesaro/quotedprintable.v3/encodedword.go
generated
vendored
Normal file
279
vendor/gopkg.in/alexcesaro/quotedprintable.v3/encodedword.go
generated
vendored
Normal file
|
|
@ -0,0 +1,279 @@
|
|||
package quotedprintable
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// A WordEncoder is a RFC 2047 encoded-word encoder.
|
||||
type WordEncoder byte
|
||||
|
||||
const (
|
||||
// BEncoding represents Base64 encoding scheme as defined by RFC 2045.
|
||||
BEncoding = WordEncoder('b')
|
||||
// QEncoding represents the Q-encoding scheme as defined by RFC 2047.
|
||||
QEncoding = WordEncoder('q')
|
||||
)
|
||||
|
||||
var (
|
||||
errInvalidWord = errors.New("mime: invalid RFC 2047 encoded-word")
|
||||
)
|
||||
|
||||
// Encode returns the encoded-word form of s. If s is ASCII without special
|
||||
// characters, it is returned unchanged. The provided charset is the IANA
|
||||
// charset name of s. It is case insensitive.
|
||||
func (e WordEncoder) Encode(charset, s string) string {
|
||||
if !needsEncoding(s) {
|
||||
return s
|
||||
}
|
||||
return e.encodeWord(charset, s)
|
||||
}
|
||||
|
||||
func needsEncoding(s string) bool {
|
||||
for _, b := range s {
|
||||
if (b < ' ' || b > '~') && b != '\t' {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// encodeWord encodes a string into an encoded-word.
|
||||
func (e WordEncoder) encodeWord(charset, s string) string {
|
||||
buf := getBuffer()
|
||||
defer putBuffer(buf)
|
||||
|
||||
buf.WriteString("=?")
|
||||
buf.WriteString(charset)
|
||||
buf.WriteByte('?')
|
||||
buf.WriteByte(byte(e))
|
||||
buf.WriteByte('?')
|
||||
|
||||
if e == BEncoding {
|
||||
w := base64.NewEncoder(base64.StdEncoding, buf)
|
||||
io.WriteString(w, s)
|
||||
w.Close()
|
||||
} else {
|
||||
enc := make([]byte, 3)
|
||||
for i := 0; i < len(s); i++ {
|
||||
b := s[i]
|
||||
switch {
|
||||
case b == ' ':
|
||||
buf.WriteByte('_')
|
||||
case b <= '~' && b >= '!' && b != '=' && b != '?' && b != '_':
|
||||
buf.WriteByte(b)
|
||||
default:
|
||||
enc[0] = '='
|
||||
enc[1] = upperhex[b>>4]
|
||||
enc[2] = upperhex[b&0x0f]
|
||||
buf.Write(enc)
|
||||
}
|
||||
}
|
||||
}
|
||||
buf.WriteString("?=")
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
const upperhex = "0123456789ABCDEF"
|
||||
|
||||
// A WordDecoder decodes MIME headers containing RFC 2047 encoded-words.
|
||||
type WordDecoder struct {
|
||||
// CharsetReader, if non-nil, defines a function to generate
|
||||
// charset-conversion readers, converting from the provided
|
||||
// charset into UTF-8.
|
||||
// Charsets are always lower-case. utf-8, iso-8859-1 and us-ascii charsets
|
||||
// are handled by default.
|
||||
// One of the the CharsetReader's result values must be non-nil.
|
||||
CharsetReader func(charset string, input io.Reader) (io.Reader, error)
|
||||
}
|
||||
|
||||
// Decode decodes an encoded-word. If word is not a valid RFC 2047 encoded-word,
|
||||
// word is returned unchanged.
|
||||
func (d *WordDecoder) Decode(word string) (string, error) {
|
||||
fields := strings.Split(word, "?") // TODO: remove allocation?
|
||||
if len(fields) != 5 || fields[0] != "=" || fields[4] != "=" || len(fields[2]) != 1 {
|
||||
return "", errInvalidWord
|
||||
}
|
||||
|
||||
content, err := decode(fields[2][0], fields[3])
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
buf := getBuffer()
|
||||
defer putBuffer(buf)
|
||||
|
||||
if err := d.convert(buf, fields[1], content); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
// DecodeHeader decodes all encoded-words of the given string. It returns an
|
||||
// error if and only if CharsetReader of d returns an error.
|
||||
func (d *WordDecoder) DecodeHeader(header string) (string, error) {
|
||||
// If there is no encoded-word, returns before creating a buffer.
|
||||
i := strings.Index(header, "=?")
|
||||
if i == -1 {
|
||||
return header, nil
|
||||
}
|
||||
|
||||
buf := getBuffer()
|
||||
defer putBuffer(buf)
|
||||
|
||||
buf.WriteString(header[:i])
|
||||
header = header[i:]
|
||||
|
||||
betweenWords := false
|
||||
for {
|
||||
start := strings.Index(header, "=?")
|
||||
if start == -1 {
|
||||
break
|
||||
}
|
||||
cur := start + len("=?")
|
||||
|
||||
i := strings.Index(header[cur:], "?")
|
||||
if i == -1 {
|
||||
break
|
||||
}
|
||||
charset := header[cur : cur+i]
|
||||
cur += i + len("?")
|
||||
|
||||
if len(header) < cur+len("Q??=") {
|
||||
break
|
||||
}
|
||||
encoding := header[cur]
|
||||
cur++
|
||||
|
||||
if header[cur] != '?' {
|
||||
break
|
||||
}
|
||||
cur++
|
||||
|
||||
j := strings.Index(header[cur:], "?=")
|
||||
if j == -1 {
|
||||
break
|
||||
}
|
||||
text := header[cur : cur+j]
|
||||
end := cur + j + len("?=")
|
||||
|
||||
content, err := decode(encoding, text)
|
||||
if err != nil {
|
||||
betweenWords = false
|
||||
buf.WriteString(header[:start+2])
|
||||
header = header[start+2:]
|
||||
continue
|
||||
}
|
||||
|
||||
// Write characters before the encoded-word. White-space and newline
|
||||
// characters separating two encoded-words must be deleted.
|
||||
if start > 0 && (!betweenWords || hasNonWhitespace(header[:start])) {
|
||||
buf.WriteString(header[:start])
|
||||
}
|
||||
|
||||
if err := d.convert(buf, charset, content); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
header = header[end:]
|
||||
betweenWords = true
|
||||
}
|
||||
|
||||
if len(header) > 0 {
|
||||
buf.WriteString(header)
|
||||
}
|
||||
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
func decode(encoding byte, text string) ([]byte, error) {
|
||||
switch encoding {
|
||||
case 'B', 'b':
|
||||
return base64.StdEncoding.DecodeString(text)
|
||||
case 'Q', 'q':
|
||||
return qDecode(text)
|
||||
}
|
||||
return nil, errInvalidWord
|
||||
}
|
||||
|
||||
func (d *WordDecoder) convert(buf *bytes.Buffer, charset string, content []byte) error {
|
||||
switch {
|
||||
case strings.EqualFold("utf-8", charset):
|
||||
buf.Write(content)
|
||||
case strings.EqualFold("iso-8859-1", charset):
|
||||
for _, c := range content {
|
||||
buf.WriteRune(rune(c))
|
||||
}
|
||||
case strings.EqualFold("us-ascii", charset):
|
||||
for _, c := range content {
|
||||
if c >= utf8.RuneSelf {
|
||||
buf.WriteRune(unicode.ReplacementChar)
|
||||
} else {
|
||||
buf.WriteByte(c)
|
||||
}
|
||||
}
|
||||
default:
|
||||
if d.CharsetReader == nil {
|
||||
return fmt.Errorf("mime: unhandled charset %q", charset)
|
||||
}
|
||||
r, err := d.CharsetReader(strings.ToLower(charset), bytes.NewReader(content))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err = buf.ReadFrom(r); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// hasNonWhitespace reports whether s (assumed to be ASCII) contains at least
|
||||
// one byte of non-whitespace.
|
||||
func hasNonWhitespace(s string) bool {
|
||||
for _, b := range s {
|
||||
switch b {
|
||||
// Encoded-words can only be separated by linear white spaces which does
|
||||
// not include vertical tabs (\v).
|
||||
case ' ', '\t', '\n', '\r':
|
||||
default:
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// qDecode decodes a Q encoded string.
|
||||
func qDecode(s string) ([]byte, error) {
|
||||
dec := make([]byte, len(s))
|
||||
n := 0
|
||||
for i := 0; i < len(s); i++ {
|
||||
switch c := s[i]; {
|
||||
case c == '_':
|
||||
dec[n] = ' '
|
||||
case c == '=':
|
||||
if i+2 >= len(s) {
|
||||
return nil, errInvalidWord
|
||||
}
|
||||
b, err := readHexByte(s[i+1], s[i+2])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dec[n] = b
|
||||
i += 2
|
||||
case (c <= '~' && c >= ' ') || c == '\n' || c == '\r' || c == '\t':
|
||||
dec[n] = c
|
||||
default:
|
||||
return nil, errInvalidWord
|
||||
}
|
||||
n++
|
||||
}
|
||||
|
||||
return dec[:n], nil
|
||||
}
|
||||
281
vendor/gopkg.in/alexcesaro/quotedprintable.v3/encodedword_test.go
generated
vendored
Normal file
281
vendor/gopkg.in/alexcesaro/quotedprintable.v3/encodedword_test.go
generated
vendored
Normal file
|
|
@ -0,0 +1,281 @@
|
|||
package quotedprintable
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func ExampleWordEncoder_Encode() {
|
||||
fmt.Println(QEncoding.Encode("utf-8", "¡Hola, señor!"))
|
||||
fmt.Println(QEncoding.Encode("utf-8", "Hello!"))
|
||||
fmt.Println(BEncoding.Encode("UTF-8", "¡Hola, señor!"))
|
||||
fmt.Println(QEncoding.Encode("ISO-8859-1", "Caf\xE9"))
|
||||
// Output:
|
||||
// =?utf-8?q?=C2=A1Hola,_se=C3=B1or!?=
|
||||
// Hello!
|
||||
// =?UTF-8?b?wqFIb2xhLCBzZcOxb3Ih?=
|
||||
// =?ISO-8859-1?q?Caf=E9?=
|
||||
}
|
||||
|
||||
func ExampleWordDecoder_Decode() {
|
||||
dec := new(WordDecoder)
|
||||
header, err := dec.Decode("=?utf-8?q?=C2=A1Hola,_se=C3=B1or!?=")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
fmt.Println(header)
|
||||
|
||||
dec.CharsetReader = func(charset string, input io.Reader) (io.Reader, error) {
|
||||
switch charset {
|
||||
case "x-case":
|
||||
// Fake character set for example.
|
||||
// Real use would integrate with packages such
|
||||
// as code.google.com/p/go-charset
|
||||
content, err := ioutil.ReadAll(input)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return bytes.NewReader(bytes.ToUpper(content)), nil
|
||||
}
|
||||
return nil, fmt.Errorf("unhandled charset %q", charset)
|
||||
}
|
||||
header, err = dec.Decode("=?x-case?q?hello!?=")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
fmt.Println(header)
|
||||
// Output:
|
||||
// ¡Hola, señor!
|
||||
// HELLO!
|
||||
}
|
||||
|
||||
func ExampleWordDecoder_DecodeHeader() {
|
||||
dec := new(WordDecoder)
|
||||
header, err := dec.DecodeHeader("=?utf-8?q?=C3=89ric?= <eric@example.org>, =?utf-8?q?Ana=C3=AFs?= <anais@example.org>")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
fmt.Println(header)
|
||||
|
||||
header, err = dec.DecodeHeader("=?utf-8?q?=C2=A1Hola,?= =?utf-8?q?_se=C3=B1or!?=")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
fmt.Println(header)
|
||||
|
||||
dec.CharsetReader = func(charset string, input io.Reader) (io.Reader, error) {
|
||||
switch charset {
|
||||
case "x-case":
|
||||
// Fake character set for example.
|
||||
// Real use would integrate with packages such
|
||||
// as code.google.com/p/go-charset
|
||||
content, err := ioutil.ReadAll(input)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return bytes.NewReader(bytes.ToUpper(content)), nil
|
||||
}
|
||||
return nil, fmt.Errorf("unhandled charset %q", charset)
|
||||
}
|
||||
header, err = dec.DecodeHeader("=?x-case?q?hello_?= =?x-case?q?world!?=")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
fmt.Println(header)
|
||||
// Output:
|
||||
// Éric <eric@example.org>, Anaïs <anais@example.org>
|
||||
// ¡Hola, señor!
|
||||
// HELLO WORLD!
|
||||
}
|
||||
|
||||
func TestEncodeWord(t *testing.T) {
|
||||
utf8, iso88591 := "utf-8", "iso-8859-1"
|
||||
tests := []struct {
|
||||
enc WordEncoder
|
||||
charset string
|
||||
src, exp string
|
||||
}{
|
||||
{QEncoding, utf8, "François-Jérôme", "=?utf-8?q?Fran=C3=A7ois-J=C3=A9r=C3=B4me?="},
|
||||
{BEncoding, utf8, "Café", "=?utf-8?b?Q2Fmw6k=?="},
|
||||
{QEncoding, iso88591, "La Seleção", "=?iso-8859-1?q?La_Sele=C3=A7=C3=A3o?="},
|
||||
{QEncoding, utf8, "", ""},
|
||||
{QEncoding, utf8, "A", "A"},
|
||||
{QEncoding, iso88591, "a", "a"},
|
||||
{QEncoding, utf8, "123 456", "123 456"},
|
||||
{QEncoding, utf8, "\t !\"#$%&'()*+,-./ :;<>?@[\\]^_`{|}~", "\t !\"#$%&'()*+,-./ :;<>?@[\\]^_`{|}~"},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
if s := test.enc.Encode(test.charset, test.src); s != test.exp {
|
||||
t.Errorf("Encode(%q) = %q, want %q", test.src, s, test.exp)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeWord(t *testing.T) {
|
||||
tests := []struct {
|
||||
src, exp string
|
||||
hasErr bool
|
||||
}{
|
||||
{"=?UTF-8?Q?=C2=A1Hola,_se=C3=B1or!?=", "¡Hola, señor!", false},
|
||||
{"=?UTF-8?Q?Fran=C3=A7ois-J=C3=A9r=C3=B4me?=", "François-Jérôme", false},
|
||||
{"=?UTF-8?q?ascii?=", "ascii", false},
|
||||
{"=?utf-8?B?QW5kcsOp?=", "André", false},
|
||||
{"=?ISO-8859-1?Q?Rapha=EBl_Dupont?=", "Raphaël Dupont", false},
|
||||
{"=?utf-8?b?IkFudG9uaW8gSm9zw6kiIDxqb3NlQGV4YW1wbGUub3JnPg==?=", `"Antonio José" <jose@example.org>`, false},
|
||||
{"=?UTF-8?A?Test?=", "", true},
|
||||
{"=?UTF-8?Q?A=B?=", "", true},
|
||||
{"=?UTF-8?Q?=A?=", "", true},
|
||||
{"=?UTF-8?A?A?=", "", true},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
dec := new(WordDecoder)
|
||||
s, err := dec.Decode(test.src)
|
||||
if test.hasErr && err == nil {
|
||||
t.Errorf("Decode(%q) should return an error", test.src)
|
||||
continue
|
||||
}
|
||||
if !test.hasErr && err != nil {
|
||||
t.Errorf("Decode(%q): %v", test.src, err)
|
||||
continue
|
||||
}
|
||||
if s != test.exp {
|
||||
t.Errorf("Decode(%q) = %q, want %q", test.src, s, test.exp)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeHeader(t *testing.T) {
|
||||
tests := []struct {
|
||||
src, exp string
|
||||
}{
|
||||
{"=?UTF-8?Q?=C2=A1Hola,_se=C3=B1or!?=", "¡Hola, señor!"},
|
||||
{"=?UTF-8?Q?Fran=C3=A7ois-J=C3=A9r=C3=B4me?=", "François-Jérôme"},
|
||||
{"=?UTF-8?q?ascii?=", "ascii"},
|
||||
{"=?utf-8?B?QW5kcsOp?=", "André"},
|
||||
{"=?ISO-8859-1?Q?Rapha=EBl_Dupont?=", "Raphaël Dupont"},
|
||||
{"Jean", "Jean"},
|
||||
{"=?utf-8?b?IkFudG9uaW8gSm9zw6kiIDxqb3NlQGV4YW1wbGUub3JnPg==?=", `"Antonio José" <jose@example.org>`},
|
||||
{"=?UTF-8?A?Test?=", "=?UTF-8?A?Test?="},
|
||||
{"=?UTF-8?Q?A=B?=", "=?UTF-8?Q?A=B?="},
|
||||
{"=?UTF-8?Q?=A?=", "=?UTF-8?Q?=A?="},
|
||||
{"=?UTF-8?A?A?=", "=?UTF-8?A?A?="},
|
||||
// Incomplete words
|
||||
{"=?", "=?"},
|
||||
{"=?UTF-8?", "=?UTF-8?"},
|
||||
{"=?UTF-8?=", "=?UTF-8?="},
|
||||
{"=?UTF-8?Q", "=?UTF-8?Q"},
|
||||
{"=?UTF-8?Q?", "=?UTF-8?Q?"},
|
||||
{"=?UTF-8?Q?=", "=?UTF-8?Q?="},
|
||||
{"=?UTF-8?Q?A", "=?UTF-8?Q?A"},
|
||||
{"=?UTF-8?Q?A?", "=?UTF-8?Q?A?"},
|
||||
// Tests from RFC 2047
|
||||
{"=?ISO-8859-1?Q?a?=", "a"},
|
||||
{"=?ISO-8859-1?Q?a?= b", "a b"},
|
||||
{"=?ISO-8859-1?Q?a?= =?ISO-8859-1?Q?b?=", "ab"},
|
||||
{"=?ISO-8859-1?Q?a?= =?ISO-8859-1?Q?b?=", "ab"},
|
||||
{"=?ISO-8859-1?Q?a?= \r\n\t =?ISO-8859-1?Q?b?=", "ab"},
|
||||
{"=?ISO-8859-1?Q?a_b?=", "a b"},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
dec := new(WordDecoder)
|
||||
s, err := dec.DecodeHeader(test.src)
|
||||
if err != nil {
|
||||
t.Errorf("DecodeHeader(%q): %v", test.src, err)
|
||||
}
|
||||
if s != test.exp {
|
||||
t.Errorf("DecodeHeader(%q) = %q, want %q", test.src, s, test.exp)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestCharsetDecoder(t *testing.T) {
|
||||
tests := []struct {
|
||||
src string
|
||||
want string
|
||||
charsets []string
|
||||
content []string
|
||||
}{
|
||||
{"=?utf-8?b?Q2Fmw6k=?=", "Café", nil, nil},
|
||||
{"=?ISO-8859-1?Q?caf=E9?=", "café", nil, nil},
|
||||
{"=?US-ASCII?Q?foo_bar?=", "foo bar", nil, nil},
|
||||
{"=?utf-8?Q?=?=", "=?utf-8?Q?=?=", nil, nil},
|
||||
{"=?utf-8?Q?=A?=", "=?utf-8?Q?=A?=", nil, nil},
|
||||
{
|
||||
"=?ISO-8859-15?Q?f=F5=F6?= =?windows-1252?Q?b=E0r?=",
|
||||
"f\xf5\xf6b\xe0r",
|
||||
[]string{"iso-8859-15", "windows-1252"},
|
||||
[]string{"f\xf5\xf6", "b\xe0r"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
i := 0
|
||||
dec := &WordDecoder{
|
||||
CharsetReader: func(charset string, input io.Reader) (io.Reader, error) {
|
||||
if charset != test.charsets[i] {
|
||||
t.Errorf("DecodeHeader(%q), got charset %q, want %q", test.src, charset, test.charsets[i])
|
||||
}
|
||||
content, err := ioutil.ReadAll(input)
|
||||
if err != nil {
|
||||
t.Errorf("DecodeHeader(%q), error in reader: %v", test.src, err)
|
||||
}
|
||||
got := string(content)
|
||||
if got != test.content[i] {
|
||||
t.Errorf("DecodeHeader(%q), got content %q, want %q", test.src, got, test.content[i])
|
||||
}
|
||||
i++
|
||||
|
||||
return strings.NewReader(got), nil
|
||||
},
|
||||
}
|
||||
got, err := dec.DecodeHeader(test.src)
|
||||
if err != nil {
|
||||
t.Errorf("DecodeHeader(%q): %v", test.src, err)
|
||||
}
|
||||
if got != test.want {
|
||||
t.Errorf("DecodeHeader(%q) = %q, want %q", test.src, got, test.want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestCharsetDecoderError(t *testing.T) {
|
||||
dec := &WordDecoder{
|
||||
CharsetReader: func(charset string, input io.Reader) (io.Reader, error) {
|
||||
return nil, errors.New("Test error")
|
||||
},
|
||||
}
|
||||
|
||||
if _, err := dec.DecodeHeader("=?charset?Q?foo?="); err == nil {
|
||||
t.Error("DecodeHeader should return an error")
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkQEncodeWord(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
QEncoding.Encode("UTF-8", "¡Hola, señor!")
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkQDecodeWord(b *testing.B) {
|
||||
dec := new(WordDecoder)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
dec.Decode("=?utf-8?q?=C2=A1Hola,_se=C3=B1or!?=")
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkQDecodeHeader(b *testing.B) {
|
||||
dec := new(WordDecoder)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
dec.Decode("=?utf-8?q?=C2=A1Hola,_se=C3=B1or!?=")
|
||||
}
|
||||
}
|
||||
26
vendor/gopkg.in/alexcesaro/quotedprintable.v3/pool.go
generated
vendored
Normal file
26
vendor/gopkg.in/alexcesaro/quotedprintable.v3/pool.go
generated
vendored
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
// +build go1.3
|
||||
|
||||
package quotedprintable
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"sync"
|
||||
)
|
||||
|
||||
var bufPool = sync.Pool{
|
||||
New: func() interface{} {
|
||||
return new(bytes.Buffer)
|
||||
},
|
||||
}
|
||||
|
||||
func getBuffer() *bytes.Buffer {
|
||||
return bufPool.Get().(*bytes.Buffer)
|
||||
}
|
||||
|
||||
func putBuffer(buf *bytes.Buffer) {
|
||||
if buf.Len() > 1024 {
|
||||
return
|
||||
}
|
||||
buf.Reset()
|
||||
bufPool.Put(buf)
|
||||
}
|
||||
24
vendor/gopkg.in/alexcesaro/quotedprintable.v3/pool_go12.go
generated
vendored
Normal file
24
vendor/gopkg.in/alexcesaro/quotedprintable.v3/pool_go12.go
generated
vendored
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
// +build !go1.3
|
||||
|
||||
package quotedprintable
|
||||
|
||||
import "bytes"
|
||||
|
||||
var ch = make(chan *bytes.Buffer, 32)
|
||||
|
||||
func getBuffer() *bytes.Buffer {
|
||||
select {
|
||||
case buf := <-ch:
|
||||
return buf
|
||||
default:
|
||||
}
|
||||
return new(bytes.Buffer)
|
||||
}
|
||||
|
||||
func putBuffer(buf *bytes.Buffer) {
|
||||
buf.Reset()
|
||||
select {
|
||||
case ch <- buf:
|
||||
default:
|
||||
}
|
||||
}
|
||||
121
vendor/gopkg.in/alexcesaro/quotedprintable.v3/reader.go
generated
vendored
Normal file
121
vendor/gopkg.in/alexcesaro/quotedprintable.v3/reader.go
generated
vendored
Normal file
|
|
@ -0,0 +1,121 @@
|
|||
// Package quotedprintable implements quoted-printable encoding as specified by
|
||||
// RFC 2045.
|
||||
package quotedprintable
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
)
|
||||
|
||||
// Reader is a quoted-printable decoder.
|
||||
type Reader struct {
|
||||
br *bufio.Reader
|
||||
rerr error // last read error
|
||||
line []byte // to be consumed before more of br
|
||||
}
|
||||
|
||||
// NewReader returns a quoted-printable reader, decoding from r.
|
||||
func NewReader(r io.Reader) *Reader {
|
||||
return &Reader{
|
||||
br: bufio.NewReader(r),
|
||||
}
|
||||
}
|
||||
|
||||
func fromHex(b byte) (byte, error) {
|
||||
switch {
|
||||
case b >= '0' && b <= '9':
|
||||
return b - '0', nil
|
||||
case b >= 'A' && b <= 'F':
|
||||
return b - 'A' + 10, nil
|
||||
// Accept badly encoded bytes.
|
||||
case b >= 'a' && b <= 'f':
|
||||
return b - 'a' + 10, nil
|
||||
}
|
||||
return 0, fmt.Errorf("quotedprintable: invalid hex byte 0x%02x", b)
|
||||
}
|
||||
|
||||
func readHexByte(a, b byte) (byte, error) {
|
||||
var hb, lb byte
|
||||
var err error
|
||||
if hb, err = fromHex(a); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if lb, err = fromHex(b); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return hb<<4 | lb, nil
|
||||
}
|
||||
|
||||
func isQPDiscardWhitespace(r rune) bool {
|
||||
switch r {
|
||||
case '\n', '\r', ' ', '\t':
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
var (
|
||||
crlf = []byte("\r\n")
|
||||
lf = []byte("\n")
|
||||
softSuffix = []byte("=")
|
||||
)
|
||||
|
||||
// Read reads and decodes quoted-printable data from the underlying reader.
|
||||
func (r *Reader) Read(p []byte) (n int, err error) {
|
||||
// Deviations from RFC 2045:
|
||||
// 1. in addition to "=\r\n", "=\n" is also treated as soft line break.
|
||||
// 2. it will pass through a '\r' or '\n' not preceded by '=', consistent
|
||||
// with other broken QP encoders & decoders.
|
||||
for len(p) > 0 {
|
||||
if len(r.line) == 0 {
|
||||
if r.rerr != nil {
|
||||
return n, r.rerr
|
||||
}
|
||||
r.line, r.rerr = r.br.ReadSlice('\n')
|
||||
|
||||
// Does the line end in CRLF instead of just LF?
|
||||
hasLF := bytes.HasSuffix(r.line, lf)
|
||||
hasCR := bytes.HasSuffix(r.line, crlf)
|
||||
wholeLine := r.line
|
||||
r.line = bytes.TrimRightFunc(wholeLine, isQPDiscardWhitespace)
|
||||
if bytes.HasSuffix(r.line, softSuffix) {
|
||||
rightStripped := wholeLine[len(r.line):]
|
||||
r.line = r.line[:len(r.line)-1]
|
||||
if !bytes.HasPrefix(rightStripped, lf) && !bytes.HasPrefix(rightStripped, crlf) {
|
||||
r.rerr = fmt.Errorf("quotedprintable: invalid bytes after =: %q", rightStripped)
|
||||
}
|
||||
} else if hasLF {
|
||||
if hasCR {
|
||||
r.line = append(r.line, '\r', '\n')
|
||||
} else {
|
||||
r.line = append(r.line, '\n')
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
b := r.line[0]
|
||||
|
||||
switch {
|
||||
case b == '=':
|
||||
if len(r.line[1:]) < 2 {
|
||||
return n, io.ErrUnexpectedEOF
|
||||
}
|
||||
b, err = readHexByte(r.line[1], r.line[2])
|
||||
if err != nil {
|
||||
return n, err
|
||||
}
|
||||
r.line = r.line[2:] // 2 of the 3; other 1 is done below
|
||||
case b == '\t' || b == '\r' || b == '\n':
|
||||
break
|
||||
case b < ' ' || b > '~':
|
||||
return n, fmt.Errorf("quotedprintable: invalid unescaped byte 0x%02x in body", b)
|
||||
}
|
||||
p[0] = b
|
||||
p = p[1:]
|
||||
r.line = r.line[1:]
|
||||
n++
|
||||
}
|
||||
return n, nil
|
||||
}
|
||||
200
vendor/gopkg.in/alexcesaro/quotedprintable.v3/reader_test.go
generated
vendored
Normal file
200
vendor/gopkg.in/alexcesaro/quotedprintable.v3/reader_test.go
generated
vendored
Normal file
|
|
@ -0,0 +1,200 @@
|
|||
package quotedprintable
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"os/exec"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestReader(t *testing.T) {
|
||||
tests := []struct {
|
||||
in, want string
|
||||
err interface{}
|
||||
}{
|
||||
{in: "", want: ""},
|
||||
{in: "foo bar", want: "foo bar"},
|
||||
{in: "foo bar=3D", want: "foo bar="},
|
||||
{in: "foo bar=3d", want: "foo bar="}, // lax.
|
||||
{in: "foo bar=\n", want: "foo bar"},
|
||||
{in: "foo bar\n", want: "foo bar\n"}, // somewhat lax.
|
||||
{in: "foo bar=0", want: "foo bar", err: io.ErrUnexpectedEOF},
|
||||
{in: "foo bar=0D=0A", want: "foo bar\r\n"},
|
||||
{in: " A B \r\n C ", want: " A B\r\n C"},
|
||||
{in: " A B =\r\n C ", want: " A B C"},
|
||||
{in: " A B =\n C ", want: " A B C"}, // lax. treating LF as CRLF
|
||||
{in: "foo=\nbar", want: "foobar"},
|
||||
{in: "foo\x00bar", want: "foo", err: "quotedprintable: invalid unescaped byte 0x00 in body"},
|
||||
{in: "foo bar\xff", want: "foo bar", err: "quotedprintable: invalid unescaped byte 0xff in body"},
|
||||
|
||||
// Equal sign.
|
||||
{in: "=3D30\n", want: "=30\n"},
|
||||
{in: "=00=FF0=\n", want: "\x00\xff0"},
|
||||
|
||||
// Trailing whitespace
|
||||
{in: "foo \n", want: "foo\n"},
|
||||
{in: "foo \n\nfoo =\n\nfoo=20\n\n", want: "foo\n\nfoo \nfoo \n\n"},
|
||||
|
||||
// Tests that we allow bare \n and \r through, despite it being strictly
|
||||
// not permitted per RFC 2045, Section 6.7 Page 22 bullet (4).
|
||||
{in: "foo\nbar", want: "foo\nbar"},
|
||||
{in: "foo\rbar", want: "foo\rbar"},
|
||||
{in: "foo\r\nbar", want: "foo\r\nbar"},
|
||||
|
||||
// Different types of soft line-breaks.
|
||||
{in: "foo=\r\nbar", want: "foobar"},
|
||||
{in: "foo=\nbar", want: "foobar"},
|
||||
{in: "foo=\rbar", want: "foo", err: "quotedprintable: invalid hex byte 0x0d"},
|
||||
{in: "foo=\r\r\r \nbar", want: "foo", err: `quotedprintable: invalid bytes after =: "\r\r\r \n"`},
|
||||
|
||||
// Example from RFC 2045:
|
||||
{in: "Now's the time =\n" + "for all folk to come=\n" + " to the aid of their country.",
|
||||
want: "Now's the time for all folk to come to the aid of their country."},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
var buf bytes.Buffer
|
||||
_, err := io.Copy(&buf, NewReader(strings.NewReader(tt.in)))
|
||||
if got := buf.String(); got != tt.want {
|
||||
t.Errorf("for %q, got %q; want %q", tt.in, got, tt.want)
|
||||
}
|
||||
switch verr := tt.err.(type) {
|
||||
case nil:
|
||||
if err != nil {
|
||||
t.Errorf("for %q, got unexpected error: %v", tt.in, err)
|
||||
}
|
||||
case string:
|
||||
if got := fmt.Sprint(err); got != verr {
|
||||
t.Errorf("for %q, got error %q; want %q", tt.in, got, verr)
|
||||
}
|
||||
case error:
|
||||
if err != verr {
|
||||
t.Errorf("for %q, got error %q; want %q", tt.in, err, verr)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func everySequence(base, alpha string, length int, fn func(string)) {
|
||||
if len(base) == length {
|
||||
fn(base)
|
||||
return
|
||||
}
|
||||
for i := 0; i < len(alpha); i++ {
|
||||
everySequence(base+alpha[i:i+1], alpha, length, fn)
|
||||
}
|
||||
}
|
||||
|
||||
var useQprint = flag.Bool("qprint", false, "Compare against the 'qprint' program.")
|
||||
|
||||
var badSoftRx = regexp.MustCompile(`=([^\r\n]+?\n)|([^\r\n]+$)|(\r$)|(\r[^\n]+\n)|( \r\n)`)
|
||||
|
||||
func TestExhaustive(t *testing.T) {
|
||||
if *useQprint {
|
||||
_, err := exec.LookPath("qprint")
|
||||
if err != nil {
|
||||
t.Fatalf("Error looking for qprint: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
res := make(map[string]int)
|
||||
everySequence("", "0A \r\n=", 6, func(s string) {
|
||||
if strings.HasSuffix(s, "=") || strings.Contains(s, "==") {
|
||||
return
|
||||
}
|
||||
buf.Reset()
|
||||
_, err := io.Copy(&buf, NewReader(strings.NewReader(s)))
|
||||
if err != nil {
|
||||
errStr := err.Error()
|
||||
if strings.Contains(errStr, "invalid bytes after =:") {
|
||||
errStr = "invalid bytes after ="
|
||||
}
|
||||
res[errStr]++
|
||||
if strings.Contains(errStr, "invalid hex byte ") {
|
||||
if strings.HasSuffix(errStr, "0x20") && (strings.Contains(s, "=0 ") || strings.Contains(s, "=A ") || strings.Contains(s, "= ")) {
|
||||
return
|
||||
}
|
||||
if strings.HasSuffix(errStr, "0x3d") && (strings.Contains(s, "=0=") || strings.Contains(s, "=A=")) {
|
||||
return
|
||||
}
|
||||
if strings.HasSuffix(errStr, "0x0a") || strings.HasSuffix(errStr, "0x0d") {
|
||||
// bunch of cases; since whitespace at the end of a line before \n is removed.
|
||||
return
|
||||
}
|
||||
}
|
||||
if strings.Contains(errStr, "unexpected EOF") {
|
||||
return
|
||||
}
|
||||
if errStr == "invalid bytes after =" && badSoftRx.MatchString(s) {
|
||||
return
|
||||
}
|
||||
t.Errorf("decode(%q) = %v", s, err)
|
||||
return
|
||||
}
|
||||
if *useQprint {
|
||||
cmd := exec.Command("qprint", "-d")
|
||||
cmd.Stdin = strings.NewReader(s)
|
||||
stderr, err := cmd.StderrPipe()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
qpres := make(chan interface{}, 2)
|
||||
go func() {
|
||||
br := bufio.NewReader(stderr)
|
||||
s, _ := br.ReadString('\n')
|
||||
if s != "" {
|
||||
qpres <- errors.New(s)
|
||||
if cmd.Process != nil {
|
||||
// It can get stuck on invalid input, like:
|
||||
// echo -n "0000= " | qprint -d
|
||||
cmd.Process.Kill()
|
||||
}
|
||||
}
|
||||
}()
|
||||
go func() {
|
||||
want, err := cmd.Output()
|
||||
if err == nil {
|
||||
qpres <- want
|
||||
}
|
||||
}()
|
||||
select {
|
||||
case got := <-qpres:
|
||||
if want, ok := got.([]byte); ok {
|
||||
if string(want) != buf.String() {
|
||||
t.Errorf("go decode(%q) = %q; qprint = %q", s, want, buf.String())
|
||||
}
|
||||
} else {
|
||||
t.Logf("qprint -d(%q) = %v", s, got)
|
||||
}
|
||||
case <-time.After(5 * time.Second):
|
||||
t.Logf("qprint timeout on %q", s)
|
||||
}
|
||||
}
|
||||
res["OK"]++
|
||||
})
|
||||
var outcomes []string
|
||||
for k, v := range res {
|
||||
outcomes = append(outcomes, fmt.Sprintf("%v: %d", k, v))
|
||||
}
|
||||
sort.Strings(outcomes)
|
||||
got := strings.Join(outcomes, "\n")
|
||||
want := `OK: 21576
|
||||
invalid bytes after =: 3397
|
||||
quotedprintable: invalid hex byte 0x0a: 1400
|
||||
quotedprintable: invalid hex byte 0x0d: 2700
|
||||
quotedprintable: invalid hex byte 0x20: 2490
|
||||
quotedprintable: invalid hex byte 0x3d: 440
|
||||
unexpected EOF: 3122`
|
||||
if got != want {
|
||||
t.Errorf("Got:\n%s\nWant:\n%s", got, want)
|
||||
}
|
||||
}
|
||||
166
vendor/gopkg.in/alexcesaro/quotedprintable.v3/writer.go
generated
vendored
Normal file
166
vendor/gopkg.in/alexcesaro/quotedprintable.v3/writer.go
generated
vendored
Normal file
|
|
@ -0,0 +1,166 @@
|
|||
package quotedprintable
|
||||
|
||||
import "io"
|
||||
|
||||
const lineMaxLen = 76
|
||||
|
||||
// A Writer is a quoted-printable writer that implements io.WriteCloser.
|
||||
type Writer struct {
|
||||
// Binary mode treats the writer's input as pure binary and processes end of
|
||||
// line bytes as binary data.
|
||||
Binary bool
|
||||
|
||||
w io.Writer
|
||||
i int
|
||||
line [78]byte
|
||||
cr bool
|
||||
}
|
||||
|
||||
// NewWriter returns a new Writer that writes to w.
|
||||
func NewWriter(w io.Writer) *Writer {
|
||||
return &Writer{w: w}
|
||||
}
|
||||
|
||||
// Write encodes p using quoted-printable encoding and writes it to the
|
||||
// underlying io.Writer. It limits line length to 76 characters. The encoded
|
||||
// bytes are not necessarily flushed until the Writer is closed.
|
||||
func (w *Writer) Write(p []byte) (n int, err error) {
|
||||
for i, b := range p {
|
||||
switch {
|
||||
// Simple writes are done in batch.
|
||||
case b >= '!' && b <= '~' && b != '=':
|
||||
continue
|
||||
case isWhitespace(b) || !w.Binary && (b == '\n' || b == '\r'):
|
||||
continue
|
||||
}
|
||||
|
||||
if i > n {
|
||||
if err := w.write(p[n:i]); err != nil {
|
||||
return n, err
|
||||
}
|
||||
n = i
|
||||
}
|
||||
|
||||
if err := w.encode(b); err != nil {
|
||||
return n, err
|
||||
}
|
||||
n++
|
||||
}
|
||||
|
||||
if n == len(p) {
|
||||
return n, nil
|
||||
}
|
||||
|
||||
if err := w.write(p[n:]); err != nil {
|
||||
return n, err
|
||||
}
|
||||
|
||||
return len(p), nil
|
||||
}
|
||||
|
||||
// Close closes the Writer, flushing any unwritten data to the underlying
|
||||
// io.Writer, but does not close the underlying io.Writer.
|
||||
func (w *Writer) Close() error {
|
||||
if err := w.checkLastByte(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return w.flush()
|
||||
}
|
||||
|
||||
// write limits text encoded in quoted-printable to 76 characters per line.
|
||||
func (w *Writer) write(p []byte) error {
|
||||
for _, b := range p {
|
||||
if b == '\n' || b == '\r' {
|
||||
// If the previous byte was \r, the CRLF has already been inserted.
|
||||
if w.cr && b == '\n' {
|
||||
w.cr = false
|
||||
continue
|
||||
}
|
||||
|
||||
if b == '\r' {
|
||||
w.cr = true
|
||||
}
|
||||
|
||||
if err := w.checkLastByte(); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := w.insertCRLF(); err != nil {
|
||||
return err
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if w.i == lineMaxLen-1 {
|
||||
if err := w.insertSoftLineBreak(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
w.line[w.i] = b
|
||||
w.i++
|
||||
w.cr = false
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (w *Writer) encode(b byte) error {
|
||||
if lineMaxLen-1-w.i < 3 {
|
||||
if err := w.insertSoftLineBreak(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
w.line[w.i] = '='
|
||||
w.line[w.i+1] = upperhex[b>>4]
|
||||
w.line[w.i+2] = upperhex[b&0x0f]
|
||||
w.i += 3
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// checkLastByte encodes the last buffered byte if it is a space or a tab.
|
||||
func (w *Writer) checkLastByte() error {
|
||||
if w.i == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
b := w.line[w.i-1]
|
||||
if isWhitespace(b) {
|
||||
w.i--
|
||||
if err := w.encode(b); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (w *Writer) insertSoftLineBreak() error {
|
||||
w.line[w.i] = '='
|
||||
w.i++
|
||||
|
||||
return w.insertCRLF()
|
||||
}
|
||||
|
||||
func (w *Writer) insertCRLF() error {
|
||||
w.line[w.i] = '\r'
|
||||
w.line[w.i+1] = '\n'
|
||||
w.i += 2
|
||||
|
||||
return w.flush()
|
||||
}
|
||||
|
||||
func (w *Writer) flush() error {
|
||||
if _, err := w.w.Write(w.line[:w.i]); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
w.i = 0
|
||||
return nil
|
||||
}
|
||||
|
||||
func isWhitespace(b byte) bool {
|
||||
return b == ' ' || b == '\t'
|
||||
}
|
||||
154
vendor/gopkg.in/alexcesaro/quotedprintable.v3/writer_test.go
generated
vendored
Normal file
154
vendor/gopkg.in/alexcesaro/quotedprintable.v3/writer_test.go
generated
vendored
Normal file
|
|
@ -0,0 +1,154 @@
|
|||
package quotedprintable
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io/ioutil"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestWriter(t *testing.T) {
|
||||
testWriter(t, false)
|
||||
}
|
||||
|
||||
func TestWriterBinary(t *testing.T) {
|
||||
testWriter(t, true)
|
||||
}
|
||||
|
||||
func testWriter(t *testing.T, binary bool) {
|
||||
tests := []struct {
|
||||
in, want, wantB string
|
||||
}{
|
||||
{in: "", want: ""},
|
||||
{in: "foo bar", want: "foo bar"},
|
||||
{in: "foo bar=", want: "foo bar=3D"},
|
||||
{in: "foo bar\r", want: "foo bar\r\n", wantB: "foo bar=0D"},
|
||||
{in: "foo bar\r\r", want: "foo bar\r\n\r\n", wantB: "foo bar=0D=0D"},
|
||||
{in: "foo bar\n", want: "foo bar\r\n", wantB: "foo bar=0A"},
|
||||
{in: "foo bar\r\n", want: "foo bar\r\n", wantB: "foo bar=0D=0A"},
|
||||
{in: "foo bar\r\r\n", want: "foo bar\r\n\r\n", wantB: "foo bar=0D=0D=0A"},
|
||||
{in: "foo bar ", want: "foo bar=20"},
|
||||
{in: "foo bar\t", want: "foo bar=09"},
|
||||
{in: "foo bar ", want: "foo bar =20"},
|
||||
{in: "foo bar \n", want: "foo bar=20\r\n", wantB: "foo bar =0A"},
|
||||
{in: "foo bar \r", want: "foo bar=20\r\n", wantB: "foo bar =0D"},
|
||||
{in: "foo bar \r\n", want: "foo bar=20\r\n", wantB: "foo bar =0D=0A"},
|
||||
{in: "foo bar \n", want: "foo bar =20\r\n", wantB: "foo bar =0A"},
|
||||
{in: "foo bar \n ", want: "foo bar =20\r\n=20", wantB: "foo bar =0A=20"},
|
||||
{in: "¡Hola Señor!", want: "=C2=A1Hola Se=C3=B1or!"},
|
||||
{
|
||||
in: "\t !\"#$%&'()*+,-./ :;<>?@[\\]^_`{|}~",
|
||||
want: "\t !\"#$%&'()*+,-./ :;<>?@[\\]^_`{|}~",
|
||||
},
|
||||
{
|
||||
in: strings.Repeat("a", 75),
|
||||
want: strings.Repeat("a", 75),
|
||||
},
|
||||
{
|
||||
in: strings.Repeat("a", 76),
|
||||
want: strings.Repeat("a", 75) + "=\r\na",
|
||||
},
|
||||
{
|
||||
in: strings.Repeat("a", 72) + "=",
|
||||
want: strings.Repeat("a", 72) + "=3D",
|
||||
},
|
||||
{
|
||||
in: strings.Repeat("a", 73) + "=",
|
||||
want: strings.Repeat("a", 73) + "=\r\n=3D",
|
||||
},
|
||||
{
|
||||
in: strings.Repeat("a", 74) + "=",
|
||||
want: strings.Repeat("a", 74) + "=\r\n=3D",
|
||||
},
|
||||
{
|
||||
in: strings.Repeat("a", 75) + "=",
|
||||
want: strings.Repeat("a", 75) + "=\r\n=3D",
|
||||
},
|
||||
{
|
||||
in: strings.Repeat(" ", 73),
|
||||
want: strings.Repeat(" ", 72) + "=20",
|
||||
},
|
||||
{
|
||||
in: strings.Repeat(" ", 74),
|
||||
want: strings.Repeat(" ", 73) + "=\r\n=20",
|
||||
},
|
||||
{
|
||||
in: strings.Repeat(" ", 75),
|
||||
want: strings.Repeat(" ", 74) + "=\r\n=20",
|
||||
},
|
||||
{
|
||||
in: strings.Repeat(" ", 76),
|
||||
want: strings.Repeat(" ", 75) + "=\r\n=20",
|
||||
},
|
||||
{
|
||||
in: strings.Repeat(" ", 77),
|
||||
want: strings.Repeat(" ", 75) + "=\r\n =20",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
buf := new(bytes.Buffer)
|
||||
w := NewWriter(buf)
|
||||
|
||||
want := tt.want
|
||||
if binary {
|
||||
w.Binary = true
|
||||
if tt.wantB != "" {
|
||||
want = tt.wantB
|
||||
}
|
||||
}
|
||||
|
||||
if _, err := w.Write([]byte(tt.in)); err != nil {
|
||||
t.Errorf("Write(%q): %v", tt.in, err)
|
||||
continue
|
||||
}
|
||||
if err := w.Close(); err != nil {
|
||||
t.Errorf("Close(): %v", err)
|
||||
continue
|
||||
}
|
||||
got := buf.String()
|
||||
if got != want {
|
||||
t.Errorf("Write(%q), got:\n%q\nwant:\n%q", tt.in, got, want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestRoundTrip(t *testing.T) {
|
||||
buf := new(bytes.Buffer)
|
||||
w := NewWriter(buf)
|
||||
if _, err := w.Write(testMsg); err != nil {
|
||||
t.Fatalf("Write: %v", err)
|
||||
}
|
||||
if err := w.Close(); err != nil {
|
||||
t.Fatalf("Close: %v", err)
|
||||
}
|
||||
|
||||
r := NewReader(buf)
|
||||
gotBytes, err := ioutil.ReadAll(r)
|
||||
if err != nil {
|
||||
t.Fatalf("Error while reading from Reader: %v", err)
|
||||
}
|
||||
got := string(gotBytes)
|
||||
if got != string(testMsg) {
|
||||
t.Errorf("Encoding and decoding changed the message, got:\n%s", got)
|
||||
}
|
||||
}
|
||||
|
||||
// From http://fr.wikipedia.org/wiki/Quoted-Printable
|
||||
var testMsg = []byte("Quoted-Printable (QP) est un format d'encodage de données codées sur 8 bits, qui utilise exclusivement les caractères alphanumériques imprimables du code ASCII (7 bits).\r\n" +
|
||||
"\r\n" +
|
||||
"En effet, les différents codages comprennent de nombreux caractères qui ne sont pas représentables en ASCII (par exemple les caractères accentués), ainsi que des caractères dits « non-imprimables ».\r\n" +
|
||||
"\r\n" +
|
||||
"L'encodage Quoted-Printable permet de remédier à ce problème, en procédant de la manière suivante :\r\n" +
|
||||
"\r\n" +
|
||||
"Un octet correspondant à un caractère imprimable de l'ASCII sauf le signe égal (donc un caractère de code ASCII entre 33 et 60 ou entre 62 et 126) ou aux caractères de saut de ligne (codes ASCII 13 et 10) ou une suite de tabulations et espaces non situées en fin de ligne (de codes ASCII respectifs 9 et 32) est représenté tel quel.\r\n" +
|
||||
"Un octet qui ne correspond pas à la définition ci-dessus (caractère non imprimable de l'ASCII, tabulation ou espaces non suivies d'un caractère imprimable avant la fin de la ligne ou signe égal) est représenté par un signe égal, suivi de son numéro, exprimé en hexadécimal.\r\n" +
|
||||
"Enfin, un signe égal suivi par un saut de ligne (donc la suite des trois caractères de codes ASCII 61, 13 et 10) peut être inséré n'importe où, afin de limiter la taille des lignes produites si nécessaire. Une limite de 76 caractères par ligne est généralement respectée.\r\n")
|
||||
|
||||
func BenchmarkWriter(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
w := NewWriter(ioutil.Discard)
|
||||
w.Write(testMsg)
|
||||
w.Close()
|
||||
}
|
||||
}
|
||||
8
vendor/gopkg.in/gomail.v2/.travis.yml
generated
vendored
Normal file
8
vendor/gopkg.in/gomail.v2/.travis.yml
generated
vendored
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
language: go
|
||||
|
||||
go:
|
||||
- 1.2
|
||||
- 1.3
|
||||
- 1.4
|
||||
- 1.5
|
||||
- tip
|
||||
20
vendor/gopkg.in/gomail.v2/CHANGELOG.md
generated
vendored
Normal file
20
vendor/gopkg.in/gomail.v2/CHANGELOG.md
generated
vendored
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
# Change Log
|
||||
All notable changes to this project will be documented in this file.
|
||||
This project adheres to [Semantic Versioning](http://semver.org/).
|
||||
|
||||
## [2.0.0] - 2015-09-02
|
||||
|
||||
- Mailer has been removed. It has been replaced by Dialer and Sender.
|
||||
- `File` type and the `CreateFile` and `OpenFile` functions have been removed.
|
||||
- `Message.Attach` and `Message.Embed` have a new signature.
|
||||
- `Message.GetBodyWriter` has been removed. Use `Message.AddAlternativeWriter`
|
||||
instead.
|
||||
- `Message.Export` has been removed. `Message.WriteTo` can be used instead.
|
||||
- `Message.DelHeader` has been removed.
|
||||
- The `Bcc` header field is no longer sent. It is far more simpler and
|
||||
efficient: the same message is sent to all recipients instead of sending a
|
||||
different email to each Bcc address.
|
||||
- LoginAuth has been removed. `NewPlainDialer` now implements the LOGIN
|
||||
authentication mechanism when needed.
|
||||
- Go 1.2 is now required instead of Go 1.3. No external dependency are used when
|
||||
using Go 1.5.
|
||||
20
vendor/gopkg.in/gomail.v2/CONTRIBUTING.md
generated
vendored
Normal file
20
vendor/gopkg.in/gomail.v2/CONTRIBUTING.md
generated
vendored
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
Thank you for contributing to Gomail! Here are a few guidelines:
|
||||
|
||||
## Bugs
|
||||
|
||||
If you think you found a bug, create an issue and supply the minimum amount
|
||||
of code triggering the bug so it can be reproduced.
|
||||
|
||||
|
||||
## Fixing a bug
|
||||
|
||||
If you want to fix a bug, you can send a pull request. It should contains a
|
||||
new test or update an existing one to cover that bug.
|
||||
|
||||
|
||||
## New feature proposal
|
||||
|
||||
If you think Gomail lacks a feature, you can open an issue or send a pull
|
||||
request. I want to keep Gomail code and API as simple as possible so please
|
||||
describe your needs so we can discuss whether this feature should be added to
|
||||
Gomail or not.
|
||||
20
vendor/gopkg.in/gomail.v2/LICENSE
generated
vendored
Normal file
20
vendor/gopkg.in/gomail.v2/LICENSE
generated
vendored
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Alexandre Cesaro
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
97
vendor/gopkg.in/gomail.v2/README.md
generated
vendored
Normal file
97
vendor/gopkg.in/gomail.v2/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,97 @@
|
|||
# Gomail
|
||||
[](https://travis-ci.org/go-gomail/gomail) [](http://gocover.io/gopkg.in/gomail.v2) [](https://godoc.org/gopkg.in/gomail.v2)
|
||||
|
||||
## Introduction
|
||||
|
||||
Gomail is a simple and efficient package to send emails. It is well tested and
|
||||
documented.
|
||||
|
||||
It is versioned using [gopkg.in](https://gopkg.in) so I promise
|
||||
they will never be backward incompatible changes within each version.
|
||||
|
||||
It requires Go 1.2 or newer. With Go 1.5, no external dependencies are used.
|
||||
|
||||
|
||||
## Features
|
||||
|
||||
Gomail supports:
|
||||
- Attachments
|
||||
- Embedded images
|
||||
- HTML and text templates
|
||||
- Automatic encoding of special characters
|
||||
- SSL and TLS
|
||||
- Sending multiple emails with the same SMTP connection
|
||||
- Any method to send emails: SMTP, postfix (not included but easily doable), etc
|
||||
|
||||
|
||||
## Documentation
|
||||
|
||||
https://godoc.org/gopkg.in/gomail.v2
|
||||
|
||||
|
||||
## Download
|
||||
|
||||
go get gopkg.in/gomail.v2
|
||||
|
||||
|
||||
## Examples
|
||||
|
||||
See the [examples in the documentation](https://godoc.org/gopkg.in/gomail.v2#example-package).
|
||||
|
||||
|
||||
## FAQ
|
||||
|
||||
### x509: certificate signed by unknown authority
|
||||
|
||||
If you get this error it means the certificate used by the SMTP server is not
|
||||
considered valid by the client running Gomail. As a quick workaround you can
|
||||
bypass the verification of the server's certificate chain and host name by using
|
||||
`SetTLSConfig`:
|
||||
|
||||
d := gomail.NewPlainDialer("smtp.example.com", "user", "123456", 587)
|
||||
d.TLSConfig = &tls.Config{InsecureSkipVerify: true}
|
||||
|
||||
Note, however, that this is insecure and should not be used in production.
|
||||
|
||||
|
||||
## Contribute
|
||||
|
||||
Contributions are more than welcome! See [CONTRIBUTING.md](CONTRIBUTING.md) for
|
||||
more info.
|
||||
|
||||
|
||||
## Change log
|
||||
|
||||
See [CHANGELOG.md](CHANGELOG.md).
|
||||
|
||||
|
||||
## License
|
||||
|
||||
[MIT](LICENSE)
|
||||
|
||||
|
||||
## Contact
|
||||
|
||||
You can ask questions on the [Gomail
|
||||
thread](https://groups.google.com/d/topic/golang-nuts/jMxZHzvvEVg/discussion)
|
||||
in the Go mailing-list.
|
||||
|
||||
|
||||
## Support
|
||||
|
||||
If you want to support the development of Gomail, I gladly accept donations.
|
||||
|
||||
I will give 100% of the money I receive to
|
||||
[Enfants, Espoir Du Monde](http://www.eedm.fr/).
|
||||
EEDM is a French NGO which helps children in Bangladesh, Cameroun, Haiti, India
|
||||
and Madagascar.
|
||||
|
||||
All its members are volunteers so its operating costs are only
|
||||
1.9%. So your money will directly helps children of these countries.
|
||||
|
||||
As an added bonus, your donations will also tip me by lowering my taxes :smile:
|
||||
|
||||
I will send an email with the receipt of the donation to EEDM annually to all
|
||||
donors.
|
||||
|
||||
[](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=PYQKC7VFVXCFG)
|
||||
67
vendor/gopkg.in/gomail.v2/auth.go
generated
vendored
Normal file
67
vendor/gopkg.in/gomail.v2/auth.go
generated
vendored
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
package gomail
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/smtp"
|
||||
)
|
||||
|
||||
// plainAuth is an smtp.Auth that implements the PLAIN authentication mechanism.
|
||||
// It fallbacks to the LOGIN mechanism if it is the only mechanism advertised
|
||||
// by the server.
|
||||
type plainAuth struct {
|
||||
username string
|
||||
password string
|
||||
host string
|
||||
login bool
|
||||
}
|
||||
|
||||
func (a *plainAuth) Start(server *smtp.ServerInfo) (string, []byte, error) {
|
||||
if server.Name != a.host {
|
||||
return "", nil, errors.New("gomail: wrong host name")
|
||||
}
|
||||
|
||||
var plain, login bool
|
||||
for _, a := range server.Auth {
|
||||
switch a {
|
||||
case "PLAIN":
|
||||
plain = true
|
||||
case "LOGIN":
|
||||
login = true
|
||||
}
|
||||
}
|
||||
|
||||
if !server.TLS && !plain && !login {
|
||||
return "", nil, errors.New("gomail: unencrypted connection")
|
||||
}
|
||||
|
||||
if !plain && login {
|
||||
a.login = true
|
||||
return "LOGIN", nil, nil
|
||||
}
|
||||
|
||||
return "PLAIN", []byte("\x00" + a.username + "\x00" + a.password), nil
|
||||
}
|
||||
|
||||
func (a *plainAuth) Next(fromServer []byte, more bool) ([]byte, error) {
|
||||
if !a.login {
|
||||
if more {
|
||||
return nil, errors.New("gomail: unexpected server challenge")
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
if !more {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
switch {
|
||||
case bytes.Equal(fromServer, []byte("Username:")):
|
||||
return []byte(a.username), nil
|
||||
case bytes.Equal(fromServer, []byte("Password:")):
|
||||
return []byte(a.password), nil
|
||||
default:
|
||||
return nil, fmt.Errorf("gomail: unexpected server challenge: %s", fromServer)
|
||||
}
|
||||
}
|
||||
156
vendor/gopkg.in/gomail.v2/auth_test.go
generated
vendored
Normal file
156
vendor/gopkg.in/gomail.v2/auth_test.go
generated
vendored
Normal file
|
|
@ -0,0 +1,156 @@
|
|||
package gomail
|
||||
|
||||
import (
|
||||
"net/smtp"
|
||||
"testing"
|
||||
)
|
||||
|
||||
const (
|
||||
testUser = "user"
|
||||
testPwd = "pwd"
|
||||
testHost = "smtp.example.com"
|
||||
)
|
||||
|
||||
var testAuth = &plainAuth{
|
||||
username: testUser,
|
||||
password: testPwd,
|
||||
host: testHost,
|
||||
}
|
||||
|
||||
type plainAuthTest struct {
|
||||
auths []string
|
||||
challenges []string
|
||||
tls bool
|
||||
wantProto string
|
||||
wantData []string
|
||||
wantError bool
|
||||
}
|
||||
|
||||
func TestNoAdvertisement(t *testing.T) {
|
||||
testPlainAuth(t, &plainAuthTest{
|
||||
auths: []string{},
|
||||
challenges: []string{"Username:", "Password:"},
|
||||
tls: false,
|
||||
wantProto: "PLAIN",
|
||||
wantError: true,
|
||||
})
|
||||
}
|
||||
|
||||
func TestNoAdvertisementTLS(t *testing.T) {
|
||||
testPlainAuth(t, &plainAuthTest{
|
||||
auths: []string{},
|
||||
challenges: []string{"Username:", "Password:"},
|
||||
tls: true,
|
||||
wantProto: "PLAIN",
|
||||
wantData: []string{"\x00" + testUser + "\x00" + testPwd},
|
||||
})
|
||||
}
|
||||
|
||||
func TestPlain(t *testing.T) {
|
||||
testPlainAuth(t, &plainAuthTest{
|
||||
auths: []string{"PLAIN"},
|
||||
challenges: []string{"Username:", "Password:"},
|
||||
tls: false,
|
||||
wantProto: "PLAIN",
|
||||
wantData: []string{"\x00" + testUser + "\x00" + testPwd},
|
||||
})
|
||||
}
|
||||
|
||||
func TestPlainTLS(t *testing.T) {
|
||||
testPlainAuth(t, &plainAuthTest{
|
||||
auths: []string{"PLAIN"},
|
||||
challenges: []string{"Username:", "Password:"},
|
||||
tls: true,
|
||||
wantProto: "PLAIN",
|
||||
wantData: []string{"\x00" + testUser + "\x00" + testPwd},
|
||||
})
|
||||
}
|
||||
|
||||
func TestPlainAndLogin(t *testing.T) {
|
||||
testPlainAuth(t, &plainAuthTest{
|
||||
auths: []string{"PLAIN", "LOGIN"},
|
||||
challenges: []string{"Username:", "Password:"},
|
||||
tls: false,
|
||||
wantProto: "PLAIN",
|
||||
wantData: []string{"\x00" + testUser + "\x00" + testPwd},
|
||||
})
|
||||
}
|
||||
|
||||
func TestPlainAndLoginTLS(t *testing.T) {
|
||||
testPlainAuth(t, &plainAuthTest{
|
||||
auths: []string{"PLAIN", "LOGIN"},
|
||||
challenges: []string{"Username:", "Password:"},
|
||||
tls: true,
|
||||
wantProto: "PLAIN",
|
||||
wantData: []string{"\x00" + testUser + "\x00" + testPwd},
|
||||
})
|
||||
}
|
||||
|
||||
func TestLogin(t *testing.T) {
|
||||
testPlainAuth(t, &plainAuthTest{
|
||||
auths: []string{"LOGIN"},
|
||||
challenges: []string{"Username:", "Password:"},
|
||||
tls: false,
|
||||
wantProto: "LOGIN",
|
||||
wantData: []string{"", testUser, testPwd},
|
||||
})
|
||||
}
|
||||
|
||||
func TestLoginTLS(t *testing.T) {
|
||||
testPlainAuth(t, &plainAuthTest{
|
||||
auths: []string{"LOGIN"},
|
||||
challenges: []string{"Username:", "Password:"},
|
||||
tls: true,
|
||||
wantProto: "LOGIN",
|
||||
wantData: []string{"", testUser, testPwd},
|
||||
})
|
||||
}
|
||||
|
||||
func testPlainAuth(t *testing.T, test *plainAuthTest) {
|
||||
auth := &plainAuth{
|
||||
username: testUser,
|
||||
password: testPwd,
|
||||
host: testHost,
|
||||
}
|
||||
server := &smtp.ServerInfo{
|
||||
Name: testHost,
|
||||
TLS: test.tls,
|
||||
Auth: test.auths,
|
||||
}
|
||||
proto, toServer, err := auth.Start(server)
|
||||
if err != nil && !test.wantError {
|
||||
t.Fatalf("plainAuth.Start(): %v", err)
|
||||
}
|
||||
if err != nil && test.wantError {
|
||||
return
|
||||
}
|
||||
if proto != test.wantProto {
|
||||
t.Errorf("invalid protocol, got %q, want %q", proto, test.wantProto)
|
||||
}
|
||||
|
||||
i := 0
|
||||
got := string(toServer)
|
||||
if got != test.wantData[i] {
|
||||
t.Errorf("Invalid response, got %q, want %q", got, test.wantData[i])
|
||||
}
|
||||
|
||||
if proto == "PLAIN" {
|
||||
return
|
||||
}
|
||||
|
||||
for _, challenge := range test.challenges {
|
||||
i++
|
||||
if i >= len(test.wantData) {
|
||||
t.Fatalf("unexpected challenge: %q", challenge)
|
||||
}
|
||||
|
||||
toServer, err = auth.Next([]byte(challenge), true)
|
||||
if err != nil {
|
||||
t.Fatalf("plainAuth.Auth(): %v", err)
|
||||
}
|
||||
got = string(toServer)
|
||||
if got != test.wantData[i] {
|
||||
t.Errorf("Invalid response, got %q, want %q", got, test.wantData[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
5
vendor/gopkg.in/gomail.v2/doc.go
generated
vendored
Normal file
5
vendor/gopkg.in/gomail.v2/doc.go
generated
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
// Package gomail provides a simple interface to compose emails and to mail them
|
||||
// efficiently.
|
||||
//
|
||||
// More info on Github: https://github.com/go-gomail/gomail
|
||||
package gomail
|
||||
215
vendor/gopkg.in/gomail.v2/example_test.go
generated
vendored
Normal file
215
vendor/gopkg.in/gomail.v2/example_test.go
generated
vendored
Normal file
|
|
@ -0,0 +1,215 @@
|
|||
package gomail_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"html/template"
|
||||
"io"
|
||||
"log"
|
||||
"time"
|
||||
|
||||
"gopkg.in/gomail.v2"
|
||||
)
|
||||
|
||||
func Example() {
|
||||
m := gomail.NewMessage()
|
||||
m.SetHeader("From", "alex@example.com")
|
||||
m.SetHeader("To", "bob@example.com", "cora@example.com")
|
||||
m.SetAddressHeader("Cc", "dan@example.com", "Dan")
|
||||
m.SetHeader("Subject", "Hello!")
|
||||
m.SetBody("text/html", "Hello <b>Bob</b> and <i>Cora</i>!")
|
||||
m.Attach("/home/Alex/lolcat.jpg")
|
||||
|
||||
d := gomail.NewPlainDialer("smtp.example.com", 587, "user", "123456")
|
||||
|
||||
// Send the email to Bob, Cora and Dan.
|
||||
if err := d.DialAndSend(m); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
// A daemon that listens to a channel and sends all incoming messages.
|
||||
func Example_daemon() {
|
||||
ch := make(chan *gomail.Message)
|
||||
|
||||
go func() {
|
||||
d := gomail.NewPlainDialer("smtp.example.com", 587, "user", "123456")
|
||||
|
||||
var s gomail.SendCloser
|
||||
var err error
|
||||
open := false
|
||||
for {
|
||||
select {
|
||||
case m, ok := <-ch:
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
if !open {
|
||||
if s, err = d.Dial(); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
open = true
|
||||
}
|
||||
if err := gomail.Send(s, m); err != nil {
|
||||
log.Print(err)
|
||||
}
|
||||
// Close the connection to the SMTP server if no email was sent in
|
||||
// the last 30 seconds.
|
||||
case <-time.After(30 * time.Second):
|
||||
if open {
|
||||
if err := s.Close(); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
open = false
|
||||
}
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
// Use the channel in your program to send emails.
|
||||
|
||||
// Close the channel to stop the mail daemon.
|
||||
close(ch)
|
||||
}
|
||||
|
||||
// Efficiently send a customized newsletter to a list of recipients.
|
||||
func Example_newsletter() {
|
||||
// The list of recipients.
|
||||
var list []struct {
|
||||
Name string
|
||||
Address string
|
||||
}
|
||||
|
||||
d := gomail.NewPlainDialer("smtp.example.com", 587, "user", "123456")
|
||||
s, err := d.Dial()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
m := gomail.NewMessage()
|
||||
for _, r := range list {
|
||||
m.SetHeader("From", "no-reply@example.com")
|
||||
m.SetAddressHeader("To", r.Address, r.Name)
|
||||
m.SetHeader("Subject", "Newsletter #1")
|
||||
m.SetBody("text/html", fmt.Sprintf("Hello %s!", r.Name))
|
||||
|
||||
if err := gomail.Send(s, m); err != nil {
|
||||
log.Printf("Could not send email to %q: %v", r.Address, err)
|
||||
}
|
||||
m.Reset()
|
||||
}
|
||||
}
|
||||
|
||||
// Send an email using a local SMTP server.
|
||||
func Example_noAuth() {
|
||||
m := gomail.NewMessage()
|
||||
m.SetHeader("From", "from@example.com")
|
||||
m.SetHeader("To", "to@example.com")
|
||||
m.SetHeader("Subject", "Hello!")
|
||||
m.SetBody("text/plain", "Hello!")
|
||||
|
||||
d := gomail.Dialer{Host: "localhost", Port: 587}
|
||||
if err := d.DialAndSend(m); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
// Send an email using an API or postfix.
|
||||
func Example_noSMTP() {
|
||||
m := gomail.NewMessage()
|
||||
m.SetHeader("From", "from@example.com")
|
||||
m.SetHeader("To", "to@example.com")
|
||||
m.SetHeader("Subject", "Hello!")
|
||||
m.SetBody("text/plain", "Hello!")
|
||||
|
||||
s := gomail.SendFunc(func(from string, to []string, msg io.WriterTo) error {
|
||||
// Implements you email-sending function, for example by calling
|
||||
// an API, or running postfix, etc.
|
||||
fmt.Println("From:", from)
|
||||
fmt.Println("To:", to)
|
||||
return nil
|
||||
})
|
||||
|
||||
if err := gomail.Send(s, m); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
// Output:
|
||||
// From: from@example.com
|
||||
// To: [to@example.com]
|
||||
}
|
||||
|
||||
var m *gomail.Message
|
||||
|
||||
func ExampleSetCopyFunc() {
|
||||
m.Attach("foo.txt", gomail.SetCopyFunc(func(w io.Writer) error {
|
||||
_, err := w.Write([]byte("Content of foo.txt"))
|
||||
return err
|
||||
}))
|
||||
}
|
||||
|
||||
func ExampleSetHeader() {
|
||||
h := map[string][]string{"Content-ID": {"<foo@bar.mail>"}}
|
||||
m.Attach("foo.jpg", gomail.SetHeader(h))
|
||||
}
|
||||
|
||||
func ExampleMessage_AddAlternative() {
|
||||
m.SetBody("text/plain", "Hello!")
|
||||
m.AddAlternative("text/html", "<p>Hello!</p>")
|
||||
}
|
||||
|
||||
func ExampleMessage_AddAlternativeWriter() {
|
||||
t := template.Must(template.New("example").Parse("Hello {{.}}!"))
|
||||
m.AddAlternativeWriter("text/plain", func(w io.Writer) error {
|
||||
return t.Execute(w, "Bob")
|
||||
})
|
||||
}
|
||||
|
||||
func ExampleMessage_Attach() {
|
||||
m.Attach("/tmp/image.jpg")
|
||||
}
|
||||
|
||||
func ExampleMessage_Embed() {
|
||||
m.Embed("/tmp/image.jpg")
|
||||
m.SetBody("text/html", `<img src="cid:image.jpg" alt="My image" />`)
|
||||
}
|
||||
|
||||
func ExampleMessage_FormatAddress() {
|
||||
m.SetHeader("To", m.FormatAddress("bob@example.com", "Bob"), m.FormatAddress("cora@example.com", "Cora"))
|
||||
}
|
||||
|
||||
func ExampleMessage_FormatDate() {
|
||||
m.SetHeaders(map[string][]string{
|
||||
"X-Date": {m.FormatDate(time.Now())},
|
||||
})
|
||||
}
|
||||
|
||||
func ExampleMessage_SetAddressHeader() {
|
||||
m.SetAddressHeader("To", "bob@example.com", "Bob")
|
||||
}
|
||||
|
||||
func ExampleMessage_SetBody() {
|
||||
m.SetBody("text/plain", "Hello!")
|
||||
}
|
||||
|
||||
func ExampleMessage_SetDateHeader() {
|
||||
m.SetDateHeader("X-Date", time.Now())
|
||||
}
|
||||
|
||||
func ExampleMessage_SetHeader() {
|
||||
m.SetHeader("Subject", "Hello!")
|
||||
}
|
||||
|
||||
func ExampleMessage_SetHeaders() {
|
||||
m.SetHeaders(map[string][]string{
|
||||
"From": {m.FormatAddress("alex@example.com", "Alex")},
|
||||
"To": {"bob@example.com", "cora@example.com"},
|
||||
"Subject": {"Hello"},
|
||||
})
|
||||
}
|
||||
|
||||
func ExampleSetCharset() {
|
||||
m = gomail.NewMessage(gomail.SetCharset("ISO-8859-1"))
|
||||
}
|
||||
|
||||
func ExampleSetEncoding() {
|
||||
m = gomail.NewMessage(gomail.SetEncoding(gomail.Base64))
|
||||
}
|
||||
302
vendor/gopkg.in/gomail.v2/message.go
generated
vendored
Normal file
302
vendor/gopkg.in/gomail.v2/message.go
generated
vendored
Normal file
|
|
@ -0,0 +1,302 @@
|
|||
package gomail
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Message represents an email.
|
||||
type Message struct {
|
||||
header header
|
||||
parts []part
|
||||
attachments []*file
|
||||
embedded []*file
|
||||
charset string
|
||||
encoding Encoding
|
||||
hEncoder mimeEncoder
|
||||
buf bytes.Buffer
|
||||
}
|
||||
|
||||
type header map[string][]string
|
||||
|
||||
type part struct {
|
||||
header header
|
||||
copier func(io.Writer) error
|
||||
}
|
||||
|
||||
// NewMessage creates a new message. It uses UTF-8 and quoted-printable encoding
|
||||
// by default.
|
||||
func NewMessage(settings ...MessageSetting) *Message {
|
||||
m := &Message{
|
||||
header: make(header),
|
||||
charset: "UTF-8",
|
||||
encoding: QuotedPrintable,
|
||||
}
|
||||
|
||||
m.applySettings(settings)
|
||||
|
||||
if m.encoding == Base64 {
|
||||
m.hEncoder = bEncoding
|
||||
} else {
|
||||
m.hEncoder = qEncoding
|
||||
}
|
||||
|
||||
return m
|
||||
}
|
||||
|
||||
// Reset resets the message so it can be reused. The message keeps its previous
|
||||
// settings so it is in the same state that after a call to NewMessage.
|
||||
func (m *Message) Reset() {
|
||||
for k := range m.header {
|
||||
delete(m.header, k)
|
||||
}
|
||||
m.parts = nil
|
||||
m.attachments = nil
|
||||
m.embedded = nil
|
||||
}
|
||||
|
||||
func (m *Message) applySettings(settings []MessageSetting) {
|
||||
for _, s := range settings {
|
||||
s(m)
|
||||
}
|
||||
}
|
||||
|
||||
// A MessageSetting can be used as an argument in NewMessage to configure an
|
||||
// email.
|
||||
type MessageSetting func(m *Message)
|
||||
|
||||
// SetCharset is a message setting to set the charset of the email.
|
||||
func SetCharset(charset string) MessageSetting {
|
||||
return func(m *Message) {
|
||||
m.charset = charset
|
||||
}
|
||||
}
|
||||
|
||||
// SetEncoding is a message setting to set the encoding of the email.
|
||||
func SetEncoding(enc Encoding) MessageSetting {
|
||||
return func(m *Message) {
|
||||
m.encoding = enc
|
||||
}
|
||||
}
|
||||
|
||||
// Encoding represents a MIME encoding scheme like quoted-printable or base64.
|
||||
type Encoding string
|
||||
|
||||
const (
|
||||
// QuotedPrintable represents the quoted-printable encoding as defined in
|
||||
// RFC 2045.
|
||||
QuotedPrintable Encoding = "quoted-printable"
|
||||
// Base64 represents the base64 encoding as defined in RFC 2045.
|
||||
Base64 Encoding = "base64"
|
||||
// Unencoded can be used to avoid encoding the body of an email. The headers
|
||||
// will still be encoded using quoted-printable encoding.
|
||||
Unencoded Encoding = "8bit"
|
||||
)
|
||||
|
||||
// SetHeader sets a value to the given header field.
|
||||
func (m *Message) SetHeader(field string, value ...string) {
|
||||
m.encodeHeader(value)
|
||||
m.header[field] = value
|
||||
}
|
||||
|
||||
func (m *Message) encodeHeader(values []string) {
|
||||
for i := range values {
|
||||
values[i] = m.encodeString(values[i])
|
||||
}
|
||||
}
|
||||
|
||||
func (m *Message) encodeString(value string) string {
|
||||
return m.hEncoder.Encode(m.charset, value)
|
||||
}
|
||||
|
||||
// SetHeaders sets the message headers.
|
||||
func (m *Message) SetHeaders(h map[string][]string) {
|
||||
for k, v := range h {
|
||||
m.SetHeader(k, v...)
|
||||
}
|
||||
}
|
||||
|
||||
// SetAddressHeader sets an address to the given header field.
|
||||
func (m *Message) SetAddressHeader(field, address, name string) {
|
||||
m.header[field] = []string{m.FormatAddress(address, name)}
|
||||
}
|
||||
|
||||
// FormatAddress formats an address and a name as a valid RFC 5322 address.
|
||||
func (m *Message) FormatAddress(address, name string) string {
|
||||
enc := m.encodeString(name)
|
||||
if enc == name {
|
||||
m.buf.WriteByte('"')
|
||||
for i := 0; i < len(name); i++ {
|
||||
b := name[i]
|
||||
if b == '\\' || b == '"' {
|
||||
m.buf.WriteByte('\\')
|
||||
}
|
||||
m.buf.WriteByte(b)
|
||||
}
|
||||
m.buf.WriteByte('"')
|
||||
} else if hasSpecials(name) {
|
||||
m.buf.WriteString(bEncoding.Encode(m.charset, name))
|
||||
} else {
|
||||
m.buf.WriteString(enc)
|
||||
}
|
||||
m.buf.WriteString(" <")
|
||||
m.buf.WriteString(address)
|
||||
m.buf.WriteByte('>')
|
||||
|
||||
addr := m.buf.String()
|
||||
m.buf.Reset()
|
||||
return addr
|
||||
}
|
||||
|
||||
func hasSpecials(text string) bool {
|
||||
for i := 0; i < len(text); i++ {
|
||||
switch c := text[i]; c {
|
||||
case '(', ')', '<', '>', '[', ']', ':', ';', '@', '\\', ',', '.', '"':
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// SetDateHeader sets a date to the given header field.
|
||||
func (m *Message) SetDateHeader(field string, date time.Time) {
|
||||
m.header[field] = []string{m.FormatDate(date)}
|
||||
}
|
||||
|
||||
// FormatDate formats a date as a valid RFC 5322 date.
|
||||
func (m *Message) FormatDate(date time.Time) string {
|
||||
return date.Format(time.RFC1123Z)
|
||||
}
|
||||
|
||||
// GetHeader gets a header field.
|
||||
func (m *Message) GetHeader(field string) []string {
|
||||
return m.header[field]
|
||||
}
|
||||
|
||||
// SetBody sets the body of the message.
|
||||
func (m *Message) SetBody(contentType, body string) {
|
||||
m.parts = []part{
|
||||
{
|
||||
header: m.getPartHeader(contentType),
|
||||
copier: func(w io.Writer) error {
|
||||
_, err := io.WriteString(w, body)
|
||||
return err
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// AddAlternative adds an alternative part to the message.
|
||||
//
|
||||
// It is commonly used to send HTML emails that default to the plain text
|
||||
// version for backward compatibility.
|
||||
//
|
||||
// More info: http://en.wikipedia.org/wiki/MIME#Alternative
|
||||
func (m *Message) AddAlternative(contentType, body string) {
|
||||
m.parts = append(m.parts,
|
||||
part{
|
||||
header: m.getPartHeader(contentType),
|
||||
copier: func(w io.Writer) error {
|
||||
_, err := io.WriteString(w, body)
|
||||
return err
|
||||
},
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
// AddAlternativeWriter adds an alternative part to the message. It can be
|
||||
// useful with the text/template or html/template packages.
|
||||
func (m *Message) AddAlternativeWriter(contentType string, f func(io.Writer) error) {
|
||||
m.parts = []part{
|
||||
{
|
||||
header: m.getPartHeader(contentType),
|
||||
copier: f,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (m *Message) getPartHeader(contentType string) header {
|
||||
return map[string][]string{
|
||||
"Content-Type": {contentType + "; charset=" + m.charset},
|
||||
"Content-Transfer-Encoding": {string(m.encoding)},
|
||||
}
|
||||
}
|
||||
|
||||
type file struct {
|
||||
Name string
|
||||
Header map[string][]string
|
||||
CopyFunc func(w io.Writer) error
|
||||
}
|
||||
|
||||
func (f *file) setHeader(field, value string) {
|
||||
f.Header[field] = []string{value}
|
||||
}
|
||||
|
||||
// A FileSetting can be used as an argument in Message.Attach or Message.Embed.
|
||||
type FileSetting func(*file)
|
||||
|
||||
// SetHeader is a file setting to set the MIME header of the message part that
|
||||
// contains the file content.
|
||||
//
|
||||
// Mandatory headers are automatically added if they are not set when sending
|
||||
// the email.
|
||||
func SetHeader(h map[string][]string) FileSetting {
|
||||
return func(f *file) {
|
||||
for k, v := range h {
|
||||
f.Header[k] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// SetCopyFunc is a file setting to replace the function that runs when the
|
||||
// message is sent. It should copy the content of the file to the io.Writer.
|
||||
//
|
||||
// The default copy function opens the file with the given filename, and copy
|
||||
// its content to the io.Writer.
|
||||
func SetCopyFunc(f func(io.Writer) error) FileSetting {
|
||||
return func(fi *file) {
|
||||
fi.CopyFunc = f
|
||||
}
|
||||
}
|
||||
|
||||
func (m *Message) appendFile(list []*file, name string, settings []FileSetting) []*file {
|
||||
f := &file{
|
||||
Name: filepath.Base(name),
|
||||
Header: make(map[string][]string),
|
||||
CopyFunc: func(w io.Writer) error {
|
||||
h, err := os.Open(name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := io.Copy(w, h); err != nil {
|
||||
h.Close()
|
||||
return err
|
||||
}
|
||||
return h.Close()
|
||||
},
|
||||
}
|
||||
|
||||
for _, s := range settings {
|
||||
s(f)
|
||||
}
|
||||
|
||||
if list == nil {
|
||||
return []*file{f}
|
||||
}
|
||||
|
||||
return append(list, f)
|
||||
}
|
||||
|
||||
// Attach attaches the files to the email.
|
||||
func (m *Message) Attach(filename string, settings ...FileSetting) {
|
||||
m.attachments = m.appendFile(m.attachments, filename, settings)
|
||||
}
|
||||
|
||||
// Embed embeds the images to the email.
|
||||
func (m *Message) Embed(filename string, settings ...FileSetting) {
|
||||
m.embedded = m.appendFile(m.embedded, filename, settings)
|
||||
}
|
||||
630
vendor/gopkg.in/gomail.v2/message_test.go
generated
vendored
Normal file
630
vendor/gopkg.in/gomail.v2/message_test.go
generated
vendored
Normal file
|
|
@ -0,0 +1,630 @@
|
|||
package gomail
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func init() {
|
||||
now = func() time.Time {
|
||||
return time.Date(2014, 06, 25, 17, 46, 0, 0, time.UTC)
|
||||
}
|
||||
}
|
||||
|
||||
type message struct {
|
||||
from string
|
||||
to []string
|
||||
content string
|
||||
}
|
||||
|
||||
func TestMessage(t *testing.T) {
|
||||
m := NewMessage()
|
||||
m.SetAddressHeader("From", "from@example.com", "Señor From")
|
||||
m.SetHeader("To", m.FormatAddress("to@example.com", "Señor To"), "tobis@example.com")
|
||||
m.SetAddressHeader("Cc", "cc@example.com", "A, B")
|
||||
m.SetAddressHeader("X-To", "ccbis@example.com", "à, b")
|
||||
m.SetDateHeader("X-Date", now())
|
||||
m.SetHeader("X-Date-2", m.FormatDate(now()))
|
||||
m.SetHeader("Subject", "¡Hola, señor!")
|
||||
m.SetHeaders(map[string][]string{
|
||||
"X-Headers": {"Test", "Café"},
|
||||
})
|
||||
m.SetBody("text/plain", "¡Hola, señor!")
|
||||
|
||||
want := &message{
|
||||
from: "from@example.com",
|
||||
to: []string{
|
||||
"to@example.com",
|
||||
"tobis@example.com",
|
||||
"cc@example.com",
|
||||
},
|
||||
content: "From: =?UTF-8?q?Se=C3=B1or_From?= <from@example.com>\r\n" +
|
||||
"To: =?UTF-8?q?Se=C3=B1or_To?= <to@example.com>, tobis@example.com\r\n" +
|
||||
"Cc: \"A, B\" <cc@example.com>\r\n" +
|
||||
"X-To: =?UTF-8?b?w6AsIGI=?= <ccbis@example.com>\r\n" +
|
||||
"X-Date: Wed, 25 Jun 2014 17:46:00 +0000\r\n" +
|
||||
"X-Date-2: Wed, 25 Jun 2014 17:46:00 +0000\r\n" +
|
||||
"X-Headers: Test, =?UTF-8?q?Caf=C3=A9?=\r\n" +
|
||||
"Subject: =?UTF-8?q?=C2=A1Hola,_se=C3=B1or!?=\r\n" +
|
||||
"Content-Type: text/plain; charset=UTF-8\r\n" +
|
||||
"Content-Transfer-Encoding: quoted-printable\r\n" +
|
||||
"\r\n" +
|
||||
"=C2=A1Hola, se=C3=B1or!",
|
||||
}
|
||||
|
||||
testMessage(t, m, 0, want)
|
||||
}
|
||||
|
||||
func TestBodyWriter(t *testing.T) {
|
||||
m := NewMessage()
|
||||
m.SetHeader("From", "from@example.com")
|
||||
m.SetHeader("To", "to@example.com")
|
||||
m.AddAlternativeWriter("text/plain", func(w io.Writer) error {
|
||||
_, err := w.Write([]byte("Test message"))
|
||||
return err
|
||||
})
|
||||
|
||||
want := &message{
|
||||
from: "from@example.com",
|
||||
to: []string{"to@example.com"},
|
||||
content: "From: from@example.com\r\n" +
|
||||
"To: to@example.com\r\n" +
|
||||
"Content-Type: text/plain; charset=UTF-8\r\n" +
|
||||
"Content-Transfer-Encoding: quoted-printable\r\n" +
|
||||
"\r\n" +
|
||||
"Test message",
|
||||
}
|
||||
|
||||
testMessage(t, m, 0, want)
|
||||
}
|
||||
|
||||
func TestCustomMessage(t *testing.T) {
|
||||
m := NewMessage(SetCharset("ISO-8859-1"), SetEncoding(Base64))
|
||||
m.SetHeaders(map[string][]string{
|
||||
"From": {"from@example.com"},
|
||||
"To": {"to@example.com"},
|
||||
"Subject": {"Café"},
|
||||
})
|
||||
m.SetBody("text/html", "¡Hola, señor!")
|
||||
|
||||
want := &message{
|
||||
from: "from@example.com",
|
||||
to: []string{"to@example.com"},
|
||||
content: "From: from@example.com\r\n" +
|
||||
"To: to@example.com\r\n" +
|
||||
"Subject: =?ISO-8859-1?b?Q2Fmw6k=?=\r\n" +
|
||||
"Content-Type: text/html; charset=ISO-8859-1\r\n" +
|
||||
"Content-Transfer-Encoding: base64\r\n" +
|
||||
"\r\n" +
|
||||
"wqFIb2xhLCBzZcOxb3Ih",
|
||||
}
|
||||
|
||||
testMessage(t, m, 0, want)
|
||||
}
|
||||
|
||||
func TestUnencodedMessage(t *testing.T) {
|
||||
m := NewMessage(SetEncoding(Unencoded))
|
||||
m.SetHeaders(map[string][]string{
|
||||
"From": {"from@example.com"},
|
||||
"To": {"to@example.com"},
|
||||
"Subject": {"Café"},
|
||||
})
|
||||
m.SetBody("text/html", "¡Hola, señor!")
|
||||
|
||||
want := &message{
|
||||
from: "from@example.com",
|
||||
to: []string{"to@example.com"},
|
||||
content: "From: from@example.com\r\n" +
|
||||
"To: to@example.com\r\n" +
|
||||
"Subject: =?UTF-8?q?Caf=C3=A9?=\r\n" +
|
||||
"Content-Type: text/html; charset=UTF-8\r\n" +
|
||||
"Content-Transfer-Encoding: 8bit\r\n" +
|
||||
"\r\n" +
|
||||
"¡Hola, señor!",
|
||||
}
|
||||
|
||||
testMessage(t, m, 0, want)
|
||||
}
|
||||
|
||||
func TestRecipients(t *testing.T) {
|
||||
m := NewMessage()
|
||||
m.SetHeaders(map[string][]string{
|
||||
"From": {"from@example.com"},
|
||||
"To": {"to@example.com"},
|
||||
"Cc": {"cc@example.com"},
|
||||
"Bcc": {"bcc1@example.com", "bcc2@example.com"},
|
||||
"Subject": {"Hello!"},
|
||||
})
|
||||
m.SetBody("text/plain", "Test message")
|
||||
|
||||
want := &message{
|
||||
from: "from@example.com",
|
||||
to: []string{"to@example.com", "cc@example.com", "bcc1@example.com", "bcc2@example.com"},
|
||||
content: "From: from@example.com\r\n" +
|
||||
"To: to@example.com\r\n" +
|
||||
"Cc: cc@example.com\r\n" +
|
||||
"Subject: Hello!\r\n" +
|
||||
"Content-Type: text/plain; charset=UTF-8\r\n" +
|
||||
"Content-Transfer-Encoding: quoted-printable\r\n" +
|
||||
"\r\n" +
|
||||
"Test message",
|
||||
}
|
||||
|
||||
testMessage(t, m, 0, want)
|
||||
}
|
||||
|
||||
func TestAlternative(t *testing.T) {
|
||||
m := NewMessage()
|
||||
m.SetHeader("From", "from@example.com")
|
||||
m.SetHeader("To", "to@example.com")
|
||||
m.SetBody("text/plain", "¡Hola, señor!")
|
||||
m.AddAlternative("text/html", "¡<b>Hola</b>, <i>señor</i>!</h1>")
|
||||
|
||||
want := &message{
|
||||
from: "from@example.com",
|
||||
to: []string{"to@example.com"},
|
||||
content: "From: from@example.com\r\n" +
|
||||
"To: to@example.com\r\n" +
|
||||
"Content-Type: multipart/alternative; boundary=_BOUNDARY_1_\r\n" +
|
||||
"\r\n" +
|
||||
"--_BOUNDARY_1_\r\n" +
|
||||
"Content-Type: text/plain; charset=UTF-8\r\n" +
|
||||
"Content-Transfer-Encoding: quoted-printable\r\n" +
|
||||
"\r\n" +
|
||||
"=C2=A1Hola, se=C3=B1or!\r\n" +
|
||||
"--_BOUNDARY_1_\r\n" +
|
||||
"Content-Type: text/html; charset=UTF-8\r\n" +
|
||||
"Content-Transfer-Encoding: quoted-printable\r\n" +
|
||||
"\r\n" +
|
||||
"=C2=A1<b>Hola</b>, <i>se=C3=B1or</i>!</h1>\r\n" +
|
||||
"--_BOUNDARY_1_--\r\n",
|
||||
}
|
||||
|
||||
testMessage(t, m, 1, want)
|
||||
}
|
||||
|
||||
func TestAttachmentOnly(t *testing.T) {
|
||||
m := NewMessage()
|
||||
m.SetHeader("From", "from@example.com")
|
||||
m.SetHeader("To", "to@example.com")
|
||||
m.Attach(mockCopyFile("/tmp/test.pdf"))
|
||||
|
||||
want := &message{
|
||||
from: "from@example.com",
|
||||
to: []string{"to@example.com"},
|
||||
content: "From: from@example.com\r\n" +
|
||||
"To: to@example.com\r\n" +
|
||||
"Content-Type: application/pdf; name=\"test.pdf\"\r\n" +
|
||||
"Content-Disposition: attachment; filename=\"test.pdf\"\r\n" +
|
||||
"Content-Transfer-Encoding: base64\r\n" +
|
||||
"\r\n" +
|
||||
base64.StdEncoding.EncodeToString([]byte("Content of test.pdf")),
|
||||
}
|
||||
|
||||
testMessage(t, m, 0, want)
|
||||
}
|
||||
|
||||
func TestAttachment(t *testing.T) {
|
||||
m := NewMessage()
|
||||
m.SetHeader("From", "from@example.com")
|
||||
m.SetHeader("To", "to@example.com")
|
||||
m.SetBody("text/plain", "Test")
|
||||
m.Attach(mockCopyFile("/tmp/test.pdf"))
|
||||
|
||||
want := &message{
|
||||
from: "from@example.com",
|
||||
to: []string{"to@example.com"},
|
||||
content: "From: from@example.com\r\n" +
|
||||
"To: to@example.com\r\n" +
|
||||
"Content-Type: multipart/mixed; boundary=_BOUNDARY_1_\r\n" +
|
||||
"\r\n" +
|
||||
"--_BOUNDARY_1_\r\n" +
|
||||
"Content-Type: text/plain; charset=UTF-8\r\n" +
|
||||
"Content-Transfer-Encoding: quoted-printable\r\n" +
|
||||
"\r\n" +
|
||||
"Test\r\n" +
|
||||
"--_BOUNDARY_1_\r\n" +
|
||||
"Content-Type: application/pdf; name=\"test.pdf\"\r\n" +
|
||||
"Content-Disposition: attachment; filename=\"test.pdf\"\r\n" +
|
||||
"Content-Transfer-Encoding: base64\r\n" +
|
||||
"\r\n" +
|
||||
base64.StdEncoding.EncodeToString([]byte("Content of test.pdf")) + "\r\n" +
|
||||
"--_BOUNDARY_1_--\r\n",
|
||||
}
|
||||
|
||||
testMessage(t, m, 1, want)
|
||||
}
|
||||
|
||||
func TestAttachmentsOnly(t *testing.T) {
|
||||
m := NewMessage()
|
||||
m.SetHeader("From", "from@example.com")
|
||||
m.SetHeader("To", "to@example.com")
|
||||
m.Attach(mockCopyFile("/tmp/test.pdf"))
|
||||
m.Attach(mockCopyFile("/tmp/test.zip"))
|
||||
|
||||
want := &message{
|
||||
from: "from@example.com",
|
||||
to: []string{"to@example.com"},
|
||||
content: "From: from@example.com\r\n" +
|
||||
"To: to@example.com\r\n" +
|
||||
"Content-Type: multipart/mixed; boundary=_BOUNDARY_1_\r\n" +
|
||||
"\r\n" +
|
||||
"--_BOUNDARY_1_\r\n" +
|
||||
"Content-Type: application/pdf; name=\"test.pdf\"\r\n" +
|
||||
"Content-Disposition: attachment; filename=\"test.pdf\"\r\n" +
|
||||
"Content-Transfer-Encoding: base64\r\n" +
|
||||
"\r\n" +
|
||||
base64.StdEncoding.EncodeToString([]byte("Content of test.pdf")) + "\r\n" +
|
||||
"--_BOUNDARY_1_\r\n" +
|
||||
"Content-Type: application/zip; name=\"test.zip\"\r\n" +
|
||||
"Content-Disposition: attachment; filename=\"test.zip\"\r\n" +
|
||||
"Content-Transfer-Encoding: base64\r\n" +
|
||||
"\r\n" +
|
||||
base64.StdEncoding.EncodeToString([]byte("Content of test.zip")) + "\r\n" +
|
||||
"--_BOUNDARY_1_--\r\n",
|
||||
}
|
||||
|
||||
testMessage(t, m, 1, want)
|
||||
}
|
||||
|
||||
func TestAttachments(t *testing.T) {
|
||||
m := NewMessage()
|
||||
m.SetHeader("From", "from@example.com")
|
||||
m.SetHeader("To", "to@example.com")
|
||||
m.SetBody("text/plain", "Test")
|
||||
m.Attach(mockCopyFile("/tmp/test.pdf"))
|
||||
m.Attach(mockCopyFile("/tmp/test.zip"))
|
||||
|
||||
want := &message{
|
||||
from: "from@example.com",
|
||||
to: []string{"to@example.com"},
|
||||
content: "From: from@example.com\r\n" +
|
||||
"To: to@example.com\r\n" +
|
||||
"Content-Type: multipart/mixed; boundary=_BOUNDARY_1_\r\n" +
|
||||
"\r\n" +
|
||||
"--_BOUNDARY_1_\r\n" +
|
||||
"Content-Type: text/plain; charset=UTF-8\r\n" +
|
||||
"Content-Transfer-Encoding: quoted-printable\r\n" +
|
||||
"\r\n" +
|
||||
"Test\r\n" +
|
||||
"--_BOUNDARY_1_\r\n" +
|
||||
"Content-Type: application/pdf; name=\"test.pdf\"\r\n" +
|
||||
"Content-Disposition: attachment; filename=\"test.pdf\"\r\n" +
|
||||
"Content-Transfer-Encoding: base64\r\n" +
|
||||
"\r\n" +
|
||||
base64.StdEncoding.EncodeToString([]byte("Content of test.pdf")) + "\r\n" +
|
||||
"--_BOUNDARY_1_\r\n" +
|
||||
"Content-Type: application/zip; name=\"test.zip\"\r\n" +
|
||||
"Content-Disposition: attachment; filename=\"test.zip\"\r\n" +
|
||||
"Content-Transfer-Encoding: base64\r\n" +
|
||||
"\r\n" +
|
||||
base64.StdEncoding.EncodeToString([]byte("Content of test.zip")) + "\r\n" +
|
||||
"--_BOUNDARY_1_--\r\n",
|
||||
}
|
||||
|
||||
testMessage(t, m, 1, want)
|
||||
}
|
||||
|
||||
func TestEmbedded(t *testing.T) {
|
||||
m := NewMessage()
|
||||
m.SetHeader("From", "from@example.com")
|
||||
m.SetHeader("To", "to@example.com")
|
||||
m.Embed(mockCopyFileWithHeader(m, "image1.jpg", map[string][]string{"Content-ID": {"<test-content-id>"}}))
|
||||
m.Embed(mockCopyFile("image2.jpg"))
|
||||
m.SetBody("text/plain", "Test")
|
||||
|
||||
want := &message{
|
||||
from: "from@example.com",
|
||||
to: []string{"to@example.com"},
|
||||
content: "From: from@example.com\r\n" +
|
||||
"To: to@example.com\r\n" +
|
||||
"Content-Type: multipart/related; boundary=_BOUNDARY_1_\r\n" +
|
||||
"\r\n" +
|
||||
"--_BOUNDARY_1_\r\n" +
|
||||
"Content-Type: text/plain; charset=UTF-8\r\n" +
|
||||
"Content-Transfer-Encoding: quoted-printable\r\n" +
|
||||
"\r\n" +
|
||||
"Test\r\n" +
|
||||
"--_BOUNDARY_1_\r\n" +
|
||||
"Content-Type: image/jpeg; name=\"image1.jpg\"\r\n" +
|
||||
"Content-Disposition: inline; filename=\"image1.jpg\"\r\n" +
|
||||
"Content-ID: <test-content-id>\r\n" +
|
||||
"Content-Transfer-Encoding: base64\r\n" +
|
||||
"\r\n" +
|
||||
base64.StdEncoding.EncodeToString([]byte("Content of image1.jpg")) + "\r\n" +
|
||||
"--_BOUNDARY_1_\r\n" +
|
||||
"Content-Type: image/jpeg; name=\"image2.jpg\"\r\n" +
|
||||
"Content-Disposition: inline; filename=\"image2.jpg\"\r\n" +
|
||||
"Content-ID: <image2.jpg>\r\n" +
|
||||
"Content-Transfer-Encoding: base64\r\n" +
|
||||
"\r\n" +
|
||||
base64.StdEncoding.EncodeToString([]byte("Content of image2.jpg")) + "\r\n" +
|
||||
"--_BOUNDARY_1_--\r\n",
|
||||
}
|
||||
|
||||
testMessage(t, m, 1, want)
|
||||
}
|
||||
|
||||
func TestFullMessage(t *testing.T) {
|
||||
m := NewMessage()
|
||||
m.SetHeader("From", "from@example.com")
|
||||
m.SetHeader("To", "to@example.com")
|
||||
m.SetBody("text/plain", "¡Hola, señor!")
|
||||
m.AddAlternative("text/html", "¡<b>Hola</b>, <i>señor</i>!</h1>")
|
||||
m.Attach(mockCopyFile("test.pdf"))
|
||||
m.Embed(mockCopyFile("image.jpg"))
|
||||
|
||||
want := &message{
|
||||
from: "from@example.com",
|
||||
to: []string{"to@example.com"},
|
||||
content: "From: from@example.com\r\n" +
|
||||
"To: to@example.com\r\n" +
|
||||
"Content-Type: multipart/mixed; boundary=_BOUNDARY_1_\r\n" +
|
||||
"\r\n" +
|
||||
"--_BOUNDARY_1_\r\n" +
|
||||
"Content-Type: multipart/related; boundary=_BOUNDARY_2_\r\n" +
|
||||
"\r\n" +
|
||||
"--_BOUNDARY_2_\r\n" +
|
||||
"Content-Type: multipart/alternative; boundary=_BOUNDARY_3_\r\n" +
|
||||
"\r\n" +
|
||||
"--_BOUNDARY_3_\r\n" +
|
||||
"Content-Type: text/plain; charset=UTF-8\r\n" +
|
||||
"Content-Transfer-Encoding: quoted-printable\r\n" +
|
||||
"\r\n" +
|
||||
"=C2=A1Hola, se=C3=B1or!\r\n" +
|
||||
"--_BOUNDARY_3_\r\n" +
|
||||
"Content-Type: text/html; charset=UTF-8\r\n" +
|
||||
"Content-Transfer-Encoding: quoted-printable\r\n" +
|
||||
"\r\n" +
|
||||
"=C2=A1<b>Hola</b>, <i>se=C3=B1or</i>!</h1>\r\n" +
|
||||
"--_BOUNDARY_3_--\r\n" +
|
||||
"\r\n" +
|
||||
"--_BOUNDARY_2_\r\n" +
|
||||
"Content-Type: image/jpeg; name=\"image.jpg\"\r\n" +
|
||||
"Content-Disposition: inline; filename=\"image.jpg\"\r\n" +
|
||||
"Content-ID: <image.jpg>\r\n" +
|
||||
"Content-Transfer-Encoding: base64\r\n" +
|
||||
"\r\n" +
|
||||
base64.StdEncoding.EncodeToString([]byte("Content of image.jpg")) + "\r\n" +
|
||||
"--_BOUNDARY_2_--\r\n" +
|
||||
"\r\n" +
|
||||
"--_BOUNDARY_1_\r\n" +
|
||||
"Content-Type: application/pdf; name=\"test.pdf\"\r\n" +
|
||||
"Content-Disposition: attachment; filename=\"test.pdf\"\r\n" +
|
||||
"Content-Transfer-Encoding: base64\r\n" +
|
||||
"\r\n" +
|
||||
base64.StdEncoding.EncodeToString([]byte("Content of test.pdf")) + "\r\n" +
|
||||
"--_BOUNDARY_1_--\r\n",
|
||||
}
|
||||
|
||||
testMessage(t, m, 3, want)
|
||||
|
||||
want = &message{
|
||||
from: "from@example.com",
|
||||
to: []string{"to@example.com"},
|
||||
content: "From: from@example.com\r\n" +
|
||||
"To: to@example.com\r\n" +
|
||||
"Content-Type: text/plain; charset=UTF-8\r\n" +
|
||||
"Content-Transfer-Encoding: quoted-printable\r\n" +
|
||||
"\r\n" +
|
||||
"Test reset",
|
||||
}
|
||||
m.Reset()
|
||||
m.SetHeader("From", "from@example.com")
|
||||
m.SetHeader("To", "to@example.com")
|
||||
m.SetBody("text/plain", "Test reset")
|
||||
testMessage(t, m, 0, want)
|
||||
}
|
||||
|
||||
func TestQpLineLength(t *testing.T) {
|
||||
m := NewMessage()
|
||||
m.SetHeader("From", "from@example.com")
|
||||
m.SetHeader("To", "to@example.com")
|
||||
m.SetBody("text/plain",
|
||||
strings.Repeat("0", 76)+"\r\n"+
|
||||
strings.Repeat("0", 75)+"à\r\n"+
|
||||
strings.Repeat("0", 74)+"à\r\n"+
|
||||
strings.Repeat("0", 73)+"à\r\n"+
|
||||
strings.Repeat("0", 72)+"à\r\n"+
|
||||
strings.Repeat("0", 75)+"\r\n"+
|
||||
strings.Repeat("0", 76)+"\n")
|
||||
|
||||
want := &message{
|
||||
from: "from@example.com",
|
||||
to: []string{"to@example.com"},
|
||||
content: "From: from@example.com\r\n" +
|
||||
"To: to@example.com\r\n" +
|
||||
"Content-Type: text/plain; charset=UTF-8\r\n" +
|
||||
"Content-Transfer-Encoding: quoted-printable\r\n" +
|
||||
"\r\n" +
|
||||
strings.Repeat("0", 75) + "=\r\n0\r\n" +
|
||||
strings.Repeat("0", 75) + "=\r\n=C3=A0\r\n" +
|
||||
strings.Repeat("0", 74) + "=\r\n=C3=A0\r\n" +
|
||||
strings.Repeat("0", 73) + "=\r\n=C3=A0\r\n" +
|
||||
strings.Repeat("0", 72) + "=C3=\r\n=A0\r\n" +
|
||||
strings.Repeat("0", 75) + "\r\n" +
|
||||
strings.Repeat("0", 75) + "=\r\n0\r\n",
|
||||
}
|
||||
|
||||
testMessage(t, m, 0, want)
|
||||
}
|
||||
|
||||
func TestBase64LineLength(t *testing.T) {
|
||||
m := NewMessage(SetCharset("UTF-8"), SetEncoding(Base64))
|
||||
m.SetHeader("From", "from@example.com")
|
||||
m.SetHeader("To", "to@example.com")
|
||||
m.SetBody("text/plain", strings.Repeat("0", 58))
|
||||
|
||||
want := &message{
|
||||
from: "from@example.com",
|
||||
to: []string{"to@example.com"},
|
||||
content: "From: from@example.com\r\n" +
|
||||
"To: to@example.com\r\n" +
|
||||
"Content-Type: text/plain; charset=UTF-8\r\n" +
|
||||
"Content-Transfer-Encoding: base64\r\n" +
|
||||
"\r\n" +
|
||||
strings.Repeat("MDAw", 19) + "\r\nMA==",
|
||||
}
|
||||
|
||||
testMessage(t, m, 0, want)
|
||||
}
|
||||
|
||||
func testMessage(t *testing.T, m *Message, bCount int, want *message) {
|
||||
err := Send(stubSendMail(t, bCount, want), m)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
func stubSendMail(t *testing.T, bCount int, want *message) SendFunc {
|
||||
return func(from string, to []string, m io.WriterTo) error {
|
||||
if from != want.from {
|
||||
t.Fatalf("Invalid from, got %q, want %q", from, want.from)
|
||||
}
|
||||
|
||||
if len(to) != len(want.to) {
|
||||
t.Fatalf("Invalid recipient count, \ngot %d: %q\nwant %d: %q",
|
||||
len(to), to,
|
||||
len(want.to), want.to,
|
||||
)
|
||||
}
|
||||
for i := range want.to {
|
||||
if to[i] != want.to[i] {
|
||||
t.Fatalf("Invalid recipient, got %q, want %q",
|
||||
to[i], want.to[i],
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
buf := new(bytes.Buffer)
|
||||
_, err := m.WriteTo(buf)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
got := buf.String()
|
||||
wantMsg := string("Mime-Version: 1.0\r\n" +
|
||||
"Date: Wed, 25 Jun 2014 17:46:00 +0000\r\n" +
|
||||
want.content)
|
||||
if bCount > 0 {
|
||||
boundaries := getBoundaries(t, bCount, got)
|
||||
for i, b := range boundaries {
|
||||
wantMsg = strings.Replace(wantMsg, "_BOUNDARY_"+strconv.Itoa(i+1)+"_", b, -1)
|
||||
}
|
||||
}
|
||||
|
||||
compareBodies(t, got, wantMsg)
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func compareBodies(t *testing.T, got, want string) {
|
||||
// We cannot do a simple comparison since the ordering of headers' fields
|
||||
// is random.
|
||||
gotLines := strings.Split(got, "\r\n")
|
||||
wantLines := strings.Split(want, "\r\n")
|
||||
|
||||
// We only test for too many lines, missing lines are tested after
|
||||
if len(gotLines) > len(wantLines) {
|
||||
t.Fatalf("Message has too many lines, \ngot %d:\n%s\nwant %d:\n%s", len(gotLines), got, len(wantLines), want)
|
||||
}
|
||||
|
||||
isInHeader := true
|
||||
headerStart := 0
|
||||
for i, line := range wantLines {
|
||||
if line == gotLines[i] {
|
||||
if line == "" {
|
||||
isInHeader = false
|
||||
} else if !isInHeader && len(line) > 2 && line[:2] == "--" {
|
||||
isInHeader = true
|
||||
headerStart = i + 1
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if !isInHeader {
|
||||
missingLine(t, line, got, want)
|
||||
}
|
||||
|
||||
isMissing := true
|
||||
for j := headerStart; j < len(gotLines); j++ {
|
||||
if gotLines[j] == "" {
|
||||
break
|
||||
}
|
||||
if gotLines[j] == line {
|
||||
isMissing = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if isMissing {
|
||||
missingLine(t, line, got, want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func missingLine(t *testing.T, line, got, want string) {
|
||||
t.Fatalf("Missing line %q\ngot:\n%s\nwant:\n%s", line, got, want)
|
||||
}
|
||||
|
||||
func getBoundaries(t *testing.T, count int, m string) []string {
|
||||
if matches := boundaryRegExp.FindAllStringSubmatch(m, count); matches != nil {
|
||||
boundaries := make([]string, count)
|
||||
for i, match := range matches {
|
||||
boundaries[i] = match[1]
|
||||
}
|
||||
return boundaries
|
||||
}
|
||||
|
||||
t.Fatal("Boundary not found in body")
|
||||
return []string{""}
|
||||
}
|
||||
|
||||
var boundaryRegExp = regexp.MustCompile("boundary=(\\w+)")
|
||||
|
||||
func mockCopyFile(name string) (string, FileSetting) {
|
||||
return name, SetCopyFunc(func(w io.Writer) error {
|
||||
_, err := w.Write([]byte("Content of " + filepath.Base(name)))
|
||||
return err
|
||||
})
|
||||
}
|
||||
|
||||
func mockCopyFileWithHeader(m *Message, name string, h map[string][]string) (string, FileSetting, FileSetting) {
|
||||
name, f := mockCopyFile(name)
|
||||
return name, f, SetHeader(h)
|
||||
}
|
||||
|
||||
func BenchmarkFull(b *testing.B) {
|
||||
discardFunc := SendFunc(func(from string, to []string, m io.WriterTo) error {
|
||||
_, err := m.WriteTo(ioutil.Discard)
|
||||
return err
|
||||
})
|
||||
|
||||
m := NewMessage()
|
||||
b.ResetTimer()
|
||||
for n := 0; n < b.N; n++ {
|
||||
m.SetAddressHeader("From", "from@example.com", "Señor From")
|
||||
m.SetHeaders(map[string][]string{
|
||||
"To": {"to@example.com"},
|
||||
"Cc": {"cc@example.com"},
|
||||
"Bcc": {"bcc1@example.com", "bcc2@example.com"},
|
||||
"Subject": {"¡Hola, señor!"},
|
||||
})
|
||||
m.SetBody("text/plain", "¡Hola, señor!")
|
||||
m.AddAlternative("text/html", "<p>¡Hola, señor!</p>")
|
||||
m.Attach(mockCopyFile("benchmark.txt"))
|
||||
m.Embed(mockCopyFile("benchmark.jpg"))
|
||||
|
||||
if err := Send(discardFunc, m); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
m.Reset()
|
||||
}
|
||||
}
|
||||
19
vendor/gopkg.in/gomail.v2/mime.go
generated
vendored
Normal file
19
vendor/gopkg.in/gomail.v2/mime.go
generated
vendored
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
// +build go1.5
|
||||
|
||||
package gomail
|
||||
|
||||
import (
|
||||
"mime"
|
||||
"mime/quotedprintable"
|
||||
)
|
||||
|
||||
var newQPWriter = quotedprintable.NewWriter
|
||||
|
||||
type mimeEncoder struct {
|
||||
mime.WordEncoder
|
||||
}
|
||||
|
||||
var (
|
||||
bEncoding = mimeEncoder{mime.BEncoding}
|
||||
qEncoding = mimeEncoder{mime.QEncoding}
|
||||
)
|
||||
16
vendor/gopkg.in/gomail.v2/mime_go14.go
generated
vendored
Normal file
16
vendor/gopkg.in/gomail.v2/mime_go14.go
generated
vendored
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
// +build !go1.5
|
||||
|
||||
package gomail
|
||||
|
||||
import "gopkg.in/alexcesaro/quotedprintable.v3"
|
||||
|
||||
var newQPWriter = quotedprintable.NewWriter
|
||||
|
||||
type mimeEncoder struct {
|
||||
quotedprintable.WordEncoder
|
||||
}
|
||||
|
||||
var (
|
||||
bEncoding = mimeEncoder{quotedprintable.BEncoding}
|
||||
qEncoding = mimeEncoder{quotedprintable.QEncoding}
|
||||
)
|
||||
117
vendor/gopkg.in/gomail.v2/send.go
generated
vendored
Normal file
117
vendor/gopkg.in/gomail.v2/send.go
generated
vendored
Normal file
|
|
@ -0,0 +1,117 @@
|
|||
package gomail
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/mail"
|
||||
)
|
||||
|
||||
// Sender is the interface that wraps the Send method.
|
||||
//
|
||||
// Send sends an email to the given addresses.
|
||||
type Sender interface {
|
||||
Send(from string, to []string, msg io.WriterTo) error
|
||||
}
|
||||
|
||||
// SendCloser is the interface that groups the Send and Close methods.
|
||||
type SendCloser interface {
|
||||
Sender
|
||||
Close() error
|
||||
}
|
||||
|
||||
// A SendFunc is a function that sends emails to the given adresses.
|
||||
//
|
||||
// The SendFunc type is an adapter to allow the use of ordinary functions as
|
||||
// email senders. If f is a function with the appropriate signature, SendFunc(f)
|
||||
// is a Sender object that calls f.
|
||||
type SendFunc func(from string, to []string, msg io.WriterTo) error
|
||||
|
||||
// Send calls f(from, to, msg).
|
||||
func (f SendFunc) Send(from string, to []string, msg io.WriterTo) error {
|
||||
return f(from, to, msg)
|
||||
}
|
||||
|
||||
// Send sends emails using the given Sender.
|
||||
func Send(s Sender, msg ...*Message) error {
|
||||
for i, m := range msg {
|
||||
if err := send(s, m); err != nil {
|
||||
return fmt.Errorf("gomail: could not send email %d: %v", i+1, err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func send(s Sender, m *Message) error {
|
||||
from, err := m.getFrom()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
to, err := m.getRecipients()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := s.Send(from, to, m); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Message) getFrom() (string, error) {
|
||||
from := m.header["Sender"]
|
||||
if len(from) == 0 {
|
||||
from = m.header["From"]
|
||||
if len(from) == 0 {
|
||||
return "", errors.New(`gomail: invalid message, "From" field is absent`)
|
||||
}
|
||||
}
|
||||
|
||||
return parseAddress(from[0])
|
||||
}
|
||||
|
||||
func (m *Message) getRecipients() ([]string, error) {
|
||||
n := 0
|
||||
for _, field := range []string{"To", "Cc", "Bcc"} {
|
||||
if addresses, ok := m.header[field]; ok {
|
||||
n += len(addresses)
|
||||
}
|
||||
}
|
||||
list := make([]string, 0, n)
|
||||
|
||||
for _, field := range []string{"To", "Cc", "Bcc"} {
|
||||
if addresses, ok := m.header[field]; ok {
|
||||
for _, a := range addresses {
|
||||
addr, err := parseAddress(a)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
list = addAddress(list, addr)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return list, nil
|
||||
}
|
||||
|
||||
func addAddress(list []string, addr string) []string {
|
||||
for _, a := range list {
|
||||
if addr == a {
|
||||
return list
|
||||
}
|
||||
}
|
||||
|
||||
return append(list, addr)
|
||||
}
|
||||
|
||||
func parseAddress(field string) (string, error) {
|
||||
a, err := mail.ParseAddress(field)
|
||||
if a == nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return a.Address, err
|
||||
}
|
||||
80
vendor/gopkg.in/gomail.v2/send_test.go
generated
vendored
Normal file
80
vendor/gopkg.in/gomail.v2/send_test.go
generated
vendored
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
package gomail
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
const (
|
||||
testTo1 = "to1@example.com"
|
||||
testTo2 = "to2@example.com"
|
||||
testFrom = "from@example.com"
|
||||
testBody = "Test message"
|
||||
testMsg = "To: " + testTo1 + ", " + testTo2 + "\r\n" +
|
||||
"From: " + testFrom + "\r\n" +
|
||||
"Mime-Version: 1.0\r\n" +
|
||||
"Date: Wed, 25 Jun 2014 17:46:00 +0000\r\n" +
|
||||
"Content-Type: text/plain; charset=UTF-8\r\n" +
|
||||
"Content-Transfer-Encoding: quoted-printable\r\n" +
|
||||
"\r\n" +
|
||||
testBody
|
||||
)
|
||||
|
||||
type mockSender SendFunc
|
||||
|
||||
func (s mockSender) Send(from string, to []string, msg io.WriterTo) error {
|
||||
return s(from, to, msg)
|
||||
}
|
||||
|
||||
type mockSendCloser struct {
|
||||
mockSender
|
||||
close func() error
|
||||
}
|
||||
|
||||
func (s *mockSendCloser) Close() error {
|
||||
return s.close()
|
||||
}
|
||||
|
||||
func TestSend(t *testing.T) {
|
||||
s := &mockSendCloser{
|
||||
mockSender: stubSend(t, testFrom, []string{testTo1, testTo2}, testMsg),
|
||||
close: func() error {
|
||||
t.Error("Close() should not be called in Send()")
|
||||
return nil
|
||||
},
|
||||
}
|
||||
if err := Send(s, getTestMessage()); err != nil {
|
||||
t.Errorf("Send(): %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func getTestMessage() *Message {
|
||||
m := NewMessage()
|
||||
m.SetHeader("From", testFrom)
|
||||
m.SetHeader("To", testTo1, testTo2)
|
||||
m.SetBody("text/plain", testBody)
|
||||
|
||||
return m
|
||||
}
|
||||
|
||||
func stubSend(t *testing.T, wantFrom string, wantTo []string, wantBody string) mockSender {
|
||||
return func(from string, to []string, msg io.WriterTo) error {
|
||||
if from != wantFrom {
|
||||
t.Errorf("invalid from, got %q, want %q", from, wantFrom)
|
||||
}
|
||||
if !reflect.DeepEqual(to, wantTo) {
|
||||
t.Errorf("invalid to, got %v, want %v", to, wantTo)
|
||||
}
|
||||
|
||||
buf := new(bytes.Buffer)
|
||||
_, err := msg.WriteTo(buf)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
compareBodies(t, buf.String(), wantBody)
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
175
vendor/gopkg.in/gomail.v2/smtp.go
generated
vendored
Normal file
175
vendor/gopkg.in/gomail.v2/smtp.go
generated
vendored
Normal file
|
|
@ -0,0 +1,175 @@
|
|||
package gomail
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"fmt"
|
||||
"io"
|
||||
"net"
|
||||
"net/smtp"
|
||||
)
|
||||
|
||||
// A Dialer is a dialer to an SMTP server.
|
||||
type Dialer struct {
|
||||
// Host represents the host of the SMTP server.
|
||||
Host string
|
||||
// Port represents the port of the SMTP server.
|
||||
Port int
|
||||
// Auth represents the authentication mechanism used to authenticate to the
|
||||
// SMTP server.
|
||||
Auth smtp.Auth
|
||||
// SSL defines whether an SSL connection is used. It should be false in
|
||||
// most cases since the authentication mechanism should use the STARTTLS
|
||||
// extension instead.
|
||||
SSL bool
|
||||
// TSLConfig represents the TLS configuration used for the TLS (when the
|
||||
// STARTTLS extension is used) or SSL connection.
|
||||
TLSConfig *tls.Config
|
||||
}
|
||||
|
||||
// NewPlainDialer returns a Dialer. The given parameters are used to connect to
|
||||
// the SMTP server via a PLAIN authentication mechanism.
|
||||
//
|
||||
// It fallbacks to the LOGIN mechanism if it is the only mechanism advertised by
|
||||
// the server.
|
||||
func NewPlainDialer(host string, port int, username, password string) *Dialer {
|
||||
return &Dialer{
|
||||
Host: host,
|
||||
Port: port,
|
||||
Auth: &plainAuth{
|
||||
username: username,
|
||||
password: password,
|
||||
host: host,
|
||||
},
|
||||
SSL: port == 465,
|
||||
}
|
||||
}
|
||||
|
||||
// Dial dials and authenticates to an SMTP server. The returned SendCloser
|
||||
// should be closed when done using it.
|
||||
func (d *Dialer) Dial() (SendCloser, error) {
|
||||
c, err := d.dial()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if d.Auth != nil {
|
||||
if ok, _ := c.Extension("AUTH"); ok {
|
||||
if err = c.Auth(d.Auth); err != nil {
|
||||
c.Close()
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return &smtpSender{c}, nil
|
||||
}
|
||||
|
||||
func (d *Dialer) dial() (smtpClient, error) {
|
||||
if d.SSL {
|
||||
return d.sslDial()
|
||||
}
|
||||
return d.starttlsDial()
|
||||
}
|
||||
|
||||
func (d *Dialer) starttlsDial() (smtpClient, error) {
|
||||
c, err := smtpDial(addr(d.Host, d.Port))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if ok, _ := c.Extension("STARTTLS"); ok {
|
||||
if err := c.StartTLS(d.tlsConfig()); err != nil {
|
||||
c.Close()
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
func (d *Dialer) sslDial() (smtpClient, error) {
|
||||
conn, err := tlsDial("tcp", addr(d.Host, d.Port), d.tlsConfig())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return newClient(conn, d.Host)
|
||||
}
|
||||
|
||||
func (d *Dialer) tlsConfig() *tls.Config {
|
||||
if d.TLSConfig == nil {
|
||||
return &tls.Config{ServerName: d.Host}
|
||||
}
|
||||
|
||||
return d.TLSConfig
|
||||
}
|
||||
|
||||
func addr(host string, port int) string {
|
||||
return fmt.Sprintf("%s:%d", host, port)
|
||||
}
|
||||
|
||||
// DialAndSend opens a connection to the SMTP server, sends the given emails and
|
||||
// closes the connection.
|
||||
func (d *Dialer) DialAndSend(m ...*Message) error {
|
||||
s, err := d.Dial()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer s.Close()
|
||||
|
||||
return Send(s, m...)
|
||||
}
|
||||
|
||||
type smtpSender struct {
|
||||
smtpClient
|
||||
}
|
||||
|
||||
func (c *smtpSender) Send(from string, to []string, msg io.WriterTo) error {
|
||||
if err := c.Mail(from); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, addr := range to {
|
||||
if err := c.Rcpt(addr); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
w, err := c.Data()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if _, err = msg.WriteTo(w); err != nil {
|
||||
w.Close()
|
||||
return err
|
||||
}
|
||||
|
||||
return w.Close()
|
||||
}
|
||||
|
||||
func (c *smtpSender) Close() error {
|
||||
return c.Quit()
|
||||
}
|
||||
|
||||
// Stubbed out for tests.
|
||||
var (
|
||||
smtpDial = func(addr string) (smtpClient, error) {
|
||||
return smtp.Dial(addr)
|
||||
}
|
||||
tlsDial = tls.Dial
|
||||
newClient = func(conn net.Conn, host string) (smtpClient, error) {
|
||||
return smtp.NewClient(conn, host)
|
||||
}
|
||||
)
|
||||
|
||||
type smtpClient interface {
|
||||
Extension(string) (bool, string)
|
||||
StartTLS(*tls.Config) error
|
||||
Auth(smtp.Auth) error
|
||||
Mail(string) error
|
||||
Rcpt(string) error
|
||||
Data() (io.WriteCloser, error)
|
||||
Quit() error
|
||||
Close() error
|
||||
}
|
||||
254
vendor/gopkg.in/gomail.v2/smtp_test.go
generated
vendored
Normal file
254
vendor/gopkg.in/gomail.v2/smtp_test.go
generated
vendored
Normal file
|
|
@ -0,0 +1,254 @@
|
|||
package gomail
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/tls"
|
||||
"io"
|
||||
"net"
|
||||
"net/smtp"
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
const (
|
||||
testPort = 587
|
||||
testSSLPort = 465
|
||||
)
|
||||
|
||||
var (
|
||||
testTLSConn = &tls.Conn{}
|
||||
testConfig = &tls.Config{InsecureSkipVerify: true}
|
||||
)
|
||||
|
||||
func TestDialer(t *testing.T) {
|
||||
d := NewPlainDialer(testHost, testPort, "user", "pwd")
|
||||
testSendMail(t, d, []string{
|
||||
"Extension STARTTLS",
|
||||
"StartTLS",
|
||||
"Extension AUTH",
|
||||
"Auth",
|
||||
"Mail " + testFrom,
|
||||
"Rcpt " + testTo1,
|
||||
"Rcpt " + testTo2,
|
||||
"Data",
|
||||
"Write message",
|
||||
"Close writer",
|
||||
"Quit",
|
||||
"Close",
|
||||
})
|
||||
}
|
||||
|
||||
func TestDialerSSL(t *testing.T) {
|
||||
d := NewPlainDialer(testHost, testSSLPort, "user", "pwd")
|
||||
testSendMail(t, d, []string{
|
||||
"Extension AUTH",
|
||||
"Auth",
|
||||
"Mail " + testFrom,
|
||||
"Rcpt " + testTo1,
|
||||
"Rcpt " + testTo2,
|
||||
"Data",
|
||||
"Write message",
|
||||
"Close writer",
|
||||
"Quit",
|
||||
"Close",
|
||||
})
|
||||
}
|
||||
|
||||
func TestDialerConfig(t *testing.T) {
|
||||
d := NewPlainDialer(testHost, testPort, "user", "pwd")
|
||||
d.TLSConfig = testConfig
|
||||
testSendMail(t, d, []string{
|
||||
"Extension STARTTLS",
|
||||
"StartTLS",
|
||||
"Extension AUTH",
|
||||
"Auth",
|
||||
"Mail " + testFrom,
|
||||
"Rcpt " + testTo1,
|
||||
"Rcpt " + testTo2,
|
||||
"Data",
|
||||
"Write message",
|
||||
"Close writer",
|
||||
"Quit",
|
||||
"Close",
|
||||
})
|
||||
}
|
||||
|
||||
func TestDialerSSLConfig(t *testing.T) {
|
||||
d := NewPlainDialer(testHost, testSSLPort, "user", "pwd")
|
||||
d.TLSConfig = testConfig
|
||||
testSendMail(t, d, []string{
|
||||
"Extension AUTH",
|
||||
"Auth",
|
||||
"Mail " + testFrom,
|
||||
"Rcpt " + testTo1,
|
||||
"Rcpt " + testTo2,
|
||||
"Data",
|
||||
"Write message",
|
||||
"Close writer",
|
||||
"Quit",
|
||||
"Close",
|
||||
})
|
||||
}
|
||||
|
||||
func TestDialerNoAuth(t *testing.T) {
|
||||
d := &Dialer{
|
||||
Host: testHost,
|
||||
Port: testPort,
|
||||
}
|
||||
testSendMail(t, d, []string{
|
||||
"Extension STARTTLS",
|
||||
"StartTLS",
|
||||
"Mail " + testFrom,
|
||||
"Rcpt " + testTo1,
|
||||
"Rcpt " + testTo2,
|
||||
"Data",
|
||||
"Write message",
|
||||
"Close writer",
|
||||
"Quit",
|
||||
"Close",
|
||||
})
|
||||
}
|
||||
|
||||
type mockClient struct {
|
||||
t *testing.T
|
||||
i int
|
||||
want []string
|
||||
addr string
|
||||
auth smtp.Auth
|
||||
config *tls.Config
|
||||
}
|
||||
|
||||
func (c *mockClient) Extension(ext string) (bool, string) {
|
||||
c.do("Extension " + ext)
|
||||
return true, ""
|
||||
}
|
||||
|
||||
func (c *mockClient) StartTLS(config *tls.Config) error {
|
||||
assertConfig(c.t, config, c.config)
|
||||
c.do("StartTLS")
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *mockClient) Auth(a smtp.Auth) error {
|
||||
assertAuth(c.t, a, c.auth)
|
||||
c.do("Auth")
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *mockClient) Mail(from string) error {
|
||||
c.do("Mail " + from)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *mockClient) Rcpt(to string) error {
|
||||
c.do("Rcpt " + to)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *mockClient) Data() (io.WriteCloser, error) {
|
||||
c.do("Data")
|
||||
return &mockWriter{c: c, want: testMsg}, nil
|
||||
}
|
||||
|
||||
func (c *mockClient) Quit() error {
|
||||
c.do("Quit")
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *mockClient) Close() error {
|
||||
c.do("Close")
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *mockClient) do(cmd string) {
|
||||
if c.i >= len(c.want) {
|
||||
c.t.Fatalf("Invalid command %q", cmd)
|
||||
}
|
||||
|
||||
if cmd != c.want[c.i] {
|
||||
c.t.Fatalf("Invalid command, got %q, want %q", cmd, c.want[c.i])
|
||||
}
|
||||
c.i++
|
||||
}
|
||||
|
||||
type mockWriter struct {
|
||||
want string
|
||||
c *mockClient
|
||||
buf bytes.Buffer
|
||||
}
|
||||
|
||||
func (w *mockWriter) Write(p []byte) (int, error) {
|
||||
if w.buf.Len() == 0 {
|
||||
w.c.do("Write message")
|
||||
}
|
||||
w.buf.Write(p)
|
||||
return len(p), nil
|
||||
}
|
||||
|
||||
func (w *mockWriter) Close() error {
|
||||
compareBodies(w.c.t, w.buf.String(), w.want)
|
||||
w.c.do("Close writer")
|
||||
return nil
|
||||
}
|
||||
|
||||
func testSendMail(t *testing.T, d *Dialer, want []string) {
|
||||
testClient := &mockClient{
|
||||
t: t,
|
||||
want: want,
|
||||
addr: addr(d.Host, d.Port),
|
||||
auth: testAuth,
|
||||
config: d.TLSConfig,
|
||||
}
|
||||
|
||||
smtpDial = func(addr string) (smtpClient, error) {
|
||||
assertAddr(t, addr, testClient.addr)
|
||||
return testClient, nil
|
||||
}
|
||||
|
||||
tlsDial = func(network, addr string, config *tls.Config) (*tls.Conn, error) {
|
||||
if network != "tcp" {
|
||||
t.Errorf("Invalid network, got %q, want tcp", network)
|
||||
}
|
||||
assertAddr(t, addr, testClient.addr)
|
||||
assertConfig(t, config, testClient.config)
|
||||
return testTLSConn, nil
|
||||
}
|
||||
|
||||
newClient = func(conn net.Conn, host string) (smtpClient, error) {
|
||||
if conn != testTLSConn {
|
||||
t.Error("Invalid TLS connection used")
|
||||
}
|
||||
if host != testHost {
|
||||
t.Errorf("Invalid host, got %q, want %q", host, testHost)
|
||||
}
|
||||
return testClient, nil
|
||||
}
|
||||
|
||||
if err := d.DialAndSend(getTestMessage()); err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
func assertAuth(t *testing.T, got, want smtp.Auth) {
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("Invalid auth, got %#v, want %#v", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func assertAddr(t *testing.T, got, want string) {
|
||||
if got != want {
|
||||
t.Errorf("Invalid addr, got %q, want %q", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func assertConfig(t *testing.T, got, want *tls.Config) {
|
||||
if want == nil {
|
||||
want = &tls.Config{ServerName: testHost}
|
||||
}
|
||||
if got.ServerName != want.ServerName {
|
||||
t.Errorf("Invalid field ServerName in config, got %q, want %q", got.ServerName, want.ServerName)
|
||||
}
|
||||
if got.InsecureSkipVerify != want.InsecureSkipVerify {
|
||||
t.Errorf("Invalid field InsecureSkipVerify in config, got %v, want %v", got.InsecureSkipVerify, want.InsecureSkipVerify)
|
||||
}
|
||||
}
|
||||
242
vendor/gopkg.in/gomail.v2/writeto.go
generated
vendored
Normal file
242
vendor/gopkg.in/gomail.v2/writeto.go
generated
vendored
Normal file
|
|
@ -0,0 +1,242 @@
|
|||
package gomail
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"io"
|
||||
"mime"
|
||||
"mime/multipart"
|
||||
"path/filepath"
|
||||
"time"
|
||||
)
|
||||
|
||||
// WriteTo implements io.WriterTo. It dumps the whole message into w.
|
||||
func (m *Message) WriteTo(w io.Writer) (int64, error) {
|
||||
mw := &messageWriter{w: w}
|
||||
mw.writeMessage(m)
|
||||
return mw.n, mw.err
|
||||
}
|
||||
|
||||
func (w *messageWriter) writeMessage(m *Message) {
|
||||
if _, ok := m.header["Mime-Version"]; !ok {
|
||||
w.writeString("Mime-Version: 1.0\r\n")
|
||||
}
|
||||
if _, ok := m.header["Date"]; !ok {
|
||||
w.writeHeader("Date", m.FormatDate(now()))
|
||||
}
|
||||
w.writeHeaders(m.header)
|
||||
|
||||
if m.hasMixedPart() {
|
||||
w.openMultipart("mixed")
|
||||
}
|
||||
|
||||
if m.hasRelatedPart() {
|
||||
w.openMultipart("related")
|
||||
}
|
||||
|
||||
if m.hasAlternativePart() {
|
||||
w.openMultipart("alternative")
|
||||
}
|
||||
for _, part := range m.parts {
|
||||
w.writeHeaders(part.header)
|
||||
w.writeBody(part.copier, m.encoding)
|
||||
}
|
||||
if m.hasAlternativePart() {
|
||||
w.closeMultipart()
|
||||
}
|
||||
|
||||
w.addFiles(m.embedded, false)
|
||||
if m.hasRelatedPart() {
|
||||
w.closeMultipart()
|
||||
}
|
||||
|
||||
w.addFiles(m.attachments, true)
|
||||
if m.hasMixedPart() {
|
||||
w.closeMultipart()
|
||||
}
|
||||
}
|
||||
|
||||
func (m *Message) hasMixedPart() bool {
|
||||
return (len(m.parts) > 0 && len(m.attachments) > 0) || len(m.attachments) > 1
|
||||
}
|
||||
|
||||
func (m *Message) hasRelatedPart() bool {
|
||||
return (len(m.parts) > 0 && len(m.embedded) > 0) || len(m.embedded) > 1
|
||||
}
|
||||
|
||||
func (m *Message) hasAlternativePart() bool {
|
||||
return len(m.parts) > 1
|
||||
}
|
||||
|
||||
type messageWriter struct {
|
||||
w io.Writer
|
||||
n int64
|
||||
writers [3]*multipart.Writer
|
||||
partWriter io.Writer
|
||||
depth uint8
|
||||
err error
|
||||
}
|
||||
|
||||
func (w *messageWriter) openMultipart(mimeType string) {
|
||||
mw := multipart.NewWriter(w)
|
||||
contentType := "multipart/" + mimeType + "; boundary=" + mw.Boundary()
|
||||
w.writers[w.depth] = mw
|
||||
|
||||
if w.depth == 0 {
|
||||
w.writeHeader("Content-Type", contentType)
|
||||
w.writeString("\r\n")
|
||||
} else {
|
||||
w.createPart(map[string][]string{
|
||||
"Content-Type": {contentType},
|
||||
})
|
||||
}
|
||||
w.depth++
|
||||
}
|
||||
|
||||
func (w *messageWriter) createPart(h map[string][]string) {
|
||||
w.partWriter, w.err = w.writers[w.depth-1].CreatePart(h)
|
||||
}
|
||||
|
||||
func (w *messageWriter) closeMultipart() {
|
||||
if w.depth > 0 {
|
||||
w.writers[w.depth-1].Close()
|
||||
w.depth--
|
||||
}
|
||||
}
|
||||
|
||||
func (w *messageWriter) addFiles(files []*file, isAttachment bool) {
|
||||
for _, f := range files {
|
||||
if _, ok := f.Header["Content-Type"]; !ok {
|
||||
mediaType := mime.TypeByExtension(filepath.Ext(f.Name))
|
||||
if mediaType == "" {
|
||||
mediaType = "application/octet-stream"
|
||||
}
|
||||
f.setHeader("Content-Type", mediaType+`; name="`+f.Name+`"`)
|
||||
}
|
||||
|
||||
if _, ok := f.Header["Content-Transfer-Encoding"]; !ok {
|
||||
f.setHeader("Content-Transfer-Encoding", string(Base64))
|
||||
}
|
||||
|
||||
if _, ok := f.Header["Content-Disposition"]; !ok {
|
||||
var disp string
|
||||
if isAttachment {
|
||||
disp = "attachment"
|
||||
} else {
|
||||
disp = "inline"
|
||||
}
|
||||
f.setHeader("Content-Disposition", disp+`; filename="`+f.Name+`"`)
|
||||
}
|
||||
|
||||
if !isAttachment {
|
||||
if _, ok := f.Header["Content-ID"]; !ok {
|
||||
f.setHeader("Content-ID", "<"+f.Name+">")
|
||||
}
|
||||
}
|
||||
w.writeHeaders(f.Header)
|
||||
w.writeBody(f.CopyFunc, Base64)
|
||||
}
|
||||
}
|
||||
|
||||
func (w *messageWriter) Write(p []byte) (int, error) {
|
||||
if w.err != nil {
|
||||
return 0, errors.New("gomail: cannot write as writer is in error")
|
||||
}
|
||||
|
||||
var n int
|
||||
n, w.err = w.w.Write(p)
|
||||
w.n += int64(n)
|
||||
return n, w.err
|
||||
}
|
||||
|
||||
func (w *messageWriter) writeString(s string) {
|
||||
n, _ := io.WriteString(w.w, s)
|
||||
w.n += int64(n)
|
||||
}
|
||||
|
||||
func (w *messageWriter) writeStrings(a []string, sep string) {
|
||||
if len(a) > 0 {
|
||||
w.writeString(a[0])
|
||||
if len(a) == 1 {
|
||||
return
|
||||
}
|
||||
}
|
||||
for _, s := range a[1:] {
|
||||
w.writeString(sep)
|
||||
w.writeString(s)
|
||||
}
|
||||
}
|
||||
|
||||
func (w *messageWriter) writeHeader(k string, v ...string) {
|
||||
w.writeString(k)
|
||||
w.writeString(": ")
|
||||
w.writeStrings(v, ", ")
|
||||
w.writeString("\r\n")
|
||||
}
|
||||
|
||||
func (w *messageWriter) writeHeaders(h map[string][]string) {
|
||||
if w.depth == 0 {
|
||||
for k, v := range h {
|
||||
if k != "Bcc" {
|
||||
w.writeHeader(k, v...)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
w.createPart(h)
|
||||
}
|
||||
}
|
||||
|
||||
func (w *messageWriter) writeBody(f func(io.Writer) error, enc Encoding) {
|
||||
var subWriter io.Writer
|
||||
if w.depth == 0 {
|
||||
w.writeString("\r\n")
|
||||
subWriter = w.w
|
||||
} else {
|
||||
subWriter = w.partWriter
|
||||
}
|
||||
|
||||
if enc == Base64 {
|
||||
wc := base64.NewEncoder(base64.StdEncoding, newBase64LineWriter(subWriter))
|
||||
w.err = f(wc)
|
||||
wc.Close()
|
||||
} else if enc == Unencoded {
|
||||
w.err = f(subWriter)
|
||||
} else {
|
||||
wc := newQPWriter(subWriter)
|
||||
w.err = f(wc)
|
||||
wc.Close()
|
||||
}
|
||||
}
|
||||
|
||||
// As required by RFC 2045, 6.7. (page 21) for quoted-printable, and
|
||||
// RFC 2045, 6.8. (page 25) for base64.
|
||||
const maxLineLen = 76
|
||||
|
||||
// base64LineWriter limits text encoded in base64 to 76 characters per line
|
||||
type base64LineWriter struct {
|
||||
w io.Writer
|
||||
lineLen int
|
||||
}
|
||||
|
||||
func newBase64LineWriter(w io.Writer) *base64LineWriter {
|
||||
return &base64LineWriter{w: w}
|
||||
}
|
||||
|
||||
func (w *base64LineWriter) Write(p []byte) (int, error) {
|
||||
n := 0
|
||||
for len(p)+w.lineLen > maxLineLen {
|
||||
w.w.Write(p[:maxLineLen-w.lineLen])
|
||||
w.w.Write([]byte("\r\n"))
|
||||
p = p[maxLineLen-w.lineLen:]
|
||||
n += maxLineLen - w.lineLen
|
||||
w.lineLen = 0
|
||||
}
|
||||
|
||||
w.w.Write(p)
|
||||
w.lineLen += len(p)
|
||||
|
||||
return n + len(p), nil
|
||||
}
|
||||
|
||||
// Stubbed out for testing.
|
||||
var now = time.Now
|
||||
9
vendor/gopkg.in/yaml.v2/.travis.yml
generated
vendored
Normal file
9
vendor/gopkg.in/yaml.v2/.travis.yml
generated
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
language: go
|
||||
|
||||
go:
|
||||
- 1.4
|
||||
- 1.5
|
||||
- 1.6
|
||||
- tip
|
||||
|
||||
go_import_path: gopkg.in/yaml.v2
|
||||
201
vendor/gopkg.in/yaml.v2/LICENSE
generated
vendored
Normal file
201
vendor/gopkg.in/yaml.v2/LICENSE
generated
vendored
Normal file
|
|
@ -0,0 +1,201 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
31
vendor/gopkg.in/yaml.v2/LICENSE.libyaml
generated
vendored
Normal file
31
vendor/gopkg.in/yaml.v2/LICENSE.libyaml
generated
vendored
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
The following files were ported to Go from C files of libyaml, and thus
|
||||
are still covered by their original copyright and license:
|
||||
|
||||
apic.go
|
||||
emitterc.go
|
||||
parserc.go
|
||||
readerc.go
|
||||
scannerc.go
|
||||
writerc.go
|
||||
yamlh.go
|
||||
yamlprivateh.go
|
||||
|
||||
Copyright (c) 2006 Kirill Simonov
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
133
vendor/gopkg.in/yaml.v2/README.md
generated
vendored
Normal file
133
vendor/gopkg.in/yaml.v2/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,133 @@
|
|||
# YAML support for the Go language
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
The yaml package enables Go programs to comfortably encode and decode YAML
|
||||
values. It was developed within [Canonical](https://www.canonical.com) as
|
||||
part of the [juju](https://juju.ubuntu.com) project, and is based on a
|
||||
pure Go port of the well-known [libyaml](http://pyyaml.org/wiki/LibYAML)
|
||||
C library to parse and generate YAML data quickly and reliably.
|
||||
|
||||
Compatibility
|
||||
-------------
|
||||
|
||||
The yaml package supports most of YAML 1.1 and 1.2, including support for
|
||||
anchors, tags, map merging, etc. Multi-document unmarshalling is not yet
|
||||
implemented, and base-60 floats from YAML 1.1 are purposefully not
|
||||
supported since they're a poor design and are gone in YAML 1.2.
|
||||
|
||||
Installation and usage
|
||||
----------------------
|
||||
|
||||
The import path for the package is *gopkg.in/yaml.v2*.
|
||||
|
||||
To install it, run:
|
||||
|
||||
go get gopkg.in/yaml.v2
|
||||
|
||||
API documentation
|
||||
-----------------
|
||||
|
||||
If opened in a browser, the import path itself leads to the API documentation:
|
||||
|
||||
* [https://gopkg.in/yaml.v2](https://gopkg.in/yaml.v2)
|
||||
|
||||
API stability
|
||||
-------------
|
||||
|
||||
The package API for yaml v2 will remain stable as described in [gopkg.in](https://gopkg.in).
|
||||
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
The yaml package is licensed under the Apache License 2.0. Please see the LICENSE file for details.
|
||||
|
||||
|
||||
Example
|
||||
-------
|
||||
|
||||
Some more examples can be found in the "examples" folder.
|
||||
|
||||
```Go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
var data = `
|
||||
a: Easy!
|
||||
b:
|
||||
c: 2
|
||||
d: [3, 4]
|
||||
`
|
||||
|
||||
type T struct {
|
||||
A string
|
||||
B struct {
|
||||
RenamedC int `yaml:"c"`
|
||||
D []int `yaml:",flow"`
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
t := T{}
|
||||
|
||||
err := yaml.Unmarshal([]byte(data), &t)
|
||||
if err != nil {
|
||||
log.Fatalf("error: %v", err)
|
||||
}
|
||||
fmt.Printf("--- t:\n%v\n\n", t)
|
||||
|
||||
d, err := yaml.Marshal(&t)
|
||||
if err != nil {
|
||||
log.Fatalf("error: %v", err)
|
||||
}
|
||||
fmt.Printf("--- t dump:\n%s\n\n", string(d))
|
||||
|
||||
m := make(map[interface{}]interface{})
|
||||
|
||||
err = yaml.Unmarshal([]byte(data), &m)
|
||||
if err != nil {
|
||||
log.Fatalf("error: %v", err)
|
||||
}
|
||||
fmt.Printf("--- m:\n%v\n\n", m)
|
||||
|
||||
d, err = yaml.Marshal(&m)
|
||||
if err != nil {
|
||||
log.Fatalf("error: %v", err)
|
||||
}
|
||||
fmt.Printf("--- m dump:\n%s\n\n", string(d))
|
||||
}
|
||||
```
|
||||
|
||||
This example will generate the following output:
|
||||
|
||||
```
|
||||
--- t:
|
||||
{Easy! {2 [3 4]}}
|
||||
|
||||
--- t dump:
|
||||
a: Easy!
|
||||
b:
|
||||
c: 2
|
||||
d: [3, 4]
|
||||
|
||||
|
||||
--- m:
|
||||
map[a:Easy! b:map[c:2 d:[3 4]]]
|
||||
|
||||
--- m dump:
|
||||
a: Easy!
|
||||
b:
|
||||
c: 2
|
||||
d:
|
||||
- 3
|
||||
- 4
|
||||
```
|
||||
|
||||
742
vendor/gopkg.in/yaml.v2/apic.go
generated
vendored
Normal file
742
vendor/gopkg.in/yaml.v2/apic.go
generated
vendored
Normal file
|
|
@ -0,0 +1,742 @@
|
|||
package yaml
|
||||
|
||||
import (
|
||||
"io"
|
||||
"os"
|
||||
)
|
||||
|
||||
func yaml_insert_token(parser *yaml_parser_t, pos int, token *yaml_token_t) {
|
||||
//fmt.Println("yaml_insert_token", "pos:", pos, "typ:", token.typ, "head:", parser.tokens_head, "len:", len(parser.tokens))
|
||||
|
||||
// Check if we can move the queue at the beginning of the buffer.
|
||||
if parser.tokens_head > 0 && len(parser.tokens) == cap(parser.tokens) {
|
||||
if parser.tokens_head != len(parser.tokens) {
|
||||
copy(parser.tokens, parser.tokens[parser.tokens_head:])
|
||||
}
|
||||
parser.tokens = parser.tokens[:len(parser.tokens)-parser.tokens_head]
|
||||
parser.tokens_head = 0
|
||||
}
|
||||
parser.tokens = append(parser.tokens, *token)
|
||||
if pos < 0 {
|
||||
return
|
||||
}
|
||||
copy(parser.tokens[parser.tokens_head+pos+1:], parser.tokens[parser.tokens_head+pos:])
|
||||
parser.tokens[parser.tokens_head+pos] = *token
|
||||
}
|
||||
|
||||
// Create a new parser object.
|
||||
func yaml_parser_initialize(parser *yaml_parser_t) bool {
|
||||
*parser = yaml_parser_t{
|
||||
raw_buffer: make([]byte, 0, input_raw_buffer_size),
|
||||
buffer: make([]byte, 0, input_buffer_size),
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Destroy a parser object.
|
||||
func yaml_parser_delete(parser *yaml_parser_t) {
|
||||
*parser = yaml_parser_t{}
|
||||
}
|
||||
|
||||
// String read handler.
|
||||
func yaml_string_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) {
|
||||
if parser.input_pos == len(parser.input) {
|
||||
return 0, io.EOF
|
||||
}
|
||||
n = copy(buffer, parser.input[parser.input_pos:])
|
||||
parser.input_pos += n
|
||||
return n, nil
|
||||
}
|
||||
|
||||
// File read handler.
|
||||
func yaml_file_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) {
|
||||
return parser.input_file.Read(buffer)
|
||||
}
|
||||
|
||||
// Set a string input.
|
||||
func yaml_parser_set_input_string(parser *yaml_parser_t, input []byte) {
|
||||
if parser.read_handler != nil {
|
||||
panic("must set the input source only once")
|
||||
}
|
||||
parser.read_handler = yaml_string_read_handler
|
||||
parser.input = input
|
||||
parser.input_pos = 0
|
||||
}
|
||||
|
||||
// Set a file input.
|
||||
func yaml_parser_set_input_file(parser *yaml_parser_t, file *os.File) {
|
||||
if parser.read_handler != nil {
|
||||
panic("must set the input source only once")
|
||||
}
|
||||
parser.read_handler = yaml_file_read_handler
|
||||
parser.input_file = file
|
||||
}
|
||||
|
||||
// Set the source encoding.
|
||||
func yaml_parser_set_encoding(parser *yaml_parser_t, encoding yaml_encoding_t) {
|
||||
if parser.encoding != yaml_ANY_ENCODING {
|
||||
panic("must set the encoding only once")
|
||||
}
|
||||
parser.encoding = encoding
|
||||
}
|
||||
|
||||
// Create a new emitter object.
|
||||
func yaml_emitter_initialize(emitter *yaml_emitter_t) bool {
|
||||
*emitter = yaml_emitter_t{
|
||||
buffer: make([]byte, output_buffer_size),
|
||||
raw_buffer: make([]byte, 0, output_raw_buffer_size),
|
||||
states: make([]yaml_emitter_state_t, 0, initial_stack_size),
|
||||
events: make([]yaml_event_t, 0, initial_queue_size),
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Destroy an emitter object.
|
||||
func yaml_emitter_delete(emitter *yaml_emitter_t) {
|
||||
*emitter = yaml_emitter_t{}
|
||||
}
|
||||
|
||||
// String write handler.
|
||||
func yaml_string_write_handler(emitter *yaml_emitter_t, buffer []byte) error {
|
||||
*emitter.output_buffer = append(*emitter.output_buffer, buffer...)
|
||||
return nil
|
||||
}
|
||||
|
||||
// File write handler.
|
||||
func yaml_file_write_handler(emitter *yaml_emitter_t, buffer []byte) error {
|
||||
_, err := emitter.output_file.Write(buffer)
|
||||
return err
|
||||
}
|
||||
|
||||
// Set a string output.
|
||||
func yaml_emitter_set_output_string(emitter *yaml_emitter_t, output_buffer *[]byte) {
|
||||
if emitter.write_handler != nil {
|
||||
panic("must set the output target only once")
|
||||
}
|
||||
emitter.write_handler = yaml_string_write_handler
|
||||
emitter.output_buffer = output_buffer
|
||||
}
|
||||
|
||||
// Set a file output.
|
||||
func yaml_emitter_set_output_file(emitter *yaml_emitter_t, file io.Writer) {
|
||||
if emitter.write_handler != nil {
|
||||
panic("must set the output target only once")
|
||||
}
|
||||
emitter.write_handler = yaml_file_write_handler
|
||||
emitter.output_file = file
|
||||
}
|
||||
|
||||
// Set the output encoding.
|
||||
func yaml_emitter_set_encoding(emitter *yaml_emitter_t, encoding yaml_encoding_t) {
|
||||
if emitter.encoding != yaml_ANY_ENCODING {
|
||||
panic("must set the output encoding only once")
|
||||
}
|
||||
emitter.encoding = encoding
|
||||
}
|
||||
|
||||
// Set the canonical output style.
|
||||
func yaml_emitter_set_canonical(emitter *yaml_emitter_t, canonical bool) {
|
||||
emitter.canonical = canonical
|
||||
}
|
||||
|
||||
//// Set the indentation increment.
|
||||
func yaml_emitter_set_indent(emitter *yaml_emitter_t, indent int) {
|
||||
if indent < 2 || indent > 9 {
|
||||
indent = 2
|
||||
}
|
||||
emitter.best_indent = indent
|
||||
}
|
||||
|
||||
// Set the preferred line width.
|
||||
func yaml_emitter_set_width(emitter *yaml_emitter_t, width int) {
|
||||
if width < 0 {
|
||||
width = -1
|
||||
}
|
||||
emitter.best_width = width
|
||||
}
|
||||
|
||||
// Set if unescaped non-ASCII characters are allowed.
|
||||
func yaml_emitter_set_unicode(emitter *yaml_emitter_t, unicode bool) {
|
||||
emitter.unicode = unicode
|
||||
}
|
||||
|
||||
// Set the preferred line break character.
|
||||
func yaml_emitter_set_break(emitter *yaml_emitter_t, line_break yaml_break_t) {
|
||||
emitter.line_break = line_break
|
||||
}
|
||||
|
||||
///*
|
||||
// * Destroy a token object.
|
||||
// */
|
||||
//
|
||||
//YAML_DECLARE(void)
|
||||
//yaml_token_delete(yaml_token_t *token)
|
||||
//{
|
||||
// assert(token); // Non-NULL token object expected.
|
||||
//
|
||||
// switch (token.type)
|
||||
// {
|
||||
// case YAML_TAG_DIRECTIVE_TOKEN:
|
||||
// yaml_free(token.data.tag_directive.handle);
|
||||
// yaml_free(token.data.tag_directive.prefix);
|
||||
// break;
|
||||
//
|
||||
// case YAML_ALIAS_TOKEN:
|
||||
// yaml_free(token.data.alias.value);
|
||||
// break;
|
||||
//
|
||||
// case YAML_ANCHOR_TOKEN:
|
||||
// yaml_free(token.data.anchor.value);
|
||||
// break;
|
||||
//
|
||||
// case YAML_TAG_TOKEN:
|
||||
// yaml_free(token.data.tag.handle);
|
||||
// yaml_free(token.data.tag.suffix);
|
||||
// break;
|
||||
//
|
||||
// case YAML_SCALAR_TOKEN:
|
||||
// yaml_free(token.data.scalar.value);
|
||||
// break;
|
||||
//
|
||||
// default:
|
||||
// break;
|
||||
// }
|
||||
//
|
||||
// memset(token, 0, sizeof(yaml_token_t));
|
||||
//}
|
||||
//
|
||||
///*
|
||||
// * Check if a string is a valid UTF-8 sequence.
|
||||
// *
|
||||
// * Check 'reader.c' for more details on UTF-8 encoding.
|
||||
// */
|
||||
//
|
||||
//static int
|
||||
//yaml_check_utf8(yaml_char_t *start, size_t length)
|
||||
//{
|
||||
// yaml_char_t *end = start+length;
|
||||
// yaml_char_t *pointer = start;
|
||||
//
|
||||
// while (pointer < end) {
|
||||
// unsigned char octet;
|
||||
// unsigned int width;
|
||||
// unsigned int value;
|
||||
// size_t k;
|
||||
//
|
||||
// octet = pointer[0];
|
||||
// width = (octet & 0x80) == 0x00 ? 1 :
|
||||
// (octet & 0xE0) == 0xC0 ? 2 :
|
||||
// (octet & 0xF0) == 0xE0 ? 3 :
|
||||
// (octet & 0xF8) == 0xF0 ? 4 : 0;
|
||||
// value = (octet & 0x80) == 0x00 ? octet & 0x7F :
|
||||
// (octet & 0xE0) == 0xC0 ? octet & 0x1F :
|
||||
// (octet & 0xF0) == 0xE0 ? octet & 0x0F :
|
||||
// (octet & 0xF8) == 0xF0 ? octet & 0x07 : 0;
|
||||
// if (!width) return 0;
|
||||
// if (pointer+width > end) return 0;
|
||||
// for (k = 1; k < width; k ++) {
|
||||
// octet = pointer[k];
|
||||
// if ((octet & 0xC0) != 0x80) return 0;
|
||||
// value = (value << 6) + (octet & 0x3F);
|
||||
// }
|
||||
// if (!((width == 1) ||
|
||||
// (width == 2 && value >= 0x80) ||
|
||||
// (width == 3 && value >= 0x800) ||
|
||||
// (width == 4 && value >= 0x10000))) return 0;
|
||||
//
|
||||
// pointer += width;
|
||||
// }
|
||||
//
|
||||
// return 1;
|
||||
//}
|
||||
//
|
||||
|
||||
// Create STREAM-START.
|
||||
func yaml_stream_start_event_initialize(event *yaml_event_t, encoding yaml_encoding_t) bool {
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_STREAM_START_EVENT,
|
||||
encoding: encoding,
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Create STREAM-END.
|
||||
func yaml_stream_end_event_initialize(event *yaml_event_t) bool {
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_STREAM_END_EVENT,
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Create DOCUMENT-START.
|
||||
func yaml_document_start_event_initialize(event *yaml_event_t, version_directive *yaml_version_directive_t,
|
||||
tag_directives []yaml_tag_directive_t, implicit bool) bool {
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_DOCUMENT_START_EVENT,
|
||||
version_directive: version_directive,
|
||||
tag_directives: tag_directives,
|
||||
implicit: implicit,
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Create DOCUMENT-END.
|
||||
func yaml_document_end_event_initialize(event *yaml_event_t, implicit bool) bool {
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_DOCUMENT_END_EVENT,
|
||||
implicit: implicit,
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
///*
|
||||
// * Create ALIAS.
|
||||
// */
|
||||
//
|
||||
//YAML_DECLARE(int)
|
||||
//yaml_alias_event_initialize(event *yaml_event_t, anchor *yaml_char_t)
|
||||
//{
|
||||
// mark yaml_mark_t = { 0, 0, 0 }
|
||||
// anchor_copy *yaml_char_t = NULL
|
||||
//
|
||||
// assert(event) // Non-NULL event object is expected.
|
||||
// assert(anchor) // Non-NULL anchor is expected.
|
||||
//
|
||||
// if (!yaml_check_utf8(anchor, strlen((char *)anchor))) return 0
|
||||
//
|
||||
// anchor_copy = yaml_strdup(anchor)
|
||||
// if (!anchor_copy)
|
||||
// return 0
|
||||
//
|
||||
// ALIAS_EVENT_INIT(*event, anchor_copy, mark, mark)
|
||||
//
|
||||
// return 1
|
||||
//}
|
||||
|
||||
// Create SCALAR.
|
||||
func yaml_scalar_event_initialize(event *yaml_event_t, anchor, tag, value []byte, plain_implicit, quoted_implicit bool, style yaml_scalar_style_t) bool {
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_SCALAR_EVENT,
|
||||
anchor: anchor,
|
||||
tag: tag,
|
||||
value: value,
|
||||
implicit: plain_implicit,
|
||||
quoted_implicit: quoted_implicit,
|
||||
style: yaml_style_t(style),
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Create SEQUENCE-START.
|
||||
func yaml_sequence_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_sequence_style_t) bool {
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_SEQUENCE_START_EVENT,
|
||||
anchor: anchor,
|
||||
tag: tag,
|
||||
implicit: implicit,
|
||||
style: yaml_style_t(style),
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Create SEQUENCE-END.
|
||||
func yaml_sequence_end_event_initialize(event *yaml_event_t) bool {
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_SEQUENCE_END_EVENT,
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Create MAPPING-START.
|
||||
func yaml_mapping_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_mapping_style_t) bool {
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_MAPPING_START_EVENT,
|
||||
anchor: anchor,
|
||||
tag: tag,
|
||||
implicit: implicit,
|
||||
style: yaml_style_t(style),
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Create MAPPING-END.
|
||||
func yaml_mapping_end_event_initialize(event *yaml_event_t) bool {
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_MAPPING_END_EVENT,
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Destroy an event object.
|
||||
func yaml_event_delete(event *yaml_event_t) {
|
||||
*event = yaml_event_t{}
|
||||
}
|
||||
|
||||
///*
|
||||
// * Create a document object.
|
||||
// */
|
||||
//
|
||||
//YAML_DECLARE(int)
|
||||
//yaml_document_initialize(document *yaml_document_t,
|
||||
// version_directive *yaml_version_directive_t,
|
||||
// tag_directives_start *yaml_tag_directive_t,
|
||||
// tag_directives_end *yaml_tag_directive_t,
|
||||
// start_implicit int, end_implicit int)
|
||||
//{
|
||||
// struct {
|
||||
// error yaml_error_type_t
|
||||
// } context
|
||||
// struct {
|
||||
// start *yaml_node_t
|
||||
// end *yaml_node_t
|
||||
// top *yaml_node_t
|
||||
// } nodes = { NULL, NULL, NULL }
|
||||
// version_directive_copy *yaml_version_directive_t = NULL
|
||||
// struct {
|
||||
// start *yaml_tag_directive_t
|
||||
// end *yaml_tag_directive_t
|
||||
// top *yaml_tag_directive_t
|
||||
// } tag_directives_copy = { NULL, NULL, NULL }
|
||||
// value yaml_tag_directive_t = { NULL, NULL }
|
||||
// mark yaml_mark_t = { 0, 0, 0 }
|
||||
//
|
||||
// assert(document) // Non-NULL document object is expected.
|
||||
// assert((tag_directives_start && tag_directives_end) ||
|
||||
// (tag_directives_start == tag_directives_end))
|
||||
// // Valid tag directives are expected.
|
||||
//
|
||||
// if (!STACK_INIT(&context, nodes, INITIAL_STACK_SIZE)) goto error
|
||||
//
|
||||
// if (version_directive) {
|
||||
// version_directive_copy = yaml_malloc(sizeof(yaml_version_directive_t))
|
||||
// if (!version_directive_copy) goto error
|
||||
// version_directive_copy.major = version_directive.major
|
||||
// version_directive_copy.minor = version_directive.minor
|
||||
// }
|
||||
//
|
||||
// if (tag_directives_start != tag_directives_end) {
|
||||
// tag_directive *yaml_tag_directive_t
|
||||
// if (!STACK_INIT(&context, tag_directives_copy, INITIAL_STACK_SIZE))
|
||||
// goto error
|
||||
// for (tag_directive = tag_directives_start
|
||||
// tag_directive != tag_directives_end; tag_directive ++) {
|
||||
// assert(tag_directive.handle)
|
||||
// assert(tag_directive.prefix)
|
||||
// if (!yaml_check_utf8(tag_directive.handle,
|
||||
// strlen((char *)tag_directive.handle)))
|
||||
// goto error
|
||||
// if (!yaml_check_utf8(tag_directive.prefix,
|
||||
// strlen((char *)tag_directive.prefix)))
|
||||
// goto error
|
||||
// value.handle = yaml_strdup(tag_directive.handle)
|
||||
// value.prefix = yaml_strdup(tag_directive.prefix)
|
||||
// if (!value.handle || !value.prefix) goto error
|
||||
// if (!PUSH(&context, tag_directives_copy, value))
|
||||
// goto error
|
||||
// value.handle = NULL
|
||||
// value.prefix = NULL
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// DOCUMENT_INIT(*document, nodes.start, nodes.end, version_directive_copy,
|
||||
// tag_directives_copy.start, tag_directives_copy.top,
|
||||
// start_implicit, end_implicit, mark, mark)
|
||||
//
|
||||
// return 1
|
||||
//
|
||||
//error:
|
||||
// STACK_DEL(&context, nodes)
|
||||
// yaml_free(version_directive_copy)
|
||||
// while (!STACK_EMPTY(&context, tag_directives_copy)) {
|
||||
// value yaml_tag_directive_t = POP(&context, tag_directives_copy)
|
||||
// yaml_free(value.handle)
|
||||
// yaml_free(value.prefix)
|
||||
// }
|
||||
// STACK_DEL(&context, tag_directives_copy)
|
||||
// yaml_free(value.handle)
|
||||
// yaml_free(value.prefix)
|
||||
//
|
||||
// return 0
|
||||
//}
|
||||
//
|
||||
///*
|
||||
// * Destroy a document object.
|
||||
// */
|
||||
//
|
||||
//YAML_DECLARE(void)
|
||||
//yaml_document_delete(document *yaml_document_t)
|
||||
//{
|
||||
// struct {
|
||||
// error yaml_error_type_t
|
||||
// } context
|
||||
// tag_directive *yaml_tag_directive_t
|
||||
//
|
||||
// context.error = YAML_NO_ERROR // Eliminate a compliler warning.
|
||||
//
|
||||
// assert(document) // Non-NULL document object is expected.
|
||||
//
|
||||
// while (!STACK_EMPTY(&context, document.nodes)) {
|
||||
// node yaml_node_t = POP(&context, document.nodes)
|
||||
// yaml_free(node.tag)
|
||||
// switch (node.type) {
|
||||
// case YAML_SCALAR_NODE:
|
||||
// yaml_free(node.data.scalar.value)
|
||||
// break
|
||||
// case YAML_SEQUENCE_NODE:
|
||||
// STACK_DEL(&context, node.data.sequence.items)
|
||||
// break
|
||||
// case YAML_MAPPING_NODE:
|
||||
// STACK_DEL(&context, node.data.mapping.pairs)
|
||||
// break
|
||||
// default:
|
||||
// assert(0) // Should not happen.
|
||||
// }
|
||||
// }
|
||||
// STACK_DEL(&context, document.nodes)
|
||||
//
|
||||
// yaml_free(document.version_directive)
|
||||
// for (tag_directive = document.tag_directives.start
|
||||
// tag_directive != document.tag_directives.end
|
||||
// tag_directive++) {
|
||||
// yaml_free(tag_directive.handle)
|
||||
// yaml_free(tag_directive.prefix)
|
||||
// }
|
||||
// yaml_free(document.tag_directives.start)
|
||||
//
|
||||
// memset(document, 0, sizeof(yaml_document_t))
|
||||
//}
|
||||
//
|
||||
///**
|
||||
// * Get a document node.
|
||||
// */
|
||||
//
|
||||
//YAML_DECLARE(yaml_node_t *)
|
||||
//yaml_document_get_node(document *yaml_document_t, index int)
|
||||
//{
|
||||
// assert(document) // Non-NULL document object is expected.
|
||||
//
|
||||
// if (index > 0 && document.nodes.start + index <= document.nodes.top) {
|
||||
// return document.nodes.start + index - 1
|
||||
// }
|
||||
// return NULL
|
||||
//}
|
||||
//
|
||||
///**
|
||||
// * Get the root object.
|
||||
// */
|
||||
//
|
||||
//YAML_DECLARE(yaml_node_t *)
|
||||
//yaml_document_get_root_node(document *yaml_document_t)
|
||||
//{
|
||||
// assert(document) // Non-NULL document object is expected.
|
||||
//
|
||||
// if (document.nodes.top != document.nodes.start) {
|
||||
// return document.nodes.start
|
||||
// }
|
||||
// return NULL
|
||||
//}
|
||||
//
|
||||
///*
|
||||
// * Add a scalar node to a document.
|
||||
// */
|
||||
//
|
||||
//YAML_DECLARE(int)
|
||||
//yaml_document_add_scalar(document *yaml_document_t,
|
||||
// tag *yaml_char_t, value *yaml_char_t, length int,
|
||||
// style yaml_scalar_style_t)
|
||||
//{
|
||||
// struct {
|
||||
// error yaml_error_type_t
|
||||
// } context
|
||||
// mark yaml_mark_t = { 0, 0, 0 }
|
||||
// tag_copy *yaml_char_t = NULL
|
||||
// value_copy *yaml_char_t = NULL
|
||||
// node yaml_node_t
|
||||
//
|
||||
// assert(document) // Non-NULL document object is expected.
|
||||
// assert(value) // Non-NULL value is expected.
|
||||
//
|
||||
// if (!tag) {
|
||||
// tag = (yaml_char_t *)YAML_DEFAULT_SCALAR_TAG
|
||||
// }
|
||||
//
|
||||
// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error
|
||||
// tag_copy = yaml_strdup(tag)
|
||||
// if (!tag_copy) goto error
|
||||
//
|
||||
// if (length < 0) {
|
||||
// length = strlen((char *)value)
|
||||
// }
|
||||
//
|
||||
// if (!yaml_check_utf8(value, length)) goto error
|
||||
// value_copy = yaml_malloc(length+1)
|
||||
// if (!value_copy) goto error
|
||||
// memcpy(value_copy, value, length)
|
||||
// value_copy[length] = '\0'
|
||||
//
|
||||
// SCALAR_NODE_INIT(node, tag_copy, value_copy, length, style, mark, mark)
|
||||
// if (!PUSH(&context, document.nodes, node)) goto error
|
||||
//
|
||||
// return document.nodes.top - document.nodes.start
|
||||
//
|
||||
//error:
|
||||
// yaml_free(tag_copy)
|
||||
// yaml_free(value_copy)
|
||||
//
|
||||
// return 0
|
||||
//}
|
||||
//
|
||||
///*
|
||||
// * Add a sequence node to a document.
|
||||
// */
|
||||
//
|
||||
//YAML_DECLARE(int)
|
||||
//yaml_document_add_sequence(document *yaml_document_t,
|
||||
// tag *yaml_char_t, style yaml_sequence_style_t)
|
||||
//{
|
||||
// struct {
|
||||
// error yaml_error_type_t
|
||||
// } context
|
||||
// mark yaml_mark_t = { 0, 0, 0 }
|
||||
// tag_copy *yaml_char_t = NULL
|
||||
// struct {
|
||||
// start *yaml_node_item_t
|
||||
// end *yaml_node_item_t
|
||||
// top *yaml_node_item_t
|
||||
// } items = { NULL, NULL, NULL }
|
||||
// node yaml_node_t
|
||||
//
|
||||
// assert(document) // Non-NULL document object is expected.
|
||||
//
|
||||
// if (!tag) {
|
||||
// tag = (yaml_char_t *)YAML_DEFAULT_SEQUENCE_TAG
|
||||
// }
|
||||
//
|
||||
// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error
|
||||
// tag_copy = yaml_strdup(tag)
|
||||
// if (!tag_copy) goto error
|
||||
//
|
||||
// if (!STACK_INIT(&context, items, INITIAL_STACK_SIZE)) goto error
|
||||
//
|
||||
// SEQUENCE_NODE_INIT(node, tag_copy, items.start, items.end,
|
||||
// style, mark, mark)
|
||||
// if (!PUSH(&context, document.nodes, node)) goto error
|
||||
//
|
||||
// return document.nodes.top - document.nodes.start
|
||||
//
|
||||
//error:
|
||||
// STACK_DEL(&context, items)
|
||||
// yaml_free(tag_copy)
|
||||
//
|
||||
// return 0
|
||||
//}
|
||||
//
|
||||
///*
|
||||
// * Add a mapping node to a document.
|
||||
// */
|
||||
//
|
||||
//YAML_DECLARE(int)
|
||||
//yaml_document_add_mapping(document *yaml_document_t,
|
||||
// tag *yaml_char_t, style yaml_mapping_style_t)
|
||||
//{
|
||||
// struct {
|
||||
// error yaml_error_type_t
|
||||
// } context
|
||||
// mark yaml_mark_t = { 0, 0, 0 }
|
||||
// tag_copy *yaml_char_t = NULL
|
||||
// struct {
|
||||
// start *yaml_node_pair_t
|
||||
// end *yaml_node_pair_t
|
||||
// top *yaml_node_pair_t
|
||||
// } pairs = { NULL, NULL, NULL }
|
||||
// node yaml_node_t
|
||||
//
|
||||
// assert(document) // Non-NULL document object is expected.
|
||||
//
|
||||
// if (!tag) {
|
||||
// tag = (yaml_char_t *)YAML_DEFAULT_MAPPING_TAG
|
||||
// }
|
||||
//
|
||||
// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error
|
||||
// tag_copy = yaml_strdup(tag)
|
||||
// if (!tag_copy) goto error
|
||||
//
|
||||
// if (!STACK_INIT(&context, pairs, INITIAL_STACK_SIZE)) goto error
|
||||
//
|
||||
// MAPPING_NODE_INIT(node, tag_copy, pairs.start, pairs.end,
|
||||
// style, mark, mark)
|
||||
// if (!PUSH(&context, document.nodes, node)) goto error
|
||||
//
|
||||
// return document.nodes.top - document.nodes.start
|
||||
//
|
||||
//error:
|
||||
// STACK_DEL(&context, pairs)
|
||||
// yaml_free(tag_copy)
|
||||
//
|
||||
// return 0
|
||||
//}
|
||||
//
|
||||
///*
|
||||
// * Append an item to a sequence node.
|
||||
// */
|
||||
//
|
||||
//YAML_DECLARE(int)
|
||||
//yaml_document_append_sequence_item(document *yaml_document_t,
|
||||
// sequence int, item int)
|
||||
//{
|
||||
// struct {
|
||||
// error yaml_error_type_t
|
||||
// } context
|
||||
//
|
||||
// assert(document) // Non-NULL document is required.
|
||||
// assert(sequence > 0
|
||||
// && document.nodes.start + sequence <= document.nodes.top)
|
||||
// // Valid sequence id is required.
|
||||
// assert(document.nodes.start[sequence-1].type == YAML_SEQUENCE_NODE)
|
||||
// // A sequence node is required.
|
||||
// assert(item > 0 && document.nodes.start + item <= document.nodes.top)
|
||||
// // Valid item id is required.
|
||||
//
|
||||
// if (!PUSH(&context,
|
||||
// document.nodes.start[sequence-1].data.sequence.items, item))
|
||||
// return 0
|
||||
//
|
||||
// return 1
|
||||
//}
|
||||
//
|
||||
///*
|
||||
// * Append a pair of a key and a value to a mapping node.
|
||||
// */
|
||||
//
|
||||
//YAML_DECLARE(int)
|
||||
//yaml_document_append_mapping_pair(document *yaml_document_t,
|
||||
// mapping int, key int, value int)
|
||||
//{
|
||||
// struct {
|
||||
// error yaml_error_type_t
|
||||
// } context
|
||||
//
|
||||
// pair yaml_node_pair_t
|
||||
//
|
||||
// assert(document) // Non-NULL document is required.
|
||||
// assert(mapping > 0
|
||||
// && document.nodes.start + mapping <= document.nodes.top)
|
||||
// // Valid mapping id is required.
|
||||
// assert(document.nodes.start[mapping-1].type == YAML_MAPPING_NODE)
|
||||
// // A mapping node is required.
|
||||
// assert(key > 0 && document.nodes.start + key <= document.nodes.top)
|
||||
// // Valid key id is required.
|
||||
// assert(value > 0 && document.nodes.start + value <= document.nodes.top)
|
||||
// // Valid value id is required.
|
||||
//
|
||||
// pair.key = key
|
||||
// pair.value = value
|
||||
//
|
||||
// if (!PUSH(&context,
|
||||
// document.nodes.start[mapping-1].data.mapping.pairs, pair))
|
||||
// return 0
|
||||
//
|
||||
// return 1
|
||||
//}
|
||||
//
|
||||
//
|
||||
685
vendor/gopkg.in/yaml.v2/decode.go
generated
vendored
Normal file
685
vendor/gopkg.in/yaml.v2/decode.go
generated
vendored
Normal file
|
|
@ -0,0 +1,685 @@
|
|||
package yaml
|
||||
|
||||
import (
|
||||
"encoding"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"math"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
documentNode = 1 << iota
|
||||
mappingNode
|
||||
sequenceNode
|
||||
scalarNode
|
||||
aliasNode
|
||||
)
|
||||
|
||||
type node struct {
|
||||
kind int
|
||||
line, column int
|
||||
tag string
|
||||
value string
|
||||
implicit bool
|
||||
children []*node
|
||||
anchors map[string]*node
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Parser, produces a node tree out of a libyaml event stream.
|
||||
|
||||
type parser struct {
|
||||
parser yaml_parser_t
|
||||
event yaml_event_t
|
||||
doc *node
|
||||
}
|
||||
|
||||
func newParser(b []byte) *parser {
|
||||
p := parser{}
|
||||
if !yaml_parser_initialize(&p.parser) {
|
||||
panic("failed to initialize YAML emitter")
|
||||
}
|
||||
|
||||
if len(b) == 0 {
|
||||
b = []byte{'\n'}
|
||||
}
|
||||
|
||||
yaml_parser_set_input_string(&p.parser, b)
|
||||
|
||||
p.skip()
|
||||
if p.event.typ != yaml_STREAM_START_EVENT {
|
||||
panic("expected stream start event, got " + strconv.Itoa(int(p.event.typ)))
|
||||
}
|
||||
p.skip()
|
||||
return &p
|
||||
}
|
||||
|
||||
func (p *parser) destroy() {
|
||||
if p.event.typ != yaml_NO_EVENT {
|
||||
yaml_event_delete(&p.event)
|
||||
}
|
||||
yaml_parser_delete(&p.parser)
|
||||
}
|
||||
|
||||
func (p *parser) skip() {
|
||||
if p.event.typ != yaml_NO_EVENT {
|
||||
if p.event.typ == yaml_STREAM_END_EVENT {
|
||||
failf("attempted to go past the end of stream; corrupted value?")
|
||||
}
|
||||
yaml_event_delete(&p.event)
|
||||
}
|
||||
if !yaml_parser_parse(&p.parser, &p.event) {
|
||||
p.fail()
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) fail() {
|
||||
var where string
|
||||
var line int
|
||||
if p.parser.problem_mark.line != 0 {
|
||||
line = p.parser.problem_mark.line
|
||||
} else if p.parser.context_mark.line != 0 {
|
||||
line = p.parser.context_mark.line
|
||||
}
|
||||
if line != 0 {
|
||||
where = "line " + strconv.Itoa(line) + ": "
|
||||
}
|
||||
var msg string
|
||||
if len(p.parser.problem) > 0 {
|
||||
msg = p.parser.problem
|
||||
} else {
|
||||
msg = "unknown problem parsing YAML content"
|
||||
}
|
||||
failf("%s%s", where, msg)
|
||||
}
|
||||
|
||||
func (p *parser) anchor(n *node, anchor []byte) {
|
||||
if anchor != nil {
|
||||
p.doc.anchors[string(anchor)] = n
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) parse() *node {
|
||||
switch p.event.typ {
|
||||
case yaml_SCALAR_EVENT:
|
||||
return p.scalar()
|
||||
case yaml_ALIAS_EVENT:
|
||||
return p.alias()
|
||||
case yaml_MAPPING_START_EVENT:
|
||||
return p.mapping()
|
||||
case yaml_SEQUENCE_START_EVENT:
|
||||
return p.sequence()
|
||||
case yaml_DOCUMENT_START_EVENT:
|
||||
return p.document()
|
||||
case yaml_STREAM_END_EVENT:
|
||||
// Happens when attempting to decode an empty buffer.
|
||||
return nil
|
||||
default:
|
||||
panic("attempted to parse unknown event: " + strconv.Itoa(int(p.event.typ)))
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) node(kind int) *node {
|
||||
return &node{
|
||||
kind: kind,
|
||||
line: p.event.start_mark.line,
|
||||
column: p.event.start_mark.column,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) document() *node {
|
||||
n := p.node(documentNode)
|
||||
n.anchors = make(map[string]*node)
|
||||
p.doc = n
|
||||
p.skip()
|
||||
n.children = append(n.children, p.parse())
|
||||
if p.event.typ != yaml_DOCUMENT_END_EVENT {
|
||||
panic("expected end of document event but got " + strconv.Itoa(int(p.event.typ)))
|
||||
}
|
||||
p.skip()
|
||||
return n
|
||||
}
|
||||
|
||||
func (p *parser) alias() *node {
|
||||
n := p.node(aliasNode)
|
||||
n.value = string(p.event.anchor)
|
||||
p.skip()
|
||||
return n
|
||||
}
|
||||
|
||||
func (p *parser) scalar() *node {
|
||||
n := p.node(scalarNode)
|
||||
n.value = string(p.event.value)
|
||||
n.tag = string(p.event.tag)
|
||||
n.implicit = p.event.implicit
|
||||
p.anchor(n, p.event.anchor)
|
||||
p.skip()
|
||||
return n
|
||||
}
|
||||
|
||||
func (p *parser) sequence() *node {
|
||||
n := p.node(sequenceNode)
|
||||
p.anchor(n, p.event.anchor)
|
||||
p.skip()
|
||||
for p.event.typ != yaml_SEQUENCE_END_EVENT {
|
||||
n.children = append(n.children, p.parse())
|
||||
}
|
||||
p.skip()
|
||||
return n
|
||||
}
|
||||
|
||||
func (p *parser) mapping() *node {
|
||||
n := p.node(mappingNode)
|
||||
p.anchor(n, p.event.anchor)
|
||||
p.skip()
|
||||
for p.event.typ != yaml_MAPPING_END_EVENT {
|
||||
n.children = append(n.children, p.parse(), p.parse())
|
||||
}
|
||||
p.skip()
|
||||
return n
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Decoder, unmarshals a node into a provided value.
|
||||
|
||||
type decoder struct {
|
||||
doc *node
|
||||
aliases map[string]bool
|
||||
mapType reflect.Type
|
||||
terrors []string
|
||||
strict bool
|
||||
}
|
||||
|
||||
var (
|
||||
mapItemType = reflect.TypeOf(MapItem{})
|
||||
durationType = reflect.TypeOf(time.Duration(0))
|
||||
defaultMapType = reflect.TypeOf(map[interface{}]interface{}{})
|
||||
ifaceType = defaultMapType.Elem()
|
||||
)
|
||||
|
||||
func newDecoder(strict bool) *decoder {
|
||||
d := &decoder{mapType: defaultMapType, strict: strict}
|
||||
d.aliases = make(map[string]bool)
|
||||
return d
|
||||
}
|
||||
|
||||
func (d *decoder) terror(n *node, tag string, out reflect.Value) {
|
||||
if n.tag != "" {
|
||||
tag = n.tag
|
||||
}
|
||||
value := n.value
|
||||
if tag != yaml_SEQ_TAG && tag != yaml_MAP_TAG {
|
||||
if len(value) > 10 {
|
||||
value = " `" + value[:7] + "...`"
|
||||
} else {
|
||||
value = " `" + value + "`"
|
||||
}
|
||||
}
|
||||
d.terrors = append(d.terrors, fmt.Sprintf("line %d: cannot unmarshal %s%s into %s", n.line+1, shortTag(tag), value, out.Type()))
|
||||
}
|
||||
|
||||
func (d *decoder) callUnmarshaler(n *node, u Unmarshaler) (good bool) {
|
||||
terrlen := len(d.terrors)
|
||||
err := u.UnmarshalYAML(func(v interface{}) (err error) {
|
||||
defer handleErr(&err)
|
||||
d.unmarshal(n, reflect.ValueOf(v))
|
||||
if len(d.terrors) > terrlen {
|
||||
issues := d.terrors[terrlen:]
|
||||
d.terrors = d.terrors[:terrlen]
|
||||
return &TypeError{issues}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if e, ok := err.(*TypeError); ok {
|
||||
d.terrors = append(d.terrors, e.Errors...)
|
||||
return false
|
||||
}
|
||||
if err != nil {
|
||||
fail(err)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// d.prepare initializes and dereferences pointers and calls UnmarshalYAML
|
||||
// if a value is found to implement it.
|
||||
// It returns the initialized and dereferenced out value, whether
|
||||
// unmarshalling was already done by UnmarshalYAML, and if so whether
|
||||
// its types unmarshalled appropriately.
|
||||
//
|
||||
// If n holds a null value, prepare returns before doing anything.
|
||||
func (d *decoder) prepare(n *node, out reflect.Value) (newout reflect.Value, unmarshaled, good bool) {
|
||||
if n.tag == yaml_NULL_TAG || n.kind == scalarNode && n.tag == "" && (n.value == "null" || n.value == "" && n.implicit) {
|
||||
return out, false, false
|
||||
}
|
||||
again := true
|
||||
for again {
|
||||
again = false
|
||||
if out.Kind() == reflect.Ptr {
|
||||
if out.IsNil() {
|
||||
out.Set(reflect.New(out.Type().Elem()))
|
||||
}
|
||||
out = out.Elem()
|
||||
again = true
|
||||
}
|
||||
if out.CanAddr() {
|
||||
if u, ok := out.Addr().Interface().(Unmarshaler); ok {
|
||||
good = d.callUnmarshaler(n, u)
|
||||
return out, true, good
|
||||
}
|
||||
}
|
||||
}
|
||||
return out, false, false
|
||||
}
|
||||
|
||||
func (d *decoder) unmarshal(n *node, out reflect.Value) (good bool) {
|
||||
switch n.kind {
|
||||
case documentNode:
|
||||
return d.document(n, out)
|
||||
case aliasNode:
|
||||
return d.alias(n, out)
|
||||
}
|
||||
out, unmarshaled, good := d.prepare(n, out)
|
||||
if unmarshaled {
|
||||
return good
|
||||
}
|
||||
switch n.kind {
|
||||
case scalarNode:
|
||||
good = d.scalar(n, out)
|
||||
case mappingNode:
|
||||
good = d.mapping(n, out)
|
||||
case sequenceNode:
|
||||
good = d.sequence(n, out)
|
||||
default:
|
||||
panic("internal error: unknown node kind: " + strconv.Itoa(n.kind))
|
||||
}
|
||||
return good
|
||||
}
|
||||
|
||||
func (d *decoder) document(n *node, out reflect.Value) (good bool) {
|
||||
if len(n.children) == 1 {
|
||||
d.doc = n
|
||||
d.unmarshal(n.children[0], out)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (d *decoder) alias(n *node, out reflect.Value) (good bool) {
|
||||
an, ok := d.doc.anchors[n.value]
|
||||
if !ok {
|
||||
failf("unknown anchor '%s' referenced", n.value)
|
||||
}
|
||||
if d.aliases[n.value] {
|
||||
failf("anchor '%s' value contains itself", n.value)
|
||||
}
|
||||
d.aliases[n.value] = true
|
||||
good = d.unmarshal(an, out)
|
||||
delete(d.aliases, n.value)
|
||||
return good
|
||||
}
|
||||
|
||||
var zeroValue reflect.Value
|
||||
|
||||
func resetMap(out reflect.Value) {
|
||||
for _, k := range out.MapKeys() {
|
||||
out.SetMapIndex(k, zeroValue)
|
||||
}
|
||||
}
|
||||
|
||||
func (d *decoder) scalar(n *node, out reflect.Value) (good bool) {
|
||||
var tag string
|
||||
var resolved interface{}
|
||||
if n.tag == "" && !n.implicit {
|
||||
tag = yaml_STR_TAG
|
||||
resolved = n.value
|
||||
} else {
|
||||
tag, resolved = resolve(n.tag, n.value)
|
||||
if tag == yaml_BINARY_TAG {
|
||||
data, err := base64.StdEncoding.DecodeString(resolved.(string))
|
||||
if err != nil {
|
||||
failf("!!binary value contains invalid base64 data")
|
||||
}
|
||||
resolved = string(data)
|
||||
}
|
||||
}
|
||||
if resolved == nil {
|
||||
if out.Kind() == reflect.Map && !out.CanAddr() {
|
||||
resetMap(out)
|
||||
} else {
|
||||
out.Set(reflect.Zero(out.Type()))
|
||||
}
|
||||
return true
|
||||
}
|
||||
if s, ok := resolved.(string); ok && out.CanAddr() {
|
||||
if u, ok := out.Addr().Interface().(encoding.TextUnmarshaler); ok {
|
||||
err := u.UnmarshalText([]byte(s))
|
||||
if err != nil {
|
||||
fail(err)
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
switch out.Kind() {
|
||||
case reflect.String:
|
||||
if tag == yaml_BINARY_TAG {
|
||||
out.SetString(resolved.(string))
|
||||
good = true
|
||||
} else if resolved != nil {
|
||||
out.SetString(n.value)
|
||||
good = true
|
||||
}
|
||||
case reflect.Interface:
|
||||
if resolved == nil {
|
||||
out.Set(reflect.Zero(out.Type()))
|
||||
} else {
|
||||
out.Set(reflect.ValueOf(resolved))
|
||||
}
|
||||
good = true
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
switch resolved := resolved.(type) {
|
||||
case int:
|
||||
if !out.OverflowInt(int64(resolved)) {
|
||||
out.SetInt(int64(resolved))
|
||||
good = true
|
||||
}
|
||||
case int64:
|
||||
if !out.OverflowInt(resolved) {
|
||||
out.SetInt(resolved)
|
||||
good = true
|
||||
}
|
||||
case uint64:
|
||||
if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) {
|
||||
out.SetInt(int64(resolved))
|
||||
good = true
|
||||
}
|
||||
case float64:
|
||||
if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) {
|
||||
out.SetInt(int64(resolved))
|
||||
good = true
|
||||
}
|
||||
case string:
|
||||
if out.Type() == durationType {
|
||||
d, err := time.ParseDuration(resolved)
|
||||
if err == nil {
|
||||
out.SetInt(int64(d))
|
||||
good = true
|
||||
}
|
||||
}
|
||||
}
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
switch resolved := resolved.(type) {
|
||||
case int:
|
||||
if resolved >= 0 && !out.OverflowUint(uint64(resolved)) {
|
||||
out.SetUint(uint64(resolved))
|
||||
good = true
|
||||
}
|
||||
case int64:
|
||||
if resolved >= 0 && !out.OverflowUint(uint64(resolved)) {
|
||||
out.SetUint(uint64(resolved))
|
||||
good = true
|
||||
}
|
||||
case uint64:
|
||||
if !out.OverflowUint(uint64(resolved)) {
|
||||
out.SetUint(uint64(resolved))
|
||||
good = true
|
||||
}
|
||||
case float64:
|
||||
if resolved <= math.MaxUint64 && !out.OverflowUint(uint64(resolved)) {
|
||||
out.SetUint(uint64(resolved))
|
||||
good = true
|
||||
}
|
||||
}
|
||||
case reflect.Bool:
|
||||
switch resolved := resolved.(type) {
|
||||
case bool:
|
||||
out.SetBool(resolved)
|
||||
good = true
|
||||
}
|
||||
case reflect.Float32, reflect.Float64:
|
||||
switch resolved := resolved.(type) {
|
||||
case int:
|
||||
out.SetFloat(float64(resolved))
|
||||
good = true
|
||||
case int64:
|
||||
out.SetFloat(float64(resolved))
|
||||
good = true
|
||||
case uint64:
|
||||
out.SetFloat(float64(resolved))
|
||||
good = true
|
||||
case float64:
|
||||
out.SetFloat(resolved)
|
||||
good = true
|
||||
}
|
||||
case reflect.Ptr:
|
||||
if out.Type().Elem() == reflect.TypeOf(resolved) {
|
||||
// TODO DOes this make sense? When is out a Ptr except when decoding a nil value?
|
||||
elem := reflect.New(out.Type().Elem())
|
||||
elem.Elem().Set(reflect.ValueOf(resolved))
|
||||
out.Set(elem)
|
||||
good = true
|
||||
}
|
||||
}
|
||||
if !good {
|
||||
d.terror(n, tag, out)
|
||||
}
|
||||
return good
|
||||
}
|
||||
|
||||
func settableValueOf(i interface{}) reflect.Value {
|
||||
v := reflect.ValueOf(i)
|
||||
sv := reflect.New(v.Type()).Elem()
|
||||
sv.Set(v)
|
||||
return sv
|
||||
}
|
||||
|
||||
func (d *decoder) sequence(n *node, out reflect.Value) (good bool) {
|
||||
l := len(n.children)
|
||||
|
||||
var iface reflect.Value
|
||||
switch out.Kind() {
|
||||
case reflect.Slice:
|
||||
out.Set(reflect.MakeSlice(out.Type(), l, l))
|
||||
case reflect.Interface:
|
||||
// No type hints. Will have to use a generic sequence.
|
||||
iface = out
|
||||
out = settableValueOf(make([]interface{}, l))
|
||||
default:
|
||||
d.terror(n, yaml_SEQ_TAG, out)
|
||||
return false
|
||||
}
|
||||
et := out.Type().Elem()
|
||||
|
||||
j := 0
|
||||
for i := 0; i < l; i++ {
|
||||
e := reflect.New(et).Elem()
|
||||
if ok := d.unmarshal(n.children[i], e); ok {
|
||||
out.Index(j).Set(e)
|
||||
j++
|
||||
}
|
||||
}
|
||||
out.Set(out.Slice(0, j))
|
||||
if iface.IsValid() {
|
||||
iface.Set(out)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (d *decoder) mapping(n *node, out reflect.Value) (good bool) {
|
||||
switch out.Kind() {
|
||||
case reflect.Struct:
|
||||
return d.mappingStruct(n, out)
|
||||
case reflect.Slice:
|
||||
return d.mappingSlice(n, out)
|
||||
case reflect.Map:
|
||||
// okay
|
||||
case reflect.Interface:
|
||||
if d.mapType.Kind() == reflect.Map {
|
||||
iface := out
|
||||
out = reflect.MakeMap(d.mapType)
|
||||
iface.Set(out)
|
||||
} else {
|
||||
slicev := reflect.New(d.mapType).Elem()
|
||||
if !d.mappingSlice(n, slicev) {
|
||||
return false
|
||||
}
|
||||
out.Set(slicev)
|
||||
return true
|
||||
}
|
||||
default:
|
||||
d.terror(n, yaml_MAP_TAG, out)
|
||||
return false
|
||||
}
|
||||
outt := out.Type()
|
||||
kt := outt.Key()
|
||||
et := outt.Elem()
|
||||
|
||||
mapType := d.mapType
|
||||
if outt.Key() == ifaceType && outt.Elem() == ifaceType {
|
||||
d.mapType = outt
|
||||
}
|
||||
|
||||
if out.IsNil() {
|
||||
out.Set(reflect.MakeMap(outt))
|
||||
}
|
||||
l := len(n.children)
|
||||
for i := 0; i < l; i += 2 {
|
||||
if isMerge(n.children[i]) {
|
||||
d.merge(n.children[i+1], out)
|
||||
continue
|
||||
}
|
||||
k := reflect.New(kt).Elem()
|
||||
if d.unmarshal(n.children[i], k) {
|
||||
kkind := k.Kind()
|
||||
if kkind == reflect.Interface {
|
||||
kkind = k.Elem().Kind()
|
||||
}
|
||||
if kkind == reflect.Map || kkind == reflect.Slice {
|
||||
failf("invalid map key: %#v", k.Interface())
|
||||
}
|
||||
e := reflect.New(et).Elem()
|
||||
if d.unmarshal(n.children[i+1], e) {
|
||||
out.SetMapIndex(k, e)
|
||||
}
|
||||
}
|
||||
}
|
||||
d.mapType = mapType
|
||||
return true
|
||||
}
|
||||
|
||||
func (d *decoder) mappingSlice(n *node, out reflect.Value) (good bool) {
|
||||
outt := out.Type()
|
||||
if outt.Elem() != mapItemType {
|
||||
d.terror(n, yaml_MAP_TAG, out)
|
||||
return false
|
||||
}
|
||||
|
||||
mapType := d.mapType
|
||||
d.mapType = outt
|
||||
|
||||
var slice []MapItem
|
||||
var l = len(n.children)
|
||||
for i := 0; i < l; i += 2 {
|
||||
if isMerge(n.children[i]) {
|
||||
d.merge(n.children[i+1], out)
|
||||
continue
|
||||
}
|
||||
item := MapItem{}
|
||||
k := reflect.ValueOf(&item.Key).Elem()
|
||||
if d.unmarshal(n.children[i], k) {
|
||||
v := reflect.ValueOf(&item.Value).Elem()
|
||||
if d.unmarshal(n.children[i+1], v) {
|
||||
slice = append(slice, item)
|
||||
}
|
||||
}
|
||||
}
|
||||
out.Set(reflect.ValueOf(slice))
|
||||
d.mapType = mapType
|
||||
return true
|
||||
}
|
||||
|
||||
func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) {
|
||||
sinfo, err := getStructInfo(out.Type())
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
name := settableValueOf("")
|
||||
l := len(n.children)
|
||||
|
||||
var inlineMap reflect.Value
|
||||
var elemType reflect.Type
|
||||
if sinfo.InlineMap != -1 {
|
||||
inlineMap = out.Field(sinfo.InlineMap)
|
||||
inlineMap.Set(reflect.New(inlineMap.Type()).Elem())
|
||||
elemType = inlineMap.Type().Elem()
|
||||
}
|
||||
|
||||
for i := 0; i < l; i += 2 {
|
||||
ni := n.children[i]
|
||||
if isMerge(ni) {
|
||||
d.merge(n.children[i+1], out)
|
||||
continue
|
||||
}
|
||||
if !d.unmarshal(ni, name) {
|
||||
continue
|
||||
}
|
||||
if info, ok := sinfo.FieldsMap[name.String()]; ok {
|
||||
var field reflect.Value
|
||||
if info.Inline == nil {
|
||||
field = out.Field(info.Num)
|
||||
} else {
|
||||
field = out.FieldByIndex(info.Inline)
|
||||
}
|
||||
d.unmarshal(n.children[i+1], field)
|
||||
} else if sinfo.InlineMap != -1 {
|
||||
if inlineMap.IsNil() {
|
||||
inlineMap.Set(reflect.MakeMap(inlineMap.Type()))
|
||||
}
|
||||
value := reflect.New(elemType).Elem()
|
||||
d.unmarshal(n.children[i+1], value)
|
||||
inlineMap.SetMapIndex(name, value)
|
||||
} else if d.strict {
|
||||
d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s not found in struct %s", n.line+1, name.String(), out.Type()))
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func failWantMap() {
|
||||
failf("map merge requires map or sequence of maps as the value")
|
||||
}
|
||||
|
||||
func (d *decoder) merge(n *node, out reflect.Value) {
|
||||
switch n.kind {
|
||||
case mappingNode:
|
||||
d.unmarshal(n, out)
|
||||
case aliasNode:
|
||||
an, ok := d.doc.anchors[n.value]
|
||||
if ok && an.kind != mappingNode {
|
||||
failWantMap()
|
||||
}
|
||||
d.unmarshal(n, out)
|
||||
case sequenceNode:
|
||||
// Step backwards as earlier nodes take precedence.
|
||||
for i := len(n.children) - 1; i >= 0; i-- {
|
||||
ni := n.children[i]
|
||||
if ni.kind == aliasNode {
|
||||
an, ok := d.doc.anchors[ni.value]
|
||||
if ok && an.kind != mappingNode {
|
||||
failWantMap()
|
||||
}
|
||||
} else if ni.kind != mappingNode {
|
||||
failWantMap()
|
||||
}
|
||||
d.unmarshal(ni, out)
|
||||
}
|
||||
default:
|
||||
failWantMap()
|
||||
}
|
||||
}
|
||||
|
||||
func isMerge(n *node) bool {
|
||||
return n.kind == scalarNode && n.value == "<<" && (n.implicit == true || n.tag == yaml_MERGE_TAG)
|
||||
}
|
||||
1017
vendor/gopkg.in/yaml.v2/decode_test.go
generated
vendored
Normal file
1017
vendor/gopkg.in/yaml.v2/decode_test.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
1684
vendor/gopkg.in/yaml.v2/emitterc.go
generated
vendored
Normal file
1684
vendor/gopkg.in/yaml.v2/emitterc.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
306
vendor/gopkg.in/yaml.v2/encode.go
generated
vendored
Normal file
306
vendor/gopkg.in/yaml.v2/encode.go
generated
vendored
Normal file
|
|
@ -0,0 +1,306 @@
|
|||
package yaml
|
||||
|
||||
import (
|
||||
"encoding"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type encoder struct {
|
||||
emitter yaml_emitter_t
|
||||
event yaml_event_t
|
||||
out []byte
|
||||
flow bool
|
||||
}
|
||||
|
||||
func newEncoder() (e *encoder) {
|
||||
e = &encoder{}
|
||||
e.must(yaml_emitter_initialize(&e.emitter))
|
||||
yaml_emitter_set_output_string(&e.emitter, &e.out)
|
||||
yaml_emitter_set_unicode(&e.emitter, true)
|
||||
e.must(yaml_stream_start_event_initialize(&e.event, yaml_UTF8_ENCODING))
|
||||
e.emit()
|
||||
e.must(yaml_document_start_event_initialize(&e.event, nil, nil, true))
|
||||
e.emit()
|
||||
return e
|
||||
}
|
||||
|
||||
func (e *encoder) finish() {
|
||||
e.must(yaml_document_end_event_initialize(&e.event, true))
|
||||
e.emit()
|
||||
e.emitter.open_ended = false
|
||||
e.must(yaml_stream_end_event_initialize(&e.event))
|
||||
e.emit()
|
||||
}
|
||||
|
||||
func (e *encoder) destroy() {
|
||||
yaml_emitter_delete(&e.emitter)
|
||||
}
|
||||
|
||||
func (e *encoder) emit() {
|
||||
// This will internally delete the e.event value.
|
||||
if !yaml_emitter_emit(&e.emitter, &e.event) && e.event.typ != yaml_DOCUMENT_END_EVENT && e.event.typ != yaml_STREAM_END_EVENT {
|
||||
e.must(false)
|
||||
}
|
||||
}
|
||||
|
||||
func (e *encoder) must(ok bool) {
|
||||
if !ok {
|
||||
msg := e.emitter.problem
|
||||
if msg == "" {
|
||||
msg = "unknown problem generating YAML content"
|
||||
}
|
||||
failf("%s", msg)
|
||||
}
|
||||
}
|
||||
|
||||
func (e *encoder) marshal(tag string, in reflect.Value) {
|
||||
if !in.IsValid() {
|
||||
e.nilv()
|
||||
return
|
||||
}
|
||||
iface := in.Interface()
|
||||
if m, ok := iface.(Marshaler); ok {
|
||||
v, err := m.MarshalYAML()
|
||||
if err != nil {
|
||||
fail(err)
|
||||
}
|
||||
if v == nil {
|
||||
e.nilv()
|
||||
return
|
||||
}
|
||||
in = reflect.ValueOf(v)
|
||||
} else if m, ok := iface.(encoding.TextMarshaler); ok {
|
||||
text, err := m.MarshalText()
|
||||
if err != nil {
|
||||
fail(err)
|
||||
}
|
||||
in = reflect.ValueOf(string(text))
|
||||
}
|
||||
switch in.Kind() {
|
||||
case reflect.Interface:
|
||||
if in.IsNil() {
|
||||
e.nilv()
|
||||
} else {
|
||||
e.marshal(tag, in.Elem())
|
||||
}
|
||||
case reflect.Map:
|
||||
e.mapv(tag, in)
|
||||
case reflect.Ptr:
|
||||
if in.IsNil() {
|
||||
e.nilv()
|
||||
} else {
|
||||
e.marshal(tag, in.Elem())
|
||||
}
|
||||
case reflect.Struct:
|
||||
e.structv(tag, in)
|
||||
case reflect.Slice:
|
||||
if in.Type().Elem() == mapItemType {
|
||||
e.itemsv(tag, in)
|
||||
} else {
|
||||
e.slicev(tag, in)
|
||||
}
|
||||
case reflect.String:
|
||||
e.stringv(tag, in)
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
if in.Type() == durationType {
|
||||
e.stringv(tag, reflect.ValueOf(iface.(time.Duration).String()))
|
||||
} else {
|
||||
e.intv(tag, in)
|
||||
}
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
e.uintv(tag, in)
|
||||
case reflect.Float32, reflect.Float64:
|
||||
e.floatv(tag, in)
|
||||
case reflect.Bool:
|
||||
e.boolv(tag, in)
|
||||
default:
|
||||
panic("cannot marshal type: " + in.Type().String())
|
||||
}
|
||||
}
|
||||
|
||||
func (e *encoder) mapv(tag string, in reflect.Value) {
|
||||
e.mappingv(tag, func() {
|
||||
keys := keyList(in.MapKeys())
|
||||
sort.Sort(keys)
|
||||
for _, k := range keys {
|
||||
e.marshal("", k)
|
||||
e.marshal("", in.MapIndex(k))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func (e *encoder) itemsv(tag string, in reflect.Value) {
|
||||
e.mappingv(tag, func() {
|
||||
slice := in.Convert(reflect.TypeOf([]MapItem{})).Interface().([]MapItem)
|
||||
for _, item := range slice {
|
||||
e.marshal("", reflect.ValueOf(item.Key))
|
||||
e.marshal("", reflect.ValueOf(item.Value))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func (e *encoder) structv(tag string, in reflect.Value) {
|
||||
sinfo, err := getStructInfo(in.Type())
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
e.mappingv(tag, func() {
|
||||
for _, info := range sinfo.FieldsList {
|
||||
var value reflect.Value
|
||||
if info.Inline == nil {
|
||||
value = in.Field(info.Num)
|
||||
} else {
|
||||
value = in.FieldByIndex(info.Inline)
|
||||
}
|
||||
if info.OmitEmpty && isZero(value) {
|
||||
continue
|
||||
}
|
||||
e.marshal("", reflect.ValueOf(info.Key))
|
||||
e.flow = info.Flow
|
||||
e.marshal("", value)
|
||||
}
|
||||
if sinfo.InlineMap >= 0 {
|
||||
m := in.Field(sinfo.InlineMap)
|
||||
if m.Len() > 0 {
|
||||
e.flow = false
|
||||
keys := keyList(m.MapKeys())
|
||||
sort.Sort(keys)
|
||||
for _, k := range keys {
|
||||
if _, found := sinfo.FieldsMap[k.String()]; found {
|
||||
panic(fmt.Sprintf("Can't have key %q in inlined map; conflicts with struct field", k.String()))
|
||||
}
|
||||
e.marshal("", k)
|
||||
e.flow = false
|
||||
e.marshal("", m.MapIndex(k))
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func (e *encoder) mappingv(tag string, f func()) {
|
||||
implicit := tag == ""
|
||||
style := yaml_BLOCK_MAPPING_STYLE
|
||||
if e.flow {
|
||||
e.flow = false
|
||||
style = yaml_FLOW_MAPPING_STYLE
|
||||
}
|
||||
e.must(yaml_mapping_start_event_initialize(&e.event, nil, []byte(tag), implicit, style))
|
||||
e.emit()
|
||||
f()
|
||||
e.must(yaml_mapping_end_event_initialize(&e.event))
|
||||
e.emit()
|
||||
}
|
||||
|
||||
func (e *encoder) slicev(tag string, in reflect.Value) {
|
||||
implicit := tag == ""
|
||||
style := yaml_BLOCK_SEQUENCE_STYLE
|
||||
if e.flow {
|
||||
e.flow = false
|
||||
style = yaml_FLOW_SEQUENCE_STYLE
|
||||
}
|
||||
e.must(yaml_sequence_start_event_initialize(&e.event, nil, []byte(tag), implicit, style))
|
||||
e.emit()
|
||||
n := in.Len()
|
||||
for i := 0; i < n; i++ {
|
||||
e.marshal("", in.Index(i))
|
||||
}
|
||||
e.must(yaml_sequence_end_event_initialize(&e.event))
|
||||
e.emit()
|
||||
}
|
||||
|
||||
// isBase60 returns whether s is in base 60 notation as defined in YAML 1.1.
|
||||
//
|
||||
// The base 60 float notation in YAML 1.1 is a terrible idea and is unsupported
|
||||
// in YAML 1.2 and by this package, but these should be marshalled quoted for
|
||||
// the time being for compatibility with other parsers.
|
||||
func isBase60Float(s string) (result bool) {
|
||||
// Fast path.
|
||||
if s == "" {
|
||||
return false
|
||||
}
|
||||
c := s[0]
|
||||
if !(c == '+' || c == '-' || c >= '0' && c <= '9') || strings.IndexByte(s, ':') < 0 {
|
||||
return false
|
||||
}
|
||||
// Do the full match.
|
||||
return base60float.MatchString(s)
|
||||
}
|
||||
|
||||
// From http://yaml.org/type/float.html, except the regular expression there
|
||||
// is bogus. In practice parsers do not enforce the "\.[0-9_]*" suffix.
|
||||
var base60float = regexp.MustCompile(`^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+(?:\.[0-9_]*)?$`)
|
||||
|
||||
func (e *encoder) stringv(tag string, in reflect.Value) {
|
||||
var style yaml_scalar_style_t
|
||||
s := in.String()
|
||||
rtag, rs := resolve("", s)
|
||||
if rtag == yaml_BINARY_TAG {
|
||||
if tag == "" || tag == yaml_STR_TAG {
|
||||
tag = rtag
|
||||
s = rs.(string)
|
||||
} else if tag == yaml_BINARY_TAG {
|
||||
failf("explicitly tagged !!binary data must be base64-encoded")
|
||||
} else {
|
||||
failf("cannot marshal invalid UTF-8 data as %s", shortTag(tag))
|
||||
}
|
||||
}
|
||||
if tag == "" && (rtag != yaml_STR_TAG || isBase60Float(s)) {
|
||||
style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
|
||||
} else if strings.Contains(s, "\n") {
|
||||
style = yaml_LITERAL_SCALAR_STYLE
|
||||
} else {
|
||||
style = yaml_PLAIN_SCALAR_STYLE
|
||||
}
|
||||
e.emitScalar(s, "", tag, style)
|
||||
}
|
||||
|
||||
func (e *encoder) boolv(tag string, in reflect.Value) {
|
||||
var s string
|
||||
if in.Bool() {
|
||||
s = "true"
|
||||
} else {
|
||||
s = "false"
|
||||
}
|
||||
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
|
||||
}
|
||||
|
||||
func (e *encoder) intv(tag string, in reflect.Value) {
|
||||
s := strconv.FormatInt(in.Int(), 10)
|
||||
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
|
||||
}
|
||||
|
||||
func (e *encoder) uintv(tag string, in reflect.Value) {
|
||||
s := strconv.FormatUint(in.Uint(), 10)
|
||||
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
|
||||
}
|
||||
|
||||
func (e *encoder) floatv(tag string, in reflect.Value) {
|
||||
// FIXME: Handle 64 bits here.
|
||||
s := strconv.FormatFloat(float64(in.Float()), 'g', -1, 32)
|
||||
switch s {
|
||||
case "+Inf":
|
||||
s = ".inf"
|
||||
case "-Inf":
|
||||
s = "-.inf"
|
||||
case "NaN":
|
||||
s = ".nan"
|
||||
}
|
||||
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
|
||||
}
|
||||
|
||||
func (e *encoder) nilv() {
|
||||
e.emitScalar("null", "", "", yaml_PLAIN_SCALAR_STYLE)
|
||||
}
|
||||
|
||||
func (e *encoder) emitScalar(value, anchor, tag string, style yaml_scalar_style_t) {
|
||||
implicit := tag == ""
|
||||
e.must(yaml_scalar_event_initialize(&e.event, []byte(anchor), []byte(tag), []byte(value), implicit, implicit, style))
|
||||
e.emit()
|
||||
}
|
||||
501
vendor/gopkg.in/yaml.v2/encode_test.go
generated
vendored
Normal file
501
vendor/gopkg.in/yaml.v2/encode_test.go
generated
vendored
Normal file
|
|
@ -0,0 +1,501 @@
|
|||
package yaml_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
. "gopkg.in/check.v1"
|
||||
"gopkg.in/yaml.v2"
|
||||
"net"
|
||||
"os"
|
||||
)
|
||||
|
||||
var marshalIntTest = 123
|
||||
|
||||
var marshalTests = []struct {
|
||||
value interface{}
|
||||
data string
|
||||
}{
|
||||
{
|
||||
nil,
|
||||
"null\n",
|
||||
}, {
|
||||
&struct{}{},
|
||||
"{}\n",
|
||||
}, {
|
||||
map[string]string{"v": "hi"},
|
||||
"v: hi\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": "hi"},
|
||||
"v: hi\n",
|
||||
}, {
|
||||
map[string]string{"v": "true"},
|
||||
"v: \"true\"\n",
|
||||
}, {
|
||||
map[string]string{"v": "false"},
|
||||
"v: \"false\"\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": true},
|
||||
"v: true\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": false},
|
||||
"v: false\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": 10},
|
||||
"v: 10\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": -10},
|
||||
"v: -10\n",
|
||||
}, {
|
||||
map[string]uint{"v": 42},
|
||||
"v: 42\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": int64(4294967296)},
|
||||
"v: 4294967296\n",
|
||||
}, {
|
||||
map[string]int64{"v": int64(4294967296)},
|
||||
"v: 4294967296\n",
|
||||
}, {
|
||||
map[string]uint64{"v": 4294967296},
|
||||
"v: 4294967296\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": "10"},
|
||||
"v: \"10\"\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": 0.1},
|
||||
"v: 0.1\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": float64(0.1)},
|
||||
"v: 0.1\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": -0.1},
|
||||
"v: -0.1\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": math.Inf(+1)},
|
||||
"v: .inf\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": math.Inf(-1)},
|
||||
"v: -.inf\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": math.NaN()},
|
||||
"v: .nan\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": nil},
|
||||
"v: null\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": ""},
|
||||
"v: \"\"\n",
|
||||
}, {
|
||||
map[string][]string{"v": []string{"A", "B"}},
|
||||
"v:\n- A\n- B\n",
|
||||
}, {
|
||||
map[string][]string{"v": []string{"A", "B\nC"}},
|
||||
"v:\n- A\n- |-\n B\n C\n",
|
||||
}, {
|
||||
map[string][]interface{}{"v": []interface{}{"A", 1, map[string][]int{"B": []int{2, 3}}}},
|
||||
"v:\n- A\n- 1\n- B:\n - 2\n - 3\n",
|
||||
}, {
|
||||
map[string]interface{}{"a": map[interface{}]interface{}{"b": "c"}},
|
||||
"a:\n b: c\n",
|
||||
}, {
|
||||
map[string]interface{}{"a": "-"},
|
||||
"a: '-'\n",
|
||||
},
|
||||
|
||||
// Simple values.
|
||||
{
|
||||
&marshalIntTest,
|
||||
"123\n",
|
||||
},
|
||||
|
||||
// Structures
|
||||
{
|
||||
&struct{ Hello string }{"world"},
|
||||
"hello: world\n",
|
||||
}, {
|
||||
&struct {
|
||||
A struct {
|
||||
B string
|
||||
}
|
||||
}{struct{ B string }{"c"}},
|
||||
"a:\n b: c\n",
|
||||
}, {
|
||||
&struct {
|
||||
A *struct {
|
||||
B string
|
||||
}
|
||||
}{&struct{ B string }{"c"}},
|
||||
"a:\n b: c\n",
|
||||
}, {
|
||||
&struct {
|
||||
A *struct {
|
||||
B string
|
||||
}
|
||||
}{},
|
||||
"a: null\n",
|
||||
}, {
|
||||
&struct{ A int }{1},
|
||||
"a: 1\n",
|
||||
}, {
|
||||
&struct{ A []int }{[]int{1, 2}},
|
||||
"a:\n- 1\n- 2\n",
|
||||
}, {
|
||||
&struct {
|
||||
B int "a"
|
||||
}{1},
|
||||
"a: 1\n",
|
||||
}, {
|
||||
&struct{ A bool }{true},
|
||||
"a: true\n",
|
||||
},
|
||||
|
||||
// Conditional flag
|
||||
{
|
||||
&struct {
|
||||
A int "a,omitempty"
|
||||
B int "b,omitempty"
|
||||
}{1, 0},
|
||||
"a: 1\n",
|
||||
}, {
|
||||
&struct {
|
||||
A int "a,omitempty"
|
||||
B int "b,omitempty"
|
||||
}{0, 0},
|
||||
"{}\n",
|
||||
}, {
|
||||
&struct {
|
||||
A *struct{ X, y int } "a,omitempty,flow"
|
||||
}{&struct{ X, y int }{1, 2}},
|
||||
"a: {x: 1}\n",
|
||||
}, {
|
||||
&struct {
|
||||
A *struct{ X, y int } "a,omitempty,flow"
|
||||
}{nil},
|
||||
"{}\n",
|
||||
}, {
|
||||
&struct {
|
||||
A *struct{ X, y int } "a,omitempty,flow"
|
||||
}{&struct{ X, y int }{}},
|
||||
"a: {x: 0}\n",
|
||||
}, {
|
||||
&struct {
|
||||
A struct{ X, y int } "a,omitempty,flow"
|
||||
}{struct{ X, y int }{1, 2}},
|
||||
"a: {x: 1}\n",
|
||||
}, {
|
||||
&struct {
|
||||
A struct{ X, y int } "a,omitempty,flow"
|
||||
}{struct{ X, y int }{0, 1}},
|
||||
"{}\n",
|
||||
}, {
|
||||
&struct {
|
||||
A float64 "a,omitempty"
|
||||
B float64 "b,omitempty"
|
||||
}{1, 0},
|
||||
"a: 1\n",
|
||||
},
|
||||
|
||||
// Flow flag
|
||||
{
|
||||
&struct {
|
||||
A []int "a,flow"
|
||||
}{[]int{1, 2}},
|
||||
"a: [1, 2]\n",
|
||||
}, {
|
||||
&struct {
|
||||
A map[string]string "a,flow"
|
||||
}{map[string]string{"b": "c", "d": "e"}},
|
||||
"a: {b: c, d: e}\n",
|
||||
}, {
|
||||
&struct {
|
||||
A struct {
|
||||
B, D string
|
||||
} "a,flow"
|
||||
}{struct{ B, D string }{"c", "e"}},
|
||||
"a: {b: c, d: e}\n",
|
||||
},
|
||||
|
||||
// Unexported field
|
||||
{
|
||||
&struct {
|
||||
u int
|
||||
A int
|
||||
}{0, 1},
|
||||
"a: 1\n",
|
||||
},
|
||||
|
||||
// Ignored field
|
||||
{
|
||||
&struct {
|
||||
A int
|
||||
B int "-"
|
||||
}{1, 2},
|
||||
"a: 1\n",
|
||||
},
|
||||
|
||||
// Struct inlining
|
||||
{
|
||||
&struct {
|
||||
A int
|
||||
C inlineB `yaml:",inline"`
|
||||
}{1, inlineB{2, inlineC{3}}},
|
||||
"a: 1\nb: 2\nc: 3\n",
|
||||
},
|
||||
|
||||
// Map inlining
|
||||
{
|
||||
&struct {
|
||||
A int
|
||||
C map[string]int `yaml:",inline"`
|
||||
}{1, map[string]int{"b": 2, "c": 3}},
|
||||
"a: 1\nb: 2\nc: 3\n",
|
||||
},
|
||||
|
||||
// Duration
|
||||
{
|
||||
map[string]time.Duration{"a": 3 * time.Second},
|
||||
"a: 3s\n",
|
||||
},
|
||||
|
||||
// Issue #24: bug in map merging logic.
|
||||
{
|
||||
map[string]string{"a": "<foo>"},
|
||||
"a: <foo>\n",
|
||||
},
|
||||
|
||||
// Issue #34: marshal unsupported base 60 floats quoted for compatibility
|
||||
// with old YAML 1.1 parsers.
|
||||
{
|
||||
map[string]string{"a": "1:1"},
|
||||
"a: \"1:1\"\n",
|
||||
},
|
||||
|
||||
// Binary data.
|
||||
{
|
||||
map[string]string{"a": "\x00"},
|
||||
"a: \"\\0\"\n",
|
||||
}, {
|
||||
map[string]string{"a": "\x80\x81\x82"},
|
||||
"a: !!binary gIGC\n",
|
||||
}, {
|
||||
map[string]string{"a": strings.Repeat("\x90", 54)},
|
||||
"a: !!binary |\n " + strings.Repeat("kJCQ", 17) + "kJ\n CQ\n",
|
||||
},
|
||||
|
||||
// Ordered maps.
|
||||
{
|
||||
&yaml.MapSlice{{"b", 2}, {"a", 1}, {"d", 4}, {"c", 3}, {"sub", yaml.MapSlice{{"e", 5}}}},
|
||||
"b: 2\na: 1\nd: 4\nc: 3\nsub:\n e: 5\n",
|
||||
},
|
||||
|
||||
// Encode unicode as utf-8 rather than in escaped form.
|
||||
{
|
||||
map[string]string{"a": "你好"},
|
||||
"a: 你好\n",
|
||||
},
|
||||
|
||||
// Support encoding.TextMarshaler.
|
||||
{
|
||||
map[string]net.IP{"a": net.IPv4(1, 2, 3, 4)},
|
||||
"a: 1.2.3.4\n",
|
||||
},
|
||||
{
|
||||
map[string]time.Time{"a": time.Unix(1424801979, 0)},
|
||||
"a: 2015-02-24T18:19:39Z\n",
|
||||
},
|
||||
|
||||
// Ensure strings containing ": " are quoted (reported as PR #43, but not reproducible).
|
||||
{
|
||||
map[string]string{"a": "b: c"},
|
||||
"a: 'b: c'\n",
|
||||
},
|
||||
|
||||
// Containing hash mark ('#') in string should be quoted
|
||||
{
|
||||
map[string]string{"a": "Hello #comment"},
|
||||
"a: 'Hello #comment'\n",
|
||||
},
|
||||
{
|
||||
map[string]string{"a": "你好 #comment"},
|
||||
"a: '你好 #comment'\n",
|
||||
},
|
||||
}
|
||||
|
||||
func (s *S) TestMarshal(c *C) {
|
||||
defer os.Setenv("TZ", os.Getenv("TZ"))
|
||||
os.Setenv("TZ", "UTC")
|
||||
for _, item := range marshalTests {
|
||||
data, err := yaml.Marshal(item.value)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(string(data), Equals, item.data)
|
||||
}
|
||||
}
|
||||
|
||||
var marshalErrorTests = []struct {
|
||||
value interface{}
|
||||
error string
|
||||
panic string
|
||||
}{{
|
||||
value: &struct {
|
||||
B int
|
||||
inlineB ",inline"
|
||||
}{1, inlineB{2, inlineC{3}}},
|
||||
panic: `Duplicated key 'b' in struct struct \{ B int; .*`,
|
||||
}, {
|
||||
value: &struct {
|
||||
A int
|
||||
B map[string]int ",inline"
|
||||
}{1, map[string]int{"a": 2}},
|
||||
panic: `Can't have key "a" in inlined map; conflicts with struct field`,
|
||||
}}
|
||||
|
||||
func (s *S) TestMarshalErrors(c *C) {
|
||||
for _, item := range marshalErrorTests {
|
||||
if item.panic != "" {
|
||||
c.Assert(func() { yaml.Marshal(item.value) }, PanicMatches, item.panic)
|
||||
} else {
|
||||
_, err := yaml.Marshal(item.value)
|
||||
c.Assert(err, ErrorMatches, item.error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *S) TestMarshalTypeCache(c *C) {
|
||||
var data []byte
|
||||
var err error
|
||||
func() {
|
||||
type T struct{ A int }
|
||||
data, err = yaml.Marshal(&T{})
|
||||
c.Assert(err, IsNil)
|
||||
}()
|
||||
func() {
|
||||
type T struct{ B int }
|
||||
data, err = yaml.Marshal(&T{})
|
||||
c.Assert(err, IsNil)
|
||||
}()
|
||||
c.Assert(string(data), Equals, "b: 0\n")
|
||||
}
|
||||
|
||||
var marshalerTests = []struct {
|
||||
data string
|
||||
value interface{}
|
||||
}{
|
||||
{"_:\n hi: there\n", map[interface{}]interface{}{"hi": "there"}},
|
||||
{"_:\n- 1\n- A\n", []interface{}{1, "A"}},
|
||||
{"_: 10\n", 10},
|
||||
{"_: null\n", nil},
|
||||
{"_: BAR!\n", "BAR!"},
|
||||
}
|
||||
|
||||
type marshalerType struct {
|
||||
value interface{}
|
||||
}
|
||||
|
||||
func (o marshalerType) MarshalText() ([]byte, error) {
|
||||
panic("MarshalText called on type with MarshalYAML")
|
||||
}
|
||||
|
||||
func (o marshalerType) MarshalYAML() (interface{}, error) {
|
||||
return o.value, nil
|
||||
}
|
||||
|
||||
type marshalerValue struct {
|
||||
Field marshalerType "_"
|
||||
}
|
||||
|
||||
func (s *S) TestMarshaler(c *C) {
|
||||
for _, item := range marshalerTests {
|
||||
obj := &marshalerValue{}
|
||||
obj.Field.value = item.value
|
||||
data, err := yaml.Marshal(obj)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(string(data), Equals, string(item.data))
|
||||
}
|
||||
}
|
||||
|
||||
func (s *S) TestMarshalerWholeDocument(c *C) {
|
||||
obj := &marshalerType{}
|
||||
obj.value = map[string]string{"hello": "world!"}
|
||||
data, err := yaml.Marshal(obj)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(string(data), Equals, "hello: world!\n")
|
||||
}
|
||||
|
||||
type failingMarshaler struct{}
|
||||
|
||||
func (ft *failingMarshaler) MarshalYAML() (interface{}, error) {
|
||||
return nil, failingErr
|
||||
}
|
||||
|
||||
func (s *S) TestMarshalerError(c *C) {
|
||||
_, err := yaml.Marshal(&failingMarshaler{})
|
||||
c.Assert(err, Equals, failingErr)
|
||||
}
|
||||
|
||||
func (s *S) TestSortedOutput(c *C) {
|
||||
order := []interface{}{
|
||||
false,
|
||||
true,
|
||||
1,
|
||||
uint(1),
|
||||
1.0,
|
||||
1.1,
|
||||
1.2,
|
||||
2,
|
||||
uint(2),
|
||||
2.0,
|
||||
2.1,
|
||||
"",
|
||||
".1",
|
||||
".2",
|
||||
".a",
|
||||
"1",
|
||||
"2",
|
||||
"a!10",
|
||||
"a/2",
|
||||
"a/10",
|
||||
"a~10",
|
||||
"ab/1",
|
||||
"b/1",
|
||||
"b/01",
|
||||
"b/2",
|
||||
"b/02",
|
||||
"b/3",
|
||||
"b/03",
|
||||
"b1",
|
||||
"b01",
|
||||
"b3",
|
||||
"c2.10",
|
||||
"c10.2",
|
||||
"d1",
|
||||
"d12",
|
||||
"d12a",
|
||||
}
|
||||
m := make(map[interface{}]int)
|
||||
for _, k := range order {
|
||||
m[k] = 1
|
||||
}
|
||||
data, err := yaml.Marshal(m)
|
||||
c.Assert(err, IsNil)
|
||||
out := "\n" + string(data)
|
||||
last := 0
|
||||
for i, k := range order {
|
||||
repr := fmt.Sprint(k)
|
||||
if s, ok := k.(string); ok {
|
||||
if _, err = strconv.ParseFloat(repr, 32); s == "" || err == nil {
|
||||
repr = `"` + repr + `"`
|
||||
}
|
||||
}
|
||||
index := strings.Index(out, "\n"+repr+":")
|
||||
if index == -1 {
|
||||
c.Fatalf("%#v is not in the output: %#v", k, out)
|
||||
}
|
||||
if index < last {
|
||||
c.Fatalf("%#v was generated before %#v: %q", k, order[i-1], out)
|
||||
}
|
||||
last = index
|
||||
}
|
||||
}
|
||||
41
vendor/gopkg.in/yaml.v2/example_embedded_test.go
generated
vendored
Normal file
41
vendor/gopkg.in/yaml.v2/example_embedded_test.go
generated
vendored
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
package yaml_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
// An example showing how to unmarshal embedded
|
||||
// structs from YAML.
|
||||
|
||||
type StructA struct {
|
||||
A string `yaml:"a"`
|
||||
}
|
||||
|
||||
type StructB struct {
|
||||
// Embedded structs are not treated as embedded in YAML by default. To do that,
|
||||
// add the ",inline" annotation below
|
||||
StructA `yaml:",inline"`
|
||||
B string `yaml:"b"`
|
||||
}
|
||||
|
||||
var data = `
|
||||
a: a string from struct A
|
||||
b: a string from struct B
|
||||
`
|
||||
|
||||
func ExampleUnmarshal_embedded() {
|
||||
var b StructB
|
||||
|
||||
err := yaml.Unmarshal([]byte(data), &b)
|
||||
if err != nil {
|
||||
log.Fatal("cannot unmarshal data: %v", err)
|
||||
}
|
||||
fmt.Println(b.A)
|
||||
fmt.Println(b.B)
|
||||
// Output:
|
||||
// a string from struct A
|
||||
// a string from struct B
|
||||
}
|
||||
1095
vendor/gopkg.in/yaml.v2/parserc.go
generated
vendored
Normal file
1095
vendor/gopkg.in/yaml.v2/parserc.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
394
vendor/gopkg.in/yaml.v2/readerc.go
generated
vendored
Normal file
394
vendor/gopkg.in/yaml.v2/readerc.go
generated
vendored
Normal file
|
|
@ -0,0 +1,394 @@
|
|||
package yaml
|
||||
|
||||
import (
|
||||
"io"
|
||||
)
|
||||
|
||||
// Set the reader error and return 0.
|
||||
func yaml_parser_set_reader_error(parser *yaml_parser_t, problem string, offset int, value int) bool {
|
||||
parser.error = yaml_READER_ERROR
|
||||
parser.problem = problem
|
||||
parser.problem_offset = offset
|
||||
parser.problem_value = value
|
||||
return false
|
||||
}
|
||||
|
||||
// Byte order marks.
|
||||
const (
|
||||
bom_UTF8 = "\xef\xbb\xbf"
|
||||
bom_UTF16LE = "\xff\xfe"
|
||||
bom_UTF16BE = "\xfe\xff"
|
||||
)
|
||||
|
||||
// Determine the input stream encoding by checking the BOM symbol. If no BOM is
|
||||
// found, the UTF-8 encoding is assumed. Return 1 on success, 0 on failure.
|
||||
func yaml_parser_determine_encoding(parser *yaml_parser_t) bool {
|
||||
// Ensure that we had enough bytes in the raw buffer.
|
||||
for !parser.eof && len(parser.raw_buffer)-parser.raw_buffer_pos < 3 {
|
||||
if !yaml_parser_update_raw_buffer(parser) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// Determine the encoding.
|
||||
buf := parser.raw_buffer
|
||||
pos := parser.raw_buffer_pos
|
||||
avail := len(buf) - pos
|
||||
if avail >= 2 && buf[pos] == bom_UTF16LE[0] && buf[pos+1] == bom_UTF16LE[1] {
|
||||
parser.encoding = yaml_UTF16LE_ENCODING
|
||||
parser.raw_buffer_pos += 2
|
||||
parser.offset += 2
|
||||
} else if avail >= 2 && buf[pos] == bom_UTF16BE[0] && buf[pos+1] == bom_UTF16BE[1] {
|
||||
parser.encoding = yaml_UTF16BE_ENCODING
|
||||
parser.raw_buffer_pos += 2
|
||||
parser.offset += 2
|
||||
} else if avail >= 3 && buf[pos] == bom_UTF8[0] && buf[pos+1] == bom_UTF8[1] && buf[pos+2] == bom_UTF8[2] {
|
||||
parser.encoding = yaml_UTF8_ENCODING
|
||||
parser.raw_buffer_pos += 3
|
||||
parser.offset += 3
|
||||
} else {
|
||||
parser.encoding = yaml_UTF8_ENCODING
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Update the raw buffer.
|
||||
func yaml_parser_update_raw_buffer(parser *yaml_parser_t) bool {
|
||||
size_read := 0
|
||||
|
||||
// Return if the raw buffer is full.
|
||||
if parser.raw_buffer_pos == 0 && len(parser.raw_buffer) == cap(parser.raw_buffer) {
|
||||
return true
|
||||
}
|
||||
|
||||
// Return on EOF.
|
||||
if parser.eof {
|
||||
return true
|
||||
}
|
||||
|
||||
// Move the remaining bytes in the raw buffer to the beginning.
|
||||
if parser.raw_buffer_pos > 0 && parser.raw_buffer_pos < len(parser.raw_buffer) {
|
||||
copy(parser.raw_buffer, parser.raw_buffer[parser.raw_buffer_pos:])
|
||||
}
|
||||
parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)-parser.raw_buffer_pos]
|
||||
parser.raw_buffer_pos = 0
|
||||
|
||||
// Call the read handler to fill the buffer.
|
||||
size_read, err := parser.read_handler(parser, parser.raw_buffer[len(parser.raw_buffer):cap(parser.raw_buffer)])
|
||||
parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)+size_read]
|
||||
if err == io.EOF {
|
||||
parser.eof = true
|
||||
} else if err != nil {
|
||||
return yaml_parser_set_reader_error(parser, "input error: "+err.Error(), parser.offset, -1)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Ensure that the buffer contains at least `length` characters.
|
||||
// Return true on success, false on failure.
|
||||
//
|
||||
// The length is supposed to be significantly less that the buffer size.
|
||||
func yaml_parser_update_buffer(parser *yaml_parser_t, length int) bool {
|
||||
if parser.read_handler == nil {
|
||||
panic("read handler must be set")
|
||||
}
|
||||
|
||||
// If the EOF flag is set and the raw buffer is empty, do nothing.
|
||||
if parser.eof && parser.raw_buffer_pos == len(parser.raw_buffer) {
|
||||
return true
|
||||
}
|
||||
|
||||
// Return if the buffer contains enough characters.
|
||||
if parser.unread >= length {
|
||||
return true
|
||||
}
|
||||
|
||||
// Determine the input encoding if it is not known yet.
|
||||
if parser.encoding == yaml_ANY_ENCODING {
|
||||
if !yaml_parser_determine_encoding(parser) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// Move the unread characters to the beginning of the buffer.
|
||||
buffer_len := len(parser.buffer)
|
||||
if parser.buffer_pos > 0 && parser.buffer_pos < buffer_len {
|
||||
copy(parser.buffer, parser.buffer[parser.buffer_pos:])
|
||||
buffer_len -= parser.buffer_pos
|
||||
parser.buffer_pos = 0
|
||||
} else if parser.buffer_pos == buffer_len {
|
||||
buffer_len = 0
|
||||
parser.buffer_pos = 0
|
||||
}
|
||||
|
||||
// Open the whole buffer for writing, and cut it before returning.
|
||||
parser.buffer = parser.buffer[:cap(parser.buffer)]
|
||||
|
||||
// Fill the buffer until it has enough characters.
|
||||
first := true
|
||||
for parser.unread < length {
|
||||
|
||||
// Fill the raw buffer if necessary.
|
||||
if !first || parser.raw_buffer_pos == len(parser.raw_buffer) {
|
||||
if !yaml_parser_update_raw_buffer(parser) {
|
||||
parser.buffer = parser.buffer[:buffer_len]
|
||||
return false
|
||||
}
|
||||
}
|
||||
first = false
|
||||
|
||||
// Decode the raw buffer.
|
||||
inner:
|
||||
for parser.raw_buffer_pos != len(parser.raw_buffer) {
|
||||
var value rune
|
||||
var width int
|
||||
|
||||
raw_unread := len(parser.raw_buffer) - parser.raw_buffer_pos
|
||||
|
||||
// Decode the next character.
|
||||
switch parser.encoding {
|
||||
case yaml_UTF8_ENCODING:
|
||||
// Decode a UTF-8 character. Check RFC 3629
|
||||
// (http://www.ietf.org/rfc/rfc3629.txt) for more details.
|
||||
//
|
||||
// The following table (taken from the RFC) is used for
|
||||
// decoding.
|
||||
//
|
||||
// Char. number range | UTF-8 octet sequence
|
||||
// (hexadecimal) | (binary)
|
||||
// --------------------+------------------------------------
|
||||
// 0000 0000-0000 007F | 0xxxxxxx
|
||||
// 0000 0080-0000 07FF | 110xxxxx 10xxxxxx
|
||||
// 0000 0800-0000 FFFF | 1110xxxx 10xxxxxx 10xxxxxx
|
||||
// 0001 0000-0010 FFFF | 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx
|
||||
//
|
||||
// Additionally, the characters in the range 0xD800-0xDFFF
|
||||
// are prohibited as they are reserved for use with UTF-16
|
||||
// surrogate pairs.
|
||||
|
||||
// Determine the length of the UTF-8 sequence.
|
||||
octet := parser.raw_buffer[parser.raw_buffer_pos]
|
||||
switch {
|
||||
case octet&0x80 == 0x00:
|
||||
width = 1
|
||||
case octet&0xE0 == 0xC0:
|
||||
width = 2
|
||||
case octet&0xF0 == 0xE0:
|
||||
width = 3
|
||||
case octet&0xF8 == 0xF0:
|
||||
width = 4
|
||||
default:
|
||||
// The leading octet is invalid.
|
||||
return yaml_parser_set_reader_error(parser,
|
||||
"invalid leading UTF-8 octet",
|
||||
parser.offset, int(octet))
|
||||
}
|
||||
|
||||
// Check if the raw buffer contains an incomplete character.
|
||||
if width > raw_unread {
|
||||
if parser.eof {
|
||||
return yaml_parser_set_reader_error(parser,
|
||||
"incomplete UTF-8 octet sequence",
|
||||
parser.offset, -1)
|
||||
}
|
||||
break inner
|
||||
}
|
||||
|
||||
// Decode the leading octet.
|
||||
switch {
|
||||
case octet&0x80 == 0x00:
|
||||
value = rune(octet & 0x7F)
|
||||
case octet&0xE0 == 0xC0:
|
||||
value = rune(octet & 0x1F)
|
||||
case octet&0xF0 == 0xE0:
|
||||
value = rune(octet & 0x0F)
|
||||
case octet&0xF8 == 0xF0:
|
||||
value = rune(octet & 0x07)
|
||||
default:
|
||||
value = 0
|
||||
}
|
||||
|
||||
// Check and decode the trailing octets.
|
||||
for k := 1; k < width; k++ {
|
||||
octet = parser.raw_buffer[parser.raw_buffer_pos+k]
|
||||
|
||||
// Check if the octet is valid.
|
||||
if (octet & 0xC0) != 0x80 {
|
||||
return yaml_parser_set_reader_error(parser,
|
||||
"invalid trailing UTF-8 octet",
|
||||
parser.offset+k, int(octet))
|
||||
}
|
||||
|
||||
// Decode the octet.
|
||||
value = (value << 6) + rune(octet&0x3F)
|
||||
}
|
||||
|
||||
// Check the length of the sequence against the value.
|
||||
switch {
|
||||
case width == 1:
|
||||
case width == 2 && value >= 0x80:
|
||||
case width == 3 && value >= 0x800:
|
||||
case width == 4 && value >= 0x10000:
|
||||
default:
|
||||
return yaml_parser_set_reader_error(parser,
|
||||
"invalid length of a UTF-8 sequence",
|
||||
parser.offset, -1)
|
||||
}
|
||||
|
||||
// Check the range of the value.
|
||||
if value >= 0xD800 && value <= 0xDFFF || value > 0x10FFFF {
|
||||
return yaml_parser_set_reader_error(parser,
|
||||
"invalid Unicode character",
|
||||
parser.offset, int(value))
|
||||
}
|
||||
|
||||
case yaml_UTF16LE_ENCODING, yaml_UTF16BE_ENCODING:
|
||||
var low, high int
|
||||
if parser.encoding == yaml_UTF16LE_ENCODING {
|
||||
low, high = 0, 1
|
||||
} else {
|
||||
low, high = 1, 0
|
||||
}
|
||||
|
||||
// The UTF-16 encoding is not as simple as one might
|
||||
// naively think. Check RFC 2781
|
||||
// (http://www.ietf.org/rfc/rfc2781.txt).
|
||||
//
|
||||
// Normally, two subsequent bytes describe a Unicode
|
||||
// character. However a special technique (called a
|
||||
// surrogate pair) is used for specifying character
|
||||
// values larger than 0xFFFF.
|
||||
//
|
||||
// A surrogate pair consists of two pseudo-characters:
|
||||
// high surrogate area (0xD800-0xDBFF)
|
||||
// low surrogate area (0xDC00-0xDFFF)
|
||||
//
|
||||
// The following formulas are used for decoding
|
||||
// and encoding characters using surrogate pairs:
|
||||
//
|
||||
// U = U' + 0x10000 (0x01 00 00 <= U <= 0x10 FF FF)
|
||||
// U' = yyyyyyyyyyxxxxxxxxxx (0 <= U' <= 0x0F FF FF)
|
||||
// W1 = 110110yyyyyyyyyy
|
||||
// W2 = 110111xxxxxxxxxx
|
||||
//
|
||||
// where U is the character value, W1 is the high surrogate
|
||||
// area, W2 is the low surrogate area.
|
||||
|
||||
// Check for incomplete UTF-16 character.
|
||||
if raw_unread < 2 {
|
||||
if parser.eof {
|
||||
return yaml_parser_set_reader_error(parser,
|
||||
"incomplete UTF-16 character",
|
||||
parser.offset, -1)
|
||||
}
|
||||
break inner
|
||||
}
|
||||
|
||||
// Get the character.
|
||||
value = rune(parser.raw_buffer[parser.raw_buffer_pos+low]) +
|
||||
(rune(parser.raw_buffer[parser.raw_buffer_pos+high]) << 8)
|
||||
|
||||
// Check for unexpected low surrogate area.
|
||||
if value&0xFC00 == 0xDC00 {
|
||||
return yaml_parser_set_reader_error(parser,
|
||||
"unexpected low surrogate area",
|
||||
parser.offset, int(value))
|
||||
}
|
||||
|
||||
// Check for a high surrogate area.
|
||||
if value&0xFC00 == 0xD800 {
|
||||
width = 4
|
||||
|
||||
// Check for incomplete surrogate pair.
|
||||
if raw_unread < 4 {
|
||||
if parser.eof {
|
||||
return yaml_parser_set_reader_error(parser,
|
||||
"incomplete UTF-16 surrogate pair",
|
||||
parser.offset, -1)
|
||||
}
|
||||
break inner
|
||||
}
|
||||
|
||||
// Get the next character.
|
||||
value2 := rune(parser.raw_buffer[parser.raw_buffer_pos+low+2]) +
|
||||
(rune(parser.raw_buffer[parser.raw_buffer_pos+high+2]) << 8)
|
||||
|
||||
// Check for a low surrogate area.
|
||||
if value2&0xFC00 != 0xDC00 {
|
||||
return yaml_parser_set_reader_error(parser,
|
||||
"expected low surrogate area",
|
||||
parser.offset+2, int(value2))
|
||||
}
|
||||
|
||||
// Generate the value of the surrogate pair.
|
||||
value = 0x10000 + ((value & 0x3FF) << 10) + (value2 & 0x3FF)
|
||||
} else {
|
||||
width = 2
|
||||
}
|
||||
|
||||
default:
|
||||
panic("impossible")
|
||||
}
|
||||
|
||||
// Check if the character is in the allowed range:
|
||||
// #x9 | #xA | #xD | [#x20-#x7E] (8 bit)
|
||||
// | #x85 | [#xA0-#xD7FF] | [#xE000-#xFFFD] (16 bit)
|
||||
// | [#x10000-#x10FFFF] (32 bit)
|
||||
switch {
|
||||
case value == 0x09:
|
||||
case value == 0x0A:
|
||||
case value == 0x0D:
|
||||
case value >= 0x20 && value <= 0x7E:
|
||||
case value == 0x85:
|
||||
case value >= 0xA0 && value <= 0xD7FF:
|
||||
case value >= 0xE000 && value <= 0xFFFD:
|
||||
case value >= 0x10000 && value <= 0x10FFFF:
|
||||
default:
|
||||
return yaml_parser_set_reader_error(parser,
|
||||
"control characters are not allowed",
|
||||
parser.offset, int(value))
|
||||
}
|
||||
|
||||
// Move the raw pointers.
|
||||
parser.raw_buffer_pos += width
|
||||
parser.offset += width
|
||||
|
||||
// Finally put the character into the buffer.
|
||||
if value <= 0x7F {
|
||||
// 0000 0000-0000 007F . 0xxxxxxx
|
||||
parser.buffer[buffer_len+0] = byte(value)
|
||||
buffer_len += 1
|
||||
} else if value <= 0x7FF {
|
||||
// 0000 0080-0000 07FF . 110xxxxx 10xxxxxx
|
||||
parser.buffer[buffer_len+0] = byte(0xC0 + (value >> 6))
|
||||
parser.buffer[buffer_len+1] = byte(0x80 + (value & 0x3F))
|
||||
buffer_len += 2
|
||||
} else if value <= 0xFFFF {
|
||||
// 0000 0800-0000 FFFF . 1110xxxx 10xxxxxx 10xxxxxx
|
||||
parser.buffer[buffer_len+0] = byte(0xE0 + (value >> 12))
|
||||
parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 6) & 0x3F))
|
||||
parser.buffer[buffer_len+2] = byte(0x80 + (value & 0x3F))
|
||||
buffer_len += 3
|
||||
} else {
|
||||
// 0001 0000-0010 FFFF . 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx
|
||||
parser.buffer[buffer_len+0] = byte(0xF0 + (value >> 18))
|
||||
parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 12) & 0x3F))
|
||||
parser.buffer[buffer_len+2] = byte(0x80 + ((value >> 6) & 0x3F))
|
||||
parser.buffer[buffer_len+3] = byte(0x80 + (value & 0x3F))
|
||||
buffer_len += 4
|
||||
}
|
||||
|
||||
parser.unread++
|
||||
}
|
||||
|
||||
// On EOF, put NUL into the buffer and return.
|
||||
if parser.eof {
|
||||
parser.buffer[buffer_len] = 0
|
||||
buffer_len++
|
||||
parser.unread++
|
||||
break
|
||||
}
|
||||
}
|
||||
parser.buffer = parser.buffer[:buffer_len]
|
||||
return true
|
||||
}
|
||||
208
vendor/gopkg.in/yaml.v2/resolve.go
generated
vendored
Normal file
208
vendor/gopkg.in/yaml.v2/resolve.go
generated
vendored
Normal file
|
|
@ -0,0 +1,208 @@
|
|||
package yaml
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"math"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type resolveMapItem struct {
|
||||
value interface{}
|
||||
tag string
|
||||
}
|
||||
|
||||
var resolveTable = make([]byte, 256)
|
||||
var resolveMap = make(map[string]resolveMapItem)
|
||||
|
||||
func init() {
|
||||
t := resolveTable
|
||||
t[int('+')] = 'S' // Sign
|
||||
t[int('-')] = 'S'
|
||||
for _, c := range "0123456789" {
|
||||
t[int(c)] = 'D' // Digit
|
||||
}
|
||||
for _, c := range "yYnNtTfFoO~" {
|
||||
t[int(c)] = 'M' // In map
|
||||
}
|
||||
t[int('.')] = '.' // Float (potentially in map)
|
||||
|
||||
var resolveMapList = []struct {
|
||||
v interface{}
|
||||
tag string
|
||||
l []string
|
||||
}{
|
||||
{true, yaml_BOOL_TAG, []string{"y", "Y", "yes", "Yes", "YES"}},
|
||||
{true, yaml_BOOL_TAG, []string{"true", "True", "TRUE"}},
|
||||
{true, yaml_BOOL_TAG, []string{"on", "On", "ON"}},
|
||||
{false, yaml_BOOL_TAG, []string{"n", "N", "no", "No", "NO"}},
|
||||
{false, yaml_BOOL_TAG, []string{"false", "False", "FALSE"}},
|
||||
{false, yaml_BOOL_TAG, []string{"off", "Off", "OFF"}},
|
||||
{nil, yaml_NULL_TAG, []string{"", "~", "null", "Null", "NULL"}},
|
||||
{math.NaN(), yaml_FLOAT_TAG, []string{".nan", ".NaN", ".NAN"}},
|
||||
{math.Inf(+1), yaml_FLOAT_TAG, []string{".inf", ".Inf", ".INF"}},
|
||||
{math.Inf(+1), yaml_FLOAT_TAG, []string{"+.inf", "+.Inf", "+.INF"}},
|
||||
{math.Inf(-1), yaml_FLOAT_TAG, []string{"-.inf", "-.Inf", "-.INF"}},
|
||||
{"<<", yaml_MERGE_TAG, []string{"<<"}},
|
||||
}
|
||||
|
||||
m := resolveMap
|
||||
for _, item := range resolveMapList {
|
||||
for _, s := range item.l {
|
||||
m[s] = resolveMapItem{item.v, item.tag}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const longTagPrefix = "tag:yaml.org,2002:"
|
||||
|
||||
func shortTag(tag string) string {
|
||||
// TODO This can easily be made faster and produce less garbage.
|
||||
if strings.HasPrefix(tag, longTagPrefix) {
|
||||
return "!!" + tag[len(longTagPrefix):]
|
||||
}
|
||||
return tag
|
||||
}
|
||||
|
||||
func longTag(tag string) string {
|
||||
if strings.HasPrefix(tag, "!!") {
|
||||
return longTagPrefix + tag[2:]
|
||||
}
|
||||
return tag
|
||||
}
|
||||
|
||||
func resolvableTag(tag string) bool {
|
||||
switch tag {
|
||||
case "", yaml_STR_TAG, yaml_BOOL_TAG, yaml_INT_TAG, yaml_FLOAT_TAG, yaml_NULL_TAG:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
var yamlStyleFloat = regexp.MustCompile(`^[-+]?[0-9]*\.?[0-9]+([eE][-+][0-9]+)?$`)
|
||||
|
||||
func resolve(tag string, in string) (rtag string, out interface{}) {
|
||||
if !resolvableTag(tag) {
|
||||
return tag, in
|
||||
}
|
||||
|
||||
defer func() {
|
||||
switch tag {
|
||||
case "", rtag, yaml_STR_TAG, yaml_BINARY_TAG:
|
||||
return
|
||||
}
|
||||
failf("cannot decode %s `%s` as a %s", shortTag(rtag), in, shortTag(tag))
|
||||
}()
|
||||
|
||||
// Any data is accepted as a !!str or !!binary.
|
||||
// Otherwise, the prefix is enough of a hint about what it might be.
|
||||
hint := byte('N')
|
||||
if in != "" {
|
||||
hint = resolveTable[in[0]]
|
||||
}
|
||||
if hint != 0 && tag != yaml_STR_TAG && tag != yaml_BINARY_TAG {
|
||||
// Handle things we can lookup in a map.
|
||||
if item, ok := resolveMap[in]; ok {
|
||||
return item.tag, item.value
|
||||
}
|
||||
|
||||
// Base 60 floats are a bad idea, were dropped in YAML 1.2, and
|
||||
// are purposefully unsupported here. They're still quoted on
|
||||
// the way out for compatibility with other parser, though.
|
||||
|
||||
switch hint {
|
||||
case 'M':
|
||||
// We've already checked the map above.
|
||||
|
||||
case '.':
|
||||
// Not in the map, so maybe a normal float.
|
||||
floatv, err := strconv.ParseFloat(in, 64)
|
||||
if err == nil {
|
||||
return yaml_FLOAT_TAG, floatv
|
||||
}
|
||||
|
||||
case 'D', 'S':
|
||||
// Int, float, or timestamp.
|
||||
plain := strings.Replace(in, "_", "", -1)
|
||||
intv, err := strconv.ParseInt(plain, 0, 64)
|
||||
if err == nil {
|
||||
if intv == int64(int(intv)) {
|
||||
return yaml_INT_TAG, int(intv)
|
||||
} else {
|
||||
return yaml_INT_TAG, intv
|
||||
}
|
||||
}
|
||||
uintv, err := strconv.ParseUint(plain, 0, 64)
|
||||
if err == nil {
|
||||
return yaml_INT_TAG, uintv
|
||||
}
|
||||
if yamlStyleFloat.MatchString(plain) {
|
||||
floatv, err := strconv.ParseFloat(plain, 64)
|
||||
if err == nil {
|
||||
return yaml_FLOAT_TAG, floatv
|
||||
}
|
||||
}
|
||||
if strings.HasPrefix(plain, "0b") {
|
||||
intv, err := strconv.ParseInt(plain[2:], 2, 64)
|
||||
if err == nil {
|
||||
if intv == int64(int(intv)) {
|
||||
return yaml_INT_TAG, int(intv)
|
||||
} else {
|
||||
return yaml_INT_TAG, intv
|
||||
}
|
||||
}
|
||||
uintv, err := strconv.ParseUint(plain[2:], 2, 64)
|
||||
if err == nil {
|
||||
return yaml_INT_TAG, uintv
|
||||
}
|
||||
} else if strings.HasPrefix(plain, "-0b") {
|
||||
intv, err := strconv.ParseInt(plain[3:], 2, 64)
|
||||
if err == nil {
|
||||
if intv == int64(int(intv)) {
|
||||
return yaml_INT_TAG, -int(intv)
|
||||
} else {
|
||||
return yaml_INT_TAG, -intv
|
||||
}
|
||||
}
|
||||
}
|
||||
// XXX Handle timestamps here.
|
||||
|
||||
default:
|
||||
panic("resolveTable item not yet handled: " + string(rune(hint)) + " (with " + in + ")")
|
||||
}
|
||||
}
|
||||
if tag == yaml_BINARY_TAG {
|
||||
return yaml_BINARY_TAG, in
|
||||
}
|
||||
if utf8.ValidString(in) {
|
||||
return yaml_STR_TAG, in
|
||||
}
|
||||
return yaml_BINARY_TAG, encodeBase64(in)
|
||||
}
|
||||
|
||||
// encodeBase64 encodes s as base64 that is broken up into multiple lines
|
||||
// as appropriate for the resulting length.
|
||||
func encodeBase64(s string) string {
|
||||
const lineLen = 70
|
||||
encLen := base64.StdEncoding.EncodedLen(len(s))
|
||||
lines := encLen/lineLen + 1
|
||||
buf := make([]byte, encLen*2+lines)
|
||||
in := buf[0:encLen]
|
||||
out := buf[encLen:]
|
||||
base64.StdEncoding.Encode(in, []byte(s))
|
||||
k := 0
|
||||
for i := 0; i < len(in); i += lineLen {
|
||||
j := i + lineLen
|
||||
if j > len(in) {
|
||||
j = len(in)
|
||||
}
|
||||
k += copy(out[k:], in[i:j])
|
||||
if lines > 1 {
|
||||
out[k] = '\n'
|
||||
k++
|
||||
}
|
||||
}
|
||||
return string(out[:k])
|
||||
}
|
||||
2711
vendor/gopkg.in/yaml.v2/scannerc.go
generated
vendored
Normal file
2711
vendor/gopkg.in/yaml.v2/scannerc.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
104
vendor/gopkg.in/yaml.v2/sorter.go
generated
vendored
Normal file
104
vendor/gopkg.in/yaml.v2/sorter.go
generated
vendored
Normal file
|
|
@ -0,0 +1,104 @@
|
|||
package yaml
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
type keyList []reflect.Value
|
||||
|
||||
func (l keyList) Len() int { return len(l) }
|
||||
func (l keyList) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
|
||||
func (l keyList) Less(i, j int) bool {
|
||||
a := l[i]
|
||||
b := l[j]
|
||||
ak := a.Kind()
|
||||
bk := b.Kind()
|
||||
for (ak == reflect.Interface || ak == reflect.Ptr) && !a.IsNil() {
|
||||
a = a.Elem()
|
||||
ak = a.Kind()
|
||||
}
|
||||
for (bk == reflect.Interface || bk == reflect.Ptr) && !b.IsNil() {
|
||||
b = b.Elem()
|
||||
bk = b.Kind()
|
||||
}
|
||||
af, aok := keyFloat(a)
|
||||
bf, bok := keyFloat(b)
|
||||
if aok && bok {
|
||||
if af != bf {
|
||||
return af < bf
|
||||
}
|
||||
if ak != bk {
|
||||
return ak < bk
|
||||
}
|
||||
return numLess(a, b)
|
||||
}
|
||||
if ak != reflect.String || bk != reflect.String {
|
||||
return ak < bk
|
||||
}
|
||||
ar, br := []rune(a.String()), []rune(b.String())
|
||||
for i := 0; i < len(ar) && i < len(br); i++ {
|
||||
if ar[i] == br[i] {
|
||||
continue
|
||||
}
|
||||
al := unicode.IsLetter(ar[i])
|
||||
bl := unicode.IsLetter(br[i])
|
||||
if al && bl {
|
||||
return ar[i] < br[i]
|
||||
}
|
||||
if al || bl {
|
||||
return bl
|
||||
}
|
||||
var ai, bi int
|
||||
var an, bn int64
|
||||
for ai = i; ai < len(ar) && unicode.IsDigit(ar[ai]); ai++ {
|
||||
an = an*10 + int64(ar[ai]-'0')
|
||||
}
|
||||
for bi = i; bi < len(br) && unicode.IsDigit(br[bi]); bi++ {
|
||||
bn = bn*10 + int64(br[bi]-'0')
|
||||
}
|
||||
if an != bn {
|
||||
return an < bn
|
||||
}
|
||||
if ai != bi {
|
||||
return ai < bi
|
||||
}
|
||||
return ar[i] < br[i]
|
||||
}
|
||||
return len(ar) < len(br)
|
||||
}
|
||||
|
||||
// keyFloat returns a float value for v if it is a number/bool
|
||||
// and whether it is a number/bool or not.
|
||||
func keyFloat(v reflect.Value) (f float64, ok bool) {
|
||||
switch v.Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return float64(v.Int()), true
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return v.Float(), true
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
return float64(v.Uint()), true
|
||||
case reflect.Bool:
|
||||
if v.Bool() {
|
||||
return 1, true
|
||||
}
|
||||
return 0, true
|
||||
}
|
||||
return 0, false
|
||||
}
|
||||
|
||||
// numLess returns whether a < b.
|
||||
// a and b must necessarily have the same kind.
|
||||
func numLess(a, b reflect.Value) bool {
|
||||
switch a.Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return a.Int() < b.Int()
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return a.Float() < b.Float()
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
return a.Uint() < b.Uint()
|
||||
case reflect.Bool:
|
||||
return !a.Bool() && b.Bool()
|
||||
}
|
||||
panic("not a number")
|
||||
}
|
||||
12
vendor/gopkg.in/yaml.v2/suite_test.go
generated
vendored
Normal file
12
vendor/gopkg.in/yaml.v2/suite_test.go
generated
vendored
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
package yaml_test
|
||||
|
||||
import (
|
||||
. "gopkg.in/check.v1"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func Test(t *testing.T) { TestingT(t) }
|
||||
|
||||
type S struct{}
|
||||
|
||||
var _ = Suite(&S{})
|
||||
89
vendor/gopkg.in/yaml.v2/writerc.go
generated
vendored
Normal file
89
vendor/gopkg.in/yaml.v2/writerc.go
generated
vendored
Normal file
|
|
@ -0,0 +1,89 @@
|
|||
package yaml
|
||||
|
||||
// Set the writer error and return false.
|
||||
func yaml_emitter_set_writer_error(emitter *yaml_emitter_t, problem string) bool {
|
||||
emitter.error = yaml_WRITER_ERROR
|
||||
emitter.problem = problem
|
||||
return false
|
||||
}
|
||||
|
||||
// Flush the output buffer.
|
||||
func yaml_emitter_flush(emitter *yaml_emitter_t) bool {
|
||||
if emitter.write_handler == nil {
|
||||
panic("write handler not set")
|
||||
}
|
||||
|
||||
// Check if the buffer is empty.
|
||||
if emitter.buffer_pos == 0 {
|
||||
return true
|
||||
}
|
||||
|
||||
// If the output encoding is UTF-8, we don't need to recode the buffer.
|
||||
if emitter.encoding == yaml_UTF8_ENCODING {
|
||||
if err := emitter.write_handler(emitter, emitter.buffer[:emitter.buffer_pos]); err != nil {
|
||||
return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error())
|
||||
}
|
||||
emitter.buffer_pos = 0
|
||||
return true
|
||||
}
|
||||
|
||||
// Recode the buffer into the raw buffer.
|
||||
var low, high int
|
||||
if emitter.encoding == yaml_UTF16LE_ENCODING {
|
||||
low, high = 0, 1
|
||||
} else {
|
||||
high, low = 1, 0
|
||||
}
|
||||
|
||||
pos := 0
|
||||
for pos < emitter.buffer_pos {
|
||||
// See the "reader.c" code for more details on UTF-8 encoding. Note
|
||||
// that we assume that the buffer contains a valid UTF-8 sequence.
|
||||
|
||||
// Read the next UTF-8 character.
|
||||
octet := emitter.buffer[pos]
|
||||
|
||||
var w int
|
||||
var value rune
|
||||
switch {
|
||||
case octet&0x80 == 0x00:
|
||||
w, value = 1, rune(octet&0x7F)
|
||||
case octet&0xE0 == 0xC0:
|
||||
w, value = 2, rune(octet&0x1F)
|
||||
case octet&0xF0 == 0xE0:
|
||||
w, value = 3, rune(octet&0x0F)
|
||||
case octet&0xF8 == 0xF0:
|
||||
w, value = 4, rune(octet&0x07)
|
||||
}
|
||||
for k := 1; k < w; k++ {
|
||||
octet = emitter.buffer[pos+k]
|
||||
value = (value << 6) + (rune(octet) & 0x3F)
|
||||
}
|
||||
pos += w
|
||||
|
||||
// Write the character.
|
||||
if value < 0x10000 {
|
||||
var b [2]byte
|
||||
b[high] = byte(value >> 8)
|
||||
b[low] = byte(value & 0xFF)
|
||||
emitter.raw_buffer = append(emitter.raw_buffer, b[0], b[1])
|
||||
} else {
|
||||
// Write the character using a surrogate pair (check "reader.c").
|
||||
var b [4]byte
|
||||
value -= 0x10000
|
||||
b[high] = byte(0xD8 + (value >> 18))
|
||||
b[low] = byte((value >> 10) & 0xFF)
|
||||
b[high+2] = byte(0xDC + ((value >> 8) & 0xFF))
|
||||
b[low+2] = byte(value & 0xFF)
|
||||
emitter.raw_buffer = append(emitter.raw_buffer, b[0], b[1], b[2], b[3])
|
||||
}
|
||||
}
|
||||
|
||||
// Write the raw buffer.
|
||||
if err := emitter.write_handler(emitter, emitter.raw_buffer); err != nil {
|
||||
return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error())
|
||||
}
|
||||
emitter.buffer_pos = 0
|
||||
emitter.raw_buffer = emitter.raw_buffer[:0]
|
||||
return true
|
||||
}
|
||||
357
vendor/gopkg.in/yaml.v2/yaml.go
generated
vendored
Normal file
357
vendor/gopkg.in/yaml.v2/yaml.go
generated
vendored
Normal file
|
|
@ -0,0 +1,357 @@
|
|||
// Package yaml implements YAML support for the Go language.
|
||||
//
|
||||
// Source code and other details for the project are available at GitHub:
|
||||
//
|
||||
// https://github.com/go-yaml/yaml
|
||||
//
|
||||
package yaml
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// MapSlice encodes and decodes as a YAML map.
|
||||
// The order of keys is preserved when encoding and decoding.
|
||||
type MapSlice []MapItem
|
||||
|
||||
// MapItem is an item in a MapSlice.
|
||||
type MapItem struct {
|
||||
Key, Value interface{}
|
||||
}
|
||||
|
||||
// The Unmarshaler interface may be implemented by types to customize their
|
||||
// behavior when being unmarshaled from a YAML document. The UnmarshalYAML
|
||||
// method receives a function that may be called to unmarshal the original
|
||||
// YAML value into a field or variable. It is safe to call the unmarshal
|
||||
// function parameter more than once if necessary.
|
||||
type Unmarshaler interface {
|
||||
UnmarshalYAML(unmarshal func(interface{}) error) error
|
||||
}
|
||||
|
||||
// The Marshaler interface may be implemented by types to customize their
|
||||
// behavior when being marshaled into a YAML document. The returned value
|
||||
// is marshaled in place of the original value implementing Marshaler.
|
||||
//
|
||||
// If an error is returned by MarshalYAML, the marshaling procedure stops
|
||||
// and returns with the provided error.
|
||||
type Marshaler interface {
|
||||
MarshalYAML() (interface{}, error)
|
||||
}
|
||||
|
||||
// Unmarshal decodes the first document found within the in byte slice
|
||||
// and assigns decoded values into the out value.
|
||||
//
|
||||
// Maps and pointers (to a struct, string, int, etc) are accepted as out
|
||||
// values. If an internal pointer within a struct is not initialized,
|
||||
// the yaml package will initialize it if necessary for unmarshalling
|
||||
// the provided data. The out parameter must not be nil.
|
||||
//
|
||||
// The type of the decoded values should be compatible with the respective
|
||||
// values in out. If one or more values cannot be decoded due to a type
|
||||
// mismatches, decoding continues partially until the end of the YAML
|
||||
// content, and a *yaml.TypeError is returned with details for all
|
||||
// missed values.
|
||||
//
|
||||
// Struct fields are only unmarshalled if they are exported (have an
|
||||
// upper case first letter), and are unmarshalled using the field name
|
||||
// lowercased as the default key. Custom keys may be defined via the
|
||||
// "yaml" name in the field tag: the content preceding the first comma
|
||||
// is used as the key, and the following comma-separated options are
|
||||
// used to tweak the marshalling process (see Marshal).
|
||||
// Conflicting names result in a runtime error.
|
||||
//
|
||||
// For example:
|
||||
//
|
||||
// type T struct {
|
||||
// F int `yaml:"a,omitempty"`
|
||||
// B int
|
||||
// }
|
||||
// var t T
|
||||
// yaml.Unmarshal([]byte("a: 1\nb: 2"), &t)
|
||||
//
|
||||
// See the documentation of Marshal for the format of tags and a list of
|
||||
// supported tag options.
|
||||
//
|
||||
func Unmarshal(in []byte, out interface{}) (err error) {
|
||||
return unmarshal(in, out, false)
|
||||
}
|
||||
|
||||
// UnmarshalStrict is like Unmarshal except that any fields that are found
|
||||
// in the data that do not have corresponding struct members will result in
|
||||
// an error.
|
||||
func UnmarshalStrict(in []byte, out interface{}) (err error) {
|
||||
return unmarshal(in, out, true)
|
||||
}
|
||||
|
||||
func unmarshal(in []byte, out interface{}, strict bool) (err error) {
|
||||
defer handleErr(&err)
|
||||
d := newDecoder(strict)
|
||||
p := newParser(in)
|
||||
defer p.destroy()
|
||||
node := p.parse()
|
||||
if node != nil {
|
||||
v := reflect.ValueOf(out)
|
||||
if v.Kind() == reflect.Ptr && !v.IsNil() {
|
||||
v = v.Elem()
|
||||
}
|
||||
d.unmarshal(node, v)
|
||||
}
|
||||
if len(d.terrors) > 0 {
|
||||
return &TypeError{d.terrors}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Marshal serializes the value provided into a YAML document. The structure
|
||||
// of the generated document will reflect the structure of the value itself.
|
||||
// Maps and pointers (to struct, string, int, etc) are accepted as the in value.
|
||||
//
|
||||
// Struct fields are only unmarshalled if they are exported (have an upper case
|
||||
// first letter), and are unmarshalled using the field name lowercased as the
|
||||
// default key. Custom keys may be defined via the "yaml" name in the field
|
||||
// tag: the content preceding the first comma is used as the key, and the
|
||||
// following comma-separated options are used to tweak the marshalling process.
|
||||
// Conflicting names result in a runtime error.
|
||||
//
|
||||
// The field tag format accepted is:
|
||||
//
|
||||
// `(...) yaml:"[<key>][,<flag1>[,<flag2>]]" (...)`
|
||||
//
|
||||
// The following flags are currently supported:
|
||||
//
|
||||
// omitempty Only include the field if it's not set to the zero
|
||||
// value for the type or to empty slices or maps.
|
||||
// Does not apply to zero valued structs.
|
||||
//
|
||||
// flow Marshal using a flow style (useful for structs,
|
||||
// sequences and maps).
|
||||
//
|
||||
// inline Inline the field, which must be a struct or a map,
|
||||
// causing all of its fields or keys to be processed as if
|
||||
// they were part of the outer struct. For maps, keys must
|
||||
// not conflict with the yaml keys of other struct fields.
|
||||
//
|
||||
// In addition, if the key is "-", the field is ignored.
|
||||
//
|
||||
// For example:
|
||||
//
|
||||
// type T struct {
|
||||
// F int "a,omitempty"
|
||||
// B int
|
||||
// }
|
||||
// yaml.Marshal(&T{B: 2}) // Returns "b: 2\n"
|
||||
// yaml.Marshal(&T{F: 1}} // Returns "a: 1\nb: 0\n"
|
||||
//
|
||||
func Marshal(in interface{}) (out []byte, err error) {
|
||||
defer handleErr(&err)
|
||||
e := newEncoder()
|
||||
defer e.destroy()
|
||||
e.marshal("", reflect.ValueOf(in))
|
||||
e.finish()
|
||||
out = e.out
|
||||
return
|
||||
}
|
||||
|
||||
func handleErr(err *error) {
|
||||
if v := recover(); v != nil {
|
||||
if e, ok := v.(yamlError); ok {
|
||||
*err = e.err
|
||||
} else {
|
||||
panic(v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type yamlError struct {
|
||||
err error
|
||||
}
|
||||
|
||||
func fail(err error) {
|
||||
panic(yamlError{err})
|
||||
}
|
||||
|
||||
func failf(format string, args ...interface{}) {
|
||||
panic(yamlError{fmt.Errorf("yaml: "+format, args...)})
|
||||
}
|
||||
|
||||
// A TypeError is returned by Unmarshal when one or more fields in
|
||||
// the YAML document cannot be properly decoded into the requested
|
||||
// types. When this error is returned, the value is still
|
||||
// unmarshaled partially.
|
||||
type TypeError struct {
|
||||
Errors []string
|
||||
}
|
||||
|
||||
func (e *TypeError) Error() string {
|
||||
return fmt.Sprintf("yaml: unmarshal errors:\n %s", strings.Join(e.Errors, "\n "))
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
// Maintain a mapping of keys to structure field indexes
|
||||
|
||||
// The code in this section was copied from mgo/bson.
|
||||
|
||||
// structInfo holds details for the serialization of fields of
|
||||
// a given struct.
|
||||
type structInfo struct {
|
||||
FieldsMap map[string]fieldInfo
|
||||
FieldsList []fieldInfo
|
||||
|
||||
// InlineMap is the number of the field in the struct that
|
||||
// contains an ,inline map, or -1 if there's none.
|
||||
InlineMap int
|
||||
}
|
||||
|
||||
type fieldInfo struct {
|
||||
Key string
|
||||
Num int
|
||||
OmitEmpty bool
|
||||
Flow bool
|
||||
|
||||
// Inline holds the field index if the field is part of an inlined struct.
|
||||
Inline []int
|
||||
}
|
||||
|
||||
var structMap = make(map[reflect.Type]*structInfo)
|
||||
var fieldMapMutex sync.RWMutex
|
||||
|
||||
func getStructInfo(st reflect.Type) (*structInfo, error) {
|
||||
fieldMapMutex.RLock()
|
||||
sinfo, found := structMap[st]
|
||||
fieldMapMutex.RUnlock()
|
||||
if found {
|
||||
return sinfo, nil
|
||||
}
|
||||
|
||||
n := st.NumField()
|
||||
fieldsMap := make(map[string]fieldInfo)
|
||||
fieldsList := make([]fieldInfo, 0, n)
|
||||
inlineMap := -1
|
||||
for i := 0; i != n; i++ {
|
||||
field := st.Field(i)
|
||||
if field.PkgPath != "" && !field.Anonymous {
|
||||
continue // Private field
|
||||
}
|
||||
|
||||
info := fieldInfo{Num: i}
|
||||
|
||||
tag := field.Tag.Get("yaml")
|
||||
if tag == "" && strings.Index(string(field.Tag), ":") < 0 {
|
||||
tag = string(field.Tag)
|
||||
}
|
||||
if tag == "-" {
|
||||
continue
|
||||
}
|
||||
|
||||
inline := false
|
||||
fields := strings.Split(tag, ",")
|
||||
if len(fields) > 1 {
|
||||
for _, flag := range fields[1:] {
|
||||
switch flag {
|
||||
case "omitempty":
|
||||
info.OmitEmpty = true
|
||||
case "flow":
|
||||
info.Flow = true
|
||||
case "inline":
|
||||
inline = true
|
||||
default:
|
||||
return nil, errors.New(fmt.Sprintf("Unsupported flag %q in tag %q of type %s", flag, tag, st))
|
||||
}
|
||||
}
|
||||
tag = fields[0]
|
||||
}
|
||||
|
||||
if inline {
|
||||
switch field.Type.Kind() {
|
||||
case reflect.Map:
|
||||
if inlineMap >= 0 {
|
||||
return nil, errors.New("Multiple ,inline maps in struct " + st.String())
|
||||
}
|
||||
if field.Type.Key() != reflect.TypeOf("") {
|
||||
return nil, errors.New("Option ,inline needs a map with string keys in struct " + st.String())
|
||||
}
|
||||
inlineMap = info.Num
|
||||
case reflect.Struct:
|
||||
sinfo, err := getStructInfo(field.Type)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, finfo := range sinfo.FieldsList {
|
||||
if _, found := fieldsMap[finfo.Key]; found {
|
||||
msg := "Duplicated key '" + finfo.Key + "' in struct " + st.String()
|
||||
return nil, errors.New(msg)
|
||||
}
|
||||
if finfo.Inline == nil {
|
||||
finfo.Inline = []int{i, finfo.Num}
|
||||
} else {
|
||||
finfo.Inline = append([]int{i}, finfo.Inline...)
|
||||
}
|
||||
fieldsMap[finfo.Key] = finfo
|
||||
fieldsList = append(fieldsList, finfo)
|
||||
}
|
||||
default:
|
||||
//return nil, errors.New("Option ,inline needs a struct value or map field")
|
||||
return nil, errors.New("Option ,inline needs a struct value field")
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if tag != "" {
|
||||
info.Key = tag
|
||||
} else {
|
||||
info.Key = strings.ToLower(field.Name)
|
||||
}
|
||||
|
||||
if _, found = fieldsMap[info.Key]; found {
|
||||
msg := "Duplicated key '" + info.Key + "' in struct " + st.String()
|
||||
return nil, errors.New(msg)
|
||||
}
|
||||
|
||||
fieldsList = append(fieldsList, info)
|
||||
fieldsMap[info.Key] = info
|
||||
}
|
||||
|
||||
sinfo = &structInfo{fieldsMap, fieldsList, inlineMap}
|
||||
|
||||
fieldMapMutex.Lock()
|
||||
structMap[st] = sinfo
|
||||
fieldMapMutex.Unlock()
|
||||
return sinfo, nil
|
||||
}
|
||||
|
||||
func isZero(v reflect.Value) bool {
|
||||
switch v.Kind() {
|
||||
case reflect.String:
|
||||
return len(v.String()) == 0
|
||||
case reflect.Interface, reflect.Ptr:
|
||||
return v.IsNil()
|
||||
case reflect.Slice:
|
||||
return v.Len() == 0
|
||||
case reflect.Map:
|
||||
return v.Len() == 0
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return v.Int() == 0
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return v.Float() == 0
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
return v.Uint() == 0
|
||||
case reflect.Bool:
|
||||
return !v.Bool()
|
||||
case reflect.Struct:
|
||||
vt := v.Type()
|
||||
for i := v.NumField() - 1; i >= 0; i-- {
|
||||
if vt.Field(i).PkgPath != "" {
|
||||
continue // Private field
|
||||
}
|
||||
if !isZero(v.Field(i)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
716
vendor/gopkg.in/yaml.v2/yamlh.go
generated
vendored
Normal file
716
vendor/gopkg.in/yaml.v2/yamlh.go
generated
vendored
Normal file
|
|
@ -0,0 +1,716 @@
|
|||
package yaml
|
||||
|
||||
import (
|
||||
"io"
|
||||
)
|
||||
|
||||
// The version directive data.
|
||||
type yaml_version_directive_t struct {
|
||||
major int8 // The major version number.
|
||||
minor int8 // The minor version number.
|
||||
}
|
||||
|
||||
// The tag directive data.
|
||||
type yaml_tag_directive_t struct {
|
||||
handle []byte // The tag handle.
|
||||
prefix []byte // The tag prefix.
|
||||
}
|
||||
|
||||
type yaml_encoding_t int
|
||||
|
||||
// The stream encoding.
|
||||
const (
|
||||
// Let the parser choose the encoding.
|
||||
yaml_ANY_ENCODING yaml_encoding_t = iota
|
||||
|
||||
yaml_UTF8_ENCODING // The default UTF-8 encoding.
|
||||
yaml_UTF16LE_ENCODING // The UTF-16-LE encoding with BOM.
|
||||
yaml_UTF16BE_ENCODING // The UTF-16-BE encoding with BOM.
|
||||
)
|
||||
|
||||
type yaml_break_t int
|
||||
|
||||
// Line break types.
|
||||
const (
|
||||
// Let the parser choose the break type.
|
||||
yaml_ANY_BREAK yaml_break_t = iota
|
||||
|
||||
yaml_CR_BREAK // Use CR for line breaks (Mac style).
|
||||
yaml_LN_BREAK // Use LN for line breaks (Unix style).
|
||||
yaml_CRLN_BREAK // Use CR LN for line breaks (DOS style).
|
||||
)
|
||||
|
||||
type yaml_error_type_t int
|
||||
|
||||
// Many bad things could happen with the parser and emitter.
|
||||
const (
|
||||
// No error is produced.
|
||||
yaml_NO_ERROR yaml_error_type_t = iota
|
||||
|
||||
yaml_MEMORY_ERROR // Cannot allocate or reallocate a block of memory.
|
||||
yaml_READER_ERROR // Cannot read or decode the input stream.
|
||||
yaml_SCANNER_ERROR // Cannot scan the input stream.
|
||||
yaml_PARSER_ERROR // Cannot parse the input stream.
|
||||
yaml_COMPOSER_ERROR // Cannot compose a YAML document.
|
||||
yaml_WRITER_ERROR // Cannot write to the output stream.
|
||||
yaml_EMITTER_ERROR // Cannot emit a YAML stream.
|
||||
)
|
||||
|
||||
// The pointer position.
|
||||
type yaml_mark_t struct {
|
||||
index int // The position index.
|
||||
line int // The position line.
|
||||
column int // The position column.
|
||||
}
|
||||
|
||||
// Node Styles
|
||||
|
||||
type yaml_style_t int8
|
||||
|
||||
type yaml_scalar_style_t yaml_style_t
|
||||
|
||||
// Scalar styles.
|
||||
const (
|
||||
// Let the emitter choose the style.
|
||||
yaml_ANY_SCALAR_STYLE yaml_scalar_style_t = iota
|
||||
|
||||
yaml_PLAIN_SCALAR_STYLE // The plain scalar style.
|
||||
yaml_SINGLE_QUOTED_SCALAR_STYLE // The single-quoted scalar style.
|
||||
yaml_DOUBLE_QUOTED_SCALAR_STYLE // The double-quoted scalar style.
|
||||
yaml_LITERAL_SCALAR_STYLE // The literal scalar style.
|
||||
yaml_FOLDED_SCALAR_STYLE // The folded scalar style.
|
||||
)
|
||||
|
||||
type yaml_sequence_style_t yaml_style_t
|
||||
|
||||
// Sequence styles.
|
||||
const (
|
||||
// Let the emitter choose the style.
|
||||
yaml_ANY_SEQUENCE_STYLE yaml_sequence_style_t = iota
|
||||
|
||||
yaml_BLOCK_SEQUENCE_STYLE // The block sequence style.
|
||||
yaml_FLOW_SEQUENCE_STYLE // The flow sequence style.
|
||||
)
|
||||
|
||||
type yaml_mapping_style_t yaml_style_t
|
||||
|
||||
// Mapping styles.
|
||||
const (
|
||||
// Let the emitter choose the style.
|
||||
yaml_ANY_MAPPING_STYLE yaml_mapping_style_t = iota
|
||||
|
||||
yaml_BLOCK_MAPPING_STYLE // The block mapping style.
|
||||
yaml_FLOW_MAPPING_STYLE // The flow mapping style.
|
||||
)
|
||||
|
||||
// Tokens
|
||||
|
||||
type yaml_token_type_t int
|
||||
|
||||
// Token types.
|
||||
const (
|
||||
// An empty token.
|
||||
yaml_NO_TOKEN yaml_token_type_t = iota
|
||||
|
||||
yaml_STREAM_START_TOKEN // A STREAM-START token.
|
||||
yaml_STREAM_END_TOKEN // A STREAM-END token.
|
||||
|
||||
yaml_VERSION_DIRECTIVE_TOKEN // A VERSION-DIRECTIVE token.
|
||||
yaml_TAG_DIRECTIVE_TOKEN // A TAG-DIRECTIVE token.
|
||||
yaml_DOCUMENT_START_TOKEN // A DOCUMENT-START token.
|
||||
yaml_DOCUMENT_END_TOKEN // A DOCUMENT-END token.
|
||||
|
||||
yaml_BLOCK_SEQUENCE_START_TOKEN // A BLOCK-SEQUENCE-START token.
|
||||
yaml_BLOCK_MAPPING_START_TOKEN // A BLOCK-SEQUENCE-END token.
|
||||
yaml_BLOCK_END_TOKEN // A BLOCK-END token.
|
||||
|
||||
yaml_FLOW_SEQUENCE_START_TOKEN // A FLOW-SEQUENCE-START token.
|
||||
yaml_FLOW_SEQUENCE_END_TOKEN // A FLOW-SEQUENCE-END token.
|
||||
yaml_FLOW_MAPPING_START_TOKEN // A FLOW-MAPPING-START token.
|
||||
yaml_FLOW_MAPPING_END_TOKEN // A FLOW-MAPPING-END token.
|
||||
|
||||
yaml_BLOCK_ENTRY_TOKEN // A BLOCK-ENTRY token.
|
||||
yaml_FLOW_ENTRY_TOKEN // A FLOW-ENTRY token.
|
||||
yaml_KEY_TOKEN // A KEY token.
|
||||
yaml_VALUE_TOKEN // A VALUE token.
|
||||
|
||||
yaml_ALIAS_TOKEN // An ALIAS token.
|
||||
yaml_ANCHOR_TOKEN // An ANCHOR token.
|
||||
yaml_TAG_TOKEN // A TAG token.
|
||||
yaml_SCALAR_TOKEN // A SCALAR token.
|
||||
)
|
||||
|
||||
func (tt yaml_token_type_t) String() string {
|
||||
switch tt {
|
||||
case yaml_NO_TOKEN:
|
||||
return "yaml_NO_TOKEN"
|
||||
case yaml_STREAM_START_TOKEN:
|
||||
return "yaml_STREAM_START_TOKEN"
|
||||
case yaml_STREAM_END_TOKEN:
|
||||
return "yaml_STREAM_END_TOKEN"
|
||||
case yaml_VERSION_DIRECTIVE_TOKEN:
|
||||
return "yaml_VERSION_DIRECTIVE_TOKEN"
|
||||
case yaml_TAG_DIRECTIVE_TOKEN:
|
||||
return "yaml_TAG_DIRECTIVE_TOKEN"
|
||||
case yaml_DOCUMENT_START_TOKEN:
|
||||
return "yaml_DOCUMENT_START_TOKEN"
|
||||
case yaml_DOCUMENT_END_TOKEN:
|
||||
return "yaml_DOCUMENT_END_TOKEN"
|
||||
case yaml_BLOCK_SEQUENCE_START_TOKEN:
|
||||
return "yaml_BLOCK_SEQUENCE_START_TOKEN"
|
||||
case yaml_BLOCK_MAPPING_START_TOKEN:
|
||||
return "yaml_BLOCK_MAPPING_START_TOKEN"
|
||||
case yaml_BLOCK_END_TOKEN:
|
||||
return "yaml_BLOCK_END_TOKEN"
|
||||
case yaml_FLOW_SEQUENCE_START_TOKEN:
|
||||
return "yaml_FLOW_SEQUENCE_START_TOKEN"
|
||||
case yaml_FLOW_SEQUENCE_END_TOKEN:
|
||||
return "yaml_FLOW_SEQUENCE_END_TOKEN"
|
||||
case yaml_FLOW_MAPPING_START_TOKEN:
|
||||
return "yaml_FLOW_MAPPING_START_TOKEN"
|
||||
case yaml_FLOW_MAPPING_END_TOKEN:
|
||||
return "yaml_FLOW_MAPPING_END_TOKEN"
|
||||
case yaml_BLOCK_ENTRY_TOKEN:
|
||||
return "yaml_BLOCK_ENTRY_TOKEN"
|
||||
case yaml_FLOW_ENTRY_TOKEN:
|
||||
return "yaml_FLOW_ENTRY_TOKEN"
|
||||
case yaml_KEY_TOKEN:
|
||||
return "yaml_KEY_TOKEN"
|
||||
case yaml_VALUE_TOKEN:
|
||||
return "yaml_VALUE_TOKEN"
|
||||
case yaml_ALIAS_TOKEN:
|
||||
return "yaml_ALIAS_TOKEN"
|
||||
case yaml_ANCHOR_TOKEN:
|
||||
return "yaml_ANCHOR_TOKEN"
|
||||
case yaml_TAG_TOKEN:
|
||||
return "yaml_TAG_TOKEN"
|
||||
case yaml_SCALAR_TOKEN:
|
||||
return "yaml_SCALAR_TOKEN"
|
||||
}
|
||||
return "<unknown token>"
|
||||
}
|
||||
|
||||
// The token structure.
|
||||
type yaml_token_t struct {
|
||||
// The token type.
|
||||
typ yaml_token_type_t
|
||||
|
||||
// The start/end of the token.
|
||||
start_mark, end_mark yaml_mark_t
|
||||
|
||||
// The stream encoding (for yaml_STREAM_START_TOKEN).
|
||||
encoding yaml_encoding_t
|
||||
|
||||
// The alias/anchor/scalar value or tag/tag directive handle
|
||||
// (for yaml_ALIAS_TOKEN, yaml_ANCHOR_TOKEN, yaml_SCALAR_TOKEN, yaml_TAG_TOKEN, yaml_TAG_DIRECTIVE_TOKEN).
|
||||
value []byte
|
||||
|
||||
// The tag suffix (for yaml_TAG_TOKEN).
|
||||
suffix []byte
|
||||
|
||||
// The tag directive prefix (for yaml_TAG_DIRECTIVE_TOKEN).
|
||||
prefix []byte
|
||||
|
||||
// The scalar style (for yaml_SCALAR_TOKEN).
|
||||
style yaml_scalar_style_t
|
||||
|
||||
// The version directive major/minor (for yaml_VERSION_DIRECTIVE_TOKEN).
|
||||
major, minor int8
|
||||
}
|
||||
|
||||
// Events
|
||||
|
||||
type yaml_event_type_t int8
|
||||
|
||||
// Event types.
|
||||
const (
|
||||
// An empty event.
|
||||
yaml_NO_EVENT yaml_event_type_t = iota
|
||||
|
||||
yaml_STREAM_START_EVENT // A STREAM-START event.
|
||||
yaml_STREAM_END_EVENT // A STREAM-END event.
|
||||
yaml_DOCUMENT_START_EVENT // A DOCUMENT-START event.
|
||||
yaml_DOCUMENT_END_EVENT // A DOCUMENT-END event.
|
||||
yaml_ALIAS_EVENT // An ALIAS event.
|
||||
yaml_SCALAR_EVENT // A SCALAR event.
|
||||
yaml_SEQUENCE_START_EVENT // A SEQUENCE-START event.
|
||||
yaml_SEQUENCE_END_EVENT // A SEQUENCE-END event.
|
||||
yaml_MAPPING_START_EVENT // A MAPPING-START event.
|
||||
yaml_MAPPING_END_EVENT // A MAPPING-END event.
|
||||
)
|
||||
|
||||
// The event structure.
|
||||
type yaml_event_t struct {
|
||||
|
||||
// The event type.
|
||||
typ yaml_event_type_t
|
||||
|
||||
// The start and end of the event.
|
||||
start_mark, end_mark yaml_mark_t
|
||||
|
||||
// The document encoding (for yaml_STREAM_START_EVENT).
|
||||
encoding yaml_encoding_t
|
||||
|
||||
// The version directive (for yaml_DOCUMENT_START_EVENT).
|
||||
version_directive *yaml_version_directive_t
|
||||
|
||||
// The list of tag directives (for yaml_DOCUMENT_START_EVENT).
|
||||
tag_directives []yaml_tag_directive_t
|
||||
|
||||
// The anchor (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_ALIAS_EVENT).
|
||||
anchor []byte
|
||||
|
||||
// The tag (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT).
|
||||
tag []byte
|
||||
|
||||
// The scalar value (for yaml_SCALAR_EVENT).
|
||||
value []byte
|
||||
|
||||
// Is the document start/end indicator implicit, or the tag optional?
|
||||
// (for yaml_DOCUMENT_START_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_SCALAR_EVENT).
|
||||
implicit bool
|
||||
|
||||
// Is the tag optional for any non-plain style? (for yaml_SCALAR_EVENT).
|
||||
quoted_implicit bool
|
||||
|
||||
// The style (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT).
|
||||
style yaml_style_t
|
||||
}
|
||||
|
||||
func (e *yaml_event_t) scalar_style() yaml_scalar_style_t { return yaml_scalar_style_t(e.style) }
|
||||
func (e *yaml_event_t) sequence_style() yaml_sequence_style_t { return yaml_sequence_style_t(e.style) }
|
||||
func (e *yaml_event_t) mapping_style() yaml_mapping_style_t { return yaml_mapping_style_t(e.style) }
|
||||
|
||||
// Nodes
|
||||
|
||||
const (
|
||||
yaml_NULL_TAG = "tag:yaml.org,2002:null" // The tag !!null with the only possible value: null.
|
||||
yaml_BOOL_TAG = "tag:yaml.org,2002:bool" // The tag !!bool with the values: true and false.
|
||||
yaml_STR_TAG = "tag:yaml.org,2002:str" // The tag !!str for string values.
|
||||
yaml_INT_TAG = "tag:yaml.org,2002:int" // The tag !!int for integer values.
|
||||
yaml_FLOAT_TAG = "tag:yaml.org,2002:float" // The tag !!float for float values.
|
||||
yaml_TIMESTAMP_TAG = "tag:yaml.org,2002:timestamp" // The tag !!timestamp for date and time values.
|
||||
|
||||
yaml_SEQ_TAG = "tag:yaml.org,2002:seq" // The tag !!seq is used to denote sequences.
|
||||
yaml_MAP_TAG = "tag:yaml.org,2002:map" // The tag !!map is used to denote mapping.
|
||||
|
||||
// Not in original libyaml.
|
||||
yaml_BINARY_TAG = "tag:yaml.org,2002:binary"
|
||||
yaml_MERGE_TAG = "tag:yaml.org,2002:merge"
|
||||
|
||||
yaml_DEFAULT_SCALAR_TAG = yaml_STR_TAG // The default scalar tag is !!str.
|
||||
yaml_DEFAULT_SEQUENCE_TAG = yaml_SEQ_TAG // The default sequence tag is !!seq.
|
||||
yaml_DEFAULT_MAPPING_TAG = yaml_MAP_TAG // The default mapping tag is !!map.
|
||||
)
|
||||
|
||||
type yaml_node_type_t int
|
||||
|
||||
// Node types.
|
||||
const (
|
||||
// An empty node.
|
||||
yaml_NO_NODE yaml_node_type_t = iota
|
||||
|
||||
yaml_SCALAR_NODE // A scalar node.
|
||||
yaml_SEQUENCE_NODE // A sequence node.
|
||||
yaml_MAPPING_NODE // A mapping node.
|
||||
)
|
||||
|
||||
// An element of a sequence node.
|
||||
type yaml_node_item_t int
|
||||
|
||||
// An element of a mapping node.
|
||||
type yaml_node_pair_t struct {
|
||||
key int // The key of the element.
|
||||
value int // The value of the element.
|
||||
}
|
||||
|
||||
// The node structure.
|
||||
type yaml_node_t struct {
|
||||
typ yaml_node_type_t // The node type.
|
||||
tag []byte // The node tag.
|
||||
|
||||
// The node data.
|
||||
|
||||
// The scalar parameters (for yaml_SCALAR_NODE).
|
||||
scalar struct {
|
||||
value []byte // The scalar value.
|
||||
length int // The length of the scalar value.
|
||||
style yaml_scalar_style_t // The scalar style.
|
||||
}
|
||||
|
||||
// The sequence parameters (for YAML_SEQUENCE_NODE).
|
||||
sequence struct {
|
||||
items_data []yaml_node_item_t // The stack of sequence items.
|
||||
style yaml_sequence_style_t // The sequence style.
|
||||
}
|
||||
|
||||
// The mapping parameters (for yaml_MAPPING_NODE).
|
||||
mapping struct {
|
||||
pairs_data []yaml_node_pair_t // The stack of mapping pairs (key, value).
|
||||
pairs_start *yaml_node_pair_t // The beginning of the stack.
|
||||
pairs_end *yaml_node_pair_t // The end of the stack.
|
||||
pairs_top *yaml_node_pair_t // The top of the stack.
|
||||
style yaml_mapping_style_t // The mapping style.
|
||||
}
|
||||
|
||||
start_mark yaml_mark_t // The beginning of the node.
|
||||
end_mark yaml_mark_t // The end of the node.
|
||||
|
||||
}
|
||||
|
||||
// The document structure.
|
||||
type yaml_document_t struct {
|
||||
|
||||
// The document nodes.
|
||||
nodes []yaml_node_t
|
||||
|
||||
// The version directive.
|
||||
version_directive *yaml_version_directive_t
|
||||
|
||||
// The list of tag directives.
|
||||
tag_directives_data []yaml_tag_directive_t
|
||||
tag_directives_start int // The beginning of the tag directives list.
|
||||
tag_directives_end int // The end of the tag directives list.
|
||||
|
||||
start_implicit int // Is the document start indicator implicit?
|
||||
end_implicit int // Is the document end indicator implicit?
|
||||
|
||||
// The start/end of the document.
|
||||
start_mark, end_mark yaml_mark_t
|
||||
}
|
||||
|
||||
// The prototype of a read handler.
|
||||
//
|
||||
// The read handler is called when the parser needs to read more bytes from the
|
||||
// source. The handler should write not more than size bytes to the buffer.
|
||||
// The number of written bytes should be set to the size_read variable.
|
||||
//
|
||||
// [in,out] data A pointer to an application data specified by
|
||||
// yaml_parser_set_input().
|
||||
// [out] buffer The buffer to write the data from the source.
|
||||
// [in] size The size of the buffer.
|
||||
// [out] size_read The actual number of bytes read from the source.
|
||||
//
|
||||
// On success, the handler should return 1. If the handler failed,
|
||||
// the returned value should be 0. On EOF, the handler should set the
|
||||
// size_read to 0 and return 1.
|
||||
type yaml_read_handler_t func(parser *yaml_parser_t, buffer []byte) (n int, err error)
|
||||
|
||||
// This structure holds information about a potential simple key.
|
||||
type yaml_simple_key_t struct {
|
||||
possible bool // Is a simple key possible?
|
||||
required bool // Is a simple key required?
|
||||
token_number int // The number of the token.
|
||||
mark yaml_mark_t // The position mark.
|
||||
}
|
||||
|
||||
// The states of the parser.
|
||||
type yaml_parser_state_t int
|
||||
|
||||
const (
|
||||
yaml_PARSE_STREAM_START_STATE yaml_parser_state_t = iota
|
||||
|
||||
yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE // Expect the beginning of an implicit document.
|
||||
yaml_PARSE_DOCUMENT_START_STATE // Expect DOCUMENT-START.
|
||||
yaml_PARSE_DOCUMENT_CONTENT_STATE // Expect the content of a document.
|
||||
yaml_PARSE_DOCUMENT_END_STATE // Expect DOCUMENT-END.
|
||||
yaml_PARSE_BLOCK_NODE_STATE // Expect a block node.
|
||||
yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE // Expect a block node or indentless sequence.
|
||||
yaml_PARSE_FLOW_NODE_STATE // Expect a flow node.
|
||||
yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a block sequence.
|
||||
yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE // Expect an entry of a block sequence.
|
||||
yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE // Expect an entry of an indentless sequence.
|
||||
yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping.
|
||||
yaml_PARSE_BLOCK_MAPPING_KEY_STATE // Expect a block mapping key.
|
||||
yaml_PARSE_BLOCK_MAPPING_VALUE_STATE // Expect a block mapping value.
|
||||
yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a flow sequence.
|
||||
yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE // Expect an entry of a flow sequence.
|
||||
yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE // Expect a key of an ordered mapping.
|
||||
yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE // Expect a value of an ordered mapping.
|
||||
yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE // Expect the and of an ordered mapping entry.
|
||||
yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping.
|
||||
yaml_PARSE_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping.
|
||||
yaml_PARSE_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping.
|
||||
yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE // Expect an empty value of a flow mapping.
|
||||
yaml_PARSE_END_STATE // Expect nothing.
|
||||
)
|
||||
|
||||
func (ps yaml_parser_state_t) String() string {
|
||||
switch ps {
|
||||
case yaml_PARSE_STREAM_START_STATE:
|
||||
return "yaml_PARSE_STREAM_START_STATE"
|
||||
case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE:
|
||||
return "yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE"
|
||||
case yaml_PARSE_DOCUMENT_START_STATE:
|
||||
return "yaml_PARSE_DOCUMENT_START_STATE"
|
||||
case yaml_PARSE_DOCUMENT_CONTENT_STATE:
|
||||
return "yaml_PARSE_DOCUMENT_CONTENT_STATE"
|
||||
case yaml_PARSE_DOCUMENT_END_STATE:
|
||||
return "yaml_PARSE_DOCUMENT_END_STATE"
|
||||
case yaml_PARSE_BLOCK_NODE_STATE:
|
||||
return "yaml_PARSE_BLOCK_NODE_STATE"
|
||||
case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE:
|
||||
return "yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE"
|
||||
case yaml_PARSE_FLOW_NODE_STATE:
|
||||
return "yaml_PARSE_FLOW_NODE_STATE"
|
||||
case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE:
|
||||
return "yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE"
|
||||
case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE:
|
||||
return "yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE"
|
||||
case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE:
|
||||
return "yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE"
|
||||
case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE:
|
||||
return "yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE"
|
||||
case yaml_PARSE_BLOCK_MAPPING_KEY_STATE:
|
||||
return "yaml_PARSE_BLOCK_MAPPING_KEY_STATE"
|
||||
case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE:
|
||||
return "yaml_PARSE_BLOCK_MAPPING_VALUE_STATE"
|
||||
case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE:
|
||||
return "yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE"
|
||||
case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE:
|
||||
return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE"
|
||||
case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE:
|
||||
return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE"
|
||||
case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE:
|
||||
return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE"
|
||||
case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE:
|
||||
return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE"
|
||||
case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE:
|
||||
return "yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE"
|
||||
case yaml_PARSE_FLOW_MAPPING_KEY_STATE:
|
||||
return "yaml_PARSE_FLOW_MAPPING_KEY_STATE"
|
||||
case yaml_PARSE_FLOW_MAPPING_VALUE_STATE:
|
||||
return "yaml_PARSE_FLOW_MAPPING_VALUE_STATE"
|
||||
case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE:
|
||||
return "yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE"
|
||||
case yaml_PARSE_END_STATE:
|
||||
return "yaml_PARSE_END_STATE"
|
||||
}
|
||||
return "<unknown parser state>"
|
||||
}
|
||||
|
||||
// This structure holds aliases data.
|
||||
type yaml_alias_data_t struct {
|
||||
anchor []byte // The anchor.
|
||||
index int // The node id.
|
||||
mark yaml_mark_t // The anchor mark.
|
||||
}
|
||||
|
||||
// The parser structure.
|
||||
//
|
||||
// All members are internal. Manage the structure using the
|
||||
// yaml_parser_ family of functions.
|
||||
type yaml_parser_t struct {
|
||||
|
||||
// Error handling
|
||||
|
||||
error yaml_error_type_t // Error type.
|
||||
|
||||
problem string // Error description.
|
||||
|
||||
// The byte about which the problem occurred.
|
||||
problem_offset int
|
||||
problem_value int
|
||||
problem_mark yaml_mark_t
|
||||
|
||||
// The error context.
|
||||
context string
|
||||
context_mark yaml_mark_t
|
||||
|
||||
// Reader stuff
|
||||
|
||||
read_handler yaml_read_handler_t // Read handler.
|
||||
|
||||
input_file io.Reader // File input data.
|
||||
input []byte // String input data.
|
||||
input_pos int
|
||||
|
||||
eof bool // EOF flag
|
||||
|
||||
buffer []byte // The working buffer.
|
||||
buffer_pos int // The current position of the buffer.
|
||||
|
||||
unread int // The number of unread characters in the buffer.
|
||||
|
||||
raw_buffer []byte // The raw buffer.
|
||||
raw_buffer_pos int // The current position of the buffer.
|
||||
|
||||
encoding yaml_encoding_t // The input encoding.
|
||||
|
||||
offset int // The offset of the current position (in bytes).
|
||||
mark yaml_mark_t // The mark of the current position.
|
||||
|
||||
// Scanner stuff
|
||||
|
||||
stream_start_produced bool // Have we started to scan the input stream?
|
||||
stream_end_produced bool // Have we reached the end of the input stream?
|
||||
|
||||
flow_level int // The number of unclosed '[' and '{' indicators.
|
||||
|
||||
tokens []yaml_token_t // The tokens queue.
|
||||
tokens_head int // The head of the tokens queue.
|
||||
tokens_parsed int // The number of tokens fetched from the queue.
|
||||
token_available bool // Does the tokens queue contain a token ready for dequeueing.
|
||||
|
||||
indent int // The current indentation level.
|
||||
indents []int // The indentation levels stack.
|
||||
|
||||
simple_key_allowed bool // May a simple key occur at the current position?
|
||||
simple_keys []yaml_simple_key_t // The stack of simple keys.
|
||||
|
||||
// Parser stuff
|
||||
|
||||
state yaml_parser_state_t // The current parser state.
|
||||
states []yaml_parser_state_t // The parser states stack.
|
||||
marks []yaml_mark_t // The stack of marks.
|
||||
tag_directives []yaml_tag_directive_t // The list of TAG directives.
|
||||
|
||||
// Dumper stuff
|
||||
|
||||
aliases []yaml_alias_data_t // The alias data.
|
||||
|
||||
document *yaml_document_t // The currently parsed document.
|
||||
}
|
||||
|
||||
// Emitter Definitions
|
||||
|
||||
// The prototype of a write handler.
|
||||
//
|
||||
// The write handler is called when the emitter needs to flush the accumulated
|
||||
// characters to the output. The handler should write @a size bytes of the
|
||||
// @a buffer to the output.
|
||||
//
|
||||
// @param[in,out] data A pointer to an application data specified by
|
||||
// yaml_emitter_set_output().
|
||||
// @param[in] buffer The buffer with bytes to be written.
|
||||
// @param[in] size The size of the buffer.
|
||||
//
|
||||
// @returns On success, the handler should return @c 1. If the handler failed,
|
||||
// the returned value should be @c 0.
|
||||
//
|
||||
type yaml_write_handler_t func(emitter *yaml_emitter_t, buffer []byte) error
|
||||
|
||||
type yaml_emitter_state_t int
|
||||
|
||||
// The emitter states.
|
||||
const (
|
||||
// Expect STREAM-START.
|
||||
yaml_EMIT_STREAM_START_STATE yaml_emitter_state_t = iota
|
||||
|
||||
yaml_EMIT_FIRST_DOCUMENT_START_STATE // Expect the first DOCUMENT-START or STREAM-END.
|
||||
yaml_EMIT_DOCUMENT_START_STATE // Expect DOCUMENT-START or STREAM-END.
|
||||
yaml_EMIT_DOCUMENT_CONTENT_STATE // Expect the content of a document.
|
||||
yaml_EMIT_DOCUMENT_END_STATE // Expect DOCUMENT-END.
|
||||
yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a flow sequence.
|
||||
yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE // Expect an item of a flow sequence.
|
||||
yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping.
|
||||
yaml_EMIT_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping.
|
||||
yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a flow mapping.
|
||||
yaml_EMIT_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping.
|
||||
yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a block sequence.
|
||||
yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE // Expect an item of a block sequence.
|
||||
yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping.
|
||||
yaml_EMIT_BLOCK_MAPPING_KEY_STATE // Expect the key of a block mapping.
|
||||
yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a block mapping.
|
||||
yaml_EMIT_BLOCK_MAPPING_VALUE_STATE // Expect a value of a block mapping.
|
||||
yaml_EMIT_END_STATE // Expect nothing.
|
||||
)
|
||||
|
||||
// The emitter structure.
|
||||
//
|
||||
// All members are internal. Manage the structure using the @c yaml_emitter_
|
||||
// family of functions.
|
||||
type yaml_emitter_t struct {
|
||||
|
||||
// Error handling
|
||||
|
||||
error yaml_error_type_t // Error type.
|
||||
problem string // Error description.
|
||||
|
||||
// Writer stuff
|
||||
|
||||
write_handler yaml_write_handler_t // Write handler.
|
||||
|
||||
output_buffer *[]byte // String output data.
|
||||
output_file io.Writer // File output data.
|
||||
|
||||
buffer []byte // The working buffer.
|
||||
buffer_pos int // The current position of the buffer.
|
||||
|
||||
raw_buffer []byte // The raw buffer.
|
||||
raw_buffer_pos int // The current position of the buffer.
|
||||
|
||||
encoding yaml_encoding_t // The stream encoding.
|
||||
|
||||
// Emitter stuff
|
||||
|
||||
canonical bool // If the output is in the canonical style?
|
||||
best_indent int // The number of indentation spaces.
|
||||
best_width int // The preferred width of the output lines.
|
||||
unicode bool // Allow unescaped non-ASCII characters?
|
||||
line_break yaml_break_t // The preferred line break.
|
||||
|
||||
state yaml_emitter_state_t // The current emitter state.
|
||||
states []yaml_emitter_state_t // The stack of states.
|
||||
|
||||
events []yaml_event_t // The event queue.
|
||||
events_head int // The head of the event queue.
|
||||
|
||||
indents []int // The stack of indentation levels.
|
||||
|
||||
tag_directives []yaml_tag_directive_t // The list of tag directives.
|
||||
|
||||
indent int // The current indentation level.
|
||||
|
||||
flow_level int // The current flow level.
|
||||
|
||||
root_context bool // Is it the document root context?
|
||||
sequence_context bool // Is it a sequence context?
|
||||
mapping_context bool // Is it a mapping context?
|
||||
simple_key_context bool // Is it a simple mapping key context?
|
||||
|
||||
line int // The current line.
|
||||
column int // The current column.
|
||||
whitespace bool // If the last character was a whitespace?
|
||||
indention bool // If the last character was an indentation character (' ', '-', '?', ':')?
|
||||
open_ended bool // If an explicit document end is required?
|
||||
|
||||
// Anchor analysis.
|
||||
anchor_data struct {
|
||||
anchor []byte // The anchor value.
|
||||
alias bool // Is it an alias?
|
||||
}
|
||||
|
||||
// Tag analysis.
|
||||
tag_data struct {
|
||||
handle []byte // The tag handle.
|
||||
suffix []byte // The tag suffix.
|
||||
}
|
||||
|
||||
// Scalar analysis.
|
||||
scalar_data struct {
|
||||
value []byte // The scalar value.
|
||||
multiline bool // Does the scalar contain line breaks?
|
||||
flow_plain_allowed bool // Can the scalar be expessed in the flow plain style?
|
||||
block_plain_allowed bool // Can the scalar be expressed in the block plain style?
|
||||
single_quoted_allowed bool // Can the scalar be expressed in the single quoted style?
|
||||
block_allowed bool // Can the scalar be expressed in the literal or folded styles?
|
||||
style yaml_scalar_style_t // The output style.
|
||||
}
|
||||
|
||||
// Dumper stuff
|
||||
|
||||
opened bool // If the stream was already opened?
|
||||
closed bool // If the stream was already closed?
|
||||
|
||||
// The information associated with the document nodes.
|
||||
anchors *struct {
|
||||
references int // The number of references.
|
||||
anchor int // The anchor id.
|
||||
serialized bool // If the node has been emitted?
|
||||
}
|
||||
|
||||
last_anchor_id int // The last assigned anchor id.
|
||||
|
||||
document *yaml_document_t // The currently emitted document.
|
||||
}
|
||||
173
vendor/gopkg.in/yaml.v2/yamlprivateh.go
generated
vendored
Normal file
173
vendor/gopkg.in/yaml.v2/yamlprivateh.go
generated
vendored
Normal file
|
|
@ -0,0 +1,173 @@
|
|||
package yaml
|
||||
|
||||
const (
|
||||
// The size of the input raw buffer.
|
||||
input_raw_buffer_size = 512
|
||||
|
||||
// The size of the input buffer.
|
||||
// It should be possible to decode the whole raw buffer.
|
||||
input_buffer_size = input_raw_buffer_size * 3
|
||||
|
||||
// The size of the output buffer.
|
||||
output_buffer_size = 128
|
||||
|
||||
// The size of the output raw buffer.
|
||||
// It should be possible to encode the whole output buffer.
|
||||
output_raw_buffer_size = (output_buffer_size*2 + 2)
|
||||
|
||||
// The size of other stacks and queues.
|
||||
initial_stack_size = 16
|
||||
initial_queue_size = 16
|
||||
initial_string_size = 16
|
||||
)
|
||||
|
||||
// Check if the character at the specified position is an alphabetical
|
||||
// character, a digit, '_', or '-'.
|
||||
func is_alpha(b []byte, i int) bool {
|
||||
return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'Z' || b[i] >= 'a' && b[i] <= 'z' || b[i] == '_' || b[i] == '-'
|
||||
}
|
||||
|
||||
// Check if the character at the specified position is a digit.
|
||||
func is_digit(b []byte, i int) bool {
|
||||
return b[i] >= '0' && b[i] <= '9'
|
||||
}
|
||||
|
||||
// Get the value of a digit.
|
||||
func as_digit(b []byte, i int) int {
|
||||
return int(b[i]) - '0'
|
||||
}
|
||||
|
||||
// Check if the character at the specified position is a hex-digit.
|
||||
func is_hex(b []byte, i int) bool {
|
||||
return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'F' || b[i] >= 'a' && b[i] <= 'f'
|
||||
}
|
||||
|
||||
// Get the value of a hex-digit.
|
||||
func as_hex(b []byte, i int) int {
|
||||
bi := b[i]
|
||||
if bi >= 'A' && bi <= 'F' {
|
||||
return int(bi) - 'A' + 10
|
||||
}
|
||||
if bi >= 'a' && bi <= 'f' {
|
||||
return int(bi) - 'a' + 10
|
||||
}
|
||||
return int(bi) - '0'
|
||||
}
|
||||
|
||||
// Check if the character is ASCII.
|
||||
func is_ascii(b []byte, i int) bool {
|
||||
return b[i] <= 0x7F
|
||||
}
|
||||
|
||||
// Check if the character at the start of the buffer can be printed unescaped.
|
||||
func is_printable(b []byte, i int) bool {
|
||||
return ((b[i] == 0x0A) || // . == #x0A
|
||||
(b[i] >= 0x20 && b[i] <= 0x7E) || // #x20 <= . <= #x7E
|
||||
(b[i] == 0xC2 && b[i+1] >= 0xA0) || // #0xA0 <= . <= #xD7FF
|
||||
(b[i] > 0xC2 && b[i] < 0xED) ||
|
||||
(b[i] == 0xED && b[i+1] < 0xA0) ||
|
||||
(b[i] == 0xEE) ||
|
||||
(b[i] == 0xEF && // #xE000 <= . <= #xFFFD
|
||||
!(b[i+1] == 0xBB && b[i+2] == 0xBF) && // && . != #xFEFF
|
||||
!(b[i+1] == 0xBF && (b[i+2] == 0xBE || b[i+2] == 0xBF))))
|
||||
}
|
||||
|
||||
// Check if the character at the specified position is NUL.
|
||||
func is_z(b []byte, i int) bool {
|
||||
return b[i] == 0x00
|
||||
}
|
||||
|
||||
// Check if the beginning of the buffer is a BOM.
|
||||
func is_bom(b []byte, i int) bool {
|
||||
return b[0] == 0xEF && b[1] == 0xBB && b[2] == 0xBF
|
||||
}
|
||||
|
||||
// Check if the character at the specified position is space.
|
||||
func is_space(b []byte, i int) bool {
|
||||
return b[i] == ' '
|
||||
}
|
||||
|
||||
// Check if the character at the specified position is tab.
|
||||
func is_tab(b []byte, i int) bool {
|
||||
return b[i] == '\t'
|
||||
}
|
||||
|
||||
// Check if the character at the specified position is blank (space or tab).
|
||||
func is_blank(b []byte, i int) bool {
|
||||
//return is_space(b, i) || is_tab(b, i)
|
||||
return b[i] == ' ' || b[i] == '\t'
|
||||
}
|
||||
|
||||
// Check if the character at the specified position is a line break.
|
||||
func is_break(b []byte, i int) bool {
|
||||
return (b[i] == '\r' || // CR (#xD)
|
||||
b[i] == '\n' || // LF (#xA)
|
||||
b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
|
||||
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
|
||||
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9) // PS (#x2029)
|
||||
}
|
||||
|
||||
func is_crlf(b []byte, i int) bool {
|
||||
return b[i] == '\r' && b[i+1] == '\n'
|
||||
}
|
||||
|
||||
// Check if the character is a line break or NUL.
|
||||
func is_breakz(b []byte, i int) bool {
|
||||
//return is_break(b, i) || is_z(b, i)
|
||||
return ( // is_break:
|
||||
b[i] == '\r' || // CR (#xD)
|
||||
b[i] == '\n' || // LF (#xA)
|
||||
b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
|
||||
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
|
||||
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029)
|
||||
// is_z:
|
||||
b[i] == 0)
|
||||
}
|
||||
|
||||
// Check if the character is a line break, space, or NUL.
|
||||
func is_spacez(b []byte, i int) bool {
|
||||
//return is_space(b, i) || is_breakz(b, i)
|
||||
return ( // is_space:
|
||||
b[i] == ' ' ||
|
||||
// is_breakz:
|
||||
b[i] == '\r' || // CR (#xD)
|
||||
b[i] == '\n' || // LF (#xA)
|
||||
b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
|
||||
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
|
||||
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029)
|
||||
b[i] == 0)
|
||||
}
|
||||
|
||||
// Check if the character is a line break, space, tab, or NUL.
|
||||
func is_blankz(b []byte, i int) bool {
|
||||
//return is_blank(b, i) || is_breakz(b, i)
|
||||
return ( // is_blank:
|
||||
b[i] == ' ' || b[i] == '\t' ||
|
||||
// is_breakz:
|
||||
b[i] == '\r' || // CR (#xD)
|
||||
b[i] == '\n' || // LF (#xA)
|
||||
b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
|
||||
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
|
||||
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029)
|
||||
b[i] == 0)
|
||||
}
|
||||
|
||||
// Determine the width of the character.
|
||||
func width(b byte) int {
|
||||
// Don't replace these by a switch without first
|
||||
// confirming that it is being inlined.
|
||||
if b&0x80 == 0x00 {
|
||||
return 1
|
||||
}
|
||||
if b&0xE0 == 0xC0 {
|
||||
return 2
|
||||
}
|
||||
if b&0xF0 == 0xE0 {
|
||||
return 3
|
||||
}
|
||||
if b&0xF8 == 0xF0 {
|
||||
return 4
|
||||
}
|
||||
return 0
|
||||
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue