1
0
mirror of https://github.com/StackExchange/dnscontrol.git synced 2024-05-11 05:55:12 +00:00

Switch from govendor to go modules. (#587)

Thanks to @BenoitKnecht for leading the way on this.
This commit is contained in:
Tom Limoncelli
2020-01-18 14:40:28 -05:00
committed by GitHub
parent 31188c3a70
commit 16d0043cce
1554 changed files with 400867 additions and 98222 deletions

View File

@ -1,6 +1,7 @@
package dns
import (
"bufio"
"crypto"
"crypto/dsa"
"crypto/ecdsa"
@ -9,6 +10,8 @@ import (
"math/big"
"strconv"
"strings"
"golang.org/x/crypto/ed25519"
)
// NewPrivateKey returns a PrivateKey by parsing the string s.
@ -86,6 +89,8 @@ func (k *DNSKEY) ReadPrivateKey(q io.Reader, file string) (crypto.PrivateKey, er
}
priv.PublicKey = *pub
return priv, nil
case ED25519:
return readPrivateKeyED25519(m)
default:
return nil, ErrPrivKey
}
@ -166,16 +171,36 @@ func readPrivateKeyECDSA(m map[string]string) (*ecdsa.PrivateKey, error) {
return p, nil
}
func readPrivateKeyED25519(m map[string]string) (ed25519.PrivateKey, error) {
var p ed25519.PrivateKey
// TODO: validate that the required flags are present
for k, v := range m {
switch k {
case "privatekey":
p1, err := fromBase64([]byte(v))
if err != nil {
return nil, err
}
if len(p1) != ed25519.SeedSize {
return nil, ErrPrivKey
}
p = ed25519.NewKeyFromSeed(p1)
case "created", "publish", "activate":
/* not used in Go (yet) */
}
}
return p, nil
}
// parseKey reads a private key from r. It returns a map[string]string,
// with the key-value pairs, or an error when the file is not correct.
func parseKey(r io.Reader, file string) (map[string]string, error) {
s := scanInit(r)
m := make(map[string]string)
c := make(chan lex)
k := ""
// Start the lexer
go klexer(s, c)
for l := range c {
var k string
c := newKLexer(r)
for l, ok := c.Next(); ok; l, ok = c.Next() {
// It should alternate
switch l.value {
case zKey:
@ -184,41 +209,111 @@ func parseKey(r io.Reader, file string) (map[string]string, error) {
if k == "" {
return nil, &ParseError{file, "no private key seen", l}
}
//println("Setting", strings.ToLower(k), "to", l.token, "b")
m[strings.ToLower(k)] = l.token
k = ""
}
}
// Surface any read errors from r.
if err := c.Err(); err != nil {
return nil, &ParseError{file: file, err: err.Error()}
}
return m, nil
}
// klexer scans the sourcefile and returns tokens on the channel c.
func klexer(s *scan, c chan lex) {
var l lex
str := "" // Hold the current read text
commt := false
key := true
x, err := s.tokenText()
defer close(c)
for err == nil {
l.column = s.position.Column
l.line = s.position.Line
type klexer struct {
br io.ByteReader
readErr error
line int
column int
key bool
eol bool // end-of-line
}
func newKLexer(r io.Reader) *klexer {
br, ok := r.(io.ByteReader)
if !ok {
br = bufio.NewReaderSize(r, 1024)
}
return &klexer{
br: br,
line: 1,
key: true,
}
}
func (kl *klexer) Err() error {
if kl.readErr == io.EOF {
return nil
}
return kl.readErr
}
// readByte returns the next byte from the input
func (kl *klexer) readByte() (byte, bool) {
if kl.readErr != nil {
return 0, false
}
c, err := kl.br.ReadByte()
if err != nil {
kl.readErr = err
return 0, false
}
// delay the newline handling until the next token is delivered,
// fixes off-by-one errors when reporting a parse error.
if kl.eol {
kl.line++
kl.column = 0
kl.eol = false
}
if c == '\n' {
kl.eol = true
} else {
kl.column++
}
return c, true
}
func (kl *klexer) Next() (lex, bool) {
var (
l lex
str strings.Builder
commt bool
)
for x, ok := kl.readByte(); ok; x, ok = kl.readByte() {
l.line, l.column = kl.line, kl.column
switch x {
case ':':
if commt {
if commt || !kl.key {
break
}
l.token = str
if key {
l.value = zKey
c <- l
// Next token is a space, eat it
s.tokenText()
key = false
str = ""
} else {
l.value = zValue
}
kl.key = false
// Next token is a space, eat it
kl.readByte()
l.value = zKey
l.token = str.String()
return l, true
case ';':
commt = true
case '\n':
@ -226,24 +321,32 @@ func klexer(s *scan, c chan lex) {
// Reset a comment
commt = false
}
kl.key = true
l.value = zValue
l.token = str
c <- l
str = ""
commt = false
key = true
l.token = str.String()
return l, true
default:
if commt {
break
}
str += string(x)
str.WriteByte(x)
}
x, err = s.tokenText()
}
if len(str) > 0 {
if kl.readErr != nil && kl.readErr != io.EOF {
// Don't return any tokens after a read error occurs.
return lex{value: zEOF}, false
}
if str.Len() > 0 {
// Send remainder
l.token = str
l.value = zValue
c <- l
l.token = str.String()
return l, true
}
return lex{value: zEOF}, false
}