1
0
mirror of https://github.com/StackExchange/dnscontrol.git synced 2024-05-11 05:55:12 +00:00

Merge branch 'master' into tlim_cfaliases

This commit is contained in:
Tom Limoncelli
2017-05-25 20:38:48 -04:00
61 changed files with 985 additions and 171 deletions

View File

@ -5,14 +5,13 @@ import (
) )
func main() { func main() {
//go:generate esc -modtime 0 -o js/static.go -pkg js -include helpers\.js -ignore go -prefix js js
conf := &embed.Config{ conf := &embed.Config{
ModTime: "0", ModTime: "0",
OutputFile: "js/static.go", OutputFile: "pkg/js/static.go",
Package: "js", Package: "js",
Prefix: "js", Prefix: "pkg/js",
Private: true, Private: true,
Files: []string{`js/helpers.js`}, Files: []string{`pkg/js/helpers.js`},
} }
embed.Run(conf) embed.Run(conf)
} }

66
cmd/spftest/main.go Normal file
View File

@ -0,0 +1,66 @@
package main
import (
"fmt"
"strings"
"github.com/StackExchange/dnscontrol/pkg/dnsresolver"
"github.com/StackExchange/dnscontrol/pkg/spflib"
)
func main() {
h := dnsresolver.NewResolverLive("spf-store.json")
fmt.Println(h.GetTxt("_spf.google.com"))
fmt.Println(h.GetTxt("spf-basic.fogcreek.com"))
h.Close()
i, err := dnsresolver.NewResolverPreloaded("spf-store.json")
if err != nil {
panic(err)
}
fmt.Println(i.GetTxt("_spf.google.com"))
fmt.Println(i.GetTxt("spf-basic.fogcreek.com"))
fmt.Println(i.GetTxt("wontbefound"))
fmt.Println()
fmt.Println("---------------------")
fmt.Println()
//res := dnsresolver.NewResolverLive("spf-store2.json")
res, err := dnsresolver.NewResolverPreloaded("spf-store2.json")
if err != nil {
panic(err)
}
rec, err := spflib.Parse(strings.Join([]string{"v=spf1",
"ip4:198.252.206.0/24",
"ip4:192.111.0.0/24",
"include:_spf.google.com",
"include:mailgun.org",
"include:spf-basic.fogcreek.com",
"include:mail.zendesk.com",
"include:servers.mcsv.net",
"include:sendgrid.net",
"include:spf.mtasv.net",
"~all"}, " "), res)
if err != nil {
panic(err)
}
spflib.DumpSPF(rec, "")
fmt.Println()
fmt.Println("---------------------")
fmt.Println()
var spf string
spf, err = spflib.Lookup("whatexit.org", res)
if err != nil {
panic(err)
}
rec, err = spflib.Parse(spf, res)
if err != nil {
panic(err)
}
spflib.DumpSPF(rec, "")
//res.Close()
}

View File

@ -0,0 +1,12 @@
{
"_spf.google.com": {
"txt": [
"v=spf1 include:_netblocks.google.com include:_netblocks2.google.com include:_netblocks3.google.com ~all"
]
},
"spf-basic.fogcreek.com": {
"txt": [
"v=spf1 ip4:64.34.80.172 -all"
]
}
}

View File

@ -0,0 +1,69 @@
{
"_netblocks.google.com": {
"txt": [
"v=spf1 ip4:64.18.0.0/20 ip4:64.233.160.0/19 ip4:66.102.0.0/20 ip4:66.249.80.0/20 ip4:72.14.192.0/18 ip4:74.125.0.0/16 ip4:108.177.8.0/21 ip4:173.194.0.0/16 ip4:207.126.144.0/20 ip4:209.85.128.0/17 ip4:216.58.192.0/19 ip4:216.239.32.0/19 ~all"
]
},
"_netblocks2.google.com": {
"txt": [
"v=spf1 ip6:2001:4860:4000::/36 ip6:2404:6800:4000::/36 ip6:2607:f8b0:4000::/36 ip6:2800:3f0:4000::/36 ip6:2a00:1450:4000::/36 ip6:2c0f:fb50:4000::/36 ~all"
]
},
"_netblocks3.google.com": {
"txt": [
"v=spf1 ip4:172.217.0.0/19 ip4:108.177.96.0/19 ~all"
]
},
"_spf.google.com": {
"txt": [
"v=spf1 include:_netblocks.google.com include:_netblocks2.google.com include:_netblocks3.google.com ~all"
]
},
"mail.zendesk.com": {
"txt": [
"v=spf1 ip4:192.161.144.0/20 ip4:185.12.80.0/22 ip4:96.46.150.192/27 ip4:174.137.46.0/24 ip4:188.172.128.0/20 ip4:216.198.0.0/18 ~all"
]
},
"mailgun.org": {
"txt": [
"google-site-verification=FIGVOKZm6lQFDBJaiC2DdwvBy8TInunoGCt-1gnL4PA",
"v=spf1 include:spf1.mailgun.org include:spf2.mailgun.org ~all"
]
},
"sendgrid.net": {
"txt": [
"v=spf1 ip4:167.89.0.0/17 ip4:208.117.48.0/20 ip4:50.31.32.0/19 ip4:198.37.144.0/20 ip4:198.21.0.0/21 ip4:192.254.112.0/20 ip4:168.245.0.0/17 ~all",
"google-site-verification=NxyooVvVaIgddVa23KTlOEuVPuhffcDqJFV8RzWrAys"
]
},
"servers.mcsv.net": {
"txt": [
"v=spf1 ip4:205.201.128.0/20 ip4:198.2.128.0/18 ip4:148.105.8.0/21 ?all"
]
},
"spf-basic.fogcreek.com": {
"txt": [
"v=spf1 ip4:64.34.80.172 -all"
]
},
"spf.mtasv.net": {
"txt": [
"v=spf1 ip4:50.31.156.96/27 ip4:104.245.209.192/26 ~all"
]
},
"spf1.mailgun.org": {
"txt": [
"v=spf1 ip4:173.193.210.32/27 ip4:50.23.218.192/27 ip4:174.37.226.64/27 ip4:208.43.239.136/30 ip4:184.173.105.0/24 ip4:184.173.153.0/24 ip4:104.130.122.0/23 ip4:146.20.112.0/26 ~all"
]
},
"spf2.mailgun.org": {
"txt": [
"v=spf1 ip4:209.61.151.0/24 ip4:166.78.68.0/22 ip4:198.61.254.0/23 ip4:192.237.158.0/23 ip4:23.253.182.0/23 ip4:104.130.96.0/28 ip4:146.20.113.0/24 ip4:146.20.191.0/24 ~all"
]
},
"whatexit.org": {
"txt": [
"v=spf1 ip6:2607:f2f8:a9c0::3 ip4:174.136.107.195 include:servers.mcsv.net include:_spf.google.com mx:evite.com -all"
]
}
}

View File

@ -22,13 +22,14 @@ username and access token:
## Metadata ## Metadata
Record level metadata availible: Record level metadata availible:
* cloudflare_proxy ('on', 'off', or 'full') * `cloudflare_proxy` ("on", "off", or "full")
Domain level metadata availible: Domain level metadata availible:
* cloudflare_proxy_default ('on', 'off', or 'full') * `cloudflare_proxy_default` ("on", "off", or "full")
Provider level metadata availible: Provider level metadata availible:
* ip_conversions * `ip_conversions`
* `manage_redirects`: set to `true` to manage page-rule based redirects
What does on/off/full mean? What does on/off/full mean?
@ -100,3 +101,31 @@ DNSControl depends on a Cloudflare Global API Key that's available under "My Set
If a domain does not exist in your CloudFlare account, DNSControl If a domain does not exist in your CloudFlare account, DNSControl
will *not* automatically add it. You'll need to do that via the will *not* automatically add it. You'll need to do that via the
control panel manually or via the `dnscontrol create-domains` command. control panel manually or via the `dnscontrol create-domains` command.
## Redirects
The cloudflare provider can manage Page-Rule based redirects for your domains. Simply use the `CF_REDIRECT` and `CF_TEMP_REDIRECT` functions to make redirects:
{% highlight js %}
// chiphacker.com is an alias for electronics.stackexchange.com
D("chiphacker.com", REG_NAMECOM, DnsProvider(CFLARE),
// must have A records with orange cloud on. Otherwise page rule will never run.
A("@","1.2.3.4", CF_PROXY_ON),
A("www", "1.2.3.4", CF_PROXY_ON)
A("meta", "1.2.3.4", CF_PROXY_ON),
// 302 for meta subdomain
CF_TEMP_REDIRECT("meta.chiphacker.com/*", "https://electronics.meta.stackexchange.com/$1),
// 301 all subdomains and preserve path
CF_REDIRECT("*chiphacker.com/*", "https://electronics.stackexchange.com/$2),
);
{%endhighlight%}
Notice a few details:
1. We need an A record with cloudflare proxy on, or the page rule will never run.
2. The IP address in those A records may be mostly irrelevant, as cloudflare should handle all requests (assuming some page rule matches).
3. Ordering matters for priority. CF_REDIRECT records will be added in the order they appear in your js. So put catch-alls at the bottom.

View File

@ -10,7 +10,7 @@ import (
"strings" "strings"
"github.com/StackExchange/dnscontrol/models" "github.com/StackExchange/dnscontrol/models"
"github.com/StackExchange/dnscontrol/nameservers" "github.com/StackExchange/dnscontrol/pkg/nameservers"
"github.com/StackExchange/dnscontrol/providers" "github.com/StackExchange/dnscontrol/providers"
_ "github.com/StackExchange/dnscontrol/providers/_all" _ "github.com/StackExchange/dnscontrol/providers/_all"
"github.com/StackExchange/dnscontrol/providers/config" "github.com/StackExchange/dnscontrol/providers/config"

View File

@ -1 +0,0 @@
D("foo.com","reg","dsp")

View File

@ -1,2 +0,0 @@
require("js/parse_tests/import.js")

10
main.go
View File

@ -12,10 +12,10 @@ import (
"strings" "strings"
"time" "time"
"github.com/StackExchange/dnscontrol/js"
"github.com/StackExchange/dnscontrol/models" "github.com/StackExchange/dnscontrol/models"
"github.com/StackExchange/dnscontrol/nameservers" "github.com/StackExchange/dnscontrol/pkg/js"
"github.com/StackExchange/dnscontrol/normalize" "github.com/StackExchange/dnscontrol/pkg/nameservers"
"github.com/StackExchange/dnscontrol/pkg/normalize"
"github.com/StackExchange/dnscontrol/providers" "github.com/StackExchange/dnscontrol/providers"
_ "github.com/StackExchange/dnscontrol/providers/_all" _ "github.com/StackExchange/dnscontrol/providers/_all"
"github.com/StackExchange/dnscontrol/providers/config" "github.com/StackExchange/dnscontrol/providers/config"
@ -207,8 +207,8 @@ func main() {
if !ok { if !ok {
log.Fatalf("Registrar %s not declared.", reg) log.Fatalf("Registrar %s not declared.", reg)
} }
if len(domain.Nameservers) == 0 { if len(domain.Nameservers) == 0 && domain.Metadata["no_ns"] != "true" {
//fmt.Printf("No nameservers declared; skipping registrar.\n") fmt.Printf("No nameservers declared; skipping registrar. Add {no_ns:'true'} to force.\n")
continue continue
} }
dc, err := domain.Copy() dc, err := domain.Copy()

View File

@ -10,7 +10,7 @@ import (
"reflect" "reflect"
"strconv" "strconv"
"github.com/StackExchange/dnscontrol/transform" "github.com/StackExchange/dnscontrol/pkg/transform"
"github.com/miekg/dns" "github.com/miekg/dns"
"golang.org/x/net/idna" "golang.org/x/net/idna"
) )

View File

@ -0,0 +1,28 @@
package dnsresolver
// dnsCache implements a very simple DNS cache.
// It caches the entire answer (i.e. all TXT records), filtering
// out the non-SPF answers is done at a higher layer.
// At this time the only rtype is "TXT". Eventually we'll need
// to cache A/AAAA/CNAME records to to CNAME flattening.
type dnsCache map[string]map[string][]string // map[fqdn]map[rtype] -> answers
func (c dnsCache) get(label, rtype string) ([]string, bool) {
v1, ok := c[label]
if !ok {
return nil, false
}
v2, ok := v1[rtype]
if !ok {
return nil, false
}
return v2, true
}
func (c dnsCache) put(label, rtype string, answers []string) {
_, ok := c[label]
if !ok {
c[label] = make(map[string][]string)
}
c[label][rtype] = answers
}

View File

@ -0,0 +1,31 @@
package dnsresolver
import "testing"
func TestDnsCache(t *testing.T) {
cache := &dnsCache{}
cache.put("one", "txt", []string{"a", "b", "c"})
cache.put("two", "txt", []string{"d", "e", "f"})
a, b := cache.get("one", "txt")
if !(b == true && len(a) == 3 && a[0] == "a" && a[1] == "b" && a[2] == "c") {
t.Errorf("one-txt didn't work")
}
a, b = cache.get("two", "txt")
if !(b == true && len(a) == 3 && a[0] == "d" && a[1] == "e" && a[2] == "f") {
t.Errorf("one-txt didn't work")
}
a, b = cache.get("three", "txt")
if !(b == false) {
t.Errorf("three-txt didn't work")
}
a, b = cache.get("two", "not")
if !(b == false) {
t.Errorf("two-not didn't work")
}
}

View File

@ -0,0 +1,83 @@
package dnsresolver
import (
"encoding/json"
"io/ioutil"
"net"
"github.com/pkg/errors"
)
// This file includes all the DNS Resolvers used by package spf.
// DnsResolver looks up txt strings associated with a FQDN.
type DnsResolver interface {
GetTxt(string) ([]string, error) // Given a DNS label, return the TXT values records.
}
// The "Live DNS" Resolver:
type dnsLive struct {
filename string
cache dnsCache
}
func NewResolverLive(filename string) *dnsLive {
// Does live DNS lookups. Records them. Writes file on Close.
c := &dnsLive{filename: filename}
c.cache = dnsCache{}
return c
}
func (c *dnsLive) GetTxt(label string) ([]string, error) {
// Try the cache.
txts, ok := c.cache.get(label, "txt")
if ok {
return txts, nil
}
// Populate the cache:
t, err := net.LookupTXT(label)
if err == nil {
c.cache.put(label, "txt", t)
}
return t, err
}
func (c *dnsLive) Close() {
// Write out and close the file.
m, _ := json.MarshalIndent(c.cache, "", " ")
m = append(m, "\n"...)
ioutil.WriteFile(c.filename, m, 0666)
}
// The "Pre-Cached DNS" Resolver:
type dnsPreloaded struct {
cache dnsCache
}
func NewResolverPreloaded(filename string) (*dnsPreloaded, error) {
c := &dnsPreloaded{}
c.cache = dnsCache{}
j, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
err = json.Unmarshal(j, &(*c).cache)
return c, err
}
func (c *dnsPreloaded) DumpCache() dnsCache {
return c.cache
}
func (c *dnsPreloaded) GetTxt(label string) ([]string, error) {
// Try the cache.
txts, ok := c.cache.get(label, "txt")
if ok {
return txts, nil
}
return nil, errors.Errorf("No preloaded DNS entry for: %#v", label)
}

View File

@ -304,6 +304,7 @@ function num2dot(num)
return d; return d;
} }
// Cloudflare aliases: // Cloudflare aliases:
// Meta settings for individual records. // Meta settings for individual records.
@ -315,3 +316,21 @@ var CF_PROXY_FULL = {'cloudflare_proxy': 'full'}; // Proxy+Railgun enabled.
var CF_PROXY_DEFAULT_OFF = {'cloudflare_proxy_default': 'off'}; var CF_PROXY_DEFAULT_OFF = {'cloudflare_proxy_default': 'off'};
// Proxy default on for entire domain: // Proxy default on for entire domain:
var CF_PROXY_DEFAULT_ON = {'cloudflare_proxy_default': 'on'}; var CF_PROXY_DEFAULT_ON = {'cloudflare_proxy_default': 'on'};
// CUSTOM, PROVIDER SPECIFIC RECORD TYPES
function CF_REDIRECT(src, dst) {
return function(d) {
if (src.indexOf(",") !== -1 || dst.indexOf(",") !== -1){
throw("redirect src and dst must not have commas")
}
addRecord(d,"CF_REDIRECT","@",src+","+dst)
}
}
function CF_TEMP_REDIRECT(src, dst) {
return function(d) {
if (src.indexOf(",") !== -1 || dst.indexOf(",") !== -1){
throw("redirect src and dst must not have commas")
}
addRecord(d,"CF_TEMP_REDIRECT","@",src+","+dst)
}
}

View File

@ -12,12 +12,12 @@ import (
) )
const ( const (
testDir = "js/parse_tests" testDir = "pkg/js/parse_tests"
errorDir = "js/error_tests" errorDir = "pkg/js/error_tests"
) )
func init() { func init() {
os.Chdir("..") // go up a directory so we helpers.js is in a consistent place. os.Chdir("../..") // go up a directory so we helpers.js is in a consistent place.
} }
func TestParsedFiles(t *testing.T) { func TestParsedFiles(t *testing.T) {
@ -72,6 +72,8 @@ func TestErrors(t *testing.T) {
{"old dsp style", `D("foo.com","reg","dsp")`}, {"old dsp style", `D("foo.com","reg","dsp")`},
{"MX no priority", `D("foo.com","reg",MX("@","test."))`}, {"MX no priority", `D("foo.com","reg",MX("@","test."))`},
{"MX reversed", `D("foo.com","reg",MX("@","test.", 5))`}, {"MX reversed", `D("foo.com","reg",MX("@","test.", 5))`},
{"CF_REDIRECT With comma", `D("foo.com","reg",CF_REDIRECT("foo.com,","baaa"))`},
{"CF_TEMP_REDIRECT With comma", `D("foo.com","reg",CF_TEMP_REDIRECT("foo.com","baa,a"))`},
} }
for _, tst := range tests { for _, tst := range tests {
t.Run(tst.desc, func(t *testing.T) { t.Run(tst.desc, func(t *testing.T) {

View File

@ -0,0 +1,2 @@
require("pkg/js/parse_tests/import.js")

View File

@ -0,0 +1,4 @@
D("foo.com","none",
CF_REDIRECT("test.foo.com","https://goo.com/$1"),
CF_TEMP_REDIRECT("test.foo.com","https://goo.com/$1")
);

View File

@ -0,0 +1,24 @@
{
"registrars": [],
"dns_providers": [],
"domains": [
{
"name": "foo.com",
"registrar": "none",
"dnsProviders": {},
"records": [
{
"type": "CF_REDIRECT",
"name": "@",
"target": "test.foo.com,https://goo.com/$1"
},
{
"type": "CF_TEMP_REDIRECT",
"name": "@",
"target": "test.foo.com,https://goo.com/$1"
}
],
"keepunknown": false
}
]
}

View File

@ -189,57 +189,60 @@ func _escFSMustString(useLocal bool, name string) string {
var _escData = map[string]*_escFile{ var _escData = map[string]*_escFile{
"/helpers.js": { "/helpers.js": {
local: "js/helpers.js", local: "pkg/js/helpers.js",
size: 8291, size: 8855,
modtime: 0, modtime: 0,
compressed: ` compressed: `
H4sIAAAAAAAA/7xZ/W/bvPH/3X/FPQK+tfS1orz0aTbI9TCvSR4US5wgcbYMhmEwEm2zlUSBpJxmhfO3 H4sIAAAAAAAA/9wZa2/byPG7fsUcgUZkRdOPXNKCOhVVbflg1JINWb76IAjCmlxJm/CF3aUcNyf/9mIf
D3yRREl2kwLr+kNqiffyuePx7nhyCo6BC0Yi4Qx7vQ1iENFsCSP43gMAYHhFuGCI8RBmc1+9izO+yBnd JJekFDtA0w/NB0fcnffMzszOWjnDwDglAbf6nc4WUQjSZAUD+NoBAKB4TRiniDIf5gtXroUJW2Y03ZIQ
kBg3XtMUkUy96G2NrBgvUZGIMVtxGMFsPuz1lkUWCUIzIBkRBCXk39j1tLKG5n3af4CgjUI+b4caXAfI 15bTGJFELnR2mlaIVyiP+JCuGQxgvuh3Oqs8CThJEyAJ4QRF5N/YdhSzGudD3L8hQVMK8b3rK+FaguwM
1oIywU+3pSo3Qyn2xXOO/RQL5Bk4ZAmufOlV8OQTjEbgXI0n9+NLRyvaqr/SdoZX0hgpLgQlVLGE6q8P USb4aVqwshMUY5c/Z9iNMUeOFoeswBaLTime+ILBAKzxcHI/vLYUo538K3SneC2UEeR8kEQlii//uiCI
Unio/hqI0vqgtjjIC752GV55Q7MTomCZEtQBf5bxG+MOt9akdVgGgKtMoEu1AKPRCPr08QuORN+Dd+/A +/KvFlFo71Uae1nONjbFa6evPcFzmkhCLeEvEnarzWFXnBQPQwGwpQrpSm7AYDCAbvr4CQe868C7d2B3
7ZN8EdFsgxknNON9IJmW4VmbIl8ETUIYwZKyFImFEO6Oda/lmpjnP++axqZr78Q8f807GX46UyGhHVP5 SbYM0mSLKSNpwrpAEkXDMZwiFrw6IAxgldIY8SXn9p59p2GakGXfb5qa05V1Qpa9Zp0EP13IkFCGKe3r
16sCXDE2sFREYf3ToPq+lcsRZTEPZ3NfRuJNHYhy1UTadHoZwpGvJHLMpCfC2XzbBJczGmHOzxBbcTf1 lAEuEWuylEB+9VNL9XUntoOUhsyfL1wRibdVIIpdHWmz2bUPJ66kyDAVlvDni11duIymAWbsAtE1s2NX
TfDazj48lJ4FjKI1pDQmS4KZL/eSCCAcUBAEDVojOYQIJYkkeiJibeTahIgx9ByWAKRJBeNkg5Nnm0oH B69p7ONjYVnAKNhAnIZkRTB1hS8JB8IAeZ5Xg9WUfQhQFAmgJ8I3mq4JiChFz34hgFApp4xscfRsQqng
h9wKtsJKZSaockSMBKoo5dlYBIRfGO1u2giYMm5cY96wWtkCTjiu+McS1A5m6QFXxs0XFZBd2U0/zr7M EK6gayxZJjyVhggRRyWkOBtLj7BLzd2OawFTxI2t1euXOzvAEcMl/lAItQdZWMAWcfNJBmSbdt2O80+L
K1c2CLf7FF8rO3doXgT4m8BZbKAH0nQ/7Vpgc4k1o0/g/HN8O/k8+SM0SKrd03mjyHiR55QJHIfgDKA8 0pQ1wN0hxjdSzz2clx7+wnESatE9obobtzUwsfiGpk9g/Ws4nVxNfvW1JKX3VN7IE5ZnWUo5Dn2welCc
lzAAB3TAqvdGr47r2o5tr3d4CGftmA7hE8NIYEBwNrkzcgK45xjEGkOOGEqxwIwD4mUYA8piCY4HdVx2 S+iBBSpg5brmq+K60mPX6Rwfw0Uzpn04pxhxDAguJneajgf3DAPfYMgQRTHmmDJArAhjQEkohGNeFZct
BBsD1dnV5oz2nywNtNo0AiM4GgL5aCfhIMHZSqyHQAYDr/JeYx8t6hmZ+9aGbrsKTqQCxFZFijPRlG5t wlpBeXaVOoPDJ0sJWjqNwABO+kB+MZOwF+FkzTd9IL2eU1qv5kcDek4WruHQXZvBmWCA6DqPccLr1A3n
jqROYQQV4YzMa7fuOY117tJpSBcYk4AMidmP84vx/eX0Dkya4oCAYwF0WZpeawZBAeV58qx+JAksC1Ew COgYBlACzsmiMuuB01jlLpWGVIHRCUiDaH+MLof317M70GmKAQKGOaSrQvWKM/AUUJZFz/JHFMEq5znF
XNavQMo7l6deHWRBa+FPJEkgSjBigLJnyBneEFpw2KCkwFwqtHfScJUltlsHd+/Vq66091K5wvapV9ZC Rf3yBL2ROPXyIPO0Iv5EogiCCCMKKHmGjOItSXMGWxTlmAmGpic1VlFi23Vwv69eNaXpS2kK06ZOUQuV
7Zfp9NLdeCHcYaHicDq9VCp1lOo4tDBr8mZ+LhddZoNggRAJjGDT1HdWpeCG2nIPSvXqnT4ilsNs3j0Y XWaza3vr+HCHuYzD2exaslRRquLQkFmB1/NzsWlTUwjqcR7BALZ1fhdlCq6xLXxQsJdr6ogYBjNxD8gQ
4oYjgjrjt6BoMFZtdsr6NUEpdnw48kCSZPwTLTIVJ0eQYpRxiGnWFyCbM8pMEcJ6v62CEtjMGRVl3DEj 1gzhVRm/IYoSxqjNVlG/JijGlgsnDgiQhJ2neSLj5ARijBIGYZp0OYjmLKW6CGHlb6OgeCZykvIi7qgm
RLKjJLGt6zQKht0rm4SyQyjFqiahyGK8JBmO+/VZrSng4NjufV7zllUxZxLDXOYSLau5jWMNkeRlyb0y ItBRFJnatRoFje4UTULRIRRkZZOQJyFekQSH3eqsVhBwdGr2Pq9Zy6iYcyHDQuQSRavuxqESkWRFyR3r
KZQHQeDVRhk6ILmdp2RKgxGssKjY6hj1T7zXsaI4vlV63dh3xo5fopGSvSbS8fjNYCvSX4x3PP4x5MvP FMo8z3MqpTQckMzMUyKlwQDWmJdoVYy6Z87rsqIwnEq+duhaQ8stpBGUnbqkw+GbhS1Bf7C8w+G3Rb6+
4zvT6yK2wuI13DU9aIZfCV4qM+gNupYF0oRPk/HV+U+YYNH/ehOUsh+aIBPjw/Qn8FfUvx799GH6Gvar Gt7pXhfRNeavyV3Bg0L4kcILZlp6LV1DA6HC+WQ4Hn2HCgb8j1dBMvumCiIxPsy+Q/4S+sdLP3uYvSb7
Bw0mZ4QyIp7fZkPJBRVby5hojaOvsqq4M9mZ3QlGspUP8vekSB9l91u/n/t1QfXBuXoA/C3HkeCwT4vj +EEJk1GSUsKf36ZDgQUlWkOZYIODz6Kq2HPRmd1xSpK1C+L3JI8fRfdbrS/cqqC6YI0fAH/JcMAZHOJi
vdFl79/gMtU1qeJX6rE6Q9ufEprjg715PrRcWrmo9oD6xZWNXF4seOTVl1FUd1HwUTOVz1aSVs2oq1it OW802fs3mEx2TbL4FXyMztC0pxDNcsF0ngsNk5YmqiwgfzGpIxMXCxY41WUUVV0U/KKQim8jSctm1Jao
FL2jN2sIaLVlSt9vmmJG5kq1rPJes1mudQ0cOKh2BpwBGTjytiJLVEQZw5FQDa/jWS2tHVuTn8lMk/9Z Rore05vVCDTaMsnvJwUxJwvJWlR5p94sV7x6FhyVngGrR3qWuK2IEhWklOKAy4bXcoyW1oytyfdkpsn/
Wpr8OCdJ4OOr87vz23+c39oG2GBbBC3Qr9ROu/aruGteoZWo0Py/3RVb9S1dMJRx+bgQ6DExYw2ZkqT+ LC1Nvp2ThODD8ehuNP1tNDUVMIVtADSEfqV2mrVfxl39Ci1J+fr/3b7Yqm7pnKKEic8lR4+RHmuIlCT4
2SyhTyEc+7Amq3UIJ77s9v+GOA7h/dwHvfx7ufxBLX++CeF0Ptdi1EXROYYXOIEXeA8vQ/gdXuADvAC8 z+dR+uTDqQsbst74cOaKbv8fiGEf3i9cUNs/F9sf5PbVrQ8fFwtFRl4UrVN4gTN4gffw0oef4QU+wAvA
wKnT0xuUkAzrRrRnR+VIxiR8hBbIXb2oos9h1KatOntJoNDBCEgeqJ/D6hSpx0akWzdRvdiK8lLWIkhR C3y0OspBEUmwakQ7ZlQOREzCL9AQcl8vKuEzGDRhy85eAEjpYAAk8+TPfnmK5Gct0o2bqNpsRHlBa+nF
rkn8ar+I972cRBTpSUyFS7ytF3yhJHMd3453eW3cLbjk1NqHnSNiGSV3pDJLPjQMky9+YJpa7hpnZFbm KFMgbukv4nwtJhF5fBam3CbOzvE+pSSxLdeMd3Ft3E+4wFTc+60jYiglPFKqJT5qiomFb6gmt9vKaZql
yef/moFGuGWiQrHfSHmVHsHMrFc68yChT57ffS0Dsn5v0PcsB6vfejSogs+M2eiTsQFewPGkGRKDMVUT euL7v6agJm6oKKU4rKS4Sg9grvdLnpkXpU+O214WAVmta+k7hoHlbzUalMGnx2zpk9YBXsByhBpCBq2q
mvUhOOV97/PVzfXtdDG9HU/uLq5vr/ShSpD0lI7C+hJZHcG3M/lCJG9KDHraGMmLbaPotFU5Pjh/dSrx AtT7fbCK+97V+PZmOlvOpsPJ3eXNdKwOVYSEpVQUVpfI8gi+HcnlPHpTYlDTxkBcbGtFp8nKcsH6u1WS
lVv1v+/91hHqh+18YaP0tnOvUSAk2uaGMxyZC5oQSXePtRNv7m//OHctB+kXxsA4+DvG+X32NaNPGYxg L82q/n3tNo5Q12/mC1NKZ7dwagVCSFt3OMWBvqBxHrV9rIx4ez/9dWQbBlILWsHQ+yfG2X3yOUmfEhjA
iRKOy2R7vegwV+/28AtW4EZGbNcG7nOB2K4qsvOyrIiH6r6896pctwll4ezeliRNczZob6Uai3Yqj1Eh CkUMF8n2ZtlCLtcO4HOa41pGbNYG5jKO6L4qsveyLIH78r588KpctQlF4WzflgRMfTZoulKORVuVR7MQ
s+3SJH1VZU2bhDgvUiyTI4pjhjkPQI9kBRARVImi7qxcU4ts7EZsfWQNTXfYLcPvuz3F3V+afBkPoX1x 2Xalk76ssrpNQozlMRbJEYUhxYx5oEayHAj3ykRRdVa2rkWm7JpsdWQ1THvYLcLvqznFPVyaXBEPvnlx
rjs1NTQ1o1Yz/d09A41xRGIMj4jjGGimB8gl/QFctCahXE9C5Z1fdxOAuHoq+4Ga9Xrn1FPSNiafilZ7 rjo1OTTVo1Y9/d0/Aw1xQEIMj4jhENJEDZAL+CO4bExCmZqEiju/6iYAMflV9AMV6s3eqaeArU0+Jayy
LoTPF3D1UEvWnlfbURpWOdzeu0486WZMRcyeaAJrjiXpZmTeWHvbMBZSl+HISrzwE1NR0OaX0VSlDTXU nA9XlzB+qCgry0t3FIqVBjd914on1YzJiDkQTWDMsQTcnCxqe28bxkJsUxwYiRe+YyoKSv0imsq0IYda
4qoz510GZXtQEcO7d2ANfeuFdk2qEFu8je8NFmuXcdt5Vc10ZXrqDHTfTtXyljlDqfqSUn8benB2eE/K THbmrI0gdfdKYHj3Doyhb7XRrEmlxAZu7b3BQG0j7lpL5UxXpKfWQPftUA1r6TMUy5eU6m3owdpjPUGz
LONCbuNOwV0vRDTjVLZBdOXW8+WrvYNlx6/myj447t1XkuckW/3mOW1TdtbfODAj4vJTVNT82MJwNNSp iAvhxr2E21YI0oSlog1K13Y1Xx4fHCxbbjlXdsGy7z6TLCPJ+ifHaqqyt/6Gnh4RF09RQf2xheKgr1Ix
mORQf+2pihSHJaMprIXIw8NDLlD0lW4wWyb0KYhoeogO/3x89OFPvx8dHp8cn54eyZy+Iahk+II2iEeM yaB67SmLFIMVTWPYcJ75x8eMo+BzusV0FaVPXpDGx+j4r6cnH/7y88nx6dnpx48nIqdvCSoQPqEtYgEl
5CJAj7QQiichjwyx58PHhOQm/oK1SK3yeuPGVHg9a2ANI4ipCHieEOH2g37TClf9G8Szo7n3/ycfTr2B GffQY5pziRORR4ro8/FjRDIdf96Gx0Z5vbXDlDsdY2ANAwhT7rEsItzuet26Frb81wvnJwvnz2cfPjo9
fDiee9bTSePp/dxrfWMq25kiLRWTpXxS07NqeObZHzaVbqfx0bCMJH23VdK6LFmRtlJvrLPz/518ON1R 8XG6cIyvs9rX+4XTeGMq2pk8LhiTlfiS07NyeOaYD5uSt1V7NCwiSd1tJbU2SpLHjdQbquz8p7MPH/cU
oN7LTvovKq8cHOjzYY3wJES4QmIdLBNKmdR5KO2sw8OSDgPoB30YQLxj3BebUIBPCS3iZYIYBpQQxDEP qPeik/6bzCtHR+p8GCM8ISKMEd94qyhNqeB5LPSswsOgDj3oel3oQbhn3Bf2y7FMlObhKkIUA4oIYpj5
9bwACzUNFzI7KIwki8mGxAVKym8Rgfpo/OlicXN7/fCvxfXFhSwq/agSucgZ/fbcD6FPl8v+dqggyiZC amCAuRyHc5EepJAkCcmWhDmKiscIT74an18ub6c3D78vby4vRVXpBiXJZUbTL89dH7rpatXd9aWMoosQ
voaYcNmZxG0xk/1SslKIJQZnu6Rc3F9e7pWzLJJESyqlDG4RSVZFVkuTK5gdlJ+DbHeEvdoGM6Smy6Wu yxASJlqTsElmcphKUhAxyOBkH5XL++vrg3RWeRQpSgWV3hSRaJ0nFTWxg+lR8R5kmsPvVDroKXW6Wqmy
epkg1WcBcK05thc2AZpR/16vLQxf7b0dWrOu0n1qdnu1oUV6t/efAAAA//8jBMkCYyAAAA== l3BSvguAbQyyHb8uoJ71H7TaUuNV1tvDNWkzPcRmv1VrXIR1VVDc381uxi7cTm9+u7oYTeHudnR+dXl1
DtPR+c30Ama/347ujFnd5XI6uriajs5nNqOBCyF72yVZHCJGA48kIf5ys5KXEvhpMICjU/jjD0Fm39be
SYZFcUjksILRQD6ThYxDnDM1bN+gLYYgjWPEWoMMaI0DK30sVzThjAY9y7V6Qq+yHzbVn43Gt/93Nqgp
9Q1D/CcAAP//qJ7f15ciAAA=
`, `,
}, },
"/": { "/": {
isDir: true, isDir: true,
local: "js", local: "pkg/js",
}, },
} }

View File

@ -6,8 +6,8 @@ import (
"strings" "strings"
"github.com/StackExchange/dnscontrol/models" "github.com/StackExchange/dnscontrol/models"
"github.com/StackExchange/dnscontrol/pkg/transform"
"github.com/StackExchange/dnscontrol/providers" "github.com/StackExchange/dnscontrol/providers"
"github.com/StackExchange/dnscontrol/transform"
"github.com/miekg/dns" "github.com/miekg/dns"
"github.com/miekg/dns/dnsutil" "github.com/miekg/dns/dnsutil"
) )
@ -44,7 +44,7 @@ func checkTarget(target string) error {
} }
// validateRecordTypes list of valid rec.Type values. Returns true if this is a real DNS record type, false means it is a pseudo-type used internally. // validateRecordTypes list of valid rec.Type values. Returns true if this is a real DNS record type, false means it is a pseudo-type used internally.
func validateRecordTypes(rec *models.RecordConfig, domain string) error { func validateRecordTypes(rec *models.RecordConfig, domain string, pTypes []string) error {
var validTypes = map[string]bool{ var validTypes = map[string]bool{
"A": true, "A": true,
"AAAA": true, "AAAA": true,
@ -55,9 +55,22 @@ func validateRecordTypes(rec *models.RecordConfig, domain string) error {
"NS": true, "NS": true,
"ALIAS": false, "ALIAS": false,
} }
_, ok := validTypes[rec.Type]
if _, ok := validTypes[rec.Type]; !ok { if !ok {
return fmt.Errorf("Unsupported record type (%v) domain=%v name=%v", rec.Type, domain, rec.Name) cType := providers.GetCustomRecordType(rec.Type)
if cType == nil {
return fmt.Errorf("Unsupported record type (%v) domain=%v name=%v", rec.Type, domain, rec.Name)
}
for _, providerType := range pTypes {
if providerType != cType.Provider {
return fmt.Errorf("Custom record type %s is not compatible with provider type %s", rec.Type, providerType)
}
}
//it is ok. Lets replace the type with real type and add metadata to say we checked it
rec.Metadata["orig_custom_type"] = rec.Type
if cType.RealType != "" {
rec.Type = cType.RealType
}
} }
return nil return nil
} }
@ -128,6 +141,10 @@ func checkTargets(rec *models.RecordConfig, domain string) (errs []error) {
check(checkTarget(target)) check(checkTarget(target))
case "TXT", "IMPORT_TRANSFORM": case "TXT", "IMPORT_TRANSFORM":
default: default:
if rec.Metadata["orig_custom_type"] != "" {
//it is a valid custom type. We perform no validation on target
return
}
errs = append(errs, fmt.Errorf("Unimplemented record type (%v) domain=%v name=%v", errs = append(errs, fmt.Errorf("Unimplemented record type (%v) domain=%v name=%v",
rec.Type, domain, rec.Name)) rec.Type, domain, rec.Name))
} }
@ -207,21 +224,34 @@ type Warning struct {
} }
func NormalizeAndValidateConfig(config *models.DNSConfig) (errs []error) { func NormalizeAndValidateConfig(config *models.DNSConfig) (errs []error) {
ptypeMap := map[string]string{}
for _, p := range config.DNSProviders {
ptypeMap[p.Name] = p.Type
}
for _, domain := range config.Domains { for _, domain := range config.Domains {
pTypes := []string{}
for p := range domain.DNSProviders {
pType, ok := ptypeMap[p]
if !ok {
errs = append(errs, fmt.Errorf("%s uses undefined DNS provider %s", domain.Name, p))
} else {
pTypes = append(pTypes, pType)
}
}
// Normalize Nameservers. // Normalize Nameservers.
for _, ns := range domain.Nameservers { for _, ns := range domain.Nameservers {
ns.Name = dnsutil.AddOrigin(ns.Name, domain.Name) ns.Name = dnsutil.AddOrigin(ns.Name, domain.Name)
ns.Name = strings.TrimRight(ns.Name, ".") ns.Name = strings.TrimRight(ns.Name, ".")
} }
// Normalize Records. // Normalize Records.
for _, rec := range domain.Records { for _, rec := range domain.Records {
if rec.TTL == 0 { if rec.TTL == 0 {
rec.TTL = models.DefaultTTL rec.TTL = models.DefaultTTL
} }
// Validate the unmodified inputs: // Validate the unmodified inputs:
if err := validateRecordTypes(rec, domain.Name); err != nil { if err := validateRecordTypes(rec, domain.Name, pTypes); err != nil {
errs = append(errs, err) errs = append(errs, err)
} }
if err := checkLabel(rec.Name, rec.Type, domain.Name); err != nil { if err := checkLabel(rec.Name, rec.Type, domain.Name); err != nil {

107
pkg/spflib/parse.go Normal file
View File

@ -0,0 +1,107 @@
package spflib
import (
"fmt"
"strings"
"github.com/StackExchange/dnscontrol/pkg/dnsresolver"
)
type SPFRecord struct {
Lookups int
Parts []*SPFPart
}
type SPFPart struct {
Text string
Lookups int
IncludeRecord *SPFRecord
}
func Lookup(target string, dnsres dnsresolver.DnsResolver) (string, error) {
txts, err := dnsres.GetTxt(target)
if err != nil {
return "", err
}
var result []string
for _, txt := range txts {
if strings.HasPrefix(txt, "v=spf1 ") {
result = append(result, txt)
}
}
if len(result) == 0 {
return "", fmt.Errorf("%s has no spf TXT records", target)
}
if len(result) != 1 {
return "", fmt.Errorf("%s has multiple spf TXT records", target)
}
return result[0], nil
}
var qualifiers = map[byte]bool{
'?': true,
'~': true,
'-': true,
'+': true,
}
func Parse(text string, dnsres dnsresolver.DnsResolver) (*SPFRecord, error) {
if !strings.HasPrefix(text, "v=spf1 ") {
return nil, fmt.Errorf("Not an spf record")
}
parts := strings.Split(text, " ")
rec := &SPFRecord{}
for _, part := range parts[1:] {
p := &SPFPart{Text: part}
if qualifiers[part[0]] {
part = part[1:]
}
rec.Parts = append(rec.Parts, p)
if part == "all" {
//all. nothing else matters.
break
} else if strings.HasPrefix(part, "a") || strings.HasPrefix(part, "mx") {
rec.Lookups++
p.Lookups = 1
} else if strings.HasPrefix(part, "ip4:") || strings.HasPrefix(part, "ip6:") {
//ip address, 0 lookups
continue
} else if strings.HasPrefix(part, "include:") {
rec.Lookups++
includeTarget := strings.TrimPrefix(part, "include:")
subRecord, err := Lookup(includeTarget, dnsres)
if err != nil {
return nil, err
}
p.IncludeRecord, err = Parse(subRecord, dnsres)
if err != nil {
return nil, fmt.Errorf("In included spf: %s", err)
}
rec.Lookups += p.IncludeRecord.Lookups
p.Lookups = p.IncludeRecord.Lookups + 1
} else {
return nil, fmt.Errorf("Unsupported spf part %s", part)
}
}
return rec, nil
}
// DumpSPF outputs an SPFRecord and related data for debugging purposes.
func DumpSPF(rec *SPFRecord, indent string) {
fmt.Printf("%sTotal Lookups: %d\n", indent, rec.Lookups)
fmt.Print(indent + "v=spf1")
for _, p := range rec.Parts {
fmt.Print(" " + p.Text)
}
fmt.Println()
indent += "\t"
for _, p := range rec.Parts {
if p.Lookups > 0 {
fmt.Println(indent + p.Text)
}
if p.IncludeRecord != nil {
DumpSPF(p.IncludeRecord, indent+"\t")
}
}
}

30
pkg/spflib/parse_test.go Normal file
View File

@ -0,0 +1,30 @@
package spflib
import (
"strings"
"testing"
"github.com/StackExchange/dnscontrol/pkg/dnsresolver"
)
func TestParse(t *testing.T) {
dnsres, err := dnsresolver.NewResolverPreloaded("testdata-dns1.json")
if err != nil {
t.Fatal(err)
}
rec, err := Parse(strings.Join([]string{"v=spf1",
"ip4:198.252.206.0/24",
"ip4:192.111.0.0/24",
"include:_spf.google.com",
"include:mailgun.org",
"include:spf-basic.fogcreek.com",
"include:mail.zendesk.com",
"include:servers.mcsv.net",
"include:sendgrid.net",
"include:spf.mtasv.net",
"~all"}, " "), dnsres)
if err != nil {
t.Fatal(err)
}
DumpSPF(rec, "")
}

View File

@ -0,0 +1,64 @@
{
"_netblocks.google.com": {
"txt": [
"v=spf1 ip4:64.18.0.0/20 ip4:64.233.160.0/19 ip4:66.102.0.0/20 ip4:66.249.80.0/20 ip4:72.14.192.0/18 ip4:74.125.0.0/16 ip4:108.177.8.0/21 ip4:173.194.0.0/16 ip4:207.126.144.0/20 ip4:209.85.128.0/17 ip4:216.58.192.0/19 ip4:216.239.32.0/19 ~all"
]
},
"_netblocks2.google.com": {
"txt": [
"v=spf1 ip6:2001:4860:4000::/36 ip6:2404:6800:4000::/36 ip6:2607:f8b0:4000::/36 ip6:2800:3f0:4000::/36 ip6:2a00:1450:4000::/36 ip6:2c0f:fb50:4000::/36 ~all"
]
},
"_netblocks3.google.com": {
"txt": [
"v=spf1 ip4:172.217.0.0/19 ip4:108.177.96.0/19 ~all"
]
},
"_spf.google.com": {
"txt": [
"v=spf1 include:_netblocks.google.com include:_netblocks2.google.com include:_netblocks3.google.com ~all"
]
},
"mail.zendesk.com": {
"txt": [
"v=spf1 ip4:192.161.144.0/20 ip4:185.12.80.0/22 ip4:96.46.150.192/27 ip4:174.137.46.0/24 ip4:188.172.128.0/20 ip4:216.198.0.0/18 ~all"
]
},
"mailgun.org": {
"txt": [
"google-site-verification=FIGVOKZm6lQFDBJaiC2DdwvBy8TInunoGCt-1gnL4PA",
"v=spf1 include:spf1.mailgun.org include:spf2.mailgun.org ~all"
]
},
"sendgrid.net": {
"txt": [
"google-site-verification=NxyooVvVaIgddVa23KTlOEuVPuhffcDqJFV8RzWrAys",
"v=spf1 ip4:167.89.0.0/17 ip4:208.117.48.0/20 ip4:50.31.32.0/19 ip4:198.37.144.0/20 ip4:198.21.0.0/21 ip4:192.254.112.0/20 ip4:168.245.0.0/17 ~all"
]
},
"servers.mcsv.net": {
"txt": [
"v=spf1 ip4:205.201.128.0/20 ip4:198.2.128.0/18 ip4:148.105.8.0/21 ?all"
]
},
"spf-basic.fogcreek.com": {
"txt": [
"v=spf1 ip4:64.34.80.172 -all"
]
},
"spf.mtasv.net": {
"txt": [
"v=spf1 ip4:50.31.156.96/27 ip4:104.245.209.192/26 ~all"
]
},
"spf1.mailgun.org": {
"txt": [
"v=spf1 ip4:173.193.210.32/27 ip4:50.23.218.192/27 ip4:174.37.226.64/27 ip4:208.43.239.136/30 ip4:184.173.105.0/24 ip4:184.173.153.0/24 ip4:104.130.122.0/23 ip4:146.20.112.0/26 ~all"
]
},
"spf2.mailgun.org": {
"txt": [
"v=spf1 ip4:209.61.151.0/24 ip4:166.78.68.0/22 ip4:198.61.254.0/23 ip4:192.237.158.0/23 ip4:23.253.182.0/23 ip4:104.130.96.0/28 ip4:146.20.113.0/24 ip4:146.20.191.0/24 ~all"
]
}
}

View File

@ -9,9 +9,9 @@ import (
"time" "time"
"github.com/StackExchange/dnscontrol/models" "github.com/StackExchange/dnscontrol/models"
"github.com/StackExchange/dnscontrol/pkg/transform"
"github.com/StackExchange/dnscontrol/providers" "github.com/StackExchange/dnscontrol/providers"
"github.com/StackExchange/dnscontrol/providers/diff" "github.com/StackExchange/dnscontrol/providers/diff"
"github.com/StackExchange/dnscontrol/transform"
"github.com/miekg/dns/dnsutil" "github.com/miekg/dns/dnsutil"
) )
@ -33,13 +33,20 @@ Domain level metadata available:
- ip_conversions - ip_conversions
*/ */
func init() {
providers.RegisterDomainServiceProviderType("CLOUDFLAREAPI", newCloudflare, providers.CanUseAlias)
providers.RegisterCustomRecordType("CF_REDIRECT", "CLOUDFLAREAPI", "")
providers.RegisterCustomRecordType("CF_TEMP_REDIRECT", "CLOUDFLAREAPI", "")
}
type CloudflareApi struct { type CloudflareApi struct {
ApiKey string `json:"apikey"` ApiKey string `json:"apikey"`
ApiUser string `json:"apiuser"` ApiUser string `json:"apiuser"`
domainIndex map[string]string domainIndex map[string]string
nameservers map[string][]string nameservers map[string][]string
ipConversions []transform.IpConversion ipConversions []transform.IpConversion
ignoredLabels []string ignoredLabels []string
manageRedirects bool
} }
func labelMatches(label string, matches []string) bool { func labelMatches(label string, matches []string) bool {
@ -51,6 +58,7 @@ func labelMatches(label string, matches []string) bool {
} }
return false return false
} }
func (c *CloudflareApi) GetNameservers(domain string) ([]*models.Nameserver, error) { func (c *CloudflareApi) GetNameservers(domain string) ([]*models.Nameserver, error) {
if c.domainIndex == nil { if c.domainIndex == nil {
if err := c.fetchDomainList(); err != nil { if err := c.fetchDomainList(); err != nil {
@ -89,6 +97,13 @@ func (c *CloudflareApi) GetDomainCorrections(dc *models.DomainConfig) ([]*models
records = append(records[:i], records[i+1:]...) records = append(records[:i], records[i+1:]...)
} }
} }
if c.manageRedirects {
prs, err := c.getPageRules(id, dc.Name)
if err != nil {
return nil, err
}
records = append(records, prs...)
}
for _, rec := range dc.Records { for _, rec := range dc.Records {
if rec.Type == "ALIAS" { if rec.Type == "ALIAS" {
rec.Type = "CNAME" rec.Type = "CNAME"
@ -103,19 +118,45 @@ func (c *CloudflareApi) GetDomainCorrections(dc *models.DomainConfig) ([]*models
corrections := []*models.Correction{} corrections := []*models.Correction{}
for _, d := range del { for _, d := range del {
corrections = append(corrections, c.deleteRec(d.Existing.Original.(*cfRecord), id)) ex := d.Existing
if ex.Type == "PAGE_RULE" {
corrections = append(corrections, &models.Correction{
Msg: d.String(),
F: func() error { return c.deletePageRule(ex.Original.(*pageRule).ID, id) },
})
} else {
corrections = append(corrections, c.deleteRec(ex.Original.(*cfRecord), id))
}
} }
for _, d := range create { for _, d := range create {
corrections = append(corrections, c.createRec(d.Desired, id)...) des := d.Desired
if des.Type == "PAGE_RULE" {
corrections = append(corrections, &models.Correction{
Msg: d.String(),
F: func() error { return c.createPageRule(id, des.Target) },
})
} else {
corrections = append(corrections, c.createRec(des, id)...)
}
} }
for _, d := range mod { for _, d := range mod {
e, rec := d.Existing.Original.(*cfRecord), d.Desired rec := d.Desired
proxy := e.Proxiable && rec.Metadata[metaProxy] != "off" ex := d.Existing
corrections = append(corrections, &models.Correction{ if rec.Type == "PAGE_RULE" {
Msg: d.String(), corrections = append(corrections, &models.Correction{
F: func() error { return c.modifyRecord(id, e.ID, proxy, rec) }, Msg: d.String(),
}) F: func() error { return c.updatePageRule(ex.Original.(*pageRule).ID, id, rec.Target) },
})
} else {
e := ex.Original.(*cfRecord)
proxy := e.Proxiable && rec.Metadata[metaProxy] != "off"
corrections = append(corrections, &models.Correction{
Msg: d.String(),
F: func() error { return c.modifyRecord(id, e.ID, proxy, rec) },
})
}
} }
return corrections, nil return corrections, nil
} }
@ -163,10 +204,14 @@ func (c *CloudflareApi) preprocessConfig(dc *models.DomainConfig) error {
} }
} }
currentPrPrio := 1
// Normalize the proxy setting for each record. // Normalize the proxy setting for each record.
// A and CNAMEs: Validate. If null, set to default. // A and CNAMEs: Validate. If null, set to default.
// else: Make sure it wasn't set. Set to default. // else: Make sure it wasn't set. Set to default.
for _, rec := range dc.Records { // iterate backwards so first defined page rules have highest priority
for i := len(dc.Records) - 1; i >= 0; i-- {
rec := dc.Records[i]
if rec.Metadata == nil { if rec.Metadata == nil {
rec.Metadata = map[string]string{} rec.Metadata = map[string]string{}
} }
@ -193,6 +238,23 @@ func (c *CloudflareApi) preprocessConfig(dc *models.DomainConfig) error {
rec.Metadata[metaProxy] = val rec.Metadata[metaProxy] = val
} }
} }
// CF_REDIRECT record types. Encode target as $FROM,$TO,$PRIO,$CODE
if rec.Type == "CF_REDIRECT" || rec.Type == "CF_TEMP_REDIRECT" {
if !c.manageRedirects {
return fmt.Errorf("you must add 'manage_redirects: true' metadata to cloudflare provider to use CF_REDIRECT records")
}
parts := strings.Split(rec.Target, ",")
if len(parts) != 2 {
return fmt.Errorf("Invalid data specified for cloudflare redirect record")
}
code := 301
if rec.Type == "CF_TEMP_REDIRECT" {
code = 302
}
rec.Target = fmt.Sprintf("%s,%d,%d", rec.Target, currentPrPrio, code)
currentPrPrio++
rec.Type = "PAGE_RULE"
}
} }
// look for ip conversions and transform records // look for ip conversions and transform records
@ -224,7 +286,7 @@ func newCloudflare(m map[string]string, metadata json.RawMessage) (providers.DNS
api.ApiUser, api.ApiKey = m["apiuser"], m["apikey"] api.ApiUser, api.ApiKey = m["apiuser"], m["apikey"]
// check api keys from creds json file // check api keys from creds json file
if api.ApiKey == "" || api.ApiUser == "" { if api.ApiKey == "" || api.ApiUser == "" {
return nil, fmt.Errorf("Cloudflare apikey and apiuser must be provided.") return nil, fmt.Errorf("cloudflare apikey and apiuser must be provided")
} }
err := api.fetchDomainList() err := api.fetchDomainList()
@ -234,30 +296,30 @@ func newCloudflare(m map[string]string, metadata json.RawMessage) (providers.DNS
if len(metadata) > 0 { if len(metadata) > 0 {
parsedMeta := &struct { parsedMeta := &struct {
IPConversions string `json:"ip_conversions"` IPConversions string `json:"ip_conversions"`
IgnoredLabels []string `json:"ignored_labels"` IgnoredLabels []string `json:"ignored_labels"`
ManageRedirects bool `json:"manage_redirects"`
}{} }{}
err := json.Unmarshal([]byte(metadata), parsedMeta) err := json.Unmarshal([]byte(metadata), parsedMeta)
if err != nil { if err != nil {
return nil, err return nil, err
} }
api.manageRedirects = parsedMeta.ManageRedirects
// ignored_labels: // ignored_labels:
for _, l := range parsedMeta.IgnoredLabels { for _, l := range parsedMeta.IgnoredLabels {
api.ignoredLabels = append(api.ignoredLabels, l) api.ignoredLabels = append(api.ignoredLabels, l)
} }
// parse provider level metadata // parse provider level metadata
api.ipConversions, err = transform.DecodeTransformTable(parsedMeta.IPConversions) if len(parsedMeta.IPConversions) > 0 {
if err != nil { api.ipConversions, err = transform.DecodeTransformTable(parsedMeta.IPConversions)
return nil, err if err != nil {
return nil, err
}
} }
} }
return api, nil return api, nil
} }
func init() {
providers.RegisterDomainServiceProviderType("CLOUDFLAREAPI", newCloudflare, providers.CanUseAlias)
}
// Used on the "existing" records. // Used on the "existing" records.
type cfRecord struct { type cfRecord struct {
ID string `json:"id"` ID string `json:"id"`

View File

@ -5,7 +5,7 @@ import (
"testing" "testing"
"github.com/StackExchange/dnscontrol/models" "github.com/StackExchange/dnscontrol/models"
"github.com/StackExchange/dnscontrol/transform" "github.com/StackExchange/dnscontrol/pkg/transform"
) )
func newDomainConfig() *models.DomainConfig { func newDomainConfig() *models.DomainConfig {

View File

@ -5,15 +5,22 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"net/http" "net/http"
"time"
"strings"
"strconv"
"github.com/StackExchange/dnscontrol/models" "github.com/StackExchange/dnscontrol/models"
) )
const ( const (
baseURL = "https://api.cloudflare.com/client/v4/" baseURL = "https://api.cloudflare.com/client/v4/"
zonesURL = baseURL + "zones/" zonesURL = baseURL + "zones/"
recordsURL = zonesURL + "%s/dns_records/" recordsURL = zonesURL + "%s/dns_records/"
singleRecordURL = recordsURL + "%s" pageRulesURL = zonesURL + "%s/pagerules/"
singlePageRuleURL = pageRulesURL + "%s"
singleRecordURL = recordsURL + "%s"
) )
// get list of domains for account. Cache so the ids can be looked up from domain name // get list of domains for account. Cache so the ids can be looked up from domain name
@ -231,6 +238,99 @@ func (c *CloudflareApi) get(endpoint string, target interface{}) error {
return decoder.Decode(target) return decoder.Decode(target)
} }
func (c *CloudflareApi) getPageRules(id string, domain string) ([]*models.RecordConfig, error) {
url := fmt.Sprintf(pageRulesURL, id)
data := pageRuleResponse{}
if err := c.get(url, &data); err != nil {
return nil, fmt.Errorf("Error fetching page rule list from cloudflare: %s", err)
}
if !data.Success {
return nil, fmt.Errorf("Error fetching page rule list cloudflare: %s", stringifyErrors(data.Errors))
}
recs := []*models.RecordConfig{}
for _, pr := range data.Result {
// only interested in forwarding rules. Lets be very specific, and skip anything else
if len(pr.Actions) != 1 || len(pr.Targets) != 1 {
continue
}
if pr.Actions[0].ID != "forwarding_url" {
continue
}
err := json.Unmarshal([]byte(pr.Actions[0].Value), &pr.ForwardingInfo)
if err != nil {
return nil, err
}
var thisPr = pr
recs = append(recs, &models.RecordConfig{
Name: "@",
NameFQDN: domain,
Type: "PAGE_RULE",
//$FROM,$TO,$PRIO,$CODE
Target: fmt.Sprintf("%s,%s,%d,%d", pr.Targets[0].Constraint.Value, pr.ForwardingInfo.URL, pr.Priority, pr.ForwardingInfo.StatusCode),
Original: thisPr,
TTL: 1,
})
}
return recs, nil
}
func (c *CloudflareApi) deletePageRule(recordID, domainID string) error {
endpoint := fmt.Sprintf(singlePageRuleURL, domainID, recordID)
req, err := http.NewRequest("DELETE", endpoint, nil)
if err != nil {
return err
}
c.setHeaders(req)
_, err = handleActionResponse(http.DefaultClient.Do(req))
return err
}
func (c *CloudflareApi) updatePageRule(recordID, domainID string, target string) error {
if err := c.deletePageRule(recordID, domainID); err != nil {
return err
}
return c.createPageRule(domainID, target)
}
func (c *CloudflareApi) createPageRule(domainID string, target string) error {
endpoint := fmt.Sprintf(pageRulesURL, domainID)
return c.sendPageRule(endpoint, "POST", target)
}
func (c *CloudflareApi) sendPageRule(endpoint, method string, data string) error {
//from to priority code
parts := strings.Split(data, ",")
priority, _ := strconv.Atoi(parts[2])
code, _ := strconv.Atoi(parts[3])
fwdInfo := &pageRuleFwdInfo{
StatusCode: code,
URL: parts[1],
}
dat, _ := json.Marshal(fwdInfo)
pr := &pageRule{
Status: "active",
Priority: priority,
Targets: []pageRuleTarget{
{Target: "url", Constraint: pageRuleConstraint{Operator: "matches", Value: parts[0]}},
},
Actions: []pageRuleAction{
{ID: "forwarding_url", Value: json.RawMessage(dat)},
},
}
buf := &bytes.Buffer{}
enc := json.NewEncoder(buf)
if err := enc.Encode(pr); err != nil {
return err
}
req, err := http.NewRequest(method, endpoint, buf)
if err != nil {
return err
}
c.setHeaders(req)
_, err = handleActionResponse(http.DefaultClient.Do(req))
return err
}
func stringifyErrors(errors []interface{}) string { func stringifyErrors(errors []interface{}) string {
dat, err := json.Marshal(errors) dat, err := json.Marshal(errors)
if err != nil { if err != nil {
@ -244,6 +344,7 @@ type recordsResponse struct {
Result []*cfRecord `json:"result"` Result []*cfRecord `json:"result"`
ResultInfo pagingInfo `json:"result_info"` ResultInfo pagingInfo `json:"result_info"`
} }
type basicResponse struct { type basicResponse struct {
Success bool `json:"success"` Success bool `json:"success"`
Errors []interface{} `json:"errors"` Errors []interface{} `json:"errors"`
@ -253,6 +354,43 @@ type basicResponse struct {
} `json:"result"` } `json:"result"`
} }
type pageRuleResponse struct {
basicResponse
Result []*pageRule `json:"result"`
ResultInfo pagingInfo `json:"result_info"`
}
type pageRule struct {
ID string `json:"id,omitempty"`
Targets []pageRuleTarget `json:"targets"`
Actions []pageRuleAction `json:"actions"`
Priority int `json:"priority"`
Status string `json:"status"`
ModifiedOn time.Time `json:"modified_on,omitempty"`
CreatedOn time.Time `json:"created_on,omitempty"`
ForwardingInfo *pageRuleFwdInfo `json:"-"`
}
type pageRuleTarget struct {
Target string `json:"target"`
Constraint pageRuleConstraint `json:"constraint"`
}
type pageRuleConstraint struct {
Operator string `json:"operator"`
Value string `json:"value"`
}
type pageRuleAction struct {
ID string `json:"id"`
Value json.RawMessage `json:"value"`
}
type pageRuleFwdInfo struct {
URL string `json:"url"`
StatusCode int `json:"status_code"`
}
type zoneResponse struct { type zoneResponse struct {
basicResponse basicResponse
Result []struct { Result []struct {

View File

@ -1,48 +0,0 @@
## name.com Provider
### required config
In your providers config json file you must provide your name.com api username and access token:
```
"yourNameDotComProviderName":{
"apikey": "yourApiKeyFromName.com-klasjdkljasdlk235235235235",
"apiuser": "yourUsername"
}
```
In order to get api access you need to [apply for access](https://www.name.com/reseller/apply)
### example dns config js (registrar only):
```
var NAMECOM = NewRegistrar("myNameCom","NAMEDOTCOM");
var mynameServers = [
NAMESERVER("bill.ns.cloudflare.com"),
NAMESERVER("fred.ns.cloudflare.com")
];
D("example.tld",NAMECOM,myNameServers
//records handled by another provider...
);
```
### example config (registrar and records managed by namedotcom)
```
var NAMECOM = NewRegistrar("myNameCom","NAMEDOTCOM");
var NAMECOMDSP = NewDSP("myNameCom","NAMEDOTCOM")
D("exammple.tld", NAMECOM, NAMECOMDSP,
//ns[1-4].name.com used by default as nameservers
//override default ttl of 300s
DefaultTTL(3600),
A("test","1.2.3.4"),
//override ttl for one record only
CNAME("foo","some.otherdomain.tld.",TTL(100))
)
```

View File

@ -138,3 +138,24 @@ func init() {
return None{}, nil return None{}, nil
}) })
} }
type CustomRType struct {
Name string
Provider string
RealType string
}
// RegisterCustomRecordType registers a record type that is only valid for one provider.
// provider is the registered type of provider this is valid with
// name is the record type as it will appear in the js. (should be something like $PROVIDER_FOO)
// realType is the record type it will be replaced with after validation
func RegisterCustomRecordType(name, provider, realType string) {
customRecordTypes[name] = &CustomRType{Name: name, Provider: provider, RealType: realType}
}
// GetCustomRecordType returns a registered custom record type, or nil if none
func GetCustomRecordType(rType string) *CustomRType {
return customRecordTypes[rType]
}
var customRecordTypes = map[string]*CustomRType{}

View File

@ -88,7 +88,7 @@ Set a Go function that returns something useful
```go ```go
vm.Set("twoPlus", func(call otto.FunctionCall) otto.Value { vm.Set("twoPlus", func(call otto.FunctionCall) otto.Value {
right, _ := call.Argument(0).ToInteger() right, _ := call.Argument(0).ToInteger()
return, _ := vm.ToValue(2 + right) result, _ := vm.ToValue(2 + right)
return result return result
}) })
``` ```
@ -114,7 +114,7 @@ http://godoc.org/github.com/robertkrimen/otto/parser
Parse and return an AST Parse and return an AST
```go ```go
filenamee := "" // A filename is optional filename := "" // A filename is optional
src := ` src := `
// Sample xyzzy example // Sample xyzzy example
(function(){ (function(){
@ -167,6 +167,7 @@ The following are some limitations with otto:
* "use strict" will parse, but does nothing. * "use strict" will parse, but does nothing.
* The regular expression engine (re2/regexp) is not fully compatible with the ECMA5 specification. * The regular expression engine (re2/regexp) is not fully compatible with the ECMA5 specification.
* Otto targets ES5. ES6 features (eg: Typed Arrays) are not supported.
### Regular Expression Incompatibility ### Regular Expression Incompatibility

View File

@ -70,7 +70,7 @@ func digitValue(chr rune) int {
} }
func builtinGlobal_parseInt(call FunctionCall) Value { func builtinGlobal_parseInt(call FunctionCall) Value {
input := strings.TrimSpace(call.Argument(0).string()) input := strings.Trim(call.Argument(0).string(), builtinString_trim_whitespace)
if len(input) == 0 { if len(input) == 0 {
return NaNValue() return NaNValue()
} }
@ -153,7 +153,8 @@ var parseFloat_matchValid = regexp.MustCompile(`[0-9eE\+\-\.]|Infinity`)
func builtinGlobal_parseFloat(call FunctionCall) Value { func builtinGlobal_parseFloat(call FunctionCall) Value {
// Caveat emptor: This implementation does NOT match the specification // Caveat emptor: This implementation does NOT match the specification
input := strings.TrimSpace(call.Argument(0).string()) input := strings.Trim(call.Argument(0).string(), builtinString_trim_whitespace)
if parseFloat_matchBadSpecial.MatchString(input) { if parseFloat_matchBadSpecial.MatchString(input) {
return NaNValue() return NaNValue()
} }

View File

@ -132,6 +132,7 @@ The following are some limitations with otto:
* "use strict" will parse, but does nothing. * "use strict" will parse, but does nothing.
* The regular expression engine (re2/regexp) is not fully compatible with the ECMA5 specification. * The regular expression engine (re2/regexp) is not fully compatible with the ECMA5 specification.
* Otto targets ES5. ES6 features (eg: Typed Arrays) are not supported.
Regular Expression Incompatibility Regular Expression Incompatibility

View File

@ -302,7 +302,11 @@ func (self *_runtime) convertCallParameter(v Value, t reflect.Type) reflect.Valu
} }
if t.Kind() == reflect.Interface { if t.Kind() == reflect.Interface {
iv := reflect.ValueOf(v.export()) e := v.export()
if e == nil {
return reflect.Zero(t)
}
iv := reflect.ValueOf(e)
if iv.Type().AssignableTo(t) { if iv.Type().AssignableTo(t) {
return iv return iv
} }
@ -352,20 +356,52 @@ func (self *_runtime) convertCallParameter(v Value, t reflect.Type) reflect.Valu
tt := t.Elem() tt := t.Elem()
for i := int64(0); i < l; i++ { if o.class == "Array" {
p, ok := o.property[strconv.FormatInt(i, 10)] for i := int64(0); i < l; i++ {
if !ok { p, ok := o.property[strconv.FormatInt(i, 10)]
continue if !ok {
continue
}
e, ok := p.value.(Value)
if !ok {
continue
}
ev := self.convertCallParameter(e, tt)
s.Index(int(i)).Set(ev)
}
} else if o.class == "GoArray" {
var gslice bool
switch o.value.(type) {
case *_goSliceObject:
gslice = true
case *_goArrayObject:
gslice = false
} }
e, ok := p.value.(Value) for i := int64(0); i < l; i++ {
if !ok { var p *_property
continue if gslice {
p = goSliceGetOwnProperty(o, strconv.FormatInt(i, 10))
} else {
p = goArrayGetOwnProperty(o, strconv.FormatInt(i, 10))
}
if p == nil {
continue
}
e, ok := p.value.(Value)
if !ok {
continue
}
ev := self.convertCallParameter(e, tt)
s.Index(int(i)).Set(ev)
} }
ev := self.convertCallParameter(e, tt)
s.Index(int(i)).Set(ev)
} }
return s return s

View File

@ -4,6 +4,7 @@ import (
"fmt" "fmt"
"math" "math"
"reflect" "reflect"
"unicode/utf16"
) )
func (value Value) bool() bool { func (value Value) bool() bool {
@ -32,6 +33,8 @@ func (value Value) bool() bool {
return true return true
case string: case string:
return 0 != len(value) return 0 != len(value)
case []uint16:
return 0 != len(utf16.Decode(value))
} }
if value.IsObject() { if value.IsObject() {
return true return true

View File

@ -11,7 +11,7 @@ import (
var stringToNumberParseInteger = regexp.MustCompile(`^(?:0[xX])`) var stringToNumberParseInteger = regexp.MustCompile(`^(?:0[xX])`)
func parseNumber(value string) float64 { func parseNumber(value string) float64 {
value = strings.TrimSpace(value) value = strings.Trim(value, builtinString_trim_whitespace)
if value == "" { if value == "" {
return 0 return 0

6
vendor/vendor.json vendored
View File

@ -303,10 +303,10 @@
"revisionTime": "2016-04-18T18:49:04Z" "revisionTime": "2016-04-18T18:49:04Z"
}, },
{ {
"checksumSHA1": "UH75lsKCrVFdCZvJchkAPo2QXjw=", "checksumSHA1": "EqyHXBcg5cWi4ERsMXN6g1opi1o=",
"path": "github.com/robertkrimen/otto", "path": "github.com/robertkrimen/otto",
"revision": "7d9cbc2befca39869eb0e5bcb0f44c0692c2f8ff", "revision": "21ec96599b1279b5673e4df0097dd56bb8360068",
"revisionTime": "2016-07-28T22:04:12Z" "revisionTime": "2017-04-24T10:46:44Z"
}, },
{ {
"checksumSHA1": "qgziiO3/QDVJMKw2nGrUbC8QldY=", "checksumSHA1": "qgziiO3/QDVJMKw2nGrUbC8QldY=",