diff --git a/CHANGELOG.md b/CHANGELOG.md index c39a899..5611203 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,7 +7,11 @@ https://github.com/octodns/octodns/pull/822 for more information. Providers that have been extracted in this release include: * [CloudflareProvider](https://github.com/octodns/octodns-cloudflare/) + * [ConstellixProvider](https://github.com/octodns/octodns-constellix/) + * [DigitalOceanProvider](https://github.com/octodns/octodns-digitalocean/) * [DnsimpleProvider](https://github.com/octodns/octodns-dnsimple/) + * [DnsMadeEasyProvider](https://github.com/octodns/octodns-dnsmadeeasy/) + * [DynProvider](https://github.com/octodns/octodns-dynprovider/) * [Ns1Provider](https://github.com/octodns/octodns-ns1/) * [PowerDnsProvider](https://github.com/octodns/octodns-powerdns/) * [Route53Provider](https://github.com/octodns/octodns-route53/) also diff --git a/README.md b/README.md index 46e3597..e7ce969 100644 --- a/README.md +++ b/README.md @@ -195,11 +195,11 @@ The table below lists the providers octoDNS supports. We're currently in the pro | [AzureProvider](/octodns/provider/azuredns.py) | | azure-identity, azure-mgmt-dns, azure-mgmt-trafficmanager | A, AAAA, CAA, CNAME, MX, NS, PTR, SRV, TXT | Alpha (A, AAAA, CNAME) | | | [Akamai](/octodns/provider/edgedns.py) | | edgegrid-python | A, AAAA, CNAME, MX, NAPTR, NS, PTR, SPF, SRV, SSHFP, TXT | No | | | [CloudflareProvider](https://github.com/octodns/octodns-cloudflare/) | [octodns_cloudflare](https://github.com/octodns/octodns-cloudflare/) | | | | | -| [ConstellixProvider](/octodns/provider/constellix.py) | | | A, AAAA, ALIAS (ANAME), CAA, CNAME, MX, NS, PTR, SPF, SRV, TXT | Yes | CAA tags restricted | -| [DigitalOceanProvider](/octodns/provider/digitalocean.py) | | | A, AAAA, CAA, CNAME, MX, NS, TXT, SRV | No | CAA tags restricted | -| [DnsMadeEasyProvider](/octodns/provider/dnsmadeeasy.py) | | | A, AAAA, ALIAS (ANAME), CAA, CNAME, MX, NS, PTR, SPF, SRV, TXT | No | CAA tags restricted | +| [ConstellixProvider](https://github.com/octodns/octodns-constellix/) | [octodns_constellix](https://github.com/octodns/octodns-constellix/) | | | | | +| [DigitalOceanProvider](https://github.com/octodns/octodns-digitalocean/) | [octodns_digitalocean](https://github.com/octodns/octodns-digitalocean/) | | | | | +| [DnsMadeEasyProvider](https://github.com/octodns/octodns-dnsmadeeasy/) | [octodns_dnsmadeeasy](https://github.com/octodns/octodns-dnsmadeeasy/) | | | | | | [DnsimpleProvider](https://github.com/octodns/octodns-dnsimple/) | [octodns_dnsimple](https://github.com/octodns/octodns-dnsimple/) | | | | | -| [DynProvider](/octodns/provider/dyn.py) | | dyn | All | Both | | +| [DynProvider](https://github.com/octodns/octodns-dyn/) (deprecated) | [octodns_dyn](https://github.com/octodns/octodns-dyn/) | | | | | | [EasyDNSProvider](/octodns/provider/easydns.py) | | | A, AAAA, CAA, CNAME, MX, NAPTR, NS, SRV, TXT | No | | | [EtcHostsProvider](/octodns/provider/etc_hosts.py) | | | A, AAAA, ALIAS, CNAME | No | | | [EnvVarSource](/octodns/source/envvar.py) | | | TXT | No | read-only environment variable injection | diff --git a/octodns/provider/constellix.py b/octodns/provider/constellix.py index 50e56ec..d39b581 100644 --- a/octodns/provider/constellix.py +++ b/octodns/provider/constellix.py @@ -5,1113 +5,19 @@ from __future__ import absolute_import, division, print_function, \ unicode_literals -from collections import defaultdict -from requests import Session -from base64 import b64encode, standard_b64encode -from pycountry_convert import country_alpha2_to_continent_code -import hashlib -import hmac -import logging -import time - -from ..record import Record -from . import ProviderException -from .base import BaseProvider - - -class ConstellixClientException(ProviderException): - pass - - -class ConstellixClientBadRequest(ConstellixClientException): - - def __init__(self, resp): - errors = '\n - '.join(resp.json()['errors']) - super(ConstellixClientBadRequest, self).__init__(f'\n - {errors}') - - -class ConstellixClientUnauthorized(ConstellixClientException): - - def __init__(self): - super(ConstellixClientUnauthorized, self).__init__('Unauthorized') - - -class ConstellixClientNotFound(ConstellixClientException): - - def __init__(self): - super(ConstellixClientNotFound, self).__init__('Not Found') - - -class ConstellixClient(object): - BASE = 'https://api.dns.constellix.com/v1' - - def __init__(self, api_key, secret_key, ratelimit_delay=0.0): - self.api_key = api_key - self.secret_key = secret_key - self.ratelimit_delay = ratelimit_delay - self._sess = Session() - self._sess.headers.update({'x-cnsdns-apiKey': self.api_key}) - self._domains = None - self._pools = {'A': None, 'AAAA': None, 'CNAME': None} - self._geofilters = None - - def _current_time(self): - return str(int(time.time() * 1000)) - - def _hmac_hash(self, now): - return hmac.new(self.secret_key.encode('utf-8'), now.encode('utf-8'), - digestmod=hashlib.sha1).digest() - - def _request(self, method, path, params=None, data=None): - now = self._current_time() - hmac_hash = self._hmac_hash(now) - - headers = { - 'x-cnsdns-hmac': b64encode(hmac_hash), - 'x-cnsdns-requestDate': now - } - - url = f'{self.BASE}{path}' - resp = self._sess.request(method, url, headers=headers, - params=params, json=data) - if resp.status_code == 400: - raise ConstellixClientBadRequest(resp) - if resp.status_code == 401: - raise ConstellixClientUnauthorized() - if resp.status_code == 404: - raise ConstellixClientNotFound() - resp.raise_for_status() - time.sleep(self.ratelimit_delay) - return resp - - @property - def domains(self): - if self._domains is None: - zones = [] - - resp = self._request('GET', '/domains').json() - zones += resp - - self._domains = {f'{z["name"]}.': z['id'] for z in zones} - - return self._domains - - def domain(self, name): - zone_id = self.domains.get(name, False) - if not zone_id: - raise ConstellixClientNotFound() - path = f'/domains/{zone_id}' - return self._request('GET', path).json() - - def domain_create(self, name): - resp = self._request('POST', '/domains', data={'names': [name]}) - # Add newly created zone to domain cache - self._domains[f'{name}.'] = resp.json()[0]['id'] - - def domain_enable_geoip(self, domain_name): - domain = self.domain(domain_name) - if domain['hasGeoIP'] is False: - domain_id = self.domains[domain_name] - self._request( - 'PUT', - f'/domains/{domain_id}', - data={'hasGeoIP': True} - ) - - def _absolutize_value(self, value, zone_name): - if value == '': - value = zone_name - elif not value.endswith('.'): - value = f'{value}.{zone_name}' - - return value - - def records(self, zone_name): - zone_id = self.domains.get(zone_name, False) - if not zone_id: - raise ConstellixClientNotFound() - path = f'/domains/{zone_id}/records' - - resp = self._request('GET', path).json() - for record in resp: - # change ANAME records to ALIAS - if record['type'] == 'ANAME': - record['type'] = 'ALIAS' - - # change relative values to absolute - value = record['value'] - if record['type'] in ['ALIAS', 'CNAME', 'MX', 'NS', 'SRV']: - if isinstance(value, str): - record['value'] = self._absolutize_value(value, - zone_name) - if isinstance(value, list): - for v in value: - v['value'] = self._absolutize_value(v['value'], - zone_name) - - return resp - - def record_create(self, zone_name, record_type, params): - # change ALIAS records to ANAME - if record_type == 'ALIAS': - record_type = 'ANAME' - - zone_id = self.domains.get(zone_name, False) - path = f'/domains/{zone_id}/records/{record_type}' - - self._request('POST', path, data=params) - - def record_delete(self, zone_name, record_type, record_id): - # change ALIAS records to ANAME - if record_type == 'ALIAS': - record_type = 'ANAME' - - zone_id = self.domains.get(zone_name, False) - path = f'/domains/{zone_id}/records/{record_type}/{record_id}' - self._request('DELETE', path) - - def pools(self, pool_type): - if self._pools[pool_type] is None: - self._pools[pool_type] = {} - path = f'/pools/{pool_type}' - response = self._request('GET', path).json() - for pool in response: - self._pools[pool_type][pool['id']] = pool - return self._pools[pool_type].values() - - def pool(self, pool_type, pool_name): - pools = self.pools(pool_type) - for pool in pools: - if pool['name'] == pool_name and pool['type'] == pool_type: - return pool - return None - - def pool_by_id(self, pool_type, pool_id): - pools = self.pools(pool_type) - for pool in pools: - if pool['id'] == pool_id: - return pool - return None - - def pool_create(self, data): - path = f'/pools/{data.get("type")}' - # This returns a list of items, we want the first one - response = self._request('POST', path, data=data).json() - - # Update our cache - self._pools[data.get('type')][response[0]['id']] = response[0] - return response[0] - - def pool_update(self, pool_id, data): - path = f'/pools/{data.get("type")}/{pool_id}' - try: - self._request('PUT', path, data=data).json() - - except ConstellixClientBadRequest as e: - message = str(e) - if not message or "no changes to save" not in message: - raise e - return data - - def pool_delete(self, pool_type, pool_id): - path = f'/pools/{pool_type}/{pool_id}' - self._request('DELETE', path) - - # Update our cache - if self._pools[pool_type] is not None: - self._pools[pool_type].pop(pool_id, None) - - def geofilters(self): - if self._geofilters is None: - self._geofilters = {} - path = '/geoFilters' - response = self._request('GET', path).json() - for geofilter in response: - self._geofilters[geofilter['id']] = geofilter - return self._geofilters.values() - - def geofilter(self, geofilter_name): - geofilters = self.geofilters() - for geofilter in geofilters: - if geofilter['name'] == geofilter_name: - return geofilter - return None - - def geofilter_by_id(self, geofilter_id): - geofilters = self.geofilters() - for geofilter in geofilters: - if geofilter['id'] == geofilter_id: - return geofilter - return None - - def geofilter_create(self, data): - path = '/geoFilters' - response = self._request('POST', path, data=data).json() - - # Update our cache - self._geofilters[response[0]['id']] = response[0] - return response[0] - - def geofilter_update(self, geofilter_id, data): - path = f'/geoFilters/{geofilter_id}' - try: - self._request('PUT', path, data=data).json() - - except ConstellixClientBadRequest as e: - message = str(e) - if not message or "no changes to save" not in message: - raise e - return data - - def geofilter_delete(self, geofilter_id): - path = f'/geoFilters/{geofilter_id}' - self._request('DELETE', path) - - # Update our cache - if self._geofilters is not None: - self._geofilters.pop(geofilter_id, None) - - -class SonarClientException(ProviderException): - pass - - -class SonarClientBadRequest(SonarClientException): - - def __init__(self, resp): - errors = resp.text - super(SonarClientBadRequest, self).__init__(f'\n - {errors}') - - -class SonarClientUnauthorized(SonarClientException): - - def __init__(self): - super(SonarClientUnauthorized, self).__init__('Unauthorized') - - -class SonarClientNotFound(SonarClientException): - - def __init__(self): - super(SonarClientNotFound, self).__init__('Not Found') - - -class SonarClient(object): - BASE = 'https://api.sonar.constellix.com/rest/api' - - def __init__(self, log, api_key, secret_key, ratelimit_delay=0.0): - self.log = log - self.api_key = api_key - self.secret_key = secret_key - self.ratelimit_delay = ratelimit_delay - self._sess = Session() - self._sess.headers = { - 'Content-Type': 'application/json', - 'User-Agent': 'octoDNS', - } - self._agents = None - self._checks = {'tcp': None, 'http': None} - - def _current_time_ms(self): - return str(int(time.time() * 1000)) - - def _hmac_hash(self, now): - digester = hmac.new( - bytes(self.secret_key, "UTF-8"), - bytes(now, "UTF-8"), - hashlib.sha1) - signature = digester.digest() - hmac_text = str(standard_b64encode(signature), "UTF-8") - return hmac_text - - def _request(self, method, path, params=None, data=None): - now = self._current_time_ms() - hmac_text = self._hmac_hash(now) - - headers = { - 'x-cns-security-token': "{}:{}:{}".format( - self.api_key, - hmac_text, - now) - } - - url = f'{self.BASE}{path}' - resp = self._sess.request(method, url, headers=headers, - params=params, json=data) - if resp.status_code == 400: - raise SonarClientBadRequest(resp) - if resp.status_code == 401: - raise SonarClientUnauthorized() - if resp.status_code == 404: - raise SonarClientNotFound() - resp.raise_for_status() - - if self.ratelimit_delay >= 1.0: - self.log.info("Waiting for Sonar Rate Limit Delay") - elif self.ratelimit_delay > 0.0: - self.log.debug("Waiting for Sonar Rate Limit Delay") - time.sleep(self.ratelimit_delay) - - return resp - - @property - def agents(self): - if self._agents is None: - agents = [] - - data = self._request('GET', '/system/sites').json() - agents += data - - self._agents = {f'{a["name"]}.': a for a in agents} - - return self._agents - - def agents_for_regions(self, regions): - if regions[0] == "WORLD": - res_agents = [] - for agent in self.agents.values(): - res_agents.append(agent['id']) - return res_agents - - res_agents = [] - for agent in self.agents.values(): - if agent["region"] in regions: - res_agents.append(agent['id']) - return res_agents - - def parse_uri_id(self, url): - r = str(url).rfind("/") - res = str(url)[r + 1:] - return res - - def checks(self, check_type): - if self._checks[check_type] is None: - self._checks[check_type] = {} - path = f'/{check_type}' - data = self._request('GET', path).json() - for check in data: - self._checks[check_type][check['id']] = check - return self._checks[check_type].values() - - def check(self, check_type, check_name): - checks = self.checks(check_type) - for check in checks: - if check['name'] == check_name: - return check - return None - - def check_create(self, check_type, data): - path = f'/{check_type}' - response = self._request('POST', path, data=data) - # Parse check ID from Location response header - id = self.parse_uri_id(response.headers["Location"]) - # Get check details - path = f'/{check_type}/{id}' - data = self._request('GET', path, data=data).json() - - # Update our cache - self._checks[check_type]['id'] = data - return data - - def check_delete(self, check_id): - # first get check type - path = f'/check/type/{check_id}' - data = self._request('GET', path).json() - check_type = data['type'].lower() - - path = f'/{check_type}/{check_id}' - self._request('DELETE', path) - - # Update our cache - self._checks[check_type].pop(check_id, None) - - -class ConstellixProvider(BaseProvider): - ''' - Constellix DNS provider - - constellix: - class: octodns.provider.constellix.ConstellixProvider - # Your Contellix api key (required) - api_key: env/CONSTELLIX_API_KEY - # Your Constellix secret key (required) - secret_key: env/CONSTELLIX_SECRET_KEY - # Amount of time to wait between requests to avoid - # ratelimit (optional) - ratelimit_delay: 0.0 - ''' - SUPPORTS_GEO = False - SUPPORTS_DYNAMIC = True - SUPPORTS = set(('A', 'AAAA', 'ALIAS', 'CAA', 'CNAME', 'MX', - 'NS', 'PTR', 'SPF', 'SRV', 'TXT')) - - def __init__(self, id, api_key, secret_key, ratelimit_delay=0.0, - *args, **kwargs): - self.log = logging.getLogger(f'ConstellixProvider[{id}]') - self.log.debug('__init__: id=%s, api_key=***, secret_key=***', id) - super(ConstellixProvider, self).__init__(id, *args, **kwargs) - self._client = ConstellixClient(api_key, secret_key, ratelimit_delay) - self._sonar = SonarClient( - self.log, api_key, secret_key, ratelimit_delay - ) - self._zone_records = {} - - def _data_for_multiple(self, _type, records): - record = records[0] - if record['recordOption'] == 'pools': - return self._data_for_pool(_type, records) - return { - 'ttl': record['ttl'], - 'type': _type, - 'values': record['value'] - } - - def _data_for_pool(self, _type, records): - default_values = [] - fallback_pool_name = None - pools = {} - rules = [] - - for record in records: - # fetch record pool data - pool_id = record['pools'][0] - pool = self._client.pool_by_id(_type, pool_id) - - geofilter_id = 1 - if 'geolocation' in record.keys() \ - and record['geolocation'] is not None: - # fetch record geofilter data - geofilter_id = record['geolocation']['geoipFilter'] - geofilter = self._client.geofilter_by_id(geofilter_id) - - pool_name = pool['name'].split(':')[-1] - - # fetch default values from the World Default pool - if geofilter_id == 1: - fallback_pool_name = pool_name - for value in pool['values']: - default_values.append(value['value']) - - # populate pools - pools[pool_name] = { - 'fallback': None, - 'values': [] - } - for value in pool['values']: - pools[pool_name]['values'].append({ - 'value': value['value'], - 'weight': value['weight'] - }) - - # populate rules - if geofilter_id == 1: - rules.append({'pool': pool_name}) - else: - geos = [] - - if 'geoipContinents' in geofilter.keys(): - for continent_code in geofilter['geoipContinents']: - geos.append(continent_code) - - if 'geoipCountries' in geofilter.keys(): - for country_code in geofilter['geoipCountries']: - continent_code = \ - country_alpha2_to_continent_code(country_code) - geos.append(f'{continent_code}-{country_code}') - - if 'regions' in geofilter.keys(): - for region in geofilter['regions']: - geos.append(f'{region["continentCode"]}-' - f'{region["countryCode"]}-' - f'{region["regionCode"]}') - - rules.append({ - 'pool': pool_name, - 'geos': sorted(geos) - }) - - # set fallback pool - for pool_name in pools: - if pool_name != fallback_pool_name: - pools[pool_name]['fallback'] = fallback_pool_name - - res = { - 'ttl': record['ttl'], - 'type': _type, - 'dynamic': { - 'pools': dict( - sorted(pools.items(), key=lambda t: t[0])), - 'rules': sorted(rules, key=lambda t: t['pool']) - }, - 'values': default_values - } - return res - - _data_for_A = _data_for_multiple - _data_for_AAAA = _data_for_multiple - - def _data_for_CAA(self, _type, records): - values = [] - record = records[0] - for value in record['value']: - values.append({ - 'flags': value['flag'], - 'tag': value['tag'], - 'value': value['data'] - }) - return { - 'ttl': records[0]['ttl'], - 'type': _type, - 'values': values - } - - def _data_for_NS(self, _type, records): - record = records[0] - return { - 'ttl': record['ttl'], - 'type': _type, - 'values': [value['value'] for value in record['value']] - } - - def _data_for_ALIAS(self, _type, records): - record = records[0] - return { - 'ttl': record['ttl'], - 'type': _type, - 'value': record['value'][0]['value'] - } - - _data_for_PTR = _data_for_ALIAS - - def _data_for_TXT(self, _type, records): - values = [value['value'].replace(';', '\\;') - for value in records[0]['value']] - return { - 'ttl': records[0]['ttl'], - 'type': _type, - 'values': values - } - - _data_for_SPF = _data_for_TXT - - def _data_for_MX(self, _type, records): - values = [] - record = records[0] - for value in record['value']: - values.append({ - 'preference': value['level'], - 'exchange': value['value'] - }) - return { - 'ttl': records[0]['ttl'], - 'type': _type, - 'values': values - } - - def _data_for_single(self, _type, records): - record = records[0] - return { - 'ttl': record['ttl'], - 'type': _type, - 'value': record['value'] - } - - _data_for_CNAME = _data_for_single - - def _data_for_SRV(self, _type, records): - values = [] - record = records[0] - for value in record['value']: - values.append({ - 'port': value['port'], - 'priority': value['priority'], - 'target': value['value'], - 'weight': value['weight'] - }) - return { - 'type': _type, - 'ttl': records[0]['ttl'], - 'values': values - } - - def zone_records(self, zone): - if zone.name not in self._zone_records: - try: - self._zone_records[zone.name] = \ - self._client.records(zone.name) - except ConstellixClientNotFound: - return [] - - return self._zone_records[zone.name] - - def populate(self, zone, target=False, lenient=False): - self.log.debug('populate: name=%s, target=%s, lenient=%s', zone.name, - target, lenient) - - values = defaultdict(lambda: defaultdict(list)) - for record in self.zone_records(zone): - _type = record['type'] - if _type not in self.SUPPORTS: - self.log.warning('populate: skipping unsupported %s record', - _type) - continue - values[record['name']][record['type']].append(record) - - before = len(zone.records) - for name, types in values.items(): - for _type, records in types.items(): - data_for = getattr(self, f'_data_for_{_type}') - record = Record.new(zone, name, data_for(_type, records), - source=self, lenient=lenient) - zone.add_record(record, lenient=lenient) - - exists = zone.name in self._zone_records - self.log.info('populate: found %s records, exists=%s', - len(zone.records) - before, exists) - return exists - - def _healthcheck_config(self, record): - sonar_healthcheck = record._octodns.get('constellix', {}) \ - .get('healthcheck', None) - - if sonar_healthcheck is None: - return None - - healthcheck = {} - healthcheck["sonar_port"] = sonar_healthcheck.get('sonar_port', 80) - healthcheck["sonar_type"] = sonar_healthcheck.get('sonar_type', "TCP") - healthcheck["sonar_regions"] = sonar_healthcheck.get( - 'sonar_regions', - ["WORLD"] - ) - healthcheck["sonar_interval"] = sonar_healthcheck.get( - 'sonar_interval', - "ONEMINUTE" - ) - - return healthcheck - - def _params_for_multiple(self, record): - yield { - 'name': record.name, - 'ttl': record.ttl, - 'roundRobin': [{ - 'value': value - } for value in record.values] - } - - _params_for_A = _params_for_multiple - _params_for_AAAA = _params_for_multiple - - # An A record with this name must exist in this domain for - # this NS record to be valid. Need to handle checking if - # there is an A record before creating NS - _params_for_NS = _params_for_multiple - - def _params_for_single(self, record): - yield { - 'name': record.name, - 'ttl': record.ttl, - 'host': record.value, - } - - _params_for_CNAME = _params_for_single - - def _params_for_ALIAS(self, record): - yield { - 'name': record.name, - 'ttl': record.ttl, - 'roundRobin': [{ - 'value': record.value, - 'disableFlag': False - }] - } - - _params_for_PTR = _params_for_ALIAS - - def _params_for_MX(self, record): - values = [] - for value in record.values: - values.append({ - 'value': value.exchange, - 'level': value.preference - }) - yield { - 'value': value.exchange, - 'name': record.name, - 'ttl': record.ttl, - 'roundRobin': values - } - - def _params_for_SRV(self, record): - values = [] - for value in record.values: - values.append({ - 'value': value.target, - 'priority': value.priority, - 'weight': value.weight, - 'port': value.port - }) - for value in record.values: - yield { - 'name': record.name, - 'ttl': record.ttl, - 'roundRobin': values - } - - def _params_for_TXT(self, record): - # Constellix does not want values escaped - values = [] - for value in record.chunked_values: - values.append({ - 'value': value.replace('\\;', ';') - }) - yield { - 'name': record.name, - 'ttl': record.ttl, - 'roundRobin': values - } - - _params_for_SPF = _params_for_TXT - - def _params_for_CAA(self, record): - values = [] - for value in record.values: - values.append({ - 'tag': value.tag, - 'data': value.value, - 'flag': value.flags, - }) - yield { - 'name': record.name, - 'ttl': record.ttl, - 'roundRobin': values - } - - def _handle_pools(self, record): - healthcheck = self._healthcheck_config(record) - - # If we don't have dynamic, then there's no pools - if not getattr(record, 'dynamic', False): - return [] - - res_pools = [] - - for i, rule in enumerate(record.dynamic.rules): - pool_name = rule.data.get('pool') - pool = record.dynamic.pools.get(pool_name) - values = [ - { - 'value': value['value'], - 'weight': value['weight'], - } for value in pool.data.get('values', []) - ] - - # Make a pool name based on zone, record, type and name - generated_pool_name = \ - f'{record.zone.name}:{record.name}:{record._type}:{pool_name}' - - # Create Sonar checks if needed - if healthcheck is not None: - check_sites = self._sonar.\ - agents_for_regions(healthcheck["sonar_regions"]) - for value in values: - check_obj = self._create_update_check( - pool_type = record._type, - check_name = '{}-{}'.format( - generated_pool_name, - value['value'] - ), - check_type = healthcheck["sonar_type"].lower(), - value = value['value'], - port = healthcheck["sonar_port"], - interval = healthcheck["sonar_interval"], - sites = check_sites - ) - value['checkId'] = check_obj['id'] - value['policy'] = "followsonar" - - # OK, pool is valid, let's create it or update it - self.log.debug("Creating pool %s", generated_pool_name) - pool_obj = self._create_update_pool( - pool_name = generated_pool_name, - pool_type = record._type, - ttl = record.ttl, - values = values - ) - - # Now will crate GeoFilter for the pool - continents = [] - countries = [] - regions = [] - - for geo in rule.data.get('geos', []): - codes = geo.split('-') - n = len(geo) - if n == 2: - continents.append(geo) - elif n == 5: - countries.append(codes[1]) - else: - regions.append({ - 'continentCode': codes[0], - 'countryCode': codes[1], - 'regionCode': codes[2] - }) - - if len(continents) == 0 and \ - len(countries) == 0 and \ - len(regions) == 0: - pool_obj['geofilter'] = 1 - else: - self.log.debug( - "Creating geofilter %s", - generated_pool_name - ) - geofilter_obj = self._create_update_geofilter( - generated_pool_name, - continents, - countries, - regions - ) - pool_obj['geofilter'] = geofilter_obj['id'] - - res_pools.append(pool_obj) - return res_pools - - def _create_update_check( - self, - pool_type, - check_name, - check_type, - value, - port, - interval, - sites): - - check = { - 'name': check_name, - 'host': value, - 'port': port, - 'checkSites': sites, - 'interval': interval - } - if pool_type == "AAAA": - check['ipVersion'] = "IPV6" - else: - check['ipVersion'] = "IPV4" - - if check_type == "http": - check['protocolType'] = "HTTPS" - - existing_check = self._sonar.check(check_type, check_name) - if existing_check: - self._sonar.check_delete(existing_check['id']) - - return self._sonar.check_create(check_type, check) - - def _create_update_pool(self, pool_name, pool_type, ttl, values): - pool = { - 'name': pool_name, - 'type': pool_type, - 'numReturn': 1, - 'minAvailableFailover': 1, - 'ttl': ttl, - 'values': values - } - existing_pool = self._client.pool(pool_type, pool_name) - if not existing_pool: - return self._client.pool_create(pool) - - pool_id = existing_pool['id'] - updated_pool = self._client.pool_update(pool_id, pool) - updated_pool['id'] = pool_id - return updated_pool - - def _create_update_geofilter( - self, - geofilter_name, - continents, - countries, - regions): - geofilter = { - 'filterRulesLimit': 100, - 'name': geofilter_name, - 'geoipContinents': continents, - 'geoipCountries': countries, - 'regions': regions - } - if len(regions) == 0: - geofilter.pop('regions', None) - - existing_geofilter = self._client.geofilter(geofilter_name) - if not existing_geofilter: - return self._client.geofilter_create(geofilter) - - geofilter_id = existing_geofilter['id'] - updated_geofilter = self._client.geofilter_update( - geofilter_id, geofilter) - updated_geofilter['id'] = geofilter_id - return updated_geofilter - - def _apply_Create(self, change, domain_name): - new = change.new - params_for = getattr(self, f'_params_for_{new._type}') - pools = self._handle_pools(new) - - for params in params_for(new): - if len(pools) == 0: - self._client.record_create(new.zone.name, new._type, params) - elif len(pools) == 1: - params['pools'] = [pools[0]['id']] - params['recordOption'] = 'pools' - params.pop('roundRobin', None) - self.log.debug( - "Creating record %s %s", - new.zone.name, - new._type - ) - self._client.record_create( - new.zone.name, - new._type, - params - ) - else: - # To use GeoIPFilter feature we need to enable it for domain - self.log.debug("Enabling domain %s geo support", domain_name) - self._client.domain_enable_geoip(domain_name) - - # First we need to create World Default (1) Record - for pool in pools: - if pool['geofilter'] != 1: - continue - params['pools'] = [pool['id']] - params['recordOption'] = 'pools' - params['geolocation'] = { - 'geoipUserRegion': [pool['geofilter']] - } - params.pop('roundRobin', None) - self.log.debug( - "Creating record %s %s", - new.zone.name, - new._type) - self._client.record_create( - new.zone.name, - new._type, - params - ) - - # Now we can create the rest of records - for pool in pools: - if pool['geofilter'] == 1: - continue - params['pools'] = [pool['id']] - params['recordOption'] = 'pools' - params['geolocation'] = { - 'geoipUserRegion': [pool['geofilter']] - } - params.pop('roundRobin', None) - self.log.debug( - "Creating record %s %s", - new.zone.name, - new._type) - self._client.record_create( - new.zone.name, - new._type, - params) - - def _apply_Update(self, change, domain_name): - self._apply_Delete(change, domain_name) - self._apply_Create(change, domain_name) - - def _apply_Delete(self, change, domain_name): - existing = change.existing - zone = existing.zone - - # if it is dynamic pools record, we need to delete World Default last - world_default_record = None - - for record in self.zone_records(zone): - if existing.name == record['name'] and \ - existing._type == record['type']: - - # handle dynamic record - if record['recordOption'] == 'pools': - if record['geolocation'] is None: - world_default_record = record - else: - if record['geolocation']['geoipFilter'] == 1: - world_default_record = record - else: - # delete record - self.log.debug( - "Deleting record %s %s", - zone.name, - record['type']) - self._client.record_delete( - zone.name, - record['type'], - record['id']) - # delete geofilter - self.log.debug( - "Deleting geofilter %s", - zone.name) - self._client.geofilter_delete( - record['geolocation']['geoipFilter']) - - # delete pool - self.log.debug( - "Deleting pool %s %s", - zone.name, - record['type']) - self._client.pool_delete( - record['type'], - record['pools'][0]) - - # for all the rest records - else: - self._client.record_delete( - zone.name, record['type'], record['id']) - # delete World Default - if world_default_record: - # delete record - self.log.debug( - "Deleting record %s %s", - zone.name, - world_default_record['type'] - ) - self._client.record_delete( - zone.name, - world_default_record['type'], - world_default_record['id'] - ) - # delete pool - self.log.debug( - "Deleting pool %s %s", - zone.name, - world_default_record['type'] - ) - self._client.pool_delete( - world_default_record['type'], - world_default_record['pools'][0] - ) - - def _apply(self, plan): - desired = plan.desired - changes = plan.changes - self.log.debug('_apply: zone=%s, len(changes)=%d', desired.name, - len(changes)) - - try: - self._client.domain(desired.name) - except ConstellixClientNotFound: - self.log.debug('_apply: no matching zone, creating domain') - self._client.domain_create(desired.name[:-1]) - - for change in changes: - class_name = change.__class__.__name__ - getattr(self, f'_apply_{class_name}')( - change, - desired.name) - - # Clear out the cache if any - self._zone_records.pop(desired.name, None) +from logging import getLogger + +logger = getLogger('Constellix') +try: + logger.warn('octodns_constellix shimmed. Update your provider class to ' + 'octodns_constellix.ConstellixProvider. ' + 'Shim will be removed in 1.0') + from octodns_constellix import ConstellixProvider, ConstellixBaseProvider + ConstellixProvider # pragma: no cover + ConstellixBaseProvider # pragma: no cover +except ModuleNotFoundError: + logger.exception('ConstellixProvider has been moved into a seperate ' + 'module, octodns_constellix is now required. Provider ' + 'class should be updated to ' + 'octodns_constellix.ConstellixProvider') + raise diff --git a/octodns/provider/digitalocean.py b/octodns/provider/digitalocean.py index 0a763ed..7bb2f1b 100644 --- a/octodns/provider/digitalocean.py +++ b/octodns/provider/digitalocean.py @@ -5,345 +5,18 @@ from __future__ import absolute_import, division, print_function, \ unicode_literals -from collections import defaultdict -from requests import Session -import logging +from logging import getLogger -from ..record import Record -from . import ProviderException -from .base import BaseProvider - - -class DigitalOceanClientException(ProviderException): - pass - - -class DigitalOceanClientNotFound(DigitalOceanClientException): - - def __init__(self): - super(DigitalOceanClientNotFound, self).__init__('Not Found') - - -class DigitalOceanClientUnauthorized(DigitalOceanClientException): - - def __init__(self): - super(DigitalOceanClientUnauthorized, self).__init__('Unauthorized') - - -class DigitalOceanClient(object): - BASE = 'https://api.digitalocean.com/v2' - - def __init__(self, token): - sess = Session() - sess.headers.update({'Authorization': f'Bearer {token}'}) - self._sess = sess - - def _request(self, method, path, params=None, data=None): - url = f'{self.BASE}{path}' - resp = self._sess.request(method, url, params=params, json=data) - if resp.status_code == 401: - raise DigitalOceanClientUnauthorized() - if resp.status_code == 404: - raise DigitalOceanClientNotFound() - resp.raise_for_status() - return resp - - def domain(self, name): - path = f'/domains/{name}' - return self._request('GET', path).json() - - def domain_create(self, name): - # Digitalocean requires an IP on zone creation - self._request('POST', '/domains', data={'name': name, - 'ip_address': '192.0.2.1'}) - - # After the zone is created, immediately delete the record - records = self.records(name) - for record in records: - if record['name'] == '' and record['type'] == 'A': - self.record_delete(name, record['id']) - - def records(self, zone_name): - path = f'/domains/{zone_name}/records' - ret = [] - - page = 1 - while True: - data = self._request('GET', path, {'page': page}).json() - - ret += data['domain_records'] - links = data['links'] - - # https://developers.digitalocean.com/documentation/v2/#links - # pages exists if there is more than 1 page - # last doesn't exist if you're on the last page - try: - links['pages']['last'] - page += 1 - except KeyError: - break - - for record in ret: - # change any apex record to empty string - if record['name'] == '@': - record['name'] = '' - - # change any apex value to zone name - if record['data'] == '@': - record['data'] = zone_name - - return ret - - def record_create(self, zone_name, params): - path = f'/domains/{zone_name}/records' - # change empty name string to @, DO uses @ for apex record names - if params['name'] == '': - params['name'] = '@' - - self._request('POST', path, data=params) - - def record_delete(self, zone_name, record_id): - path = f'/domains/{zone_name}/records/{record_id}' - self._request('DELETE', path) - - -class DigitalOceanProvider(BaseProvider): - ''' - DigitalOcean DNS provider using API v2 - - digitalocean: - class: octodns.provider.digitalocean.DigitalOceanProvider - # Your DigitalOcean API token (required) - token: foo - ''' - SUPPORTS_GEO = False - SUPPORTS_DYNAMIC = False - SUPPORTS = set(('A', 'AAAA', 'CAA', 'CNAME', 'MX', 'NS', 'TXT', 'SRV')) - - def __init__(self, id, token, *args, **kwargs): - self.log = logging.getLogger(f'DigitalOceanProvider[{id}]') - self.log.debug('__init__: id=%s, token=***', id) - super(DigitalOceanProvider, self).__init__(id, *args, **kwargs) - self._client = DigitalOceanClient(token) - - self._zone_records = {} - - def _data_for_multiple(self, _type, records): - return { - 'ttl': records[0]['ttl'], - 'type': _type, - 'values': [r['data'] for r in records] - } - - _data_for_A = _data_for_multiple - _data_for_AAAA = _data_for_multiple - - def _data_for_CAA(self, _type, records): - values = [] - for record in records: - values.append({ - 'flags': record['flags'], - 'tag': record['tag'], - 'value': record['data'], - }) - return { - 'ttl': records[0]['ttl'], - 'type': _type, - 'values': values - } - - def _data_for_CNAME(self, _type, records): - record = records[0] - return { - 'ttl': record['ttl'], - 'type': _type, - 'value': f'{record["data"]}.' - } - - def _data_for_MX(self, _type, records): - values = [] - for record in records: - values.append({ - 'preference': record['priority'], - 'exchange': f'{record["data"]}.' - }) - return { - 'ttl': records[0]['ttl'], - 'type': _type, - 'values': values - } - - def _data_for_NS(self, _type, records): - values = [] - for record in records: - values.append(f'{record["data"]}.') - return { - 'ttl': records[0]['ttl'], - 'type': _type, - 'values': values, - } - - def _data_for_SRV(self, _type, records): - values = [] - for record in records: - target = f'{record["data"]}.' if record['data'] != "." else "." - values.append({ - 'port': record['port'], - 'priority': record['priority'], - 'target': target, - 'weight': record['weight'] - }) - return { - 'type': _type, - 'ttl': records[0]['ttl'], - 'values': values - } - - def _data_for_TXT(self, _type, records): - values = [value['data'].replace(';', '\\;') for value in records] - return { - 'ttl': records[0]['ttl'], - 'type': _type, - 'values': values - } - - def zone_records(self, zone): - if zone.name not in self._zone_records: - try: - self._zone_records[zone.name] = \ - self._client.records(zone.name[:-1]) - except DigitalOceanClientNotFound: - return [] - - return self._zone_records[zone.name] - - def populate(self, zone, target=False, lenient=False): - self.log.debug('populate: name=%s, target=%s, lenient=%s', zone.name, - target, lenient) - - values = defaultdict(lambda: defaultdict(list)) - for record in self.zone_records(zone): - _type = record['type'] - if _type not in self.SUPPORTS: - self.log.warning('populate: skipping unsupported %s record', - _type) - continue - values[record['name']][record['type']].append(record) - - before = len(zone.records) - for name, types in values.items(): - for _type, records in types.items(): - data_for = getattr(self, f'_data_for_{_type}') - record = Record.new(zone, name, data_for(_type, records), - source=self, lenient=lenient) - zone.add_record(record, lenient=lenient) - - exists = zone.name in self._zone_records - self.log.info('populate: found %s records, exists=%s', - len(zone.records) - before, exists) - return exists - - def _params_for_multiple(self, record): - for value in record.values: - yield { - 'data': value, - 'name': record.name, - 'ttl': record.ttl, - 'type': record._type - } - - _params_for_A = _params_for_multiple - _params_for_AAAA = _params_for_multiple - _params_for_NS = _params_for_multiple - - def _params_for_CAA(self, record): - for value in record.values: - yield { - 'data': value.value, - 'flags': value.flags, - 'name': record.name, - 'tag': value.tag, - 'ttl': record.ttl, - 'type': record._type - } - - def _params_for_single(self, record): - yield { - 'data': record.value, - 'name': record.name, - 'ttl': record.ttl, - 'type': record._type - } - - _params_for_CNAME = _params_for_single - - def _params_for_MX(self, record): - for value in record.values: - yield { - 'data': value.exchange, - 'name': record.name, - 'priority': value.preference, - 'ttl': record.ttl, - 'type': record._type - } - - def _params_for_SRV(self, record): - for value in record.values: - yield { - 'data': value.target, - 'name': record.name, - 'port': value.port, - 'priority': value.priority, - 'ttl': record.ttl, - 'type': record._type, - 'weight': value.weight - } - - def _params_for_TXT(self, record): - # DigitalOcean doesn't want things escaped in values so we - # have to strip them here and add them when going the other way - for value in record.values: - yield { - 'data': value.replace('\\;', ';'), - 'name': record.name, - 'ttl': record.ttl, - 'type': record._type - } - - def _apply_Create(self, change): - new = change.new - params_for = getattr(self, f'_params_for_{new._type}') - for params in params_for(new): - self._client.record_create(new.zone.name[:-1], params) - - def _apply_Update(self, change): - self._apply_Delete(change) - self._apply_Create(change) - - def _apply_Delete(self, change): - existing = change.existing - zone = existing.zone - for record in self.zone_records(zone): - if existing.name == record['name'] and \ - existing._type == record['type']: - self._client.record_delete(zone.name[:-1], record['id']) - - def _apply(self, plan): - desired = plan.desired - changes = plan.changes - self.log.debug('_apply: zone=%s, len(changes)=%d', desired.name, - len(changes)) - - domain_name = desired.name[:-1] - try: - self._client.domain(domain_name) - except DigitalOceanClientNotFound: - self.log.debug('_apply: no matching zone, creating domain') - self._client.domain_create(domain_name) - - for change in changes: - class_name = change.__class__.__name__ - getattr(self, f'_apply_{class_name}')(change) - - # Clear out the cache if any - self._zone_records.pop(desired.name, None) +logger = getLogger('DigitalOcean') +try: + logger.warn('octodns_digitalocean shimmed. Update your provider class to ' + 'octodns_digitalocean.DigitalOceanProvider. ' + 'Shim will be removed in 1.0') + from octodns_digitalocean import DigitalOceanProvider + DigitalOceanProvider # pragma: no cover +except ModuleNotFoundError: + logger.exception('DigitalOceanProvider has been moved into a seperate ' + 'module, octodns_digitalocean is now required. Provider ' + 'class should be updated to ' + 'octodns_digitalocean.DigitalOceanProvider') + raise diff --git a/octodns/provider/dnsmadeeasy.py b/octodns/provider/dnsmadeeasy.py index 9aab39c..68c718f 100644 --- a/octodns/provider/dnsmadeeasy.py +++ b/octodns/provider/dnsmadeeasy.py @@ -5,417 +5,18 @@ from __future__ import absolute_import, division, print_function, \ unicode_literals -from collections import defaultdict -from requests import Session -from time import strftime, gmtime, sleep -import hashlib -import hmac -import logging - -from ..record import Record -from . import ProviderException -from .base import BaseProvider - - -class DnsMadeEasyClientException(ProviderException): - pass - - -class DnsMadeEasyClientBadRequest(DnsMadeEasyClientException): - - def __init__(self, resp): - errors = '\n - '.join(resp.json()['error']) - super(DnsMadeEasyClientBadRequest, self).__init__(f'\n - {errors}') - - -class DnsMadeEasyClientUnauthorized(DnsMadeEasyClientException): - - def __init__(self): - super(DnsMadeEasyClientUnauthorized, self).__init__('Unauthorized') - - -class DnsMadeEasyClientNotFound(DnsMadeEasyClientException): - - def __init__(self): - super(DnsMadeEasyClientNotFound, self).__init__('Not Found') - - -class DnsMadeEasyClient(object): - PRODUCTION = 'https://api.dnsmadeeasy.com/V2.0/dns/managed' - SANDBOX = 'https://api.sandbox.dnsmadeeasy.com/V2.0/dns/managed' - - def __init__(self, api_key, secret_key, sandbox=False, - ratelimit_delay=0.0): - self.api_key = api_key - self.secret_key = secret_key - self._base = self.SANDBOX if sandbox else self.PRODUCTION - self.ratelimit_delay = ratelimit_delay - self._sess = Session() - self._sess.headers.update({'x-dnsme-apiKey': self.api_key}) - self._domains = None - - def _current_time(self): - return strftime("%a, %d %b %Y %H:%M:%S +0000", gmtime()) - - def _hmac_hash(self, now): - return hmac.new(self.secret_key.encode(), now.encode(), - hashlib.sha1).hexdigest() - - def _request(self, method, path, params=None, data=None): - now = self._current_time() - hmac_hash = self._hmac_hash(now) - - headers = { - 'x-dnsme-hmac': hmac_hash, - 'x-dnsme-requestDate': now - } - - url = f'{self._base}{path}' - resp = self._sess.request(method, url, headers=headers, - params=params, json=data) - if resp.status_code == 400: - raise DnsMadeEasyClientBadRequest(resp) - if resp.status_code in [401, 403]: - raise DnsMadeEasyClientUnauthorized() - if resp.status_code == 404: - raise DnsMadeEasyClientNotFound() - resp.raise_for_status() - sleep(self.ratelimit_delay) - return resp - - @property - def domains(self): - if self._domains is None: - zones = [] - - # has pages in resp, do we need paging? - resp = self._request('GET', '/').json() - zones += resp['data'] - - self._domains = {f'{z["name"]}.': z['id'] for z in zones} - - return self._domains - - def domain(self, name): - path = f'/id/{name}' - return self._request('GET', path).json() - - def domain_create(self, name): - self._request('POST', '/', data={'name': name}) - - def records(self, zone_name): - zone_id = self.domains.get(zone_name, False) - path = f'/{zone_id}/records' - ret = [] - - # has pages in resp, do we need paging? - resp = self._request('GET', path).json() - ret += resp['data'] - - for record in ret: - # change ANAME records to ALIAS - if record['type'] == 'ANAME': - record['type'] = 'ALIAS' - - # change relative values to absolute - value = record['value'] - if record['type'] in ['ALIAS', 'CNAME', 'MX', 'NS', 'SRV']: - if value == '': - record['value'] = zone_name - elif not value.endswith('.'): - record['value'] = f'{value}.{zone_name}' - - return ret - - def record_create(self, zone_name, params): - zone_id = self.domains.get(zone_name, False) - path = f'/{zone_id}/records' - - # change ALIAS records to ANAME - if params['type'] == 'ALIAS': - params['type'] = 'ANAME' - - self._request('POST', path, data=params) - - def record_delete(self, zone_name, record_id): - zone_id = self.domains.get(zone_name, False) - path = f'/{zone_id}/records/{record_id}' - self._request('DELETE', path) - - -class DnsMadeEasyProvider(BaseProvider): - ''' - DNSMadeEasy DNS provider using v2.0 API - - dnsmadeeasy: - class: octodns.provider.dnsmadeeasy.DnsMadeEasyProvider - # Your DnsMadeEasy api key (required) - api_key: env/DNSMADEEASY_API_KEY - # Your DnsMadeEasy secret key (required) - secret_key: env/DNSMADEEASY_SECRET_KEY - # Whether or not to use Sandbox environment - # (optional, default is false) - sandbox: true - ''' - SUPPORTS_GEO = False - SUPPORTS_DYNAMIC = False - SUPPORTS = set(('A', 'AAAA', 'ALIAS', 'CAA', 'CNAME', 'MX', - 'NS', 'PTR', 'SPF', 'SRV', 'TXT')) - - def __init__(self, id, api_key, secret_key, sandbox=False, - ratelimit_delay=0.0, *args, **kwargs): - self.log = logging.getLogger(f'DnsMadeEasyProvider[{id}]') - self.log.debug('__init__: id=%s, api_key=***, secret_key=***, ' - 'sandbox=%s', id, sandbox) - super(DnsMadeEasyProvider, self).__init__(id, *args, **kwargs) - self._client = DnsMadeEasyClient(api_key, secret_key, sandbox, - ratelimit_delay) - - self._zone_records = {} - - def _data_for_multiple(self, _type, records): - return { - 'ttl': records[0]['ttl'], - 'type': _type, - 'values': [r['value'] for r in records] - } - - _data_for_A = _data_for_multiple - _data_for_AAAA = _data_for_multiple - _data_for_NS = _data_for_multiple - - def _data_for_CAA(self, _type, records): - values = [] - for record in records: - values.append({ - 'flags': record['issuerCritical'], - 'tag': record['caaType'], - 'value': record['value'][1:-1] - }) - return { - 'ttl': records[0]['ttl'], - 'type': _type, - 'values': values - } - - def _data_for_TXT(self, _type, records): - values = [value['value'].replace(';', '\\;') for value in records] - return { - 'ttl': records[0]['ttl'], - 'type': _type, - 'values': values - } - - _data_for_SPF = _data_for_TXT - - def _data_for_MX(self, _type, records): - values = [] - for record in records: - values.append({ - 'preference': record['mxLevel'], - 'exchange': record['value'] - }) - return { - 'ttl': records[0]['ttl'], - 'type': _type, - 'values': values - } - - def _data_for_single(self, _type, records): - record = records[0] - return { - 'ttl': record['ttl'], - 'type': _type, - 'value': record['value'] - } - - _data_for_CNAME = _data_for_single - _data_for_PTR = _data_for_single - _data_for_ALIAS = _data_for_single - - def _data_for_SRV(self, _type, records): - values = [] - for record in records: - values.append({ - 'port': record['port'], - 'priority': record['priority'], - 'target': record['value'], - 'weight': record['weight'] - }) - return { - 'type': _type, - 'ttl': records[0]['ttl'], - 'values': values - } - - def zone_records(self, zone): - if zone.name not in self._zone_records: - try: - self._zone_records[zone.name] = \ - self._client.records(zone.name) - except DnsMadeEasyClientNotFound: - return [] - - return self._zone_records[zone.name] - - def populate(self, zone, target=False, lenient=False): - self.log.debug('populate: name=%s, target=%s, lenient=%s', zone.name, - target, lenient) - - values = defaultdict(lambda: defaultdict(list)) - for record in self.zone_records(zone): - _type = record['type'] - if _type not in self.SUPPORTS: - self.log.warning('populate: skipping unsupported %s record', - _type) - continue - values[record['name']][record['type']].append(record) - - before = len(zone.records) - for name, types in values.items(): - for _type, records in types.items(): - data_for = getattr(self, f'_data_for_{_type}') - record = Record.new(zone, name, data_for(_type, records), - source=self, lenient=lenient) - zone.add_record(record, lenient=lenient) - - exists = zone.name in self._zone_records - self.log.info('populate: found %s records, exists=%s', - len(zone.records) - before, exists) - return exists - - def supports(self, record): - # DNS Made Easy does not support empty/NULL SRV records - # - # Attempting to sync such a record would generate the following error - # - # octodns.provider.dnsmadeeasy.DnsMadeEasyClientBadRequest: - # - Record value may not be a standalone dot. - # - # Skip the record and continue - if record._type == "SRV": - if 'value' in record.data: - targets = (record.data['value']['target'],) - else: - targets = [value['target'] for value in record.data['values']] - - if "." in targets: - self.log.warning( - 'supports: unsupported %s record with target (%s)', - record._type, targets - ) - return False - - return super(DnsMadeEasyProvider, self).supports(record) - - def _params_for_multiple(self, record): - for value in record.values: - yield { - 'value': value, - 'name': record.name, - 'ttl': record.ttl, - 'type': record._type - } - - _params_for_A = _params_for_multiple - _params_for_AAAA = _params_for_multiple - - # An A record with this name must exist in this domain for - # this NS record to be valid. Need to handle checking if - # there is an A record before creating NS - _params_for_NS = _params_for_multiple - - def _params_for_single(self, record): - yield { - 'value': record.value, - 'name': record.name, - 'ttl': record.ttl, - 'type': record._type - } - - _params_for_CNAME = _params_for_single - _params_for_PTR = _params_for_single - _params_for_ALIAS = _params_for_single - - def _params_for_MX(self, record): - for value in record.values: - yield { - 'value': value.exchange, - 'name': record.name, - 'mxLevel': value.preference, - 'ttl': record.ttl, - 'type': record._type - } - - def _params_for_SRV(self, record): - for value in record.values: - yield { - 'value': value.target, - 'name': record.name, - 'port': value.port, - 'priority': value.priority, - 'ttl': record.ttl, - 'type': record._type, - 'weight': value.weight - } - - def _params_for_TXT(self, record): - # DNSMadeEasy does not want values escaped - for value in record.chunked_values: - yield { - 'value': value.replace('\\;', ';'), - 'name': record.name, - 'ttl': record.ttl, - 'type': record._type - } - - _params_for_SPF = _params_for_TXT - - def _params_for_CAA(self, record): - for value in record.values: - yield { - 'value': value.value, - 'issuerCritical': value.flags, - 'name': record.name, - 'caaType': value.tag, - 'ttl': record.ttl, - 'type': record._type - } - - def _apply_Create(self, change): - new = change.new - params_for = getattr(self, f'_params_for_{new._type}') - for params in params_for(new): - self._client.record_create(new.zone.name, params) - - def _apply_Update(self, change): - self._apply_Delete(change) - self._apply_Create(change) - - def _apply_Delete(self, change): - existing = change.existing - zone = existing.zone - for record in self.zone_records(zone): - if existing.name == record['name'] and \ - existing._type == record['type']: - self._client.record_delete(zone.name, record['id']) - - def _apply(self, plan): - desired = plan.desired - changes = plan.changes - self.log.debug('_apply: zone=%s, len(changes)=%d', desired.name, - len(changes)) - - domain_name = desired.name[:-1] - try: - self._client.domain(domain_name) - except DnsMadeEasyClientNotFound: - self.log.debug('_apply: no matching zone, creating domain') - self._client.domain_create(domain_name) - - for change in changes: - class_name = change.__class__.__name__ - getattr(self, f'_apply_{class_name}')(change) - - # Clear out the cache if any - self._zone_records.pop(desired.name, None) +from logging import getLogger + +logger = getLogger('DnsMadeEasy') +try: + logger.warn('octodns_dnsmadeeasy shimmed. Update your provider class to ' + 'octodns_dnsmadeeasy.DnsMadeEasyProvider. ' + 'Shim will be removed in 1.0') + from octodns_dnsmadeeasy import DnsMadeEasyProvider + DnsMadeEasyProvider # pragma: no cover +except ModuleNotFoundError: + logger.exception('DnsMadeEasyProvider has been moved into a seperate ' + 'module, octodns_dnsmadeeasy is now required. Provider ' + 'class should be updated to ' + 'octodns_dnsmadeeasy.DnsMadeEasyProvider') + raise diff --git a/octodns/provider/dyn.py b/octodns/provider/dyn.py index 53032ea..5363e9c 100644 --- a/octodns/provider/dyn.py +++ b/octodns/provider/dyn.py @@ -5,1399 +5,17 @@ from __future__ import absolute_import, division, print_function, \ unicode_literals -from collections import defaultdict -from dyn.tm.errors import DynectGetError -from dyn.tm.services.dsf import DSFARecord, DSFAAAARecord, DSFCNAMERecord, \ - DSFFailoverChain, DSFMonitor, DSFNode, DSFRecordSet, DSFResponsePool, \ - DSFRuleset, TrafficDirector, get_all_dsf_monitors, get_all_dsf_services, \ - get_response_pool -from dyn.tm.session import DynectSession -from dyn.tm.zones import Zone as DynZone from logging import getLogger -from threading import Lock -from uuid import uuid4 -from ..record import Record, Update -from ..record.geo import GeoCodes -from .base import BaseProvider - - -############################################################################### -# -# The following monkey patching is to work around functionality that is lacking -# from DSFMonitor. You cannot set host or path (which we need) and there's no -# update method. What's more host & path aren't publically accessible on the -# object so you can't see their current values and depending on how the object -# came to be (constructor vs pulled from the api) the "private" location of -# those fields varies :-( -# -############################################################################### -def _monitor_host_get(self): - return self._host or self._options['host'] - - -DSFMonitor.host = property(_monitor_host_get) - - -def _monitor_host_set(self, value): - if self._options is None: - self._options = {} - self._host = self._options['host'] = value - - -DSFMonitor.host = DSFMonitor.host.setter(_monitor_host_set) - - -def _monitor_path_get(self): - return self._path or self._options['path'] - - -DSFMonitor.path = property(_monitor_path_get) - - -def _monitor_path_set(self, value): - if self._options is None: - self._options = {} - self._path = self._options['path'] = value - - -DSFMonitor.path = DSFMonitor.path.setter(_monitor_path_set) - - -def _monitor_protocol_get(self): - return self._protocol - - -DSFMonitor.protocol = property(_monitor_protocol_get) - - -def _monitor_protocol_set(self, value): - self._protocol = value - - -DSFMonitor.protocol = DSFMonitor.protocol.setter(_monitor_protocol_set) - - -def _monitor_port_get(self): - return self._port or self._options['port'] - - -DSFMonitor.port = property(_monitor_port_get) - - -def _monitor_port_set(self, value): - if self._options is None: - self._options = {} - self._port = self._options['port'] = value - - -DSFMonitor.port = DSFMonitor.port.setter(_monitor_port_set) - - -def _monitor_update(self, host, path, protocol, port): - # I can't see how to actually do this with the client lib so - # I'm having to hack around it. Have to provide all the - # options or else things complain - return self._update({ - 'protocol': protocol, - 'options': { - 'host': host, - 'path': path, - 'port': port, - 'timeout': DynProvider.MONITOR_TIMEOUT, - 'header': DynProvider.MONITOR_HEADER, - } - }) - - -DSFMonitor.update = _monitor_update -############################################################################### - - -def _monitor_doesnt_match(monitor, host, path, protocol, port): - return monitor.host != host or monitor.path != path or \ - monitor.protocol != protocol or int(monitor.port) != port - - -class _CachingDynZone(DynZone): - log = getLogger('_CachingDynZone') - - _cache = {} - - @classmethod - def get(cls, zone_name, create=False): - cls.log.debug('get: zone_name=%s, create=%s', zone_name, create) - # This works in dyn zone names, without the trailing . - try: - dyn_zone = cls._cache[zone_name] - cls.log.debug('get: cache hit') - except KeyError: - cls.log.debug('get: cache miss') - try: - dyn_zone = _CachingDynZone(zone_name) - cls.log.debug('get: fetched') - except DynectGetError: - if not create: - cls.log.debug("get: doesn't exist") - return None - # this value shouldn't really matter, it's not tied to - # whois or anything - hostname = f'hostmaster@{zone_name[:-1]}' - # Try again with the params necessary to create - dyn_zone = _CachingDynZone(zone_name, ttl=3600, - contact=hostname, - serial_style='increment') - cls.log.debug('get: created') - cls._cache[zone_name] = dyn_zone - - return dyn_zone - - @classmethod - def flush_zone(cls, zone_name): - '''Flushes the zone cache, if there is one''' - cls.log.debug('flush_zone: zone_name=%s', zone_name) - try: - del cls._cache[zone_name] - except KeyError: - pass - - def __init__(self, zone_name, *args, **kwargs): - super(_CachingDynZone, self).__init__(zone_name, *args, **kwargs) - self.flush_cache() - - def flush_cache(self): - self._cached_records = None - - def get_all_records(self): - if self._cached_records is None: - self._cached_records = \ - super(_CachingDynZone, self).get_all_records() - return self._cached_records - - def publish(self): - super(_CachingDynZone, self).publish() - self.flush_cache() - - -def _dynamic_value_sort_key(value): - return value['value'] - - -class DynProvider(BaseProvider): - ''' - Dynect Managed DNS provider - - dyn: - class: octodns.provider.dyn.DynProvider - # Your dynect customer name (required) - customer: cust - # Your dynect username (required) - username: user - # Your dynect password (required) - password: pass - # Whether or not to support TrafficDirectors and enable GeoDNS - # (optional, default is false) - traffic_directors_enabled: true - - Note: due to the way dyn.tm.session.DynectSession is managing things we can - only really have a single DynProvider configured. When you create a - DynectSession it's stored in a thread-local singleton. You don't invoke - methods on this session or a client that holds on to it. The client - libraries grab their per-thread session by accessing the singleton through - DynectSession.get_session(). That fundamentally doesn't support having more - than one account active at a time. See DynProvider._check_dyn_sess for some - related bits. - ''' - - RECORDS_TO_TYPE = { - 'a_records': 'A', - 'aaaa_records': 'AAAA', - 'alias_records': 'ALIAS', - 'caa_records': 'CAA', - 'cname_records': 'CNAME', - 'mx_records': 'MX', - 'naptr_records': 'NAPTR', - 'ns_records': 'NS', - 'ptr_records': 'PTR', - 'sshfp_records': 'SSHFP', - 'spf_records': 'SPF', - 'srv_records': 'SRV', - 'txt_records': 'TXT', - } - TYPE_TO_RECORDS = {v: k for k, v in RECORDS_TO_TYPE.items()} - SUPPORTS = set(TYPE_TO_RECORDS.keys()) - - # https://help.dyn.com/predefined-geotm-regions-groups/ - REGION_CODES = { - 'NA': 11, # Continental North America - 'SA': 12, # Continental South America - 'EU': 13, # Continental Europe - 'AF': 14, # Continental Africa - 'AS': 15, # Continental Asia - 'OC': 16, # Continental Australia/Oceania - 'AN': 17, # Continental Antarctica - } - # Reverse of ^ - REGION_CODES_LOOKUP = {code: geo for geo, code in REGION_CODES.items()} - - MONITOR_HEADER = 'User-Agent: Dyn Monitor' - MONITOR_TIMEOUT = 10 - - _sess_create_lock = Lock() - - def __init__(self, id, customer, username, password, - traffic_directors_enabled=False, *args, **kwargs): - self.log = getLogger(f'DynProvider[{id}]') - self.log.debug('__init__: id=%s, customer=%s, username=%s, ' - 'password=***, traffic_directors_enabled=%s', id, - customer, username, traffic_directors_enabled) - # we have to set this before calling super b/c SUPPORTS_GEO requires it - self.traffic_directors_enabled = traffic_directors_enabled - super(DynProvider, self).__init__(id, *args, **kwargs) - self.customer = customer - self.username = username - self.password = password - - self._cache = {} - self._traffic_directors = None - self._traffic_director_monitors = None - - @property - def SUPPORTS_GEO(self): - return self.traffic_directors_enabled - - @property - def SUPPORTS_DYNAMIC(self): - return self.traffic_directors_enabled - - def _check_dyn_sess(self): - # We don't have to worry about locking for the check since the - # underlying pieces are pre-thread. We can check to see if this thread - # has a session and if so we're good to go. - if DynectSession.get_session() is None: - # We need to create a new session for this thread and DynectSession - # creation is not thread-safe so we have to do the locking. If we - # don't and multiple sessions start creation before the the first - # has finished (long time b/c it makes http calls) the subsequent - # creates will blow away DynectSession._instances, potentially - # multiple times if there are multiple creates in flight. Only the - # last of these initial concurrent creates will exist in - # DynectSession._instances dict and the others will be lost. When - # this thread later tries to make api calls there won't be an - # accessible session available for it to use. - with self._sess_create_lock: - DynectSession(self.customer, self.username, self.password) - - def _data_for_A(self, _type, records): - return { - 'type': _type, - 'ttl': records[0].ttl, - 'values': [r.address for r in records] - } - - _data_for_AAAA = _data_for_A - - def _data_for_ALIAS(self, _type, records): - # See note on ttl in _kwargs_for_ALIAS - record = records[0] - return { - 'type': _type, - 'ttl': record.ttl, - 'value': record.alias - } - - def _data_for_CAA(self, _type, records): - return { - 'type': _type, - 'ttl': records[0].ttl, - 'values': [{'flags': r.flags, 'tag': r.tag, 'value': r.value} - for r in records], - } - - def _data_for_CNAME(self, _type, records): - record = records[0] - return { - 'type': _type, - 'ttl': record.ttl, - 'value': record.cname, - } - - def _data_for_MX(self, _type, records): - return { - 'type': _type, - 'ttl': records[0].ttl, - 'values': [{'preference': r.preference, 'exchange': r.exchange} - for r in records], - } - - def _data_for_NAPTR(self, _type, records): - return { - 'type': _type, - 'ttl': records[0].ttl, - 'values': [{ - 'order': r.order, - 'preference': r.preference, - 'flags': r.flags, - 'service': r.services, - 'regexp': r.regexp, - 'replacement': r.replacement, - } for r in records] - } - - def _data_for_NS(self, _type, records): - return { - 'type': _type, - 'ttl': records[0].ttl, - 'values': [r.nsdname for r in records] - } - - def _data_for_PTR(self, _type, records): - record = records[0] - return { - 'type': _type, - 'ttl': record.ttl, - 'value': record.ptrdname, - } - - def _data_for_SPF(self, _type, records): - record = records[0] - return { - 'type': _type, - 'ttl': record.ttl, - 'values': [r.txtdata for r in records] - } - - _data_for_TXT = _data_for_SPF - - def _data_for_SSHFP(self, _type, records): - return { - 'type': _type, - 'ttl': records[0].ttl, - 'values': [{ - 'algorithm': r.algorithm, - 'fingerprint_type': r.fptype, - 'fingerprint': r.fingerprint, - } for r in records], - } - - def _data_for_SRV(self, _type, records): - return { - 'type': _type, - 'ttl': records[0].ttl, - 'values': [{ - 'priority': r.priority, - 'weight': r.weight, - 'port': r.port, - 'target': r.target, - } for r in records], - } - - @property - def traffic_directors(self): - if self._traffic_directors is None: - self._check_dyn_sess() - - tds = defaultdict(dict) - for td in get_all_dsf_services(): - try: - fqdn, _type = td.label.split(':', 1) - except ValueError: - self.log.warn("Unsupported TrafficDirector '%s'", td.label) - continue - tds[fqdn][_type] = td - self._traffic_directors = dict(tds) - - return self._traffic_directors - - def _populate_geo_traffic_director(self, zone, fqdn, _type, td, rulesets, - lenient): - # We start out with something that will always show change in case this - # is a busted TD. This will prevent us from creating a duplicate td. - # We'll overwrite this with real data provided we have it - geo = {} - data = { - 'geo': geo, - 'type': _type, - 'ttl': td.ttl, - 'values': ['0.0.0.0'] - } - for ruleset in rulesets: - try: - record_set = ruleset.response_pools[0].rs_chains[0] \ - .record_sets[0] - except IndexError: - # problems indicate a malformed ruleset, ignore it - continue - if ruleset.label.startswith('default:'): - data_for = getattr(self, f'_data_for_{_type}') - data.update(data_for(_type, record_set.records)) - else: - # We've stored the geo in label - try: - code, _ = ruleset.label.split(':', 1) - except ValueError: - continue - values = [r.address for r in record_set.records] - geo[code] = values - - name = zone.hostname_from_fqdn(fqdn) - record = Record.new(zone, name, data, source=self) - zone.add_record(record, lenient=lenient) - - return record - - def _value_for_address(self, _type, record): - return { - 'value': record.address, - 'weight': record.weight, - } - - _value_for_A = _value_for_address - _value_for_AAAA = _value_for_address - - def _value_for_CNAME(self, _type, record): - return { - 'value': record.cname, - 'weight': record.weight, - } - - def _populate_dynamic_pools(self, _type, rulesets, response_pools): - default = {} - pools = {} - - data_for = getattr(self, f'_data_for_{_type}') - value_for = getattr(self, f'_value_for_{_type}') - - # Build the list of pools, we can't just read them off of rules b/c we - # won't see unused pools there. If/when we dis-allow unused pools we - # could probably change that and avoid the refresh - for response_pool in response_pools: - # We have to refresh the response pool to have access to its - # rs_chains and thus records, yeah... :-( - # TODO: look at rulesets first b/c they won't need a refresh... - response_pool.refresh() - try: - record_set = response_pool.rs_chains[0] \ - .record_sets[0] - except IndexError: - # problems indicate a malformed ruleset, ignore it - self.log.warn('_populate_dynamic_pools: ' - 'malformed response_pool "%s" ignoring', - response_pool.label) - continue - - label = response_pool.label - - if label == 'default': - # The default pool has the base record values - default = data_for(_type, record_set.records) - else: - if label not in pools: - # First time we've seen it get its data - # Note we'll have to set fallbacks as we go through rules - # b/c we can't determine them here - values = [value_for(_type, r) for r in record_set.records] - # Sort to ensure consistent ordering so we can compare them - values.sort(key=_dynamic_value_sort_key) - pools[label] = { - 'values': values, - } - - return default, pools - - def _populate_dynamic_rules(self, rulesets, pools): - rules = [] - - # Build the list of rules based on the rulesets - for ruleset in rulesets: - if ruleset.label.startswith('default:'): - # Ignore the default, it's implicit in our model - continue - - num_pools = len(ruleset.response_pools) - if num_pools > 0: - # Find the primary pool for this rule - pool = ruleset.response_pools[0].label - # TODO: verify pool exists - if num_pools > 1: - # We have a fallback, record it in the approrpriate pool. - # Note we didn't have fallback info when we populated the - # pools above so we're filling that info in here. It's - # possible that rules will have disagreeing values for the - # fallbacks. That's annoying but a sync should fix it and - # match stuff up with the config. - fallback = ruleset.response_pools[1].label - # TODO: verify fallback exists - if fallback != 'default': - pools[pool]['fallback'] = fallback - else: - self.log.warn('_populate_dynamic_pools: ' - 'ruleset "%s" has no response_pools', - ruleset.label) - continue - - # OK we have the rule's pool info, record it and work on the rule's - # matching criteria - rule = { - 'pool': pool, - } - - criteria_type = ruleset.criteria_type - if criteria_type == 'geoip': - # Geo - geo = ruleset.criteria['geoip'] - geos = [] - # Dyn uses the same 2-letter codes as octoDNS (except for - # continents) but it doesn't have the hierary, e.g. US is - # just US, not NA-US. We'll have to map these things back - for code in geo['country']: - geos.append(GeoCodes.country_to_code(code)) - for code in geo['province']: - geos.append(GeoCodes.province_to_code(code.upper())) - for code in geo['region']: - geos.append(self.REGION_CODES_LOOKUP[int(code)]) - geos.sort() - rule['geos'] = geos - elif criteria_type == 'always': - pass - else: - self.log.warn('_populate_dynamic_rules: ' - 'unsupported criteria_type "%s", ignoring', - criteria_type) - continue - - rules.append(rule) - - return rules - - def _populate_dynamic_traffic_director(self, zone, fqdn, _type, td, - rulesets, lenient): - # We'll go ahead and grab pools too, using all will include unref'd - # pools - response_pools = td.all_response_pools - - # Populate pools - default, pools = self._populate_dynamic_pools(_type, rulesets, - response_pools) - - # Populate rules - rules = self._populate_dynamic_rules(rulesets, pools) - - # We start out with something that will always show - # change in case this is a busted TD. This will prevent us from - # creating a duplicate td. We'll overwrite this with real data - # provide we have it - data = { - 'dynamic': { - 'pools': pools, - 'rules': rules, - }, - 'type': _type, - 'ttl': td.ttl, - } - # Include default's information in data - data.update(default) - - name = zone.hostname_from_fqdn(fqdn) - record = Record.new(zone, name, data, source=self, lenient=lenient) - zone.add_record(record, lenient=lenient) - - return record - - def _is_traffic_director_dynamic(self, td, rulesets): - for ruleset in rulesets: - try: - pieces = ruleset.label.split(':') - if len(pieces) == 2: - # It matches octoDNS's format - int(pieces[0]) - # It's an integer, so probably rule_num, thus dynamic - return True - except (IndexError, ValueError): - pass - # We didn't see any rulesets that look like a dynamic record so maybe - # geo... - return False - - def _populate_traffic_directors(self, zone, lenient): - self.log.debug('_populate_traffic_directors: zone=%s, lenient=%s', - zone.name, lenient) - td_records = set() - for fqdn, types in self.traffic_directors.items(): - for _type, td in types.items(): - # Does this TD belong to the current zone - td_zone = f'{td.nodes[0]["zone"]}.' - if td_zone != zone.name: - # Doesn't belong to the current zone, skip it - continue - # critical to call rulesets once, each call loads them :-( - rulesets = td.rulesets - if self._is_traffic_director_dynamic(td, rulesets): - record = \ - self._populate_dynamic_traffic_director(zone, fqdn, - _type, td, - rulesets, - lenient) - else: - record = \ - self._populate_geo_traffic_director(zone, fqdn, _type, - td, rulesets, - lenient) - td_records.add(record) - - return td_records - - def populate(self, zone, target=False, lenient=False): - self.log.debug('populate: name=%s, target=%s, lenient=%s', zone.name, - target, lenient) - - exists = False - before = len(zone.records) - - self._check_dyn_sess() - - td_records = set() - if self.traffic_directors_enabled: - td_records = self._populate_traffic_directors(zone, lenient) - exists = True - - dyn_zone = _CachingDynZone.get(zone.name[:-1]) - - if dyn_zone: - exists = True - values = defaultdict(lambda: defaultdict(list)) - for _type, records in dyn_zone.get_all_records().items(): - if _type == 'soa_records': - continue - _type = self.RECORDS_TO_TYPE[_type] - for record in records: - record_name = zone.hostname_from_fqdn(record.fqdn) - values[record_name][_type].append(record) - - for name, types in values.items(): - for _type, records in types.items(): - data_for = getattr(self, f'_data_for_{_type}') - data = data_for(_type, records) - record = Record.new(zone, name, data, source=self, - lenient=lenient) - if record not in td_records: - zone.add_record(record, lenient=lenient) - - self.log.info('populate: found %s records, exists=%s', - len(zone.records) - before, exists) - return exists - - def _extra_changes(self, desired, changes, **kwargs): - self.log.debug('_extra_changes: desired=%s', desired.name) - - changed = set([c.record for c in changes]) - - extra = [] - for record in desired.records: - if record in changed or not getattr(record, 'geo', False): - # Already changed, or no geo, no need to check it - continue - label = f'{record.fqdn}:{record._type}' - try: - monitor = self.traffic_director_monitors[label] - except KeyError: - self.log.info('_extra_changes: health-check missing for %s', - label) - extra.append(Update(record, record)) - continue - if _monitor_doesnt_match(monitor, record.healthcheck_host(), - record.healthcheck_path, - record.healthcheck_protocol, - record.healthcheck_port): - self.log.info('_extra_changes: health-check mis-match for %s', - label) - extra.append(Update(record, record)) - - return extra - - def _kwargs_for_A(self, record): - return [{ - 'address': v, - 'ttl': record.ttl, - } for v in record.values] - - _kwargs_for_AAAA = _kwargs_for_A - - def _kwargs_for_CAA(self, record): - return [{ - 'flags': v.flags, - 'tag': v.tag, - 'ttl': record.ttl, - 'value': v.value, - } for v in record.values] - - def _kwargs_for_CNAME(self, record): - return [{ - 'cname': record.value, - 'ttl': record.ttl, - }] - - def _kwargs_for_ALIAS(self, record): - # NOTE: Dyn's UI doesn't allow editing of ALIAS ttl, but the API seems - # to accept and store the values we send it just fine. No clue if they - # do anything with them. I'd assume they just obey the TTL of the - # record that we're pointed at which makes sense. - return [{ - 'alias': record.value, - 'ttl': record.ttl, - }] - - def _kwargs_for_MX(self, record): - return [{ - 'preference': v.preference, - 'exchange': v.exchange, - 'ttl': record.ttl, - } for v in record.values] - - def _kwargs_for_NAPTR(self, record): - return [{ - 'flags': v.flags, - 'order': v.order, - 'preference': v.preference, - 'regexp': v.regexp, - 'replacement': v.replacement, - 'services': v.service, - 'ttl': record.ttl, - } for v in record.values] - - def _kwargs_for_NS(self, record): - return [{ - 'nsdname': v, - 'ttl': record.ttl, - } for v in record.values] - - def _kwargs_for_PTR(self, record): - return [{ - 'ptrdname': record.value, - 'ttl': record.ttl, - }] - - def _kwargs_for_SSHFP(self, record): - return [{ - 'algorithm': v.algorithm, - 'fptype': v.fingerprint_type, - 'fingerprint': v.fingerprint, - } for v in record.values] - - def _kwargs_for_SPF(self, record): - return [{ - 'txtdata': v, - 'ttl': record.ttl, - } for v in record.chunked_values] - - def _kwargs_for_SRV(self, record): - return [{ - 'port': v.port, - 'priority': v.priority, - 'target': v.target, - 'weight': v.weight, - 'ttl': record.ttl, - } for v in record.values] - - _kwargs_for_TXT = _kwargs_for_SPF - - @property - def traffic_director_monitors(self): - if self._traffic_director_monitors is None: - self.log.debug('traffic_director_monitors: loading') - self._traffic_director_monitors = \ - {m.label: m for m in get_all_dsf_monitors()} - - return self._traffic_director_monitors - - def _traffic_director_monitor(self, record): - fqdn = record.fqdn - label = f'{fqdn}:{record._type}' - try: - try: - monitor = self.traffic_director_monitors[label] - self.log.debug('_traffic_director_monitor: existing for %s', - label) - except KeyError: - # UNTIL 1.0 We don't have one for the new label format, see if - # we still have one for the old and update it - monitor = self.traffic_director_monitors[fqdn] - self.log.info('_traffic_director_monitor: upgrading label ' - 'to %s', label) - monitor.label = label - self.traffic_director_monitors[label] = \ - self.traffic_director_monitors[fqdn] - del self.traffic_director_monitors[fqdn] - if _monitor_doesnt_match(monitor, record.healthcheck_host(), - record.healthcheck_path, - record.healthcheck_protocol, - record.healthcheck_port): - self.log.info('_traffic_director_monitor: updating monitor ' - 'for %s', label) - monitor.update(record.healthcheck_host(), - record.healthcheck_path, - record.healthcheck_protocol, - record.healthcheck_port) - return monitor - except KeyError: - self.log.info('_traffic_director_monitor: creating monitor ' - 'for %s', label) - monitor = DSFMonitor(label, protocol=record.healthcheck_protocol, - response_count=2, probe_interval=60, - retries=2, port=record.healthcheck_port, - active='Y', host=record.healthcheck_host(), - timeout=self.MONITOR_TIMEOUT, - header=self.MONITOR_HEADER, - path=record.healthcheck_path) - self._traffic_director_monitors[label] = monitor - return monitor - - def _find_or_create_geo_pool(self, td, pools, label, _type, values, - monitor_id=None): - for pool in pools: - if pool.label != label: - continue - records = pool.rs_chains[0].record_sets[0].records - record_values = sorted([r.address for r in records]) - if record_values == values: - # it's a match - return pool - # we need to create the pool - _class = { - 'A': DSFARecord, - 'AAAA': DSFAAAARecord - }[_type] - records = [_class(v) for v in values] - record_set = DSFRecordSet(_type, label, serve_count=len(records), - records=records, dsf_monitor_id=monitor_id) - chain = DSFFailoverChain(label, record_sets=[record_set]) - pool = DSFResponsePool(label, rs_chains=[chain]) - pool.create(td) - - # We need to store the newly created pool in the pools list since the - # caller won't know if it was newly created or not. This will allow us - # to find this pool again if another rule references it and avoid - # creating duplicates - pools.append(pool) - - return pool - - def _dynamic_records_for_A(self, values, record_extras): - return [DSFARecord(v['value'], weight=v.get('weight', 1), - **record_extras) - for v in values] - - def _dynamic_records_for_AAAA(self, values, record_extras): - return [DSFAAAARecord(v['value'], weight=v.get('weight', 1), - **record_extras) - for v in values] - - def _dynamic_records_for_CNAME(self, values, record_extras): - return [DSFCNAMERecord(v['value'], weight=v.get('weight', 1), - **record_extras) - for v in values] - - def _find_or_create_dynamic_pool(self, td, pools, label, _type, values, - monitor_id=None, record_extras={}): - - # Sort the values for consistent ordering so that we can compare - values = sorted(values, key=_dynamic_value_sort_key) - # Ensure that weight is included and if not use the default - values = [{ - 'value': v['value'], - 'weight': v.get('weight', 1), - } for v in values] - - # Walk through our existing pools looking for a match we can use - for pool in pools: - # It must have the same label - if pool.label != label: - continue - try: - records = pool.rs_chains[0].record_sets[0].records - except IndexError: - # No values, can't match - continue - # And the (sorted) values must match once converted for comparison - # purposes - value_for = getattr(self, f'_value_for_{_type}') - record_values = [value_for(_type, r) for r in records] - if record_values == values: - # it's a match - return pool - - # We don't have this pool and thus need to create it - records_for = getattr(self, f'_dynamic_records_for_{_type}') - records = records_for(values, record_extras) - record_set = DSFRecordSet(_type, label, serve_count=1, records=records, - dsf_monitor_id=monitor_id) - chain = DSFFailoverChain(label, record_sets=[record_set]) - pool = DSFResponsePool(label, rs_chains=[chain]) - pool.create(td) - - # We need to store the newly created pool in the pools list since the - # caller won't know if it was newly created or not. This will allow us - # to find this pool again if another rule references it and avoid - # creating duplicates - pools.append(pool) - - return pool - - def _mod_geo_rulesets(self, td, change): - new = change.new - - # Response Pools - pools = {} - - # Get existing pools. This should be simple, but it's not b/c the dyn - # api is a POS. We need all response pools so we can GC and check to - # make sure that what we're after doesn't already exist. - # td.all_response_pools just returns thin objects that don't include - # their rs_chains (and children down to actual records.) We could just - # foreach over those turning them into full DSFResponsePool objects - # with get_response_pool, but that'd be N round-trips. We can avoid - # those round trips in cases where the pools are in use in rules where - # they're already full objects. - - # First up populate all the full pools we have under rules, the _ - # prevents a td.refresh we don't need :-( seriously? - existing_rulesets = td._rulesets - for ruleset in existing_rulesets: - for pool in ruleset.response_pools: - pools[pool.response_pool_id] = pool - # Reverse sort the existing_rulesets by _ordering so that we'll remove - # them in that order later, this will ensure that we remove the old - # default before any of the old geo rules preventing it from catching - # everything. - existing_rulesets.sort(key=lambda r: r._ordering, reverse=True) - - # Now we need to find any pools that aren't referenced by rules - for pool in td.all_response_pools: - rpid = pool.response_pool_id - if rpid not in pools: - # we want this one, but it's thin, inflate it - pools[rpid] = get_response_pool(rpid, td) - # now that we have full objects for the complete set of existing pools, - # a list will be more useful - pools = pools.values() - - # Rulesets - - # We need to make sure and insert the new rules after any existing - # rules so they won't take effect before we've had a chance to add - # response pools to them. I've tried both publish=False (which is - # completely broken in the client) and creating the rulesets with - # response_pool_ids neither of which appear to work from the client - # library. If there are no existing rulesets fallback to 0 - insert_at = max([ - int(r._ordering) - for r in existing_rulesets - ] + [-1]) + 1 - self.log.debug('_mod_geo_rulesets: insert_at=%d', insert_at) - - # add the default - label = f'default:{uuid4().hex}' - ruleset = DSFRuleset(label, 'always', []) - ruleset.create(td, index=insert_at) - pool = self._find_or_create_geo_pool(td, pools, 'default', new._type, - new.values) - # There's no way in the client lib to create a ruleset with an existing - # pool (ref'd by id) so we have to do this round-a-bout. - active_pools = { - 'default': pool.response_pool_id - } - ruleset.add_response_pool(pool.response_pool_id) - - monitor_id = self._traffic_director_monitor(new).dsf_monitor_id - # Geos ordered least to most specific so that parents will always be - # created before their children (and thus can be referenced - geos = sorted(new.geo.items(), key=lambda d: d[0]) - for _, geo in geos: - if geo.subdivision_code: - criteria = { - 'province': geo.subdivision_code.lower() - } - elif geo.country_code: - criteria = { - 'country': geo.country_code - } - else: - criteria = { - 'region': self.REGION_CODES[geo.continent_code] - } - - label = f'{geo.code}:{uuid4().hex}' - ruleset = DSFRuleset(label, 'geoip', [], { - 'geoip': criteria - }) - # Something you have to call create others the constructor does it - ruleset.create(td, index=insert_at) - - first = geo.values[0] - pool = self._find_or_create_geo_pool(td, pools, first, new._type, - geo.values, monitor_id) - active_pools[geo.code] = pool.response_pool_id - ruleset.add_response_pool(pool.response_pool_id) - - # look for parent rulesets we can add in the chain - for code in geo.parents: - try: - pool_id = active_pools[code] - # looking at client lib code, index > exists appends - ruleset.add_response_pool(pool_id, index=999) - except KeyError: - pass - # and always add default as the last - pool_id = active_pools['default'] - ruleset.add_response_pool(pool_id, index=999) - - # we're done with active_pools as a lookup, convert it in to a set of - # the ids in use - active_pools = set(active_pools.values()) - # Clean up unused response_pools - for pool in pools: - if pool.response_pool_id in active_pools: - continue - pool.delete() - - # Clean out the old rulesets - for ruleset in existing_rulesets: - ruleset.delete() - - def _mod_geo_Create(self, dyn_zone, change): - new = change.new - fqdn = new.fqdn - _type = new._type - label = f'{fqdn}:{_type}' - node = DSFNode(new.zone.name, fqdn) - td = TrafficDirector(label, ttl=new.ttl, nodes=[node], publish='Y') - self.log.debug('_mod_geo_Create: td=%s', td.service_id) - self._mod_geo_rulesets(td, change) - self.traffic_directors[fqdn] = { - _type: td - } - - def _mod_geo_Update(self, dyn_zone, change): - new = change.new - if not new.geo: - # New record doesn't have geo we're going from a TD to a regular - # record - self._mod_Create(dyn_zone, change) - self._mod_geo_Delete(dyn_zone, change) - return - try: - td = self.traffic_directors[new.fqdn][new._type] - except KeyError: - # There's no td, this is actually a create, we must be going from a - # non-geo to geo record so delete the regular record as well - self._mod_geo_Create(dyn_zone, change) - self._mod_Delete(dyn_zone, change) - return - self._mod_geo_rulesets(td, change) - - def _mod_geo_Delete(self, dyn_zone, change): - existing = change.existing - fqdn_tds = self.traffic_directors[existing.fqdn] - _type = existing._type - fqdn_tds[_type].delete() - del fqdn_tds[_type] - - def _mod_dynamic_rulesets(self, td, change): - new = change.new - - # TODO: make sure we can update TTLs - if td.ttl != new.ttl: - td.ttl = new.ttl - - # Get existing pools. This should be simple, but it's not b/c the dyn - # api is a POS. We need all response pools so we can GC and check to - # make sure that what we're after doesn't already exist. - # td.all_response_pools just returns thin objects that don't include - # their rs_chains (and children down to actual records.) We could just - # foreach over those turning them into full DSFResponsePool objects - # with get_response_pool, but that'd be N round-trips. We can avoid - # those round trips in cases where the pools are in use in rules where - # they're already full objects. - - # First up populate all the pools we have under rules, the _ prevents a - # td.refresh we don't need :-( seriously? - existing_rulesets = td._rulesets - pools = {} - for ruleset in existing_rulesets: - for pool in ruleset.response_pools: - pools[pool.response_pool_id] = pool - - # Reverse sort the existing_rulesets by _ordering so that we'll remove - # them in that order later, this will ensure that we remove the old - # default before any of the old geo rules preventing it from catching - # everything. - existing_rulesets.sort(key=lambda r: r._ordering, reverse=True) - - # Add in any pools that aren't currently referenced by rules - for pool in td.all_response_pools: - rpid = pool.response_pool_id - if rpid not in pools: - # we want this one, but it's thin, inflate it - pools[rpid] = get_response_pool(rpid, td) - # now that we have full objects for the complete set of existing pools, - # a list will be more useful - pools = list(pools.values()) - - # Rulesets - - # We need to make sure and insert the new rules after any existing - # rules so they won't take effect before we've had a chance to add - # response pools to them. I've tried both publish=False (which is - # completely broken in the client) and creating the rulesets with - # response_pool_ids neither of which appear to work from the client - # library. If there are no existing rulesets fallback to 0 - insert_at = max([ - int(r._ordering) - for r in existing_rulesets - ] + [-1]) + 1 - self.log.debug('_mod_dynamic_rulesets: insert_at=%d', insert_at) - - # Add the base record values as the ultimate/unhealthchecked default - label = f'default:{uuid4().hex}' - ruleset = DSFRuleset(label, 'always', []) - ruleset.create(td, index=insert_at) - # If/when we go beyond A, AAAA, and CNAME this will have to get - # more intelligent, probably a weighted_values method on Record objects - # or something like that? - try: - values = new.values - except AttributeError: - values = [new.value] - values = [{ - 'value': v, - 'weight': 1, - } for v in values] - # For these defaults we need to set them to always be served and to - # ignore any health checking (since they won't have one) - pool = self._find_or_create_dynamic_pool(td, pools, 'default', - new._type, values, - record_extras={ - 'automation': 'manual', - 'eligible': True, - }) - # There's no way in the client lib to create a ruleset with an existing - # pool (ref'd by id) so we have to do this round-a-bout. - active_pools = { - # TODO: disallow default as a pool id - 'default': pool.response_pool_id - } - ruleset.add_response_pool(pool.response_pool_id) - - # Get our monitor - monitor_id = self._traffic_director_monitor(new).dsf_monitor_id - - # Make sure we have all the pools we're going to need - for _id, pool in sorted(new.dynamic.pools.items()): - values = [{ - 'weight': v.get('weight', 1), - 'value': v['value'], - } for v in pool.data['values']] - pool = self._find_or_create_dynamic_pool(td, pools, _id, - new._type, values, - monitor_id) - active_pools[_id] = pool.response_pool_id - - # Run through and configure our rules - for rule_num, rule in enumerate(reversed(new.dynamic.rules)): - criteria = defaultdict(lambda: defaultdict(list)) - criteria_type = 'always' - try: - geos = rule.data['geos'] - criteria_type = 'geoip' - except KeyError: - geos = [] - - for geo in geos: - geo = GeoCodes.parse(geo) - if geo['province_code']: - criteria['geoip']['province'] \ - .append(geo['province_code'].lower()) - elif geo['country_code']: - criteria['geoip']['country'] \ - .append(geo['country_code']) - else: - criteria['geoip']['region'] \ - .append(self.REGION_CODES[geo['continent_code']]) - - label = f'{rule_num}:{uuid4().hex}' - ruleset = DSFRuleset(label, criteria_type, [], criteria) - # Something you have to call create others the constructor does it - ruleset.create(td, index=insert_at) - - # Add the primary pool for this rule - rule_pool = rule.data['pool'] - ruleset.add_response_pool(active_pools[rule_pool]) - - # OK, we have the rule and its primary pool setup, now look to see - # if there's a fallback chain that needs to be configured - fallback = new.dynamic.pools[rule_pool].data.get('fallback', None) - seen = set([rule_pool]) - while fallback and fallback not in seen: - seen.add(fallback) - # looking at client lib code, index > exists appends - ruleset.add_response_pool(active_pools[fallback], index=999) - fallback = new.dynamic.pools[fallback].data.get('fallback', - None) - if fallback is not None: - # If we're out of the while and fallback is not None that means - # there was a loop. This generally shouldn't happen since - # Record validations test for it, but this is a - # belt-and-suspenders setup. Excepting here would put things - # into a partially configured state which would be bad. We'll - # just break at the point where the loop was going to happen - # and log about it. Note that any time we hit this we're likely - # to hit it multiple times as we configure the other pools - self.log.warn('_mod_dynamic_rulesets: loop detected in ' - 'fallback chain, fallback=%s, seen=%s', fallback, - seen) - - # and always add default as the last - ruleset.add_response_pool(active_pools['default'], index=999) - - # we're done with active_pools as a lookup, convert it in to a set of - # the ids in use - active_pools = set(active_pools.values()) - # Clean up unused response_pools - for pool in pools: - if pool.response_pool_id in active_pools: - continue - pool.delete() - - # Clean out the old rulesets - for ruleset in existing_rulesets: - ruleset.delete() - - def _mod_dynamic_Create(self, dyn_zone, change): - new = change.new - fqdn = new.fqdn - _type = new._type - # Create a new traffic director - label = f'{fqdn}:{_type}' - node = DSFNode(new.zone.name, fqdn) - td = TrafficDirector(label, ttl=new.ttl, nodes=[node], publish='Y') - self.log.debug('_mod_dynamic_Create: td=%s', td.service_id) - # Sync up it's pools & rules - self._mod_dynamic_rulesets(td, change) - # Store it for future reference - self.traffic_directors[fqdn] = { - _type: td - } - - def _mod_dynamic_Update(self, dyn_zone, change): - new = change.new - if not new.dynamic: - if new.geo: - # New record is a geo record - self.log.info('_mod_dynamic_Update: %s to geo', new.fqdn) - # Convert the TD over to a geo and we're done - self._mod_geo_Update(dyn_zone, change) - else: - # New record doesn't have dynamic, we're going from a TD to a - # regular record - self.log.info('_mod_dynamic_Update: %s to plain', new.fqdn) - # Create the regular record - self._mod_Create(dyn_zone, change) - # Delete the dynamic - self._mod_dynamic_Delete(dyn_zone, change) - return - try: - # We'll be dynamic going forward, see if we have one already - td = self.traffic_directors[new.fqdn][new._type] - if change.existing.geo: - self.log.info('_mod_dynamic_Update: %s from geo', new.fqdn) - else: - self.log.debug('_mod_dynamic_Update: %s existing', new.fqdn) - # If we're here we do, we'll just update it down below - except KeyError: - # There's no td, this is actually a create, we must be going from a - # non-dynamic to dynamic record - # First create the dynamic record - self.log.info('_mod_dynamic_Update: %s from regular', new.fqdn) - self._mod_dynamic_Create(dyn_zone, change) - # From a generic so remove the old generic - self._mod_Delete(dyn_zone, change) - return - - # IF we're here it's actually an update, sync up rules - self._mod_dynamic_rulesets(td, change) - - def _mod_dynamic_Delete(self, dyn_zone, change): - existing = change.existing - fqdn_tds = self.traffic_directors[existing.fqdn] - _type = existing._type - fqdn_tds[_type].delete() - del fqdn_tds[_type] - - def _mod_Create(self, dyn_zone, change): - new = change.new - kwargs_for = getattr(self, f'_kwargs_for_{new._type}') - for kwargs in kwargs_for(new): - dyn_zone.add_record(new.name, new._type, **kwargs) - - def _mod_Delete(self, dyn_zone, change): - existing = change.existing - if existing.name: - target = f'{existing.name}.{existing.zone.name[:-1]}' - else: - target = existing.zone.name[:-1] - _type = self.TYPE_TO_RECORDS[existing._type] - for rec in dyn_zone.get_all_records()[_type]: - if rec.fqdn == target: - rec.delete() - - def _mod_Update(self, dyn_zone, change): - self._mod_Delete(dyn_zone, change) - self._mod_Create(dyn_zone, change) - - def _apply_traffic_directors(self, desired, changes, dyn_zone): - self.log.debug('_apply_traffic_directors: zone=%s', desired.name) - unhandled_changes = [] - for c in changes: - # we only mess with changes that have geo info somewhere - if getattr(c.new, 'dynamic', False) or getattr(c.existing, - 'dynamic', False): - klass = c.__class__.__name__ - mod = getattr(self, f'_mod_dynamic_{klass}') - mod(dyn_zone, c) - elif getattr(c.new, 'geo', False) or getattr(c.existing, 'geo', - False): - klass = c.__class__.__name__ - mod = getattr(self, f'_mod_geo_{klass}') - mod(dyn_zone, c) - else: - unhandled_changes.append(c) - - return unhandled_changes - - def _apply_regular(self, desired, changes, dyn_zone): - self.log.debug('_apply_regular: zone=%s', desired.name) - for c in changes: - klass = c.__class__.__name__ - mod = getattr(self, f'_mod_{klass}') - mod(dyn_zone, c) - - # TODO: detect "extra" changes when monitors are out of date or failover - # chains are wrong etc. - - def _apply(self, plan): - desired = plan.desired - changes = plan.changes - self.log.debug('_apply: zone=%s, len(changes)=%d', desired.name, - len(changes)) - - self._check_dyn_sess() - - dyn_zone = _CachingDynZone.get(desired.name[:-1], create=True) - - if self.traffic_directors_enabled: - # any changes left over don't involve geo - changes = self._apply_traffic_directors(desired, changes, dyn_zone) - - self._apply_regular(desired, changes, dyn_zone) - - dyn_zone.publish() +logger = getLogger('Dyn') +try: + logger.warn('octodns_dyn shimmed. Update your provider class to ' + 'octodns_dyn.DynProvider. ' + 'Shim will be removed in 1.0') + from octodns_dyn import DynProvider + DynProvider # pragma: no cover +except ModuleNotFoundError: + logger.exception('DynProvider has been moved into a seperate module, ' + 'octodns_dyn is now required. Provider class should ' + 'be updated to octodns_dyn.DynProvider') + raise diff --git a/requirements.txt b/requirements.txt index 23bf713..1ee7cd8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,6 @@ azure-mgmt-dns==8.0.0 azure-mgmt-trafficmanager==0.51.0 dnspython==1.16.0 docutils==0.16 -dyn==1.8.1 edgegrid-python==1.1.1 fqdn==1.5.0 google-cloud-core==1.4.1 diff --git a/tests/fixtures/constellix-domains.json b/tests/fixtures/constellix-domains.json deleted file mode 100644 index 4b6392d..0000000 --- a/tests/fixtures/constellix-domains.json +++ /dev/null @@ -1,28 +0,0 @@ -[{ - "id": 123123, - "name": "unit.tests", - "soa": { - "primaryNameserver": "ns11.constellix.com.", - "email": "dns.constellix.com.", - "ttl": 86400, - "serial": 2015010102, - "refresh": 43200, - "retry": 3600, - "expire": 1209600, - "negCache": 180 - }, - "createdTs": "2019-08-07T03:36:02Z", - "modifiedTs": "2019-08-07T03:36:02Z", - "typeId": 1, - "domainTags": [], - "folder": null, - "hasGtdRegions": false, - "hasGeoIP": false, - "nameserverGroup": 1, - "nameservers": ["ns11.constellix.com.", "ns21.constellix.com.", "ns31.constellix.com.", "ns41.constellix.net.", "ns51.constellix.net.", "ns61.constellix.net."], - "note": "", - "version": 0, - "status": "ACTIVE", - "tags": [], - "contactIds": [] -}] diff --git a/tests/fixtures/constellix-geofilters.json b/tests/fixtures/constellix-geofilters.json deleted file mode 100644 index eef17a3..0000000 --- a/tests/fixtures/constellix-geofilters.json +++ /dev/null @@ -1,34 +0,0 @@ -[ - { - "id": 6303, - "name": "some.other", - "filterRulesLimit": 100, - "createdTs": "2021-08-19T14:47:47Z", - "modifiedTs": "2021-08-19T14:47:47Z", - "geoipContinents": ["AS", "OC"], - "geoipCountries": ["ES", "SE", "UA"], - "regions": [ - { - "continentCode": "NA", - "countryCode": "CA", - "regionCode": "NL" - } - ] - }, - { - "id": 5303, - "name": "unit.tests.:www.dynamic:A:one", - "filterRulesLimit": 100, - "createdTs": "2021-08-19T14:47:47Z", - "modifiedTs": "2021-08-19T14:47:47Z", - "geoipContinents": ["AS", "OC"], - "geoipCountries": ["ES", "SE", "UA"], - "regions": [ - { - "continentCode": "NA", - "countryCode": "CA", - "regionCode": "NL" - } - ] - } -] diff --git a/tests/fixtures/constellix-pools.json b/tests/fixtures/constellix-pools.json deleted file mode 100644 index 8d90bd4..0000000 --- a/tests/fixtures/constellix-pools.json +++ /dev/null @@ -1,62 +0,0 @@ -[ - { - "id": 1808521, - "name": "unit.tests.:www.dynamic:A:two", - "type": "A", - "numReturn": 1, - "minAvailableFailover": 1, - "createdTs": "2020-09-12T00:44:35Z", - "modifiedTs": "2020-09-12T00:44:35Z", - "appliedDomains": [ - { - "id": 123123, - "name": "unit.tests", - "recordOption": "pools" - } - ], - "appliedTemplates": null, - "unlinkedDomains": [], - "unlinkedTemplates": null, - "itoEnabled": false, - "values": [ - { - "value": "1.2.3.4", - "weight": 1 - }, - { - "value": "1.2.3.5", - "weight": 1 - } - ] - }, - { - "id": 1808522, - "name": "unit.tests.:www.dynamic:A:one", - "type": "A", - "numReturn": 1, - "minAvailableFailover": 1, - "createdTs": "2020-09-12T00:44:35Z", - "modifiedTs": "2020-09-12T00:44:35Z", - "appliedDomains": [ - { - "id": 123123, - "name": "unit.tests", - "recordOption": "pools" - } - ], - "appliedTemplates": null, - "unlinkedDomains": [], - "unlinkedTemplates": null, - "itoEnabled": false, - "values": [ - { - "value": "1.2.3.6", - "weight": 1 - }, - { - "value": "1.2.3.7", - "weight": 1 - } - ] - } -] \ No newline at end of file diff --git a/tests/fixtures/constellix-records.json b/tests/fixtures/constellix-records.json deleted file mode 100644 index c5cdf8e..0000000 --- a/tests/fixtures/constellix-records.json +++ /dev/null @@ -1,696 +0,0 @@ -[{ - "id": 1808529, - "type": "CAA", - "recordType": "caa", - "name": "", - "recordOption": "roundRobin", - "noAnswer": false, - "note": "", - "ttl": 3600, - "gtdRegion": 1, - "parentId": 123123, - "parent": "domain", - "source": "Domain", - "modifiedTs": 1565149569216, - "value": [{ - "flag": 0, - "tag": "issue", - "data": "ca.unit.tests", - "caaProviderId": 1, - "disableFlag": false - }], - "roundRobin": [{ - "flag": 0, - "tag": "issue", - "data": "ca.unit.tests", - "caaProviderId": 1, - "disableFlag": false - }] -}, { - "id": 1808516, - "type": "A", - "recordType": "a", - "name": "", - "recordOption": "roundRobin", - "noAnswer": false, - "note": "", - "ttl": 300, - "gtdRegion": 1, - "parentId": 123123, - "parent": "domain", - "source": "Domain", - "modifiedTs": 1565149623640, - "value": ["1.2.3.4", "1.2.3.5"], - "roundRobin": [{ - "value": "1.2.3.4", - "disableFlag": false - }, { - "value": "1.2.3.5", - "disableFlag": false - }], - "geolocation": null, - "recordFailover": { - "disabled": false, - "failoverType": 1, - "failoverTypeStr": "Normal (always lowest level)", - "values": [] - }, - "failover": { - "disabled": false, - "failoverType": 1, - "failoverTypeStr": "Normal (always lowest level)", - "values": [] - }, - "roundRobinFailover": [], - "pools": [], - "poolsDetail": [] -}, { - "id": 1808527, - "type": "SRV", - "recordType": "srv", - "name": "_srv._tcp", - "recordOption": "roundRobin", - "noAnswer": false, - "note": "", - "ttl": 600, - "gtdRegion": 1, - "parentId": 123123, - "parent": "domain", - "source": "Domain", - "modifiedTs": 1565149714387, - "value": [{ - "value": "foo-1.unit.tests.", - "priority": 10, - "weight": 20, - "port": 30, - "disableFlag": false - }, { - "value": "foo-2.unit.tests.", - "priority": 12, - "weight": 20, - "port": 30, - "disableFlag": false - }], - "roundRobin": [{ - "value": "foo-1.unit.tests.", - "priority": 10, - "weight": 20, - "port": 30, - "disableFlag": false - }, { - "value": "foo-2.unit.tests.", - "priority": 12, - "weight": 20, - "port": 30, - "disableFlag": false - }] -}, { - "id": 1808527, - "type": "SRV", - "recordType": "srv", - "name": "_imap._tcp", - "recordOption": "roundRobin", - "noAnswer": false, - "note": "", - "ttl": 600, - "gtdRegion": 1, - "parentId": 123123, - "parent": "domain", - "source": "Domain", - "modifiedTs": 1565149714387, - "value": [{ - "value": ".", - "priority": 0, - "weight": 0, - "port": 0, - "disableFlag": false - }], - "roundRobin": [{ - "value": ".", - "priority": 0, - "weight": 0, - "port": 0, - "disableFlag": false - }] -}, { - "id": 1808527, - "type": "SRV", - "recordType": "srv", - "name": "_pop3._tcp", - "recordOption": "roundRobin", - "noAnswer": false, - "note": "", - "ttl": 600, - "gtdRegion": 1, - "parentId": 123123, - "parent": "domain", - "source": "Domain", - "modifiedTs": 1565149714387, - "value": [{ - "value": ".", - "priority": 0, - "weight": 0, - "port": 0, - "disableFlag": false - }], - "roundRobin": [{ - "value": ".", - "priority": 0, - "weight": 0, - "port": 0, - "disableFlag": false - }] -}, { - "id": 1808515, - "type": "AAAA", - "recordType": "aaaa", - "name": "aaaa", - "recordOption": "roundRobin", - "noAnswer": false, - "note": "", - "ttl": 600, - "gtdRegion": 1, - "parentId": 123123, - "parent": "domain", - "source": "Domain", - "modifiedTs": 1565149739464, - "value": ["2601:644:500:e210:62f8:1dff:feb8:947a"], - "roundRobin": [{ - "value": "2601:644:500:e210:62f8:1dff:feb8:947a", - "disableFlag": false - }], - "geolocation": null, - "recordFailover": { - "disabled": false, - "failoverType": 1, - "failoverTypeStr": "Normal (always lowest level)", - "values": [] - }, - "failover": { - "disabled": false, - "failoverType": 1, - "failoverTypeStr": "Normal (always lowest level)", - "values": [] - }, - "pools": [], - "poolsDetail": [], - "roundRobinFailover": [] -}, { - "id": 1808530, - "type": "ANAME", - "recordType": "aname", - "name": "", - "recordOption": "roundRobin", - "noAnswer": false, - "note": "", - "ttl": 1800, - "gtdRegion": 1, - "parentId": 123123, - "parent": "domain", - "source": "Domain", - "modifiedTs": 1565150251379, - "value": [{ - "value": "aname.unit.tests.", - "disableFlag": false - }], - "roundRobin": [{ - "value": "aname.unit.tests.", - "disableFlag": false - }], - "geolocation": null, - "recordFailover": { - "disabled": false, - "failoverType": 1, - "failoverTypeStr": "Normal (always lowest level)", - "values": [] - }, - "failover": { - "disabled": false, - "failoverType": 1, - "failoverTypeStr": "Normal (always lowest level)", - "values": [] - }, - "pools": [], - "poolsDetail": [] -}, { - "id": 1808521, - "type": "CNAME", - "recordType": "cname", - "name": "cname", - "recordOption": "roundRobin", - "noAnswer": false, - "note": "", - "ttl": 300, - "gtdRegion": 1, - "parentId": 123123, - "parent": "domain", - "source": "Domain", - "modifiedTs": 1565152113825, - "value": "", - "roundRobin": [{ - "value": "", - "disableFlag": false - }], - "recordFailover": { - "disabled": false, - "failoverType": 1, - "failoverTypeStr": "Normal (always lowest level)", - "values": [{ - "id": null, - "value": "", - "disableFlag": false, - "failedFlag": false, - "status": "N/A", - "sortOrder": 1, - "markedActive": false - }, { - "id": null, - "value": "", - "disableFlag": false, - "failedFlag": false, - "status": "N/A", - "sortOrder": 2, - "markedActive": false - }] - }, - "failover": { - "disabled": false, - "failoverType": 1, - "failoverTypeStr": "Normal (always lowest level)", - "values": [{ - "id": null, - "value": "", - "disableFlag": false, - "failedFlag": false, - "status": "N/A", - "sortOrder": 1, - "markedActive": false - }, { - "id": null, - "value": "", - "disableFlag": false, - "failedFlag": false, - "status": "N/A", - "sortOrder": 2, - "markedActive": false - }] - }, - "pools": [], - "poolsDetail": [], - "geolocation": null, - "host": "" -}, { - "id": 1808522, - "type": "CNAME", - "recordType": "cname", - "name": "included", - "recordOption": "roundRobin", - "noAnswer": false, - "note": "", - "ttl": 3600, - "gtdRegion": 1, - "parentId": 123123, - "parent": "domain", - "source": "Domain", - "modifiedTs": 1565152119137, - "value": "", - "roundRobin": [{ - "value": "", - "disableFlag": false - }], - "recordFailover": { - "disabled": false, - "failoverType": 1, - "failoverTypeStr": "Normal (always lowest level)", - "values": [{ - "id": null, - "value": "", - "disableFlag": false, - "failedFlag": false, - "status": "N/A", - "sortOrder": 1, - "markedActive": false - }, { - "id": null, - "value": "", - "disableFlag": false, - "failedFlag": false, - "status": "N/A", - "sortOrder": 2, - "markedActive": false - }] - }, - "failover": { - "disabled": false, - "failoverType": 1, - "failoverTypeStr": "Normal (always lowest level)", - "values": [{ - "id": null, - "value": "", - "disableFlag": false, - "failedFlag": false, - "status": "N/A", - "sortOrder": 1, - "markedActive": false - }, { - "id": null, - "value": "", - "disableFlag": false, - "failedFlag": false, - "status": "N/A", - "sortOrder": 2, - "markedActive": false - }] - }, - "pools": [], - "poolsDetail": [], - "geolocation": null, - "host": "" -}, { - "id": 1808523, - "type": "MX", - "recordType": "mx", - "name": "mx", - "recordOption": "roundRobin", - "noAnswer": false, - "note": "", - "ttl": 300, - "gtdRegion": 1, - "parentId": 123123, - "parent": "domain", - "source": "Domain", - "modifiedTs": 1565149879856, - "value": [{ - "value": "smtp-3.unit.tests.", - "level": 30, - "disableFlag": false - }, { - "value": "smtp-2.unit.tests.", - "level": 20, - "disableFlag": false - }, { - "value": "smtp-4.unit.tests.", - "level": 10, - "disableFlag": false - }, { - "value": "smtp-1.unit.tests.", - "level": 40, - "disableFlag": false - }], - "roundRobin": [{ - "value": "smtp-3.unit.tests.", - "level": 30, - "disableFlag": false - }, { - "value": "smtp-2.unit.tests.", - "level": 20, - "disableFlag": false - }, { - "value": "smtp-4.unit.tests.", - "level": 10, - "disableFlag": false - }, { - "value": "smtp-1.unit.tests.", - "level": 40, - "disableFlag": false - }] -}, { - "id": 1808525, - "type": "PTR", - "recordType": "ptr", - "name": "ptr", - "recordOption": "roundRobin", - "noAnswer": false, - "note": "", - "ttl": 300, - "gtdRegion": 1, - "parentId": 123123, - "parent": "domain", - "source": "Domain", - "modifiedTs": 1565150115139, - "value": [{ - "value": "foo.bar.com.", - "disableFlag": false - }], - "roundRobin": [{ - "value": "foo.bar.com.", - "disableFlag": false - }] -}, { - "id": 1808526, - "type": "SPF", - "recordType": "spf", - "name": "spf", - "recordOption": "roundRobin", - "noAnswer": false, - "note": "", - "ttl": 600, - "gtdRegion": 1, - "parentId": 123123, - "parent": "domain", - "source": "Domain", - "modifiedTs": 1565149916132, - "value": [{ - "value": "\"v=spf1 ip4:192.168.0.1/16-all\"", - "disableFlag": false - }], - "roundRobin": [{ - "value": "\"v=spf1 ip4:192.168.0.1/16-all\"", - "disableFlag": false - }] -}, { - "id": 1808528, - "type": "TXT", - "recordType": "txt", - "name": "txt", - "recordOption": "roundRobin", - "noAnswer": false, - "note": "", - "ttl": 600, - "gtdRegion": 1, - "parentId": 123123, - "parent": "domain", - "source": "Domain", - "modifiedTs": 1565149966915, - "value": [{ - "value": "\"Bah bah black sheep\"", - "disableFlag": false - }, { - "value": "\"have you any wool.\"", - "disableFlag": false - }, { - "value": "\"v=DKIM1;k=rsa;s=email;h=sha256;p=A/kinda+of/long/string+with+numb3rs\"", - "disableFlag": false - }], - "roundRobin": [{ - "value": "\"Bah bah black sheep\"", - "disableFlag": false - }, { - "value": "\"have you any wool.\"", - "disableFlag": false - }, { - "value": "\"v=DKIM1;k=rsa;s=email;h=sha256;p=A/kinda+of/long/string+with+numb3rs\"", - "disableFlag": false - }] -}, { - "id": 1808524, - "type": "NS", - "recordType": "ns", - "name": "under", - "recordOption": "roundRobin", - "noAnswer": false, - "note": "", - "ttl": 3600, - "gtdRegion": 1, - "parentId": 123123, - "parent": "domain", - "source": "Domain", - "modifiedTs": 1565150062850, - "value": [{ - "value": "ns1.unit.tests.", - "disableFlag": false - }, { - "value": "ns2", - "disableFlag": false - }], - "roundRobin": [{ - "value": "ns1.unit.tests.", - "disableFlag": false - }, { - "value": "ns2", - "disableFlag": false - }] -}, { - "id": 1808531, - "type": "HTTPRedirection", - "recordType": "httpredirection", - "name": "unsupported", - "recordOption": "roundRobin", - "noAnswer": false, - "note": "", - "ttl": 300, - "gtdRegion": 1, - "parentId": 123123, - "parent": "domain", - "source": "Domain", - "modifiedTs": 1565150348154, - "value": "https://redirect.unit.tests", - "roundRobin": [{ - "value": "https://redirect.unit.tests" - }], - "title": "Unsupported Record", - "keywords": "unsupported", - "description": "unsupported record", - "hardlinkFlag": false, - "redirectTypeId": 1, - "url": "https://redirect.unit.tests" -}, { - "id": 1808519, - "type": "A", - "recordType": "a", - "name": "www", - "recordOption": "roundRobin", - "noAnswer": false, - "note": "", - "ttl": 300, - "gtdRegion": 1, - "parentId": 123123, - "parent": "domain", - "source": "Domain", - "modifiedTs": 1565150079027, - "value": ["2.2.3.6"], - "roundRobin": [{ - "value": "2.2.3.6", - "disableFlag": false - }], - "geolocation": null, - "recordFailover": { - "disabled": false, - "failoverType": 1, - "failoverTypeStr": "Normal (always lowest level)", - "values": [] - }, - "failover": { - "disabled": false, - "failoverType": 1, - "failoverTypeStr": "Normal (always lowest level)", - "values": [] - }, - "roundRobinFailover": [], - "pools": [], - "poolsDetail": [] -}, { - "id": 1808520, - "type": "A", - "recordType": "a", - "name": "www.sub", - "recordOption": "roundRobin", - "noAnswer": false, - "note": "", - "ttl": 300, - "gtdRegion": 1, - "parentId": 123123, - "parent": "domain", - "source": "Domain", - "modifiedTs": 1565150090588, - "value": ["2.2.3.6"], - "roundRobin": [{ - "value": "2.2.3.6", - "disableFlag": false - }], - "geolocation": null, - "recordFailover": { - "disabled": false, - "failoverType": 1, - "failoverTypeStr": "Normal (always lowest level)", - "values": [] - }, - "failover": { - "disabled": false, - "failoverType": 1, - "failoverTypeStr": "Normal (always lowest level)", - "values": [] - }, - "roundRobinFailover": [], - "pools": [], - "poolsDetail": [] -}, { - "id": 1808520, - "type": "A", - "recordType": "a", - "name": "www.dynamic", - "recordOption": "pools", - "noAnswer": false, - "note": "", - "ttl": 300, - "gtdRegion": 1, - "parentId": 123123, - "parent": "domain", - "source": "Domain", - "modifiedTs": 1565150090588, - "value": [], - "roundRobin": [], - "geolocation": { - "geoipFilter": 1 - }, - "recordFailover": { - "disabled": false, - "failoverType": 1, - "failoverTypeStr": "Normal (always lowest level)", - "values": [] - }, - "failover": { - "disabled": false, - "failoverType": 1, - "failoverTypeStr": "Normal (always lowest level)", - "values": [] - }, - "roundRobinFailover": [], - "pools": [ - 1808521 - ], - "poolsDetail": [{ - "id": 1808521, - "name": "unit.tests.:www.dynamic:A:two" - }] -}, -{ - "id": 1808521, - "type": "A", - "recordType": "a", - "name": "www.dynamic", - "recordOption": "pools", - "noAnswer": false, - "note": "", - "ttl": 300, - "gtdRegion": 1, - "parentId": 123123, - "parent": "domain", - "source": "Domain", - "modifiedTs": 1565150090588, - "value": [], - "roundRobin": [], - "geolocation": { - "geoipFilter": 5303 - }, - "recordFailover": { - "disabled": false, - "failoverType": 1, - "failoverTypeStr": "Normal (always lowest level)", - "values": [] - }, - "failover": { - "disabled": false, - "failoverType": 1, - "failoverTypeStr": "Normal (always lowest level)", - "values": [] - }, - "roundRobinFailover": [], - "pools": [ - 1808522 - ], - "poolsDetail": [{ - "id": 1808522, - "name": "unit.tests.:www.dynamic:A:one" - }] -}] diff --git a/tests/fixtures/digitalocean-page-1.json b/tests/fixtures/digitalocean-page-1.json deleted file mode 100644 index c931411..0000000 --- a/tests/fixtures/digitalocean-page-1.json +++ /dev/null @@ -1,188 +0,0 @@ -{ - "domain_records": [{ - "id": null, - "type": "SOA", - "name": "@", - "data": null, - "priority": null, - "port": null, - "ttl": null, - "weight": null, - "flags": null, - "tag": null - }, { - "id": 11189874, - "type": "NS", - "name": "@", - "data": "ns1.digitalocean.com", - "priority": null, - "port": null, - "ttl": 3600, - "weight": null, - "flags": null, - "tag": null - }, { - "id": 11189875, - "type": "NS", - "name": "@", - "data": "ns2.digitalocean.com", - "priority": null, - "port": null, - "ttl": 3600, - "weight": null, - "flags": null, - "tag": null - }, { - "id": 11189876, - "type": "NS", - "name": "@", - "data": "ns3.digitalocean.com", - "priority": null, - "port": null, - "ttl": 3600, - "weight": null, - "flags": null, - "tag": null - }, { - "id": 11189877, - "type": "NS", - "name": "under", - "data": "ns1.unit.tests", - "priority": null, - "port": null, - "ttl": 3600, - "weight": null, - "flags": null, - "tag": null - }, { - "id": 11189878, - "type": "NS", - "name": "under", - "data": "ns2.unit.tests", - "priority": null, - "port": null, - "ttl": 3600, - "weight": null, - "flags": null, - "tag": null - }, { - "id": 11189879, - "type": "SRV", - "name": "_srv._tcp", - "data": "foo-1.unit.tests", - "priority": 10, - "port": 30, - "ttl": 600, - "weight": 20, - "flags": null, - "tag": null - }, { - "id": 11189880, - "type": "SRV", - "name": "_srv._tcp", - "data": "foo-2.unit.tests", - "priority": 12, - "port": 30, - "ttl": 600, - "weight": 20, - "flags": null, - "tag": null - }, { - "id": 11189881, - "type": "TXT", - "name": "txt", - "data": "Bah bah black sheep", - "priority": null, - "port": null, - "ttl": 600, - "weight": null, - "flags": null, - "tag": null - }, { - "id": 11189882, - "type": "TXT", - "name": "txt", - "data": "have you any wool.", - "priority": null, - "port": null, - "ttl": 600, - "weight": null, - "flags": null, - "tag": null - }, { - "id": 11189883, - "type": "A", - "name": "@", - "data": "1.2.3.4", - "priority": null, - "port": null, - "ttl": 300, - "weight": null, - "flags": null, - "tag": null - }, { - "id": 11189884, - "type": "A", - "name": "@", - "data": "1.2.3.5", - "priority": null, - "port": null, - "ttl": 300, - "weight": null, - "flags": null, - "tag": null - }, { - "id": 11189885, - "type": "A", - "name": "www", - "data": "2.2.3.6", - "priority": null, - "port": null, - "ttl": 300, - "weight": null, - "flags": null, - "tag": null - }, { - "id": 11189886, - "type": "MX", - "name": "mx", - "data": "smtp-4.unit.tests", - "priority": 10, - "port": null, - "ttl": 300, - "weight": null, - "flags": null, - "tag": null - }, { - "id": 11189887, - "type": "MX", - "name": "mx", - "data": "smtp-2.unit.tests", - "priority": 20, - "port": null, - "ttl": 300, - "weight": null, - "flags": null, - "tag": null - }, { - "id": 11189888, - "type": "MX", - "name": "mx", - "data": "smtp-3.unit.tests", - "priority": 30, - "port": null, - "ttl": 300, - "weight": null, - "flags": null, - "tag": null - }], - "links": { - "pages": { - "last": "https://api.digitalocean.com/v2/domains/unit.tests/records?page=2", - "next": "https://api.digitalocean.com/v2/domains/unit.tests/records?page=2" - } - }, - "meta": { - "total": 21 - } -} \ No newline at end of file diff --git a/tests/fixtures/digitalocean-page-2.json b/tests/fixtures/digitalocean-page-2.json deleted file mode 100644 index 1405527..0000000 --- a/tests/fixtures/digitalocean-page-2.json +++ /dev/null @@ -1,111 +0,0 @@ -{ - "domain_records": [{ - "id": 11189889, - "type": "MX", - "name": "mx", - "data": "smtp-1.unit.tests", - "priority": 40, - "port": null, - "ttl": 300, - "weight": null, - "flags": null, - "tag": null - }, { - "id": 11189890, - "type": "AAAA", - "name": "aaaa", - "data": "2601:644:500:e210:62f8:1dff:feb8:947a", - "priority": null, - "port": null, - "ttl": 600, - "weight": null, - "flags": null, - "tag": null - }, { - "id": 11189891, - "type": "CNAME", - "name": "cname", - "data": "@", - "priority": null, - "port": null, - "ttl": 300, - "weight": null, - "flags": null, - "tag": null - }, { - "id": 11189892, - "type": "A", - "name": "www.sub", - "data": "2.2.3.6", - "priority": null, - "port": null, - "ttl": 300, - "weight": null, - "flags": null, - "tag": null - }, { - "id": 11189893, - "type": "TXT", - "name": "txt", - "data": "v=DKIM1;k=rsa;s=email;h=sha256;p=A/kinda+of/long/string+with+numb3rs", - "priority": null, - "port": null, - "ttl": 600, - "weight": null, - "flags": null, - "tag": null - }, { - "id": 11189894, - "type": "CAA", - "name": "@", - "data": "ca.unit.tests", - "priority": null, - "port": null, - "ttl": 3600, - "weight": null, - "flags": 0, - "tag": "issue" - }, { - "id": 11189895, - "type": "CNAME", - "name": "included", - "data": "@", - "priority": null, - "port": null, - "ttl": 3600, - "weight": null, - "flags": null, - "tag": null - }, { - "id": 11189896, - "type": "SRV", - "name": "_imap._tcp", - "data": ".", - "priority": 0, - "port": 0, - "ttl": 600, - "weight": 0, - "flags": null, - "tag": null - }, { - "id": 11189897, - "type": "SRV", - "name": "_pop3._tcp", - "data": ".", - "priority": 0, - "port": 0, - "ttl": 600, - "weight": 0, - "flags": null, - "tag": null - }], - "links": { - "pages": { - "first": "https://api.digitalocean.com/v2/domains/unit.tests/records?page=1", - "prev": "https://api.digitalocean.com/v2/domains/unit.tests/records?page=1" - } - }, - "meta": { - "total": 21 - } -} diff --git a/tests/fixtures/dnsmadeeasy-domains.json b/tests/fixtures/dnsmadeeasy-domains.json deleted file mode 100644 index de7f7db..0000000 --- a/tests/fixtures/dnsmadeeasy-domains.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "totalPages": 1, - "totalRecords": 1, - "data": [{ - "created": 1511740800000, - "folderId": 1990, - "gtdEnabled": false, - "pendingActionId": 0, - "updated": 1511766661574, - "processMulti": false, - "activeThirdParties": [], - "name": "unit.tests", - "id": 123123 - }], - "page": 0 -} \ No newline at end of file diff --git a/tests/fixtures/dnsmadeeasy-records.json b/tests/fixtures/dnsmadeeasy-records.json deleted file mode 100644 index aefd6ce..0000000 --- a/tests/fixtures/dnsmadeeasy-records.json +++ /dev/null @@ -1,344 +0,0 @@ -{ - "totalPages": 1, - "totalRecords": 23, - "data": [{ - "failover": false, - "monitor": false, - "sourceId": 123123, - "caaType": "issue", - "dynamicDns": false, - "failed": false, - "gtdLocation": "DEFAULT", - "hardLink": false, - "issuerCritical": 0, - "ttl": 3600, - "source": 1, - "name": "", - "value": "\"ca.unit.tests\"", - "id": 11189874, - "type": "CAA" - }, { - "failover": false, - "monitor": false, - "sourceId": 123123, - "dynamicDns": false, - "failed": false, - "gtdLocation": "DEFAULT", - "hardLink": false, - "ttl": 300, - "source": 1, - "name": "", - "value": "1.2.3.4", - "id": 11189875, - "type": "A" - }, { - "failover": false, - "monitor": false, - "sourceId": 123123, - "dynamicDns": false, - "failed": false, - "gtdLocation": "DEFAULT", - "hardLink": false, - "ttl": 300, - "source": 1, - "name": "", - "value": "1.2.3.5", - "id": 11189876, - "type": "A" - }, { - "failover": false, - "monitor": false, - "sourceId": 123123, - "dynamicDns": false, - "failed": false, - "gtdLocation": "DEFAULT", - "hardLink": false, - "ttl": 600, - "weight": 20, - "source": 1, - "name": "_srv._tcp", - "value": "foo-1.unit.tests.", - "id": 11189877, - "priority": 10, - "type": "SRV", - "port": 30 - }, { - "failover": false, - "monitor": false, - "sourceId": 123123, - "dynamicDns": false, - "failed": false, - "gtdLocation": "DEFAULT", - "hardLink": false, - "ttl": 600, - "weight": 20, - "source": 1, - "name": "_srv._tcp", - "value": "foo-2.unit.tests.", - "id": 11189878, - "priority": 12, - "type": "SRV", - "port": 30 - }, { - "failover": false, - "monitor": false, - "sourceId": 123123, - "dynamicDns": false, - "failed": false, - "gtdLocation": "DEFAULT", - "hardLink": false, - "ttl": 600, - "source": 1, - "name": "aaaa", - "value": "2601:644:500:e210:62f8:1dff:feb8:947a", - "id": 11189879, - "type": "AAAA" - }, { - "failover": false, - "monitor": false, - "sourceId": 123123, - "dynamicDns": false, - "failed": false, - "gtdLocation": "DEFAULT", - "hardLink": false, - "ttl": 300, - "source": 1, - "name": "cname", - "value": "", - "id": 11189880, - "type": "CNAME" - }, { - "failover": false, - "monitor": false, - "sourceId": 123123, - "dynamicDns": false, - "failed": false, - "gtdLocation": "DEFAULT", - "hardLink": false, - "ttl": 3600, - "source": 1, - "name": "included", - "value": "", - "id": 11189881, - "type": "CNAME" - }, { - "failover": false, - "monitor": false, - "sourceId": 123123, - "dynamicDns": false, - "failed": false, - "gtdLocation": "DEFAULT", - "hardLink": false, - "mxLevel": 30, - "ttl": 300, - "source": 1, - "name": "mx", - "value": "smtp-3.unit.tests.", - "id": 11189882, - "type": "MX" - }, { - "failover": false, - "monitor": false, - "sourceId": 123123, - "dynamicDns": false, - "failed": false, - "gtdLocation": "DEFAULT", - "hardLink": false, - "mxLevel": 20, - "ttl": 300, - "source": 1, - "name": "mx", - "value": "smtp-2.unit.tests.", - "id": 11189883, - "type": "MX" - }, { - "failover": false, - "monitor": false, - "sourceId": 123123, - "dynamicDns": false, - "failed": false, - "gtdLocation": "DEFAULT", - "hardLink": false, - "mxLevel": 10, - "ttl": 300, - "source": 1, - "name": "mx", - "value": "smtp-4.unit.tests.", - "id": 11189884, - "type": "MX" - }, { - "failover": false, - "monitor": false, - "sourceId": 123123, - "dynamicDns": false, - "failed": false, - "gtdLocation": "DEFAULT", - "hardLink": false, - "mxLevel": 40, - "ttl": 300, - "source": 1, - "name": "mx", - "value": "smtp-1.unit.tests.", - "id": 11189885, - "type": "MX" - }, { - "failover": false, - "monitor": false, - "sourceId": 123123, - "dynamicDns": false, - "failed": false, - "gtdLocation": "DEFAULT", - "hardLink": false, - "ttl": 600, - "source": 1, - "name": "spf", - "value": "\"v=spf1 ip4:192.168.0.1/16-all\"", - "id": 11189886, - "type": "SPF" - }, { - "failover": false, - "monitor": false, - "sourceId": 123123, - "dynamicDns": false, - "failed": false, - "gtdLocation": "DEFAULT", - "hardLink": false, - "ttl": 600, - "source": 1, - "name": "txt", - "value": "\"Bah bah black sheep\"", - "id": 11189887, - "type": "TXT" - }, { - "failover": false, - "monitor": false, - "sourceId": 123123, - "dynamicDns": false, - "failed": false, - "gtdLocation": "DEFAULT", - "hardLink": false, - "ttl": 600, - "source": 1, - "name": "txt", - "value": "\"have you any wool.\"", - "id": 11189888, - "type": "TXT" - }, { - "failover": false, - "monitor": false, - "sourceId": 123123, - "dynamicDns": false, - "failed": false, - "gtdLocation": "DEFAULT", - "hardLink": false, - "ttl": 600, - "source": 1, - "name": "txt", - "value": "\"v=DKIM1;k=rsa;s=email;h=sha256;p=A/kinda+of/long/string+with+numb3rs\"", - "id": 11189889, - "type": "TXT" - }, { - "failover": false, - "monitor": false, - "sourceId": 123123, - "dynamicDns": false, - "failed": false, - "gtdLocation": "DEFAULT", - "hardLink": false, - "ttl": 3600, - "source": 1, - "name": "under", - "value": "ns1.unit.tests.", - "id": 11189890, - "type": "NS" - }, { - "failover": false, - "monitor": false, - "sourceId": 123123, - "dynamicDns": false, - "failed": false, - "gtdLocation": "DEFAULT", - "hardLink": false, - "ttl": 3600, - "source": 1, - "name": "under", - "value": "ns2", - "id": 11189891, - "type": "NS" - }, { - "failover": false, - "monitor": false, - "sourceId": 123123, - "dynamicDns": false, - "failed": false, - "gtdLocation": "DEFAULT", - "hardLink": false, - "ttl": 300, - "source": 1, - "name": "www", - "value": "2.2.3.6", - "id": 11189892, - "type": "A" - }, { - "failover": false, - "monitor": false, - "sourceId": 123123, - "dynamicDns": false, - "failed": false, - "gtdLocation": "DEFAULT", - "hardLink": false, - "ttl": 300, - "source": 1, - "name": "www.sub", - "value": "2.2.3.6", - "id": 11189893, - "type": "A" - }, { - "failover": false, - "monitor": false, - "sourceId": 123123, - "dynamicDns": false, - "failed": false, - "gtdLocation": "DEFAULT", - "hardLink": false, - "ttl": 300, - "source": 1, - "name": "ptr", - "value": "foo.bar.com.", - "id": 11189894, - "type": "PTR" - }, { - "failover": false, - "monitor": false, - "sourceId": 123123, - "dynamicDns": false, - "failed": false, - "gtdLocation": "DEFAULT", - "hardLink": false, - "ttl": 1800, - "source": 1, - "name": "", - "value": "aname.unit.tests.", - "id": 11189895, - "type": "ANAME" - }, { - "failover": false, - "monitor": false, - "sourceId": 123123, - "dynamicDns": false, - "failed": false, - "gtdLocation": "DEFAULT", - "hardLink": true, - "ttl": 1800, - "source": 1, - "name": "unsupported", - "value": "https://redirect.unit.tests", - "id": 11189897, - "title": "Unsupported Record", - "keywords": "unsupported", - "redirectType": "Standard - 302", - "description": "unsupported record", - "type": "HTTPRED" - }], - "page": 0 -} diff --git a/tests/fixtures/dyn-traffic-director-get.json b/tests/fixtures/dyn-traffic-director-get.json deleted file mode 100644 index 38f2602..0000000 --- a/tests/fixtures/dyn-traffic-director-get.json +++ /dev/null @@ -1,4190 +0,0 @@ -{ - "status": "success", - "data": { - "notifiers": [], - "rulesets": [ - { - "dsf_ruleset_id": "9e4lSkD33d8x1mCBChQcbOc6O2o", - "response_pools": [ - { - "status": "ok", - "rs_chains": [ - { - "dsf_response_pool_id": "vX54Ck2p2c6fSmIAkG_JD3nY5OI", - "core": "false", - "status": "ok", - "dsf_record_set_failover_chain_id": "VUFenKGhvlmDZDIcAXE-qxoxJb0", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "record_sets": [ - { - "status": "unknown", - "rdata_class": "A", - "eligible": "true", - "dsf_record_set_id": "-ZT5yekuV4em0ADl7533_ulTk7g", - "dsf_monitor_id": "htWFldFj4R8dLWiG6dbMbDL0EOE", - "automation": "auto", - "trouble_count": "0", - "records": [ - { - "status": "unknown", - "torpidity": 4294967295, - "rdata_class": "A", - "weight": 1, - "eligible": "true", - "dsf_record_set_id": "-ZT5yekuV4em0ADl7533_ulTk7g", - "ttl": "300", - "endpoint_up_count": 1, - "label": "", - "automation": "auto", - "master_line": "5.2.3.4", - "dsf_record_id": "4WiI3ymCsLpsRfWwftix0lBZDoE", - "last_monitored": 0, - "rdata": [ - { - "type": "A", - "data": { - "rdata_kx": { - "preference": 0, - "exchange": "" - }, - "rdata_srv": { - "priority": 0, - "port": 0, - "weight": 0, - "target": "" - }, - "rdata_policy": { - "gui_url": "", - "policy": "", - "rtype": "", - "api_url": "", - "name": "" - }, - "rdata_soa": { - "rname": "", - "retry": 0, - "mname": "", - "minimum": 0, - "refresh": 0, - "expire": 0, - "serial": 0 - }, - "rdata_key": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_ipseckey": { - "public_key": "", - "precedence": 0, - "gatetype": 0, - "algorithm": 0, - "gateway": "" - }, - "rdata_cname": { - "cname": "" - }, - "rdata_loc": { - "horiz_pre": 0, - "altitude": 0, - "longitude": "", - "version": 0, - "vert_pre": 0, - "latitude": "", - "size": 0 - }, - "rdata_spf": { - "txtdata": "" - }, - "rdata_ptr": { - "ptrdname": "" - }, - "rdata_alias": { - "alias": "" - }, - "rdata_ds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_naptr": { - "flags": "", - "preference": 0, - "services": "", - "regexp": "", - "order": 0, - "replacement": "" - }, - "rdata_sshfp": { - "fptype": 0, - "algorithm": 0, - "fingerprint": "" - }, - "rdata_aaaa": { - "address": "" - }, - "rdata_nsap": { - "nsap": "" - }, - "rdata_dhcid": { - "digest": "" - }, - "rdata_dnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_cds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_txt": { - "txtdata": "" - }, - "rdata_ns": { - "nsdname": "" - }, - "rdata_dname": { - "dname": "" - }, - "rdata_csync": { - "soa_serial": 0, - "flags": "", - "types": "" - }, - "rdata_px": { - "mapx400": "", - "map822": "", - "preference": 0 - }, - "rdata_a": { - "address": "5.2.3.4" - }, - "rdata_cert": { - "tag": 0, - "certificate": "", - "algorithm": 0, - "format": 0 - }, - "rdata_rp": { - "mbox": "", - "txtdname": "" - }, - "rdata_tlsa": { - "cert_usage": 0, - "match_type": 0, - "certificate": "", - "selector": 0 - }, - "rdata_mx": { - "preference": 0, - "exchange": "" - }, - "rdata_cdnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - } - }, - "ttl": "" - } - ], - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "endpoints": [ - "5.2.3.4" - ], - "response_time": 4294967295 - }, - { - "status": "unknown", - "torpidity": 4294967295, - "rdata_class": "A", - "weight": 1, - "eligible": "true", - "dsf_record_set_id": "-ZT5yekuV4em0ADl7533_ulTk7g", - "ttl": "300", - "endpoint_up_count": 1, - "label": "", - "automation": "auto", - "master_line": "5.2.3.5", - "dsf_record_id": "OuF6eTQFcURtRht9H0cOjUa8uRM", - "last_monitored": 0, - "rdata": [ - { - "type": "A", - "data": { - "rdata_kx": { - "preference": 0, - "exchange": "" - }, - "rdata_srv": { - "priority": 0, - "port": 0, - "weight": 0, - "target": "" - }, - "rdata_policy": { - "gui_url": "", - "policy": "", - "rtype": "", - "api_url": "", - "name": "" - }, - "rdata_soa": { - "rname": "", - "retry": 0, - "mname": "", - "minimum": 0, - "refresh": 0, - "expire": 0, - "serial": 0 - }, - "rdata_key": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_ipseckey": { - "public_key": "", - "precedence": 0, - "gatetype": 0, - "algorithm": 0, - "gateway": "" - }, - "rdata_cname": { - "cname": "" - }, - "rdata_loc": { - "horiz_pre": 0, - "altitude": 0, - "longitude": "", - "version": 0, - "vert_pre": 0, - "latitude": "", - "size": 0 - }, - "rdata_spf": { - "txtdata": "" - }, - "rdata_ptr": { - "ptrdname": "" - }, - "rdata_alias": { - "alias": "" - }, - "rdata_ds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_naptr": { - "flags": "", - "preference": 0, - "services": "", - "regexp": "", - "order": 0, - "replacement": "" - }, - "rdata_sshfp": { - "fptype": 0, - "algorithm": 0, - "fingerprint": "" - }, - "rdata_aaaa": { - "address": "" - }, - "rdata_nsap": { - "nsap": "" - }, - "rdata_dhcid": { - "digest": "" - }, - "rdata_dnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_cds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_txt": { - "txtdata": "" - }, - "rdata_ns": { - "nsdname": "" - }, - "rdata_dname": { - "dname": "" - }, - "rdata_csync": { - "soa_serial": 0, - "flags": "", - "types": "" - }, - "rdata_px": { - "mapx400": "", - "map822": "", - "preference": 0 - }, - "rdata_a": { - "address": "5.2.3.5" - }, - "rdata_cert": { - "tag": 0, - "certificate": "", - "algorithm": 0, - "format": 0 - }, - "rdata_rp": { - "mbox": "", - "txtdname": "" - }, - "rdata_tlsa": { - "cert_usage": 0, - "match_type": 0, - "certificate": "", - "selector": 0 - }, - "rdata_mx": { - "preference": 0, - "exchange": "" - }, - "rdata_cdnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - } - }, - "ttl": "" - } - ], - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "endpoints": [ - "5.2.3.5" - ], - "response_time": 4294967295 - } - ], - "fail_count": "1", - "torpidity_max": "0", - "ttl_derived": "300", - "last_monitored": "1487879663", - "ttl": "", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "label": "5.2.3.4", - "serve_count": "2" - }, - { - "status": "unknown", - "rdata_class": "A", - "eligible": "true", - "dsf_record_set_id": "5Xt8xYzj5Yic3lydKyPeoihJow4", - "dsf_monitor_id": "", - "automation": "auto", - "trouble_count": "0", - "records": [ - { - "status": "unknown", - "torpidity": 4294967295, - "rdata_class": "A", - "weight": 1, - "eligible": "true", - "dsf_record_set_id": "5Xt8xYzj5Yic3lydKyPeoihJow4", - "ttl": "300", - "endpoint_up_count": 1, - "label": "", - "automation": "auto", - "master_line": "1.2.3.4", - "dsf_record_id": "VQSQMyjakhz1X5c63NV-PqXl8Z4", - "last_monitored": 0, - "rdata": [ - { - "type": "A", - "data": { - "rdata_kx": { - "preference": 0, - "exchange": "" - }, - "rdata_srv": { - "priority": 0, - "port": 0, - "weight": 0, - "target": "" - }, - "rdata_policy": { - "gui_url": "", - "policy": "", - "rtype": "", - "api_url": "", - "name": "" - }, - "rdata_soa": { - "rname": "", - "retry": 0, - "mname": "", - "minimum": 0, - "refresh": 0, - "expire": 0, - "serial": 0 - }, - "rdata_key": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_ipseckey": { - "public_key": "", - "precedence": 0, - "gatetype": 0, - "algorithm": 0, - "gateway": "" - }, - "rdata_cname": { - "cname": "" - }, - "rdata_loc": { - "horiz_pre": 0, - "altitude": 0, - "longitude": "", - "version": 0, - "vert_pre": 0, - "latitude": "", - "size": 0 - }, - "rdata_spf": { - "txtdata": "" - }, - "rdata_ptr": { - "ptrdname": "" - }, - "rdata_alias": { - "alias": "" - }, - "rdata_ds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_naptr": { - "flags": "", - "preference": 0, - "services": "", - "regexp": "", - "order": 0, - "replacement": "" - }, - "rdata_sshfp": { - "fptype": 0, - "algorithm": 0, - "fingerprint": "" - }, - "rdata_aaaa": { - "address": "" - }, - "rdata_nsap": { - "nsap": "" - }, - "rdata_dhcid": { - "digest": "" - }, - "rdata_dnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_cds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_txt": { - "txtdata": "" - }, - "rdata_ns": { - "nsdname": "" - }, - "rdata_dname": { - "dname": "" - }, - "rdata_csync": { - "soa_serial": 0, - "flags": "", - "types": "" - }, - "rdata_px": { - "mapx400": "", - "map822": "", - "preference": 0 - }, - "rdata_a": { - "address": "1.2.3.4" - }, - "rdata_cert": { - "tag": 0, - "certificate": "", - "algorithm": 0, - "format": 0 - }, - "rdata_rp": { - "mbox": "", - "txtdname": "" - }, - "rdata_tlsa": { - "cert_usage": 0, - "match_type": 0, - "certificate": "", - "selector": 0 - }, - "rdata_mx": { - "preference": 0, - "exchange": "" - }, - "rdata_cdnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - } - }, - "ttl": "" - } - ], - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "endpoints": [ - "1.2.3.4" - ], - "response_time": 4294967295 - }, - { - "status": "unknown", - "torpidity": 4294967295, - "rdata_class": "A", - "weight": 1, - "eligible": "true", - "dsf_record_set_id": "5Xt8xYzj5Yic3lydKyPeoihJow4", - "ttl": "300", - "endpoint_up_count": 1, - "label": "", - "automation": "auto", - "master_line": "1.2.3.5", - "dsf_record_id": "pYP70Y9_EyAXdNtUOHC4dZa1NSo", - "last_monitored": 0, - "rdata": [ - { - "type": "A", - "data": { - "rdata_kx": { - "preference": 0, - "exchange": "" - }, - "rdata_srv": { - "priority": 0, - "port": 0, - "weight": 0, - "target": "" - }, - "rdata_policy": { - "gui_url": "", - "policy": "", - "rtype": "", - "api_url": "", - "name": "" - }, - "rdata_soa": { - "rname": "", - "retry": 0, - "mname": "", - "minimum": 0, - "refresh": 0, - "expire": 0, - "serial": 0 - }, - "rdata_key": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_ipseckey": { - "public_key": "", - "precedence": 0, - "gatetype": 0, - "algorithm": 0, - "gateway": "" - }, - "rdata_cname": { - "cname": "" - }, - "rdata_loc": { - "horiz_pre": 0, - "altitude": 0, - "longitude": "", - "version": 0, - "vert_pre": 0, - "latitude": "", - "size": 0 - }, - "rdata_spf": { - "txtdata": "" - }, - "rdata_ptr": { - "ptrdname": "" - }, - "rdata_alias": { - "alias": "" - }, - "rdata_ds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_naptr": { - "flags": "", - "preference": 0, - "services": "", - "regexp": "", - "order": 0, - "replacement": "" - }, - "rdata_sshfp": { - "fptype": 0, - "algorithm": 0, - "fingerprint": "" - }, - "rdata_aaaa": { - "address": "" - }, - "rdata_nsap": { - "nsap": "" - }, - "rdata_dhcid": { - "digest": "" - }, - "rdata_dnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_cds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_txt": { - "txtdata": "" - }, - "rdata_ns": { - "nsdname": "" - }, - "rdata_dname": { - "dname": "" - }, - "rdata_csync": { - "soa_serial": 0, - "flags": "", - "types": "" - }, - "rdata_px": { - "mapx400": "", - "map822": "", - "preference": 0 - }, - "rdata_a": { - "address": "1.2.3.5" - }, - "rdata_cert": { - "tag": 0, - "certificate": "", - "algorithm": 0, - "format": 0 - }, - "rdata_rp": { - "mbox": "", - "txtdname": "" - }, - "rdata_tlsa": { - "cert_usage": 0, - "match_type": 0, - "certificate": "", - "selector": 0 - }, - "rdata_mx": { - "preference": 0, - "exchange": "" - }, - "rdata_cdnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - } - }, - "ttl": "" - } - ], - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "endpoints": [ - "1.2.3.5" - ], - "response_time": 4294967295 - } - ], - "fail_count": "1", - "torpidity_max": "0", - "ttl_derived": "300", - "last_monitored": "1487879663", - "ttl": "", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "label": "default", - "serve_count": "2" - } - ], - "label": "5.2.3.4" - } - ], - "automation": "auto", - "last_monitored": "1487879663", - "pending_change": "", - "eligible": "true", - "rulesets": [ - { - "dsf_ruleset_id": "9e4lSkD33d8x1mCBChQcbOc6O2o", - "response_pools": [], - "criteria": { - "geoip": { - "province": [], - "country": [ - "US", - "CA" - ], - "region": [] - } - }, - "ordering": "1", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "eligible": "", - "label": "NA-US-CA", - "criteria_type": "geoip" - } - ], - "label": "5.2.3.4", - "dsf_response_pool_id": "vX54Ck2p2c6fSmIAkG_JD3nY5OI", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "core_set_count": "1", - "notifier": "" - } - ], - "criteria": { - "geoip": { - "province": [], - "country": [ - "US", - "CA" - ], - "region": [] - } - }, - "ordering": "1", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "eligible": "", - "label": "NA-US-CA:b2c71ae53a5645cc813c51db2b6571c7", - "criteria_type": "geoip" - }, - { - "dsf_ruleset_id": "Eg5aPCgcvdhrCCpqK1zCSd-oE2c", - "response_pools": [ - { - "status": "ok", - "rs_chains": [ - { - "dsf_response_pool_id": "zyoEkzz1znnhQk3J1gcMFpEngeI", - "core": "false", - "status": "ok", - "dsf_record_set_failover_chain_id": "G4ZdfXTa45jrlNVaHSBY5kHVX8k", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "record_sets": [ - { - "status": "unknown", - "rdata_class": "A", - "eligible": "true", - "dsf_record_set_id": "FJZxSLJnFSS4KJfdZ2yyjwM8_hY", - "dsf_monitor_id": "htWFldFj4R8dLWiG6dbMbDL0EOE", - "automation": "auto", - "trouble_count": "0", - "records": [ - { - "status": "unknown", - "torpidity": 4294967295, - "rdata_class": "A", - "weight": 1, - "eligible": "true", - "dsf_record_set_id": "FJZxSLJnFSS4KJfdZ2yyjwM8_hY", - "ttl": "300", - "endpoint_up_count": 1, - "label": "", - "automation": "auto", - "master_line": "4.2.3.4", - "dsf_record_id": "Es9g_4E_aIoN-tFTStQDNSE8-aY", - "last_monitored": 0, - "rdata": [ - { - "type": "A", - "data": { - "rdata_kx": { - "preference": 0, - "exchange": "" - }, - "rdata_srv": { - "priority": 0, - "port": 0, - "weight": 0, - "target": "" - }, - "rdata_policy": { - "gui_url": "", - "policy": "", - "rtype": "", - "api_url": "", - "name": "" - }, - "rdata_soa": { - "rname": "", - "retry": 0, - "mname": "", - "minimum": 0, - "refresh": 0, - "expire": 0, - "serial": 0 - }, - "rdata_key": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_ipseckey": { - "public_key": "", - "precedence": 0, - "gatetype": 0, - "algorithm": 0, - "gateway": "" - }, - "rdata_cname": { - "cname": "" - }, - "rdata_loc": { - "horiz_pre": 0, - "altitude": 0, - "longitude": "", - "version": 0, - "vert_pre": 0, - "latitude": "", - "size": 0 - }, - "rdata_spf": { - "txtdata": "" - }, - "rdata_ptr": { - "ptrdname": "" - }, - "rdata_alias": { - "alias": "" - }, - "rdata_ds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_naptr": { - "flags": "", - "preference": 0, - "services": "", - "regexp": "", - "order": 0, - "replacement": "" - }, - "rdata_sshfp": { - "fptype": 0, - "algorithm": 0, - "fingerprint": "" - }, - "rdata_aaaa": { - "address": "" - }, - "rdata_nsap": { - "nsap": "" - }, - "rdata_dhcid": { - "digest": "" - }, - "rdata_dnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_cds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_txt": { - "txtdata": "" - }, - "rdata_ns": { - "nsdname": "" - }, - "rdata_dname": { - "dname": "" - }, - "rdata_csync": { - "soa_serial": 0, - "flags": "", - "types": "" - }, - "rdata_px": { - "mapx400": "", - "map822": "", - "preference": 0 - }, - "rdata_a": { - "address": "4.2.3.4" - }, - "rdata_cert": { - "tag": 0, - "certificate": "", - "algorithm": 0, - "format": 0 - }, - "rdata_rp": { - "mbox": "", - "txtdname": "" - }, - "rdata_tlsa": { - "cert_usage": 0, - "match_type": 0, - "certificate": "", - "selector": 0 - }, - "rdata_mx": { - "preference": 0, - "exchange": "" - }, - "rdata_cdnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - } - }, - "ttl": "" - } - ], - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "endpoints": [ - "4.2.3.4" - ], - "response_time": 4294967295 - }, - { - "status": "unknown", - "torpidity": 4294967295, - "rdata_class": "A", - "weight": 1, - "eligible": "true", - "dsf_record_set_id": "FJZxSLJnFSS4KJfdZ2yyjwM8_hY", - "ttl": "300", - "endpoint_up_count": 1, - "label": "", - "automation": "auto", - "master_line": "4.2.3.5", - "dsf_record_id": "Mnz7WakCZ91XRKpbcQ6dkZ6d-QI", - "last_monitored": 0, - "rdata": [ - { - "type": "A", - "data": { - "rdata_kx": { - "preference": 0, - "exchange": "" - }, - "rdata_srv": { - "priority": 0, - "port": 0, - "weight": 0, - "target": "" - }, - "rdata_policy": { - "gui_url": "", - "policy": "", - "rtype": "", - "api_url": "", - "name": "" - }, - "rdata_soa": { - "rname": "", - "retry": 0, - "mname": "", - "minimum": 0, - "refresh": 0, - "expire": 0, - "serial": 0 - }, - "rdata_key": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_ipseckey": { - "public_key": "", - "precedence": 0, - "gatetype": 0, - "algorithm": 0, - "gateway": "" - }, - "rdata_cname": { - "cname": "" - }, - "rdata_loc": { - "horiz_pre": 0, - "altitude": 0, - "longitude": "", - "version": 0, - "vert_pre": 0, - "latitude": "", - "size": 0 - }, - "rdata_spf": { - "txtdata": "" - }, - "rdata_ptr": { - "ptrdname": "" - }, - "rdata_alias": { - "alias": "" - }, - "rdata_ds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_naptr": { - "flags": "", - "preference": 0, - "services": "", - "regexp": "", - "order": 0, - "replacement": "" - }, - "rdata_sshfp": { - "fptype": 0, - "algorithm": 0, - "fingerprint": "" - }, - "rdata_aaaa": { - "address": "" - }, - "rdata_nsap": { - "nsap": "" - }, - "rdata_dhcid": { - "digest": "" - }, - "rdata_dnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_cds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_txt": { - "txtdata": "" - }, - "rdata_ns": { - "nsdname": "" - }, - "rdata_dname": { - "dname": "" - }, - "rdata_csync": { - "soa_serial": 0, - "flags": "", - "types": "" - }, - "rdata_px": { - "mapx400": "", - "map822": "", - "preference": 0 - }, - "rdata_a": { - "address": "4.2.3.5" - }, - "rdata_cert": { - "tag": 0, - "certificate": "", - "algorithm": 0, - "format": 0 - }, - "rdata_rp": { - "mbox": "", - "txtdname": "" - }, - "rdata_tlsa": { - "cert_usage": 0, - "match_type": 0, - "certificate": "", - "selector": 0 - }, - "rdata_mx": { - "preference": 0, - "exchange": "" - }, - "rdata_cdnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - } - }, - "ttl": "" - } - ], - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "endpoints": [ - "4.2.3.5" - ], - "response_time": 4294967295 - } - ], - "fail_count": "1", - "torpidity_max": "0", - "ttl_derived": "300", - "last_monitored": "1487879665", - "ttl": "", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "label": "4.2.3.4", - "serve_count": "2" - }, - { - "status": "unknown", - "rdata_class": "A", - "eligible": "true", - "dsf_record_set_id": "YGakLwcCC22_5rXg2t8XuWL8lqc", - "dsf_monitor_id": "", - "automation": "auto", - "trouble_count": "0", - "records": [ - { - "status": "unknown", - "torpidity": 4294967295, - "rdata_class": "A", - "weight": 1, - "eligible": "true", - "dsf_record_set_id": "YGakLwcCC22_5rXg2t8XuWL8lqc", - "ttl": "300", - "endpoint_up_count": 1, - "label": "", - "automation": "auto", - "master_line": "1.2.3.4", - "dsf_record_id": "aduilHaDNN_8HD8BppL_wVmMerk", - "last_monitored": 0, - "rdata": [ - { - "type": "A", - "data": { - "rdata_kx": { - "preference": 0, - "exchange": "" - }, - "rdata_srv": { - "priority": 0, - "port": 0, - "weight": 0, - "target": "" - }, - "rdata_policy": { - "gui_url": "", - "policy": "", - "rtype": "", - "api_url": "", - "name": "" - }, - "rdata_soa": { - "rname": "", - "retry": 0, - "mname": "", - "minimum": 0, - "refresh": 0, - "expire": 0, - "serial": 0 - }, - "rdata_key": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_ipseckey": { - "public_key": "", - "precedence": 0, - "gatetype": 0, - "algorithm": 0, - "gateway": "" - }, - "rdata_cname": { - "cname": "" - }, - "rdata_loc": { - "horiz_pre": 0, - "altitude": 0, - "longitude": "", - "version": 0, - "vert_pre": 0, - "latitude": "", - "size": 0 - }, - "rdata_spf": { - "txtdata": "" - }, - "rdata_ptr": { - "ptrdname": "" - }, - "rdata_alias": { - "alias": "" - }, - "rdata_ds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_naptr": { - "flags": "", - "preference": 0, - "services": "", - "regexp": "", - "order": 0, - "replacement": "" - }, - "rdata_sshfp": { - "fptype": 0, - "algorithm": 0, - "fingerprint": "" - }, - "rdata_aaaa": { - "address": "" - }, - "rdata_nsap": { - "nsap": "" - }, - "rdata_dhcid": { - "digest": "" - }, - "rdata_dnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_cds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_txt": { - "txtdata": "" - }, - "rdata_ns": { - "nsdname": "" - }, - "rdata_dname": { - "dname": "" - }, - "rdata_csync": { - "soa_serial": 0, - "flags": "", - "types": "" - }, - "rdata_px": { - "mapx400": "", - "map822": "", - "preference": 0 - }, - "rdata_a": { - "address": "1.2.3.4" - }, - "rdata_cert": { - "tag": 0, - "certificate": "", - "algorithm": 0, - "format": 0 - }, - "rdata_rp": { - "mbox": "", - "txtdname": "" - }, - "rdata_tlsa": { - "cert_usage": 0, - "match_type": 0, - "certificate": "", - "selector": 0 - }, - "rdata_mx": { - "preference": 0, - "exchange": "" - }, - "rdata_cdnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - } - }, - "ttl": "" - } - ], - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "endpoints": [ - "1.2.3.4" - ], - "response_time": 4294967295 - }, - { - "status": "unknown", - "torpidity": 4294967295, - "rdata_class": "A", - "weight": 1, - "eligible": "true", - "dsf_record_set_id": "YGakLwcCC22_5rXg2t8XuWL8lqc", - "ttl": "300", - "endpoint_up_count": 1, - "label": "", - "automation": "auto", - "master_line": "1.2.3.5", - "dsf_record_id": "TOn5jK5oq1ZwmkbPpYwPcBUJuVo", - "last_monitored": 0, - "rdata": [ - { - "type": "A", - "data": { - "rdata_kx": { - "preference": 0, - "exchange": "" - }, - "rdata_srv": { - "priority": 0, - "port": 0, - "weight": 0, - "target": "" - }, - "rdata_policy": { - "gui_url": "", - "policy": "", - "rtype": "", - "api_url": "", - "name": "" - }, - "rdata_soa": { - "rname": "", - "retry": 0, - "mname": "", - "minimum": 0, - "refresh": 0, - "expire": 0, - "serial": 0 - }, - "rdata_key": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_ipseckey": { - "public_key": "", - "precedence": 0, - "gatetype": 0, - "algorithm": 0, - "gateway": "" - }, - "rdata_cname": { - "cname": "" - }, - "rdata_loc": { - "horiz_pre": 0, - "altitude": 0, - "longitude": "", - "version": 0, - "vert_pre": 0, - "latitude": "", - "size": 0 - }, - "rdata_spf": { - "txtdata": "" - }, - "rdata_ptr": { - "ptrdname": "" - }, - "rdata_alias": { - "alias": "" - }, - "rdata_ds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_naptr": { - "flags": "", - "preference": 0, - "services": "", - "regexp": "", - "order": 0, - "replacement": "" - }, - "rdata_sshfp": { - "fptype": 0, - "algorithm": 0, - "fingerprint": "" - }, - "rdata_aaaa": { - "address": "" - }, - "rdata_nsap": { - "nsap": "" - }, - "rdata_dhcid": { - "digest": "" - }, - "rdata_dnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_cds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_txt": { - "txtdata": "" - }, - "rdata_ns": { - "nsdname": "" - }, - "rdata_dname": { - "dname": "" - }, - "rdata_csync": { - "soa_serial": 0, - "flags": "", - "types": "" - }, - "rdata_px": { - "mapx400": "", - "map822": "", - "preference": 0 - }, - "rdata_a": { - "address": "1.2.3.5" - }, - "rdata_cert": { - "tag": 0, - "certificate": "", - "algorithm": 0, - "format": 0 - }, - "rdata_rp": { - "mbox": "", - "txtdname": "" - }, - "rdata_tlsa": { - "cert_usage": 0, - "match_type": 0, - "certificate": "", - "selector": 0 - }, - "rdata_mx": { - "preference": 0, - "exchange": "" - }, - "rdata_cdnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - } - }, - "ttl": "" - } - ], - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "endpoints": [ - "1.2.3.5" - ], - "response_time": 4294967295 - } - ], - "fail_count": "1", - "torpidity_max": "0", - "ttl_derived": "300", - "last_monitored": "1487879665", - "ttl": "", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "label": "default", - "serve_count": "2" - } - ], - "label": "4.2.3.4" - } - ], - "automation": "auto", - "last_monitored": "1487879665", - "pending_change": "", - "eligible": "true", - "rulesets": [ - { - "dsf_ruleset_id": "Eg5aPCgcvdhrCCpqK1zCSd-oE2c", - "response_pools": [], - "criteria": { - "geoip": { - "province": [], - "country": [ - "US" - ], - "region": [] - } - }, - "ordering": "2", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "eligible": "", - "label": "NA-US", - "criteria_type": "geoip" - } - ], - "label": "4.2.3.4", - "dsf_response_pool_id": "zyoEkzz1znnhQk3J1gcMFpEngeI", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "core_set_count": "1", - "notifier": "" - } - ], - "criteria": { - "geoip": { - "province": [], - "country": [ - "US" - ], - "region": [] - } - }, - "ordering": "2", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "eligible": "", - "label": "NA-US:b2c71ae53a5645cc813c51db2b6571c7", - "criteria_type": "geoip" - }, - { - "dsf_ruleset_id": "gi96PlJik9qf36PyNXJPT-5CRM0", - "response_pools": [ - { - "status": "ok", - "rs_chains": [ - { - "dsf_response_pool_id": "0xUYeWq92OrQg6ImuFZAdFz8gOs", - "core": "false", - "status": "ok", - "dsf_record_set_failover_chain_id": "MSuION06EZhgHBjHkBqVhtFTJq8", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "record_sets": [ - { - "status": "unknown", - "rdata_class": "A", - "eligible": "true", - "dsf_record_set_id": "TeTYCMo1INWcHfs8tTjGScx2uv8", - "dsf_monitor_id": "htWFldFj4R8dLWiG6dbMbDL0EOE", - "automation": "auto", - "trouble_count": "0", - "records": [ - { - "status": "unknown", - "torpidity": 4294967295, - "rdata_class": "A", - "weight": 1, - "eligible": "true", - "dsf_record_set_id": "TeTYCMo1INWcHfs8tTjGScx2uv8", - "ttl": "300", - "endpoint_up_count": 1, - "label": "", - "automation": "auto", - "master_line": "3.2.3.4", - "dsf_record_id": "AltgsqLdUKOXi3ESlr3KLcw-vUs", - "last_monitored": 0, - "rdata": [ - { - "type": "A", - "data": { - "rdata_kx": { - "preference": 0, - "exchange": "" - }, - "rdata_srv": { - "priority": 0, - "port": 0, - "weight": 0, - "target": "" - }, - "rdata_policy": { - "gui_url": "", - "policy": "", - "rtype": "", - "api_url": "", - "name": "" - }, - "rdata_soa": { - "rname": "", - "retry": 0, - "mname": "", - "minimum": 0, - "refresh": 0, - "expire": 0, - "serial": 0 - }, - "rdata_key": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_ipseckey": { - "public_key": "", - "precedence": 0, - "gatetype": 0, - "algorithm": 0, - "gateway": "" - }, - "rdata_cname": { - "cname": "" - }, - "rdata_loc": { - "horiz_pre": 0, - "altitude": 0, - "longitude": "", - "version": 0, - "vert_pre": 0, - "latitude": "", - "size": 0 - }, - "rdata_spf": { - "txtdata": "" - }, - "rdata_ptr": { - "ptrdname": "" - }, - "rdata_alias": { - "alias": "" - }, - "rdata_ds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_naptr": { - "flags": "", - "preference": 0, - "services": "", - "regexp": "", - "order": 0, - "replacement": "" - }, - "rdata_sshfp": { - "fptype": 0, - "algorithm": 0, - "fingerprint": "" - }, - "rdata_aaaa": { - "address": "" - }, - "rdata_nsap": { - "nsap": "" - }, - "rdata_dhcid": { - "digest": "" - }, - "rdata_dnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_cds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_txt": { - "txtdata": "" - }, - "rdata_ns": { - "nsdname": "" - }, - "rdata_dname": { - "dname": "" - }, - "rdata_csync": { - "soa_serial": 0, - "flags": "", - "types": "" - }, - "rdata_px": { - "mapx400": "", - "map822": "", - "preference": 0 - }, - "rdata_a": { - "address": "3.2.3.4" - }, - "rdata_cert": { - "tag": 0, - "certificate": "", - "algorithm": 0, - "format": 0 - }, - "rdata_rp": { - "mbox": "", - "txtdname": "" - }, - "rdata_tlsa": { - "cert_usage": 0, - "match_type": 0, - "certificate": "", - "selector": 0 - }, - "rdata_mx": { - "preference": 0, - "exchange": "" - }, - "rdata_cdnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - } - }, - "ttl": "" - } - ], - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "endpoints": [ - "3.2.3.4" - ], - "response_time": 4294967295 - }, - { - "status": "unknown", - "torpidity": 4294967295, - "rdata_class": "A", - "weight": 1, - "eligible": "true", - "dsf_record_set_id": "TeTYCMo1INWcHfs8tTjGScx2uv8", - "ttl": "300", - "endpoint_up_count": 1, - "label": "", - "automation": "auto", - "master_line": "3.2.3.5", - "dsf_record_id": "IH6_EamKifOs3p4WJWWWX04xQAQ", - "last_monitored": 0, - "rdata": [ - { - "type": "A", - "data": { - "rdata_kx": { - "preference": 0, - "exchange": "" - }, - "rdata_srv": { - "priority": 0, - "port": 0, - "weight": 0, - "target": "" - }, - "rdata_policy": { - "gui_url": "", - "policy": "", - "rtype": "", - "api_url": "", - "name": "" - }, - "rdata_soa": { - "rname": "", - "retry": 0, - "mname": "", - "minimum": 0, - "refresh": 0, - "expire": 0, - "serial": 0 - }, - "rdata_key": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_ipseckey": { - "public_key": "", - "precedence": 0, - "gatetype": 0, - "algorithm": 0, - "gateway": "" - }, - "rdata_cname": { - "cname": "" - }, - "rdata_loc": { - "horiz_pre": 0, - "altitude": 0, - "longitude": "", - "version": 0, - "vert_pre": 0, - "latitude": "", - "size": 0 - }, - "rdata_spf": { - "txtdata": "" - }, - "rdata_ptr": { - "ptrdname": "" - }, - "rdata_alias": { - "alias": "" - }, - "rdata_ds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_naptr": { - "flags": "", - "preference": 0, - "services": "", - "regexp": "", - "order": 0, - "replacement": "" - }, - "rdata_sshfp": { - "fptype": 0, - "algorithm": 0, - "fingerprint": "" - }, - "rdata_aaaa": { - "address": "" - }, - "rdata_nsap": { - "nsap": "" - }, - "rdata_dhcid": { - "digest": "" - }, - "rdata_dnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_cds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_txt": { - "txtdata": "" - }, - "rdata_ns": { - "nsdname": "" - }, - "rdata_dname": { - "dname": "" - }, - "rdata_csync": { - "soa_serial": 0, - "flags": "", - "types": "" - }, - "rdata_px": { - "mapx400": "", - "map822": "", - "preference": 0 - }, - "rdata_a": { - "address": "3.2.3.5" - }, - "rdata_cert": { - "tag": 0, - "certificate": "", - "algorithm": 0, - "format": 0 - }, - "rdata_rp": { - "mbox": "", - "txtdname": "" - }, - "rdata_tlsa": { - "cert_usage": 0, - "match_type": 0, - "certificate": "", - "selector": 0 - }, - "rdata_mx": { - "preference": 0, - "exchange": "" - }, - "rdata_cdnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - } - }, - "ttl": "" - } - ], - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "endpoints": [ - "3.2.3.5" - ], - "response_time": 4294967295 - } - ], - "fail_count": "1", - "torpidity_max": "0", - "ttl_derived": "300", - "last_monitored": "1487879668", - "ttl": "", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "label": "3.2.3.4", - "serve_count": "2" - }, - { - "status": "unknown", - "rdata_class": "A", - "eligible": "true", - "dsf_record_set_id": "YI41kUebHBMejXqBcF3BnkjT1bQ", - "dsf_monitor_id": "", - "automation": "auto", - "trouble_count": "0", - "records": [ - { - "status": "unknown", - "torpidity": 4294967295, - "rdata_class": "A", - "weight": 1, - "eligible": "true", - "dsf_record_set_id": "YI41kUebHBMejXqBcF3BnkjT1bQ", - "ttl": "300", - "endpoint_up_count": 1, - "label": "", - "automation": "auto", - "master_line": "1.2.3.4", - "dsf_record_id": "ZgSaaoX-OzHI_FpmID_O6CfSFiQ", - "last_monitored": 0, - "rdata": [ - { - "type": "A", - "data": { - "rdata_kx": { - "preference": 0, - "exchange": "" - }, - "rdata_srv": { - "priority": 0, - "port": 0, - "weight": 0, - "target": "" - }, - "rdata_policy": { - "gui_url": "", - "policy": "", - "rtype": "", - "api_url": "", - "name": "" - }, - "rdata_soa": { - "rname": "", - "retry": 0, - "mname": "", - "minimum": 0, - "refresh": 0, - "expire": 0, - "serial": 0 - }, - "rdata_key": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_ipseckey": { - "public_key": "", - "precedence": 0, - "gatetype": 0, - "algorithm": 0, - "gateway": "" - }, - "rdata_cname": { - "cname": "" - }, - "rdata_loc": { - "horiz_pre": 0, - "altitude": 0, - "longitude": "", - "version": 0, - "vert_pre": 0, - "latitude": "", - "size": 0 - }, - "rdata_spf": { - "txtdata": "" - }, - "rdata_ptr": { - "ptrdname": "" - }, - "rdata_alias": { - "alias": "" - }, - "rdata_ds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_naptr": { - "flags": "", - "preference": 0, - "services": "", - "regexp": "", - "order": 0, - "replacement": "" - }, - "rdata_sshfp": { - "fptype": 0, - "algorithm": 0, - "fingerprint": "" - }, - "rdata_aaaa": { - "address": "" - }, - "rdata_nsap": { - "nsap": "" - }, - "rdata_dhcid": { - "digest": "" - }, - "rdata_dnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_cds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_txt": { - "txtdata": "" - }, - "rdata_ns": { - "nsdname": "" - }, - "rdata_dname": { - "dname": "" - }, - "rdata_csync": { - "soa_serial": 0, - "flags": "", - "types": "" - }, - "rdata_px": { - "mapx400": "", - "map822": "", - "preference": 0 - }, - "rdata_a": { - "address": "1.2.3.4" - }, - "rdata_cert": { - "tag": 0, - "certificate": "", - "algorithm": 0, - "format": 0 - }, - "rdata_rp": { - "mbox": "", - "txtdname": "" - }, - "rdata_tlsa": { - "cert_usage": 0, - "match_type": 0, - "certificate": "", - "selector": 0 - }, - "rdata_mx": { - "preference": 0, - "exchange": "" - }, - "rdata_cdnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - } - }, - "ttl": "" - } - ], - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "endpoints": [ - "1.2.3.4" - ], - "response_time": 4294967295 - }, - { - "status": "unknown", - "torpidity": 4294967295, - "rdata_class": "A", - "weight": 1, - "eligible": "true", - "dsf_record_set_id": "YI41kUebHBMejXqBcF3BnkjT1bQ", - "ttl": "300", - "endpoint_up_count": 1, - "label": "", - "automation": "auto", - "master_line": "1.2.3.5", - "dsf_record_id": "S0NUfNRx-MYClfOkHRR8NCPzLhw", - "last_monitored": 0, - "rdata": [ - { - "type": "A", - "data": { - "rdata_kx": { - "preference": 0, - "exchange": "" - }, - "rdata_srv": { - "priority": 0, - "port": 0, - "weight": 0, - "target": "" - }, - "rdata_policy": { - "gui_url": "", - "policy": "", - "rtype": "", - "api_url": "", - "name": "" - }, - "rdata_soa": { - "rname": "", - "retry": 0, - "mname": "", - "minimum": 0, - "refresh": 0, - "expire": 0, - "serial": 0 - }, - "rdata_key": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_ipseckey": { - "public_key": "", - "precedence": 0, - "gatetype": 0, - "algorithm": 0, - "gateway": "" - }, - "rdata_cname": { - "cname": "" - }, - "rdata_loc": { - "horiz_pre": 0, - "altitude": 0, - "longitude": "", - "version": 0, - "vert_pre": 0, - "latitude": "", - "size": 0 - }, - "rdata_spf": { - "txtdata": "" - }, - "rdata_ptr": { - "ptrdname": "" - }, - "rdata_alias": { - "alias": "" - }, - "rdata_ds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_naptr": { - "flags": "", - "preference": 0, - "services": "", - "regexp": "", - "order": 0, - "replacement": "" - }, - "rdata_sshfp": { - "fptype": 0, - "algorithm": 0, - "fingerprint": "" - }, - "rdata_aaaa": { - "address": "" - }, - "rdata_nsap": { - "nsap": "" - }, - "rdata_dhcid": { - "digest": "" - }, - "rdata_dnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_cds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_txt": { - "txtdata": "" - }, - "rdata_ns": { - "nsdname": "" - }, - "rdata_dname": { - "dname": "" - }, - "rdata_csync": { - "soa_serial": 0, - "flags": "", - "types": "" - }, - "rdata_px": { - "mapx400": "", - "map822": "", - "preference": 0 - }, - "rdata_a": { - "address": "1.2.3.5" - }, - "rdata_cert": { - "tag": 0, - "certificate": "", - "algorithm": 0, - "format": 0 - }, - "rdata_rp": { - "mbox": "", - "txtdname": "" - }, - "rdata_tlsa": { - "cert_usage": 0, - "match_type": 0, - "certificate": "", - "selector": 0 - }, - "rdata_mx": { - "preference": 0, - "exchange": "" - }, - "rdata_cdnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - } - }, - "ttl": "" - } - ], - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "endpoints": [ - "1.2.3.5" - ], - "response_time": 4294967295 - } - ], - "fail_count": "1", - "torpidity_max": "0", - "ttl_derived": "300", - "last_monitored": "1487879668", - "ttl": "", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "label": "default", - "serve_count": "2" - } - ], - "label": "3.2.3.4" - } - ], - "automation": "auto", - "last_monitored": "1487879668", - "pending_change": "", - "eligible": "true", - "rulesets": [ - { - "dsf_ruleset_id": "gi96PlJik9qf36PyNXJPT-5CRM0", - "response_pools": [], - "criteria": { - "geoip": { - "province": [], - "country": [ - "JP" - ], - "region": [] - } - }, - "ordering": "3", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "eligible": "", - "label": "AS-JP", - "criteria_type": "geoip" - } - ], - "label": "3.2.3.4", - "dsf_response_pool_id": "0xUYeWq92OrQg6ImuFZAdFz8gOs", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "core_set_count": "1", - "notifier": "" - } - ], - "criteria": { - "geoip": { - "province": [], - "country": [ - "JP" - ], - "region": [] - } - }, - "ordering": "3", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "eligible": "", - "label": "AS-JP:b2c71ae53a5645cc813c51db2b6571c7", - "criteria_type": "geoip" - }, - { - "dsf_ruleset_id": "lVWDeD6bwuXZtqJu_cwuQJcYVmE", - "response_pools": [ - { - "status": "ok", - "rs_chains": [ - { - "dsf_response_pool_id": "GQ2VYQM9kBPQkLur6IlzZJeCICU", - "core": "false", - "status": "ok", - "dsf_record_set_failover_chain_id": "dYiwBxOAdUjEr638SJ8FycTkolA", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "record_sets": [ - { - "status": "unknown", - "rdata_class": "A", - "eligible": "true", - "dsf_record_set_id": "ek1O6mp2SnOPxGA4XQfmp9Xkr4o", - "dsf_monitor_id": "htWFldFj4R8dLWiG6dbMbDL0EOE", - "automation": "auto", - "trouble_count": "0", - "records": [ - { - "status": "unknown", - "torpidity": 4294967295, - "rdata_class": "A", - "weight": 1, - "eligible": "true", - "dsf_record_set_id": "ek1O6mp2SnOPxGA4XQfmp9Xkr4o", - "ttl": "300", - "endpoint_up_count": 1, - "label": "", - "automation": "auto", - "master_line": "2.2.3.4", - "dsf_record_id": "s8FCJSXvOch_cHa0wqHPRNIki-U", - "last_monitored": 0, - "rdata": [ - { - "type": "A", - "data": { - "rdata_kx": { - "preference": 0, - "exchange": "" - }, - "rdata_srv": { - "priority": 0, - "port": 0, - "weight": 0, - "target": "" - }, - "rdata_policy": { - "gui_url": "", - "policy": "", - "rtype": "", - "api_url": "", - "name": "" - }, - "rdata_soa": { - "rname": "", - "retry": 0, - "mname": "", - "minimum": 0, - "refresh": 0, - "expire": 0, - "serial": 0 - }, - "rdata_key": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_ipseckey": { - "public_key": "", - "precedence": 0, - "gatetype": 0, - "algorithm": 0, - "gateway": "" - }, - "rdata_cname": { - "cname": "" - }, - "rdata_loc": { - "horiz_pre": 0, - "altitude": 0, - "longitude": "", - "version": 0, - "vert_pre": 0, - "latitude": "", - "size": 0 - }, - "rdata_spf": { - "txtdata": "" - }, - "rdata_ptr": { - "ptrdname": "" - }, - "rdata_alias": { - "alias": "" - }, - "rdata_ds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_naptr": { - "flags": "", - "preference": 0, - "services": "", - "regexp": "", - "order": 0, - "replacement": "" - }, - "rdata_sshfp": { - "fptype": 0, - "algorithm": 0, - "fingerprint": "" - }, - "rdata_aaaa": { - "address": "" - }, - "rdata_nsap": { - "nsap": "" - }, - "rdata_dhcid": { - "digest": "" - }, - "rdata_dnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_cds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_txt": { - "txtdata": "" - }, - "rdata_ns": { - "nsdname": "" - }, - "rdata_dname": { - "dname": "" - }, - "rdata_csync": { - "soa_serial": 0, - "flags": "", - "types": "" - }, - "rdata_px": { - "mapx400": "", - "map822": "", - "preference": 0 - }, - "rdata_a": { - "address": "2.2.3.4" - }, - "rdata_cert": { - "tag": 0, - "certificate": "", - "algorithm": 0, - "format": 0 - }, - "rdata_rp": { - "mbox": "", - "txtdname": "" - }, - "rdata_tlsa": { - "cert_usage": 0, - "match_type": 0, - "certificate": "", - "selector": 0 - }, - "rdata_mx": { - "preference": 0, - "exchange": "" - }, - "rdata_cdnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - } - }, - "ttl": "" - } - ], - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "endpoints": [ - "2.2.3.4" - ], - "response_time": 4294967295 - }, - { - "status": "unknown", - "torpidity": 4294967295, - "rdata_class": "A", - "weight": 1, - "eligible": "true", - "dsf_record_set_id": "ek1O6mp2SnOPxGA4XQfmp9Xkr4o", - "ttl": "300", - "endpoint_up_count": 1, - "label": "", - "automation": "auto", - "master_line": "2.2.3.5", - "dsf_record_id": "JXA71fL_4DGdE38A8tIDZ9FnoBU", - "last_monitored": 0, - "rdata": [ - { - "type": "A", - "data": { - "rdata_kx": { - "preference": 0, - "exchange": "" - }, - "rdata_srv": { - "priority": 0, - "port": 0, - "weight": 0, - "target": "" - }, - "rdata_policy": { - "gui_url": "", - "policy": "", - "rtype": "", - "api_url": "", - "name": "" - }, - "rdata_soa": { - "rname": "", - "retry": 0, - "mname": "", - "minimum": 0, - "refresh": 0, - "expire": 0, - "serial": 0 - }, - "rdata_key": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_ipseckey": { - "public_key": "", - "precedence": 0, - "gatetype": 0, - "algorithm": 0, - "gateway": "" - }, - "rdata_cname": { - "cname": "" - }, - "rdata_loc": { - "horiz_pre": 0, - "altitude": 0, - "longitude": "", - "version": 0, - "vert_pre": 0, - "latitude": "", - "size": 0 - }, - "rdata_spf": { - "txtdata": "" - }, - "rdata_ptr": { - "ptrdname": "" - }, - "rdata_alias": { - "alias": "" - }, - "rdata_ds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_naptr": { - "flags": "", - "preference": 0, - "services": "", - "regexp": "", - "order": 0, - "replacement": "" - }, - "rdata_sshfp": { - "fptype": 0, - "algorithm": 0, - "fingerprint": "" - }, - "rdata_aaaa": { - "address": "" - }, - "rdata_nsap": { - "nsap": "" - }, - "rdata_dhcid": { - "digest": "" - }, - "rdata_dnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_cds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_txt": { - "txtdata": "" - }, - "rdata_ns": { - "nsdname": "" - }, - "rdata_dname": { - "dname": "" - }, - "rdata_csync": { - "soa_serial": 0, - "flags": "", - "types": "" - }, - "rdata_px": { - "mapx400": "", - "map822": "", - "preference": 0 - }, - "rdata_a": { - "address": "2.2.3.5" - }, - "rdata_cert": { - "tag": 0, - "certificate": "", - "algorithm": 0, - "format": 0 - }, - "rdata_rp": { - "mbox": "", - "txtdname": "" - }, - "rdata_tlsa": { - "cert_usage": 0, - "match_type": 0, - "certificate": "", - "selector": 0 - }, - "rdata_mx": { - "preference": 0, - "exchange": "" - }, - "rdata_cdnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - } - }, - "ttl": "" - } - ], - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "endpoints": [ - "2.2.3.5" - ], - "response_time": 4294967295 - } - ], - "fail_count": "1", - "torpidity_max": "0", - "ttl_derived": "300", - "last_monitored": "1487879670", - "ttl": "", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "label": "2.2.3.4", - "serve_count": "2" - }, - { - "status": "unknown", - "rdata_class": "A", - "eligible": "true", - "dsf_record_set_id": "34UoNhEdeOqF61XEW1MsBZ5jyRQ", - "dsf_monitor_id": "", - "automation": "auto", - "trouble_count": "0", - "records": [ - { - "status": "unknown", - "torpidity": 4294967295, - "rdata_class": "A", - "weight": 1, - "eligible": "true", - "dsf_record_set_id": "34UoNhEdeOqF61XEW1MsBZ5jyRQ", - "ttl": "300", - "endpoint_up_count": 1, - "label": "", - "automation": "auto", - "master_line": "1.2.3.4", - "dsf_record_id": "JjeVu2S5kBT6puXAeWyv9GqqV0o", - "last_monitored": 0, - "rdata": [ - { - "type": "A", - "data": { - "rdata_kx": { - "preference": 0, - "exchange": "" - }, - "rdata_srv": { - "priority": 0, - "port": 0, - "weight": 0, - "target": "" - }, - "rdata_policy": { - "gui_url": "", - "policy": "", - "rtype": "", - "api_url": "", - "name": "" - }, - "rdata_soa": { - "rname": "", - "retry": 0, - "mname": "", - "minimum": 0, - "refresh": 0, - "expire": 0, - "serial": 0 - }, - "rdata_key": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_ipseckey": { - "public_key": "", - "precedence": 0, - "gatetype": 0, - "algorithm": 0, - "gateway": "" - }, - "rdata_cname": { - "cname": "" - }, - "rdata_loc": { - "horiz_pre": 0, - "altitude": 0, - "longitude": "", - "version": 0, - "vert_pre": 0, - "latitude": "", - "size": 0 - }, - "rdata_spf": { - "txtdata": "" - }, - "rdata_ptr": { - "ptrdname": "" - }, - "rdata_alias": { - "alias": "" - }, - "rdata_ds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_naptr": { - "flags": "", - "preference": 0, - "services": "", - "regexp": "", - "order": 0, - "replacement": "" - }, - "rdata_sshfp": { - "fptype": 0, - "algorithm": 0, - "fingerprint": "" - }, - "rdata_aaaa": { - "address": "" - }, - "rdata_nsap": { - "nsap": "" - }, - "rdata_dhcid": { - "digest": "" - }, - "rdata_dnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_cds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_txt": { - "txtdata": "" - }, - "rdata_ns": { - "nsdname": "" - }, - "rdata_dname": { - "dname": "" - }, - "rdata_csync": { - "soa_serial": 0, - "flags": "", - "types": "" - }, - "rdata_px": { - "mapx400": "", - "map822": "", - "preference": 0 - }, - "rdata_a": { - "address": "1.2.3.4" - }, - "rdata_cert": { - "tag": 0, - "certificate": "", - "algorithm": 0, - "format": 0 - }, - "rdata_rp": { - "mbox": "", - "txtdname": "" - }, - "rdata_tlsa": { - "cert_usage": 0, - "match_type": 0, - "certificate": "", - "selector": 0 - }, - "rdata_mx": { - "preference": 0, - "exchange": "" - }, - "rdata_cdnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - } - }, - "ttl": "" - } - ], - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "endpoints": [ - "1.2.3.4" - ], - "response_time": 4294967295 - }, - { - "status": "unknown", - "torpidity": 4294967295, - "rdata_class": "A", - "weight": 1, - "eligible": "true", - "dsf_record_set_id": "34UoNhEdeOqF61XEW1MsBZ5jyRQ", - "ttl": "300", - "endpoint_up_count": 1, - "label": "", - "automation": "auto", - "master_line": "1.2.3.5", - "dsf_record_id": "GfOulen8R_E1TwSD0WrUHfiMepk", - "last_monitored": 0, - "rdata": [ - { - "type": "A", - "data": { - "rdata_kx": { - "preference": 0, - "exchange": "" - }, - "rdata_srv": { - "priority": 0, - "port": 0, - "weight": 0, - "target": "" - }, - "rdata_policy": { - "gui_url": "", - "policy": "", - "rtype": "", - "api_url": "", - "name": "" - }, - "rdata_soa": { - "rname": "", - "retry": 0, - "mname": "", - "minimum": 0, - "refresh": 0, - "expire": 0, - "serial": 0 - }, - "rdata_key": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_ipseckey": { - "public_key": "", - "precedence": 0, - "gatetype": 0, - "algorithm": 0, - "gateway": "" - }, - "rdata_cname": { - "cname": "" - }, - "rdata_loc": { - "horiz_pre": 0, - "altitude": 0, - "longitude": "", - "version": 0, - "vert_pre": 0, - "latitude": "", - "size": 0 - }, - "rdata_spf": { - "txtdata": "" - }, - "rdata_ptr": { - "ptrdname": "" - }, - "rdata_alias": { - "alias": "" - }, - "rdata_ds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_naptr": { - "flags": "", - "preference": 0, - "services": "", - "regexp": "", - "order": 0, - "replacement": "" - }, - "rdata_sshfp": { - "fptype": 0, - "algorithm": 0, - "fingerprint": "" - }, - "rdata_aaaa": { - "address": "" - }, - "rdata_nsap": { - "nsap": "" - }, - "rdata_dhcid": { - "digest": "" - }, - "rdata_dnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_cds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_txt": { - "txtdata": "" - }, - "rdata_ns": { - "nsdname": "" - }, - "rdata_dname": { - "dname": "" - }, - "rdata_csync": { - "soa_serial": 0, - "flags": "", - "types": "" - }, - "rdata_px": { - "mapx400": "", - "map822": "", - "preference": 0 - }, - "rdata_a": { - "address": "1.2.3.5" - }, - "rdata_cert": { - "tag": 0, - "certificate": "", - "algorithm": 0, - "format": 0 - }, - "rdata_rp": { - "mbox": "", - "txtdname": "" - }, - "rdata_tlsa": { - "cert_usage": 0, - "match_type": 0, - "certificate": "", - "selector": 0 - }, - "rdata_mx": { - "preference": 0, - "exchange": "" - }, - "rdata_cdnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - } - }, - "ttl": "" - } - ], - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "endpoints": [ - "1.2.3.5" - ], - "response_time": 4294967295 - } - ], - "fail_count": "1", - "torpidity_max": "0", - "ttl_derived": "300", - "last_monitored": "1487879670", - "ttl": "", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "label": "default", - "serve_count": "2" - } - ], - "label": "2.2.3.4" - } - ], - "automation": "auto", - "last_monitored": "1487879670", - "pending_change": "", - "eligible": "true", - "rulesets": [ - { - "dsf_ruleset_id": "lVWDeD6bwuXZtqJu_cwuQJcYVmE", - "response_pools": [], - "criteria": { - "geoip": { - "province": [], - "country": [], - "region": [ - "14" - ] - } - }, - "ordering": "4", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "eligible": "", - "label": "AF", - "criteria_type": "geoip" - } - ], - "label": "2.2.3.4", - "dsf_response_pool_id": "GQ2VYQM9kBPQkLur6IlzZJeCICU", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "core_set_count": "1", - "notifier": "" - } - ], - "criteria": { - "geoip": { - "province": [], - "country": [], - "region": [ - "14" - ] - } - }, - "ordering": "4", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "eligible": "", - "label": "AF:b2c71ae53a5645cc813c51db2b6571c7", - "criteria_type": "geoip" - }, - { - "dsf_ruleset_id": "MZFKVbrOa112kb-JyCRHVSmh8NA", - "response_pools": [ - { - "status": "ok", - "rs_chains": [ - { - "dsf_response_pool_id": "KOKo8sVfakgJ1HpqqZ84A7QkGTk", - "core": "false", - "status": "ok", - "dsf_record_set_failover_chain_id": "Vfem7bn-aO_a3l1mhZxYrds-BUg", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "record_sets": [ - { - "status": "unknown", - "rdata_class": "A", - "eligible": "true", - "dsf_record_set_id": "EdrNCZ6gM8FCeiXGamxxT39AkkA", - "dsf_monitor_id": "", - "automation": "auto", - "trouble_count": "0", - "records": [ - { - "status": "unknown", - "torpidity": 4294967295, - "rdata_class": "A", - "weight": 1, - "eligible": "true", - "dsf_record_set_id": "EdrNCZ6gM8FCeiXGamxxT39AkkA", - "ttl": "300", - "endpoint_up_count": 1, - "label": "", - "automation": "auto", - "master_line": "1.2.3.4", - "dsf_record_id": "14DpdlaAi81FWR2qB3DW2HYI9YM", - "last_monitored": 0, - "rdata": [ - { - "type": "A", - "data": { - "rdata_kx": { - "preference": 0, - "exchange": "" - }, - "rdata_srv": { - "priority": 0, - "port": 0, - "weight": 0, - "target": "" - }, - "rdata_policy": { - "gui_url": "", - "policy": "", - "rtype": "", - "api_url": "", - "name": "" - }, - "rdata_soa": { - "rname": "", - "retry": 0, - "mname": "", - "minimum": 0, - "refresh": 0, - "expire": 0, - "serial": 0 - }, - "rdata_key": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_ipseckey": { - "public_key": "", - "precedence": 0, - "gatetype": 0, - "algorithm": 0, - "gateway": "" - }, - "rdata_cname": { - "cname": "" - }, - "rdata_loc": { - "horiz_pre": 0, - "altitude": 0, - "longitude": "", - "version": 0, - "vert_pre": 0, - "latitude": "", - "size": 0 - }, - "rdata_spf": { - "txtdata": "" - }, - "rdata_ptr": { - "ptrdname": "" - }, - "rdata_alias": { - "alias": "" - }, - "rdata_ds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_naptr": { - "flags": "", - "preference": 0, - "services": "", - "regexp": "", - "order": 0, - "replacement": "" - }, - "rdata_sshfp": { - "fptype": 0, - "algorithm": 0, - "fingerprint": "" - }, - "rdata_aaaa": { - "address": "" - }, - "rdata_nsap": { - "nsap": "" - }, - "rdata_dhcid": { - "digest": "" - }, - "rdata_dnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_cds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_txt": { - "txtdata": "" - }, - "rdata_ns": { - "nsdname": "" - }, - "rdata_dname": { - "dname": "" - }, - "rdata_csync": { - "soa_serial": 0, - "flags": "", - "types": "" - }, - "rdata_px": { - "mapx400": "", - "map822": "", - "preference": 0 - }, - "rdata_a": { - "address": "1.2.3.4" - }, - "rdata_cert": { - "tag": 0, - "certificate": "", - "algorithm": 0, - "format": 0 - }, - "rdata_rp": { - "mbox": "", - "txtdname": "" - }, - "rdata_tlsa": { - "cert_usage": 0, - "match_type": 0, - "certificate": "", - "selector": 0 - }, - "rdata_mx": { - "preference": 0, - "exchange": "" - }, - "rdata_cdnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - } - }, - "ttl": "" - } - ], - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "endpoints": [ - "1.2.3.4" - ], - "response_time": 4294967295 - }, - { - "status": "unknown", - "torpidity": 4294967295, - "rdata_class": "A", - "weight": 1, - "eligible": "true", - "dsf_record_set_id": "EdrNCZ6gM8FCeiXGamxxT39AkkA", - "ttl": "300", - "endpoint_up_count": 1, - "label": "", - "automation": "auto", - "master_line": "1.2.3.5", - "dsf_record_id": "wlLEe3F4vqEmH-OCcPicDE0K1I0", - "last_monitored": 0, - "rdata": [ - { - "type": "A", - "data": { - "rdata_kx": { - "preference": 0, - "exchange": "" - }, - "rdata_srv": { - "priority": 0, - "port": 0, - "weight": 0, - "target": "" - }, - "rdata_policy": { - "gui_url": "", - "policy": "", - "rtype": "", - "api_url": "", - "name": "" - }, - "rdata_soa": { - "rname": "", - "retry": 0, - "mname": "", - "minimum": 0, - "refresh": 0, - "expire": 0, - "serial": 0 - }, - "rdata_key": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_ipseckey": { - "public_key": "", - "precedence": 0, - "gatetype": 0, - "algorithm": 0, - "gateway": "" - }, - "rdata_cname": { - "cname": "" - }, - "rdata_loc": { - "horiz_pre": 0, - "altitude": 0, - "longitude": "", - "version": 0, - "vert_pre": 0, - "latitude": "", - "size": 0 - }, - "rdata_spf": { - "txtdata": "" - }, - "rdata_ptr": { - "ptrdname": "" - }, - "rdata_alias": { - "alias": "" - }, - "rdata_ds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_naptr": { - "flags": "", - "preference": 0, - "services": "", - "regexp": "", - "order": 0, - "replacement": "" - }, - "rdata_sshfp": { - "fptype": 0, - "algorithm": 0, - "fingerprint": "" - }, - "rdata_aaaa": { - "address": "" - }, - "rdata_nsap": { - "nsap": "" - }, - "rdata_dhcid": { - "digest": "" - }, - "rdata_dnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_cds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_txt": { - "txtdata": "" - }, - "rdata_ns": { - "nsdname": "" - }, - "rdata_dname": { - "dname": "" - }, - "rdata_csync": { - "soa_serial": 0, - "flags": "", - "types": "" - }, - "rdata_px": { - "mapx400": "", - "map822": "", - "preference": 0 - }, - "rdata_a": { - "address": "1.2.3.5" - }, - "rdata_cert": { - "tag": 0, - "certificate": "", - "algorithm": 0, - "format": 0 - }, - "rdata_rp": { - "mbox": "", - "txtdname": "" - }, - "rdata_tlsa": { - "cert_usage": 0, - "match_type": 0, - "certificate": "", - "selector": 0 - }, - "rdata_mx": { - "preference": 0, - "exchange": "" - }, - "rdata_cdnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - } - }, - "ttl": "" - } - ], - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "endpoints": [ - "1.2.3.5" - ], - "response_time": 4294967295 - } - ], - "fail_count": "1", - "torpidity_max": "0", - "ttl_derived": "300", - "last_monitored": "1487879672", - "ttl": "", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "label": "default", - "serve_count": "2" - } - ], - "label": "default" - } - ], - "automation": "auto", - "last_monitored": "1487879672", - "pending_change": "", - "eligible": "true", - "rulesets": [ - { - "dsf_ruleset_id": "MZFKVbrOa112kb-JyCRHVSmh8NA", - "response_pools": [], - "criteria": { - "geoip": { - "province": [], - "country": [], - "region": [] - } - }, - "ordering": "5", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "eligible": "", - "label": "default", - "criteria_type": "always" - } - ], - "label": "default", - "dsf_response_pool_id": "KOKo8sVfakgJ1HpqqZ84A7QkGTk", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "core_set_count": "1", - "notifier": "" - } - ], - "criteria": { - "geoip": { - "province": [], - "country": [], - "region": [] - } - }, - "ordering": "5", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "eligible": "", - "label": "other format", - "criteria_type": "always" - }, - { - "dsf_ruleset_id": "MZFKVbrOa112kb-JyCRHVSmh8NA", - "response_pools": [ - { - "status": "ok", - "rs_chains": [], - "automation": "auto", - "last_monitored": "1487879672", - "pending_change": "", - "eligible": "true", - "rulesets": [], - "label": "NA-US-FL:norules", - "dsf_response_pool_id": "KOKo8sVfakgJ1HpqqZ84A7QkGTk", - "service_id": "xIRZ4lM-W64NUelJGuzuVziZ4MI", - "core_set_count": "1", - "notifier": "" - } - ], - "criteria": { - "geoip": { - "province": [], - "country": [], - "region": [] - } - }, - "ordering": "5", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "eligible": "", - "label": "other format", - "criteria_type": "always" - }, - { - "dsf_ruleset_id": "MZFKVbrOa112kb-JyCRHVSmh8NA", - "response_pools": [ - { - "status": "ok", - "rs_chains": [ - { - "dsf_response_pool_id": "KOKo8sVfakgJ1HpqqZ84A7QkGTk", - "core": "false", - "status": "ok", - "dsf_record_set_failover_chain_id": "Vfem7bn-aO_a3l1mhZxYrds-BUg", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "record_sets": [ - { - "status": "unknown", - "rdata_class": "A", - "eligible": "true", - "dsf_record_set_id": "EdrNCZ6gM8FCeiXGamxxT39AkkA", - "dsf_monitor_id": "", - "automation": "auto", - "trouble_count": "0", - "records": [ - { - "status": "unknown", - "torpidity": 4294967295, - "rdata_class": "A", - "weight": 1, - "eligible": "true", - "dsf_record_set_id": "EdrNCZ6gM8FCeiXGamxxT39AkkA", - "ttl": "300", - "endpoint_up_count": 1, - "label": "", - "automation": "auto", - "master_line": "1.2.3.4", - "dsf_record_id": "14DpdlaAi81FWR2qB3DW2HYI9YM", - "last_monitored": 0, - "rdata": [ - { - "type": "A", - "data": { - "rdata_kx": { - "preference": 0, - "exchange": "" - }, - "rdata_srv": { - "priority": 0, - "port": 0, - "weight": 0, - "target": "" - }, - "rdata_policy": { - "gui_url": "", - "policy": "", - "rtype": "", - "api_url": "", - "name": "" - }, - "rdata_soa": { - "rname": "", - "retry": 0, - "mname": "", - "minimum": 0, - "refresh": 0, - "expire": 0, - "serial": 0 - }, - "rdata_key": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_ipseckey": { - "public_key": "", - "precedence": 0, - "gatetype": 0, - "algorithm": 0, - "gateway": "" - }, - "rdata_cname": { - "cname": "" - }, - "rdata_loc": { - "horiz_pre": 0, - "altitude": 0, - "longitude": "", - "version": 0, - "vert_pre": 0, - "latitude": "", - "size": 0 - }, - "rdata_spf": { - "txtdata": "" - }, - "rdata_ptr": { - "ptrdname": "" - }, - "rdata_alias": { - "alias": "" - }, - "rdata_ds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_naptr": { - "flags": "", - "preference": 0, - "services": "", - "regexp": "", - "order": 0, - "replacement": "" - }, - "rdata_sshfp": { - "fptype": 0, - "algorithm": 0, - "fingerprint": "" - }, - "rdata_aaaa": { - "address": "" - }, - "rdata_nsap": { - "nsap": "" - }, - "rdata_dhcid": { - "digest": "" - }, - "rdata_dnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_cds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_txt": { - "txtdata": "" - }, - "rdata_ns": { - "nsdname": "" - }, - "rdata_dname": { - "dname": "" - }, - "rdata_csync": { - "soa_serial": 0, - "flags": "", - "types": "" - }, - "rdata_px": { - "mapx400": "", - "map822": "", - "preference": 0 - }, - "rdata_a": { - "address": "1.2.3.4" - }, - "rdata_cert": { - "tag": 0, - "certificate": "", - "algorithm": 0, - "format": 0 - }, - "rdata_rp": { - "mbox": "", - "txtdname": "" - }, - "rdata_tlsa": { - "cert_usage": 0, - "match_type": 0, - "certificate": "", - "selector": 0 - }, - "rdata_mx": { - "preference": 0, - "exchange": "" - }, - "rdata_cdnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - } - }, - "ttl": "" - } - ], - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "endpoints": [ - "1.2.3.4" - ], - "response_time": 4294967295 - }, - { - "status": "unknown", - "torpidity": 4294967295, - "rdata_class": "A", - "weight": 1, - "eligible": "true", - "dsf_record_set_id": "EdrNCZ6gM8FCeiXGamxxT39AkkA", - "ttl": "300", - "endpoint_up_count": 1, - "label": "", - "automation": "auto", - "master_line": "1.2.3.5", - "dsf_record_id": "wlLEe3F4vqEmH-OCcPicDE0K1I0", - "last_monitored": 0, - "rdata": [ - { - "type": "A", - "data": { - "rdata_kx": { - "preference": 0, - "exchange": "" - }, - "rdata_srv": { - "priority": 0, - "port": 0, - "weight": 0, - "target": "" - }, - "rdata_policy": { - "gui_url": "", - "policy": "", - "rtype": "", - "api_url": "", - "name": "" - }, - "rdata_soa": { - "rname": "", - "retry": 0, - "mname": "", - "minimum": 0, - "refresh": 0, - "expire": 0, - "serial": 0 - }, - "rdata_key": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_ipseckey": { - "public_key": "", - "precedence": 0, - "gatetype": 0, - "algorithm": 0, - "gateway": "" - }, - "rdata_cname": { - "cname": "" - }, - "rdata_loc": { - "horiz_pre": 0, - "altitude": 0, - "longitude": "", - "version": 0, - "vert_pre": 0, - "latitude": "", - "size": 0 - }, - "rdata_spf": { - "txtdata": "" - }, - "rdata_ptr": { - "ptrdname": "" - }, - "rdata_alias": { - "alias": "" - }, - "rdata_ds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_naptr": { - "flags": "", - "preference": 0, - "services": "", - "regexp": "", - "order": 0, - "replacement": "" - }, - "rdata_sshfp": { - "fptype": 0, - "algorithm": 0, - "fingerprint": "" - }, - "rdata_aaaa": { - "address": "" - }, - "rdata_nsap": { - "nsap": "" - }, - "rdata_dhcid": { - "digest": "" - }, - "rdata_dnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - }, - "rdata_cds": { - "keytag": 0, - "digest": "", - "algorithm": 0, - "digtype": 0 - }, - "rdata_txt": { - "txtdata": "" - }, - "rdata_ns": { - "nsdname": "" - }, - "rdata_dname": { - "dname": "" - }, - "rdata_csync": { - "soa_serial": 0, - "flags": "", - "types": "" - }, - "rdata_px": { - "mapx400": "", - "map822": "", - "preference": 0 - }, - "rdata_a": { - "address": "1.2.3.5" - }, - "rdata_cert": { - "tag": 0, - "certificate": "", - "algorithm": 0, - "format": 0 - }, - "rdata_rp": { - "mbox": "", - "txtdname": "" - }, - "rdata_tlsa": { - "cert_usage": 0, - "match_type": 0, - "certificate": "", - "selector": 0 - }, - "rdata_mx": { - "preference": 0, - "exchange": "" - }, - "rdata_cdnskey": { - "protocol": 0, - "flags": 0, - "algorithm": 0, - "public_key": "" - } - }, - "ttl": "" - } - ], - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "endpoints": [ - "1.2.3.5" - ], - "response_time": 4294967295 - } - ], - "fail_count": "1", - "torpidity_max": "0", - "ttl_derived": "300", - "last_monitored": "1487879672", - "ttl": "", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "label": "default", - "serve_count": "2" - } - ], - "label": "default" - } - ], - "automation": "auto", - "last_monitored": "1487879672", - "pending_change": "", - "eligible": "true", - "rulesets": [ - { - "dsf_ruleset_id": "MZFKVbrOa112kb-JyCRHVSmh8NA", - "response_pools": [], - "criteria": { - "geoip": { - "province": [], - "country": [], - "region": [] - } - }, - "ordering": "5", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "eligible": "", - "label": "default", - "criteria_type": "always" - } - ], - "label": "default", - "dsf_response_pool_id": "KOKo8sVfakgJ1HpqqZ84A7QkGTk", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "core_set_count": "1", - "notifier": "" - } - ], - "criteria": { - "geoip": { - "province": [], - "country": [], - "region": [] - } - }, - "ordering": "5", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "pending_change": "", - "eligible": "", - "label": "default:b2c71ae53a5645cc813c51db2b6571c7", - "criteria_type": "always" - } - ], - "ttl": "300", - "active": "Y", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "nodes": [ - { - "fqdn": "unit.tests", - "zone": "unit.tests" - } - ], - "pending_change": "", - "label": "unit.tests.:A" - }, - "job_id": 3376642606, - "msgs": [ - { - "INFO": "detail: Here is your service", - "LVL": "INFO", - "ERR_CD": null, - "SOURCE": "BLL" - } - ] -} diff --git a/tests/test_octodns_provider_constellix.py b/tests/test_octodns_provider_constellix.py index b35191f..46e87ce 100644 --- a/tests/test_octodns_provider_constellix.py +++ b/tests/test_octodns_provider_constellix.py @@ -2,1895 +2,15 @@ # # - from __future__ import absolute_import, division, print_function, \ unicode_literals -from mock import Mock, PropertyMock, call -from os.path import dirname, join -from requests import HTTPError -from requests_mock import ANY, mock as requests_mock from unittest import TestCase -from octodns.record import Record -from octodns.provider.constellix import \ - ConstellixProvider, ConstellixClientBadRequest -from octodns.provider.yaml import YamlProvider -from octodns.zone import Zone +class TestConstellixShim(TestCase): -class TestConstellixProvider(TestCase): - expected = Zone('unit.tests.', []) - source = YamlProvider('test', join(dirname(__file__), 'config')) - source.populate(expected) - - # Our test suite differs a bit, add our NS and remove the simple one - expected.add_record(Record.new(expected, 'under', { - 'ttl': 3600, - 'type': 'NS', - 'values': [ - 'ns1.unit.tests.', - 'ns2.unit.tests.', - ] - })) - - # Add some ALIAS records - expected.add_record(Record.new(expected, '', { - 'ttl': 1800, - 'type': 'ALIAS', - 'value': 'aname.unit.tests.' - })) - - # Add a dynamic record - expected.add_record(Record.new(expected, 'www.dynamic', { - 'ttl': 300, - 'type': 'A', - 'values': [ - '1.2.3.4', - '1.2.3.5' - ], - 'dynamic': { - 'pools': { - 'two': { - 'values': [{ - 'value': '1.2.3.4', - 'weight': 1 - }, { - 'value': '1.2.3.5', - 'weight': 1 - }], - }, - }, - 'rules': [{ - 'pool': 'two', - }], - }, - })) - - for record in list(expected.records): - if record.name == 'sub' and record._type == 'NS': - expected._remove_record(record) - break - - expected_healthcheck = Zone('unit.tests.', []) - source = YamlProvider('test', join(dirname(__file__), 'config')) - source.populate(expected_healthcheck) - - # Our test suite differs a bit, add our NS and remove the simple one - expected_healthcheck.add_record(Record.new(expected_healthcheck, 'under', { - 'ttl': 3600, - 'type': 'NS', - 'values': [ - 'ns1.unit.tests.', - 'ns2.unit.tests.', - ] - })) - - # Add some ALIAS records - expected_healthcheck.add_record(Record.new(expected_healthcheck, '', { - 'ttl': 1800, - 'type': 'ALIAS', - 'value': 'aname.unit.tests.' - })) - - # Add a dynamic record - expected_healthcheck.add_record( - Record.new(expected_healthcheck, 'www.dynamic', { - 'ttl': 300, - 'type': 'A', - 'values': [ - '1.2.3.4', - '1.2.3.5' - ], - 'dynamic': { - 'pools': { - 'two': { - 'values': [{ - 'value': '1.2.3.4', - 'weight': 1 - }, { - 'value': '1.2.3.5', - 'weight': 1 - }], - }, - }, - 'rules': [{ - 'pool': 'two', - }], - }, - 'octodns': { - 'constellix': { - 'healthcheck': { - 'sonar_port': 80, - 'sonar_regions': [ - 'ASIAPAC', - 'EUROPE' - ], - 'sonar_type': 'TCP' - } - } - } - }) - ) - - for record in list(expected_healthcheck.records): - if record.name == 'sub' and record._type == 'NS': - expected_healthcheck._remove_record(record) - break - - expected_healthcheck_world = Zone('unit.tests.', []) - source = YamlProvider('test', join(dirname(__file__), 'config')) - source.populate(expected_healthcheck_world) - - # Our test suite differs a bit, add our NS and remove the simple one - expected_healthcheck_world.add_record( - Record.new(expected_healthcheck_world, 'under', { - 'ttl': 3600, - 'type': 'NS', - 'values': [ - 'ns1.unit.tests.', - 'ns2.unit.tests.', - ] - }) - ) - - # Add some ALIAS records - expected_healthcheck_world.add_record( - Record.new(expected_healthcheck_world, '', { - 'ttl': 1800, - 'type': 'ALIAS', - 'value': 'aname.unit.tests.' - }) - ) - - # Add a dynamic record - expected_healthcheck_world.add_record( - Record.new(expected_healthcheck_world, 'www.dynamic', { - 'ttl': 300, - 'type': 'AAAA', - 'values': [ - '2601:644:500:e210:62f8:1dff:feb8:947a', - '2601:642:500:e210:62f8:1dff:feb8:947a' - ], - 'dynamic': { - 'pools': { - 'two': { - 'values': [{ - 'value': '2601:644:500:e210:62f8:1dff:feb8:947a', - 'weight': 1 - }, { - 'value': '2601:642:500:e210:62f8:1dff:feb8:947a', - 'weight': 1 - }], - }, - }, - 'rules': [{ - 'pool': 'two', - }], - }, - 'octodns': { - 'constellix': { - 'healthcheck': { - 'sonar_port': 80, - 'sonar_regions': [ - 'WORLD' - ], - 'sonar_type': 'HTTP' - } - } - } - }) - ) - - for record in list(expected_healthcheck_world.records): - if record.name == 'sub' and record._type == 'NS': - expected_healthcheck_world._remove_record(record) - break - - expected_dynamic = Zone('unit.tests.', []) - source = YamlProvider('test', join(dirname(__file__), 'config')) - source.populate(expected_dynamic) - - # Our test suite differs a bit, add our NS and remove the simple one - expected_dynamic.add_record(Record.new(expected_dynamic, 'under', { - 'ttl': 3600, - 'type': 'NS', - 'values': [ - 'ns1.unit.tests.', - 'ns2.unit.tests.', - ] - })) - - # Add some ALIAS records - expected_dynamic.add_record(Record.new(expected_dynamic, '', { - 'ttl': 1800, - 'type': 'ALIAS', - 'value': 'aname.unit.tests.' - })) - - # Add a dynamic record - expected_dynamic.add_record(Record.new(expected_dynamic, 'www.dynamic', { - 'ttl': 300, - 'type': 'A', - 'values': [ - '1.2.3.4', - '1.2.3.5' - ], - 'dynamic': { - 'pools': { - 'one': { - 'fallback': 'two', - 'values': [{ - 'value': '1.2.3.6', - 'weight': 1 - }, { - 'value': '1.2.3.7', - 'weight': 1 - }], - }, - 'two': { - 'values': [{ - 'value': '1.2.3.4', - 'weight': 1 - }, { - 'value': '1.2.3.5', - 'weight': 1 - }], - }, - }, - 'rules': [{ - 'geos': [ - 'AS', - 'EU-ES', - 'EU-UA', - 'EU-SE', - 'NA-CA-NL', - 'OC' - ], - 'pool': 'one' - }, { - 'pool': 'two', - }], - } - })) - - for record in list(expected_dynamic.records): - if record.name == 'sub' and record._type == 'NS': - expected_dynamic._remove_record(record) - break - - def test_populate(self): - provider = ConstellixProvider('test', 'api', 'secret') - - # Bad auth - with requests_mock() as mock: - mock.get(ANY, status_code=401, - text='{"errors": ["Unable to authenticate token"]}') - - with self.assertRaises(Exception) as ctx: - zone = Zone('unit.tests.', []) - provider.populate(zone) - self.assertEquals('Unauthorized', str(ctx.exception)) - - with requests_mock() as mock: - mock.get(ANY, status_code=401, - text='{"errors": ["Unable to authenticate token"]}') - - with self.assertRaises(Exception) as ctx: - provider._sonar.agents - self.assertEquals('Unauthorized', str(ctx.exception)) - - # Bad request - with requests_mock() as mock: - mock.get(ANY, status_code=400, - text='{"errors": ["\\"unittests\\" is not ' - 'a valid domain name"]}') - - with self.assertRaises(Exception) as ctx: - zone = Zone('unit.tests.', []) - provider.populate(zone) - self.assertEquals('\n - "unittests" is not a valid domain name', - str(ctx.exception)) - - with requests_mock() as mock: - mock.get(ANY, status_code=400, - text='error text') - - with self.assertRaises(Exception) as ctx: - provider._sonar.agents - self.assertEquals('\n - error text', - str(ctx.exception)) - - # General error - with requests_mock() as mock: - mock.get(ANY, status_code=502, text='Things caught fire') - - with self.assertRaises(HTTPError) as ctx: - zone = Zone('unit.tests.', []) - provider.populate(zone) - self.assertEquals(502, ctx.exception.response.status_code) - - # Non-existent zone doesn't populate anything - with requests_mock() as mock: - mock.get(ANY, status_code=404, - text='') - - zone = Zone('unit.tests.', []) - provider.populate(zone) - self.assertEquals(set(), zone.records) - - with requests_mock() as mock: - mock.get(ANY, status_code=404, text='') - with self.assertRaises(Exception) as ctx: - provider._sonar.agents - self.assertEquals('Not Found', str(ctx.exception)) - - # Sonar Normal response - provider = ConstellixProvider('test', 'api', 'secret') - with requests_mock() as mock: - mock.get(ANY, status_code=200, text='[]') - agents = provider._sonar.agents - self.assertEquals({}, agents) - agents = provider._sonar.agents - - provider = ConstellixProvider('test', 'api', 'secret', 0.01) - with requests_mock() as mock: - mock.get(ANY, status_code=200, text='[]') - agents = provider._sonar.agents - - provider = ConstellixProvider('test', 'api', 'secret', 1.01) - with requests_mock() as mock: - mock.get(ANY, status_code=200, text='[]') - agents = provider._sonar.agents - - provider = ConstellixProvider('test', 'api', 'secret') - # No diffs == no changes - with requests_mock() as mock: - base = 'https://api.dns.constellix.com/v1' - with open('tests/fixtures/constellix-domains.json') as fh: - mock.get(f'{base}/domains', text=fh.read()) - with open('tests/fixtures/constellix-records.json') as fh: - mock.get(f'{base}/domains/123123/records', text=fh.read()) - with open('tests/fixtures/constellix-pools.json') as fh: - mock.get(f'{base}/pools/A', text=fh.read()) - with open('tests/fixtures/constellix-geofilters.json') as fh: - mock.get(f'{base}/geoFilters', text=fh.read()) - - zone = Zone('unit.tests.', []) - provider.populate(zone) - self.assertEquals(17, len(zone.records)) - changes = self.expected_dynamic.changes(zone, provider) - self.assertEquals(0, len(changes)) - - # 2nd populate makes no network calls/all from cache - again = Zone('unit.tests.', []) - provider.populate(again) - self.assertEquals(17, len(again.records)) - - # bust the cache - del provider._zone_records[zone.name] - - def test_apply(self): - provider = ConstellixProvider('test', 'api', 'secret') - - resp = Mock() - resp.json = Mock() - provider._client._request = Mock(return_value=resp) - - # non-existent domain, create everything - resp.json.side_effect = [ - [], # no domains returned during populate - [{ - 'id': 123123, - 'name': 'unit.tests' - }], # domain created in apply - [], # No pools returned during populate - [{ - "id": 1808520, - "name": "unit.tests.:www.dynamic:A:two", - }] # pool created in apply - ] - - plan = provider.plan(self.expected) - - # No root NS, no ignored, no excluded, no unsupported - n = len(self.expected.records) - 8 - self.assertEquals(n, len(plan.changes)) - self.assertEquals(n, provider.apply(plan)) - - provider._client._request.assert_has_calls([ - # get all domains to build the cache - call('GET', '/domains'), - # created the domain - call('POST', '/domains', data={'names': ['unit.tests']}) - ]) - - # Check we tried to get our pool - provider._client._request.assert_has_calls([ - # get all pools to build the cache - call('GET', '/pools/A'), - # created the pool - call('POST', '/pools/A', data={ - 'name': 'unit.tests.:www.dynamic:A:two', - 'type': 'A', - 'numReturn': 1, - 'minAvailableFailover': 1, - 'ttl': 300, - 'values': [{ - "value": "1.2.3.4", - "weight": 1 - }, { - "value": "1.2.3.5", - "weight": 1 - }] - }) - ]) - - # These two checks are broken up so that ordering doesn't break things. - # Python3 doesn't make the calls in a consistent order so different - # things follow the GET / on different runs - provider._client._request.assert_has_calls([ - call('POST', '/domains/123123/records/SRV', data={ - 'roundRobin': [{ - 'priority': 10, - 'weight': 20, - 'value': 'foo-1.unit.tests.', - 'port': 30 - }, { - 'priority': 12, - 'weight': 20, - 'value': 'foo-2.unit.tests.', - 'port': 30 - }], - 'name': '_srv._tcp', - 'ttl': 600, - }), - ]) - - self.assertEquals(22, provider._client._request.call_count) - - provider._client._request.reset_mock() - - provider._client.records = Mock(return_value=[ - { - 'id': 11189897, - 'type': 'A', - 'name': 'www', - 'ttl': 300, - 'recordOption': 'roundRobin', - 'value': [ - '1.2.3.4', - '2.2.3.4', - ] - }, { - 'id': 11189898, - 'type': 'A', - 'name': 'ttl', - 'ttl': 600, - 'recordOption': 'roundRobin', - 'value': [ - '3.2.3.4' - ] - }, { - 'id': 11189899, - 'type': 'ALIAS', - 'name': 'alias', - 'ttl': 600, - 'recordOption': 'roundRobin', - 'value': [{ - 'value': 'aname.unit.tests.' - }] - }, { - "id": 1808520, - "type": "A", - "name": "www.dynamic", - "geolocation": None, - "recordOption": "pools", - "ttl": 300, - "value": [], - "pools": [ - 1808521 - ] - } - ]) - - provider._client.pools = Mock(return_value=[{ - "id": 1808521, - "name": "unit.tests.:www.dynamic:A:two", - "type": "A", - "values": [ - { - "value": "1.2.3.4", - "weight": 1 - }, - { - "value": "1.2.3.5", - "weight": 1 - } - ] - }]) - - # Domain exists, we don't care about return - resp.json.side_effect = [ - [], # no domains returned during populate - [{ - 'id': 123123, - 'name': 'unit.tests' - }], # domain created in apply - [], # No pools returned during populate - [{ - "id": 1808521, - "name": "unit.tests.:www.dynamic:A:one" - }] # pool created in apply - ] - - wanted = Zone('unit.tests.', []) - wanted.add_record(Record.new(wanted, 'ttl', { - 'ttl': 300, - 'type': 'A', - 'value': '3.2.3.4' - })) - - wanted.add_record(Record.new(wanted, 'www.dynamic', { - 'ttl': 300, - 'type': 'A', - 'values': [ - '1.2.3.4' - ], - 'dynamic': { - 'pools': { - 'two': { - 'values': [{ - 'value': '1.2.3.4', - 'weight': 1 - }], - }, - }, - 'rules': [{ - 'pool': 'two', - }], - }, - })) - - plan = provider.plan(wanted) - self.assertEquals(4, len(plan.changes)) - self.assertEquals(4, provider.apply(plan)) - - # recreate for update, and deletes for the 2 parts of the other - provider._client._request.assert_has_calls([ - call('POST', '/domains/123123/records/A', data={ - 'roundRobin': [{ - 'value': '3.2.3.4' - }], - 'name': 'ttl', - 'ttl': 300 - }), - call('PUT', '/pools/A/1808521', data={ - 'name': 'unit.tests.:www.dynamic:A:two', - 'type': 'A', - 'numReturn': 1, - 'minAvailableFailover': 1, - 'ttl': 300, - 'values': [{ - "value": "1.2.3.4", - "weight": 1 - }], - 'id': 1808521, - 'geofilter': 1 - }), - call('DELETE', '/domains/123123/records/A/11189897'), - call('DELETE', '/domains/123123/records/A/11189898'), - call('DELETE', '/domains/123123/records/ANAME/11189899'), - ], any_order=True) - - def test_apply_healthcheck(self): - provider = ConstellixProvider('test', 'api', 'secret') - - resp = Mock() - resp.json = Mock() - provider._client._request = Mock(return_value=resp) - - # non-existent domain, create everything - resp.json.side_effect = [ - [], # no domains returned during populate - [{ - 'id': 123123, - 'name': 'unit.tests' - }], # domain created in apply - [], # No pools returned during populate - [{ - "id": 1808520, - "name": "unit.tests.:www.dynamic:A:two", - }] # pool created in apply - ] - - sonar_resp = Mock() - sonar_resp.json = Mock() - type(sonar_resp).headers = PropertyMock(return_value={ - "Location": "http://api.sonar.constellix.com/rest/api/tcp/52906" - }) - sonar_resp.headers = Mock() - provider._sonar._request = Mock(return_value=sonar_resp) - - sonar_resp.json.side_effect = [ - [{ - "id": 1, - "name": "USWAS01", - "label": "Site 1", - "location": "Washington, DC, U.S.A", - "country": "U.S.A", - "region": "ASIAPAC" - }, { - "id": 23, - "name": "CATOR01", - "label": "Site 1", - "location": "Toronto,Canada", - "country": "Canada", - "region": "EUROPE" - }, { - "id": 25, - "name": "CATOR01", - "label": "Site 1", - "location": "Toronto,Canada", - "country": "Canada", - "region": "OCEANIA" - }], # available agents - [{ - "id": 52, - "name": "unit.tests.:www.dynamic:A:two-1.2.3.4" - }], # initial checks - { - "type": 'TCP' - }, # check type - { - "id": 52906, - "name": "unit.tests.:www.dynamic:A:two-1.2.3.4" - }, - { - "id": 52907, - "name": "unit.tests.:www.dynamic:A:two-1.2.3.5" - } - ] - - plan = provider.plan(self.expected_healthcheck) - - # No root NS, no ignored, no excluded, no unsupported - n = len(self.expected_healthcheck.records) - 8 - self.assertEquals(n, len(plan.changes)) - self.assertEquals(n, provider.apply(plan)) - - provider._client._request.assert_has_calls([ - # get all domains to build the cache - call('GET', '/domains'), - # created the domain - call('POST', '/domains', data={'names': ['unit.tests']}) - ]) - - # Check we tried to get our pool - provider._client._request.assert_has_calls([ - # get all pools to build the cache - call('GET', '/pools/A'), - # created the pool - call('POST', '/pools/A', data={ - 'name': 'unit.tests.:www.dynamic:A:two', - 'type': 'A', - 'numReturn': 1, - 'minAvailableFailover': 1, - 'ttl': 300, - 'values': [{ - "value": "1.2.3.4", - "weight": 1, - "checkId": 52906, - "policy": 'followsonar' - }, { - "value": "1.2.3.5", - "weight": 1, - "checkId": 52907, - "policy": 'followsonar' - }] - }) - ]) - - # These two checks are broken up so that ordering doesn't break things. - # Python3 doesn't make the calls in a consistent order so different - # things follow the GET / on different runs - provider._client._request.assert_has_calls([ - call('POST', '/domains/123123/records/SRV', data={ - 'roundRobin': [{ - 'priority': 10, - 'weight': 20, - 'value': 'foo-1.unit.tests.', - 'port': 30 - }, { - 'priority': 12, - 'weight': 20, - 'value': 'foo-2.unit.tests.', - 'port': 30 - }], - 'name': '_srv._tcp', - 'ttl': 600, - }), - ]) - - self.assertEquals(22, provider._client._request.call_count) - - provider._client._request.reset_mock() - - provider._client.records = Mock(return_value=[ - { - 'id': 11189897, - 'type': 'A', - 'name': 'www', - 'ttl': 300, - 'recordOption': 'roundRobin', - 'value': [ - '1.2.3.4', - '2.2.3.4', - ] - }, { - 'id': 11189898, - 'type': 'A', - 'name': 'ttl', - 'ttl': 600, - 'recordOption': 'roundRobin', - 'value': [ - '3.2.3.4' - ] - }, { - 'id': 11189899, - 'type': 'ALIAS', - 'name': 'alias', - 'ttl': 600, - 'recordOption': 'roundRobin', - 'value': [{ - 'value': 'aname.unit.tests.' - }] - }, { - "id": 1808520, - "type": "A", - "name": "www.dynamic", - "geolocation": None, - "recordOption": "pools", - "ttl": 300, - "value": [], - "pools": [ - 1808521 - ] - } - ]) - - provider._client.pools = Mock(return_value=[{ - "id": 1808521, - "name": "unit.tests.:www.dynamic:A:two", - "type": "A", - "values": [ - { - "value": "1.2.3.4", - "weight": 1 - }, - { - "value": "1.2.3.5", - "weight": 1 - } - ] - }]) - - # Domain exists, we don't care about return - resp.json.side_effect = [ - [], # no domains returned during populate - [{ - 'id': 123123, - 'name': 'unit.tests' - }], # domain created in apply - [], # No pools returned during populate - [{ - "id": 1808521, - "name": "unit.tests.:www.dynamic:A:one" - }] # pool created in apply - ] - - wanted = Zone('unit.tests.', []) - wanted.add_record(Record.new(wanted, 'ttl', { - 'ttl': 300, - 'type': 'A', - 'value': '3.2.3.4' - })) - - wanted.add_record(Record.new(wanted, 'www.dynamic', { - 'ttl': 300, - 'type': 'A', - 'values': [ - '1.2.3.4' - ], - 'dynamic': { - 'pools': { - 'two': { - 'values': [{ - 'value': '1.2.3.4', - 'weight': 1 - }], - }, - }, - 'rules': [{ - 'pool': 'two', - }], - }, - })) - - plan = provider.plan(wanted) - self.assertEquals(4, len(plan.changes)) - self.assertEquals(4, provider.apply(plan)) - - # recreate for update, and deletes for the 2 parts of the other - provider._client._request.assert_has_calls([ - call('POST', '/domains/123123/records/A', data={ - 'roundRobin': [{ - 'value': '3.2.3.4' - }], - 'name': 'ttl', - 'ttl': 300 - }), - call('PUT', '/pools/A/1808521', data={ - 'name': 'unit.tests.:www.dynamic:A:two', - 'type': 'A', - 'numReturn': 1, - 'minAvailableFailover': 1, - 'ttl': 300, - 'values': [{ - "value": "1.2.3.4", - "weight": 1 - }], - 'id': 1808521, - 'geofilter': 1 - }), - call('DELETE', '/domains/123123/records/A/11189897'), - call('DELETE', '/domains/123123/records/A/11189898'), - call('DELETE', '/domains/123123/records/ANAME/11189899'), - ], any_order=True) - - def test_apply_healthcheck_world(self): - provider = ConstellixProvider('test', 'api', 'secret') - - resp = Mock() - resp.json = Mock() - provider._client._request = Mock(return_value=resp) - - # non-existent domain, create everything - resp.json.side_effect = [ - [], # no domains returned during populate - [{ - 'id': 123123, - 'name': 'unit.tests' - }], # domain created in apply - [], # No pools returned during populate - [{ - "id": 1808520, - "name": "unit.tests.:www.dynamic:A:two", - }] # pool created in apply - ] - - sonar_resp = Mock() - sonar_resp.json = Mock() - type(sonar_resp).headers = PropertyMock(return_value={ - "Location": "http://api.sonar.constellix.com/rest/api/tcp/52906" - }) - sonar_resp.headers = Mock() - provider._sonar._request = Mock(return_value=sonar_resp) - - sonar_resp.json.side_effect = [ - [{ - "id": 1, - "name": "USWAS01", - "label": "Site 1", - "location": "Washington, DC, U.S.A", - "country": "U.S.A", - "region": "ASIAPAC" - }, { - "id": 23, - "name": "CATOR01", - "label": "Site 1", - "location": "Toronto,Canada", - "country": "Canada", - "region": "EUROPE" - }], # available agents - [], # no checks - { - "id": 52906, - "name": "check1" - }, - { - "id": 52907, - "name": "check2" - } - ] - - plan = provider.plan(self.expected_healthcheck_world) - - # No root NS, no ignored, no excluded, no unsupported - n = len(self.expected_healthcheck.records) - 8 - self.assertEquals(n, len(plan.changes)) - self.assertEquals(n, provider.apply(plan)) - - provider._client._request.assert_has_calls([ - # get all domains to build the cache - call('GET', '/domains'), - # created the domain - call('POST', '/domains', data={'names': ['unit.tests']}) - ]) - - # Check we tried to get our pool - provider._client._request.assert_has_calls([ - # get all pools to build the cache - call('GET', '/pools/AAAA'), - # created the pool - call('POST', '/pools/AAAA', data={ - 'name': 'unit.tests.:www.dynamic:AAAA:two', - 'type': 'AAAA', - 'numReturn': 1, - 'minAvailableFailover': 1, - 'ttl': 300, - 'values': [{ - "value": "2601:642:500:e210:62f8:1dff:feb8:947a", - "weight": 1, - "checkId": 52906, - "policy": 'followsonar' - }, { - "value": "2601:644:500:e210:62f8:1dff:feb8:947a", - "weight": 1, - "checkId": 52907, - "policy": 'followsonar' - }] - }) - ]) - - # These two checks are broken up so that ordering doesn't break things. - # Python3 doesn't make the calls in a consistent order so different - # things follow the GET / on different runs - provider._client._request.assert_has_calls([ - call('POST', '/domains/123123/records/SRV', data={ - 'roundRobin': [{ - 'priority': 10, - 'weight': 20, - 'value': 'foo-1.unit.tests.', - 'port': 30 - }, { - 'priority': 12, - 'weight': 20, - 'value': 'foo-2.unit.tests.', - 'port': 30 - }], - 'name': '_srv._tcp', - 'ttl': 600, - }), - ]) - - self.assertEquals(22, provider._client._request.call_count) - - provider._client._request.reset_mock() - - provider._client.records = Mock(return_value=[ - { - 'id': 11189897, - 'type': 'A', - 'name': 'www', - 'ttl': 300, - 'recordOption': 'roundRobin', - 'value': [ - '1.2.3.4', - '2.2.3.4', - ] - }, { - 'id': 11189898, - 'type': 'A', - 'name': 'ttl', - 'ttl': 600, - 'recordOption': 'roundRobin', - 'value': [ - '3.2.3.4' - ] - }, { - 'id': 11189899, - 'type': 'ALIAS', - 'name': 'alias', - 'ttl': 600, - 'recordOption': 'roundRobin', - 'value': [{ - 'value': 'aname.unit.tests.' - }] - }, { - "id": 1808520, - "type": "A", - "name": "www.dynamic", - "geolocation": None, - "recordOption": "pools", - "ttl": 300, - "value": [], - "pools": [ - 1808521 - ] - } - ]) - - provider._client.pools = Mock(return_value=[{ - "id": 1808521, - "name": "unit.tests.:www.dynamic:A:two", - "type": "A", - "values": [ - { - "value": "1.2.3.4", - "weight": 1 - }, - { - "value": "1.2.3.5", - "weight": 1 - } - ] - }]) - - # Domain exists, we don't care about return - resp.json.side_effect = [ - [], # no domains returned during populate - [{ - 'id': 123123, - 'name': 'unit.tests' - }], # domain created in apply - [], # No pools returned during populate - [{ - "id": 1808521, - "name": "unit.tests.:www.dynamic:A:one" - }] # pool created in apply - ] - - wanted = Zone('unit.tests.', []) - wanted.add_record(Record.new(wanted, 'ttl', { - 'ttl': 300, - 'type': 'A', - 'value': '3.2.3.4' - })) - - wanted.add_record(Record.new(wanted, 'www.dynamic', { - 'ttl': 300, - 'type': 'A', - 'values': [ - '1.2.3.4' - ], - 'dynamic': { - 'pools': { - 'two': { - 'values': [{ - 'value': '1.2.3.4', - 'weight': 1 - }], - }, - }, - 'rules': [{ - 'pool': 'two', - }], - }, - })) - - plan = provider.plan(wanted) - self.assertEquals(4, len(plan.changes)) - self.assertEquals(4, provider.apply(plan)) - - # recreate for update, and deletes for the 2 parts of the other - provider._client._request.assert_has_calls([ - call('POST', '/domains/123123/records/A', data={ - 'roundRobin': [{ - 'value': '3.2.3.4' - }], - 'name': 'ttl', - 'ttl': 300 - }), - call('PUT', '/pools/A/1808521', data={ - 'name': 'unit.tests.:www.dynamic:A:two', - 'type': 'A', - 'numReturn': 1, - 'minAvailableFailover': 1, - 'ttl': 300, - 'values': [{ - "value": "1.2.3.4", - "weight": 1 - }], - 'id': 1808521, - 'geofilter': 1 - }), - call('DELETE', '/domains/123123/records/A/11189897'), - call('DELETE', '/domains/123123/records/A/11189898'), - call('DELETE', '/domains/123123/records/ANAME/11189899'), - ], any_order=True) - - def test_apply_dynamic(self): - provider = ConstellixProvider('test', 'api', 'secret') - - resp = Mock() - resp.json = Mock() - provider._client._request = Mock(return_value=resp) - - # non-existent domain, create everything - resp.json.side_effect = [ - [], # no domains returned during populate - [{ - 'id': 123123, - 'name': 'unit.tests' - }], # domain created in apply - [], # No pools returned during populate - [{ - "id": 1808521, - "name": "unit.tests.:www.dynamic:A:one" - }], # pool created in apply - [], # no geofilters returned during populate - [{ - "id": 5303, - "name": "unit.tests.:www.dynamic:A:one", - "filterRulesLimit": 100, - "geoipContinents": ["AS", "OC"], - "geoipCountries": ["ES", "SE", "UA"], - "regions": [ - { - "continentCode": "NA", - "countryCode": "CA", - "regionCode": "NL" - } - ] - }], # geofilters created in applly - [{ - "id": 1808520, - "name": "unit.tests.:www.dynamic:A:two", - }], # pool created in apply - { - 'id': 123123, - 'name': 'unit.tests', - 'hasGeoIP': False - }, # domain listed for enabling geo - [] # enabling geo - ] - - plan = provider.plan(self.expected_dynamic) - - # No root NS, no ignored, no excluded, no unsupported - n = len(self.expected_dynamic.records) - 8 - self.assertEquals(n, len(plan.changes)) - self.assertEquals(n, provider.apply(plan)) - - provider._client._request.assert_has_calls([ - # get all domains to build the cache - call('GET', '/domains'), - # created the domain - call('POST', '/domains', data={'names': ['unit.tests']}) - ]) -# - # Check we tried to get our pool - provider._client._request.assert_has_calls([ - call('GET', '/pools/A'), - call('POST', '/pools/A', data={ - 'name': 'unit.tests.:www.dynamic:A:one', - 'type': 'A', - 'numReturn': 1, - 'minAvailableFailover': 1, - 'ttl': 300, - 'values': [{ - 'value': '1.2.3.6', - 'weight': 1 - }, { - 'value': '1.2.3.7', - 'weight': 1}] - }), - call('GET', '/geoFilters'), - call('POST', '/geoFilters', data={ - 'filterRulesLimit': 100, - 'name': 'unit.tests.:www.dynamic:A:one', - 'geoipContinents': ['AS', 'OC'], - 'geoipCountries': ['ES', 'SE', 'UA'], - 'regions': [{ - 'continentCode': 'NA', - 'countryCode': 'CA', - 'regionCode': 'NL'}] - }), - call('POST', '/pools/A', data={ - 'name': 'unit.tests.:www.dynamic:A:two', - 'type': 'A', - 'numReturn': 1, - 'minAvailableFailover': 1, - 'ttl': 300, - 'values': [{ - 'value': '1.2.3.4', - 'weight': 1 - }, { - 'value': '1.2.3.5', - 'weight': 1}] - }) - ]) - - # These two checks are broken up so that ordering doesn't break things. - # Python3 doesn't make the calls in a consistent order so different - # things follow the GET / on different runs - provider._client._request.assert_has_calls([ - call('POST', '/domains/123123/records/SRV', data={ - 'roundRobin': [{ - 'priority': 10, - 'weight': 20, - 'value': 'foo-1.unit.tests.', - 'port': 30 - }, { - 'priority': 12, - 'weight': 20, - 'value': 'foo-2.unit.tests.', - 'port': 30 - }], - 'name': '_srv._tcp', - 'ttl': 600, - }), - ]) - - self.assertEquals(28, provider._client._request.call_count) - - provider._client._request.reset_mock() - - provider._client.records = Mock(return_value=[ - { - 'id': 11189897, - 'type': 'A', - 'name': 'www', - 'ttl': 300, - 'recordOption': 'roundRobin', - 'value': [ - '1.2.3.4', - '2.2.3.4', - ] - }, { - 'id': 11189898, - 'type': 'A', - 'name': 'ttl', - 'ttl': 600, - 'recordOption': 'roundRobin', - 'value': [ - '3.2.3.4' - ] - }, { - 'id': 11189899, - 'type': 'ALIAS', - 'name': 'alias', - 'ttl': 600, - 'recordOption': 'roundRobin', - 'value': [{ - 'value': 'aname.unit.tests.' - }] - }, { - "id": 1808520, - "type": "A", - "name": "www.dynamic", - "geolocation": { - "geoipFilter": 1 - }, - "recordOption": "pools", - "ttl": 300, - "value": [], - "pools": [ - 1808521 - ] - }, { - "id": 1808521, - "type": "A", - "name": "www.dynamic", - "geolocation": { - "geoipFilter": 5303 - }, - "recordOption": "pools", - "ttl": 300, - "value": [], - "pools": [ - 1808522 - ] - } - ]) - - provider._client.pools = Mock(return_value=[ - { - "id": 1808521, - "name": "unit.tests.:www.dynamic:A:two", - "type": "A", - "values": [ - { - "value": "1.2.3.4", - "weight": 1 - }, - { - "value": "1.2.3.5", - "weight": 1 - } - ] - }, - { - "id": 1808522, - "name": "unit.tests.:www.dynamic:A:one", - "type": "A", - "values": [ - { - "value": "1.2.3.6", - "weight": 1 - }, - { - "value": "1.2.3.7", - "weight": 1 - } - ] - } - ]) - - provider._client.geofilters = Mock(return_value=[ - { - "id": 5303, - "name": "unit.tests.:www.dynamic:A:one", - "filterRulesLimit": 100, - "geoipContinents": ["AS", "OC"], - "geoipCountries": ["ES", "SE", "UA"], - "regions": [ - { - "continentCode": "NA", - "countryCode": "CA", - "regionCode": "NL" - } - ] - } - ]) - - # Domain exists, we don't care about return - resp.json.side_effect = [ - [], - [], - [], - [], - { - 'id': 123123, - 'name': 'unit.tests', - 'hasGeoIP': True - } # domain listed for enabling geo - ] - - wanted = Zone('unit.tests.', []) - wanted.add_record(Record.new(wanted, 'ttl', { - 'ttl': 300, - 'type': 'A', - 'value': '3.2.3.4' - })) - - wanted.add_record(Record.new(wanted, 'www.dynamic', { - 'ttl': 300, - 'type': 'A', - 'values': [ - '1.2.3.4' - ], - 'dynamic': { - 'pools': { - 'one': { - 'fallback': 'two', - 'values': [{ - 'value': '1.2.3.6', - 'weight': 1 - }, { - 'value': '1.2.3.7', - 'weight': 1 - }], - }, - 'two': { - 'values': [{ - 'value': '1.2.3.4', - 'weight': 1 - }], - }, - }, - 'rules': [{ - 'geos': [ - 'AS', - 'EU-ES', - 'EU-UA', - 'EU-SE', - 'NA-CA-NL', - 'OC' - ], - 'pool': 'one' - }, { - 'pool': 'two', - }], - }, - })) - - plan = provider.plan(wanted) - self.assertEquals(4, len(plan.changes)) - self.assertEquals(4, provider.apply(plan)) - - # recreate for update, and deletes for the 2 parts of the other - provider._client._request.assert_has_calls([ - call('POST', '/domains/123123/records/A', data={ - 'roundRobin': [{ - 'value': '3.2.3.4' - }], - 'name': 'ttl', - 'ttl': 300 - }), - - call('DELETE', '/domains/123123/records/A/1808521'), - call('DELETE', '/geoFilters/5303'), - call('DELETE', '/pools/A/1808522'), - call('DELETE', '/domains/123123/records/A/1808520'), - call('DELETE', '/pools/A/1808521'), - call('DELETE', '/domains/123123/records/ANAME/11189899'), - - call('PUT', '/pools/A/1808522', data={ - 'name': 'unit.tests.:www.dynamic:A:one', - 'type': 'A', - 'numReturn': 1, - 'minAvailableFailover': 1, - 'ttl': 300, - 'values': [ - {'value': '1.2.3.6', 'weight': 1}, - {'value': '1.2.3.7', 'weight': 1}], - 'id': 1808522, - 'geofilter': 5303 - }), - - call('PUT', '/geoFilters/5303', data={ - 'filterRulesLimit': 100, - 'name': 'unit.tests.:www.dynamic:A:one', - 'geoipContinents': ['AS', 'OC'], - 'geoipCountries': ['ES', 'SE', 'UA'], - 'regions': [{ - 'continentCode': 'NA', - 'countryCode': 'CA', - 'regionCode': 'NL'}], - 'id': 5303 - }), - - call('PUT', '/pools/A/1808521', data={ - 'name': 'unit.tests.:www.dynamic:A:two', - 'type': 'A', - 'numReturn': 1, - 'minAvailableFailover': 1, - 'ttl': 300, - 'values': [{'value': '1.2.3.4', 'weight': 1}], - 'id': 1808521, - 'geofilter': 1 - }), - - call('GET', '/domains/123123'), - call('POST', '/domains/123123/records/A', data={ - 'name': 'www.dynamic', - 'ttl': 300, - 'pools': [1808522], - 'recordOption': 'pools', - 'geolocation': { - 'geoipUserRegion': [5303] - } - }), - - call('POST', '/domains/123123/records/A', data={ - 'name': 'www.dynamic', - 'ttl': 300, - 'pools': [1808522], - 'recordOption': 'pools', - 'geolocation': { - 'geoipUserRegion': [5303] - } - }) - ], any_order=True) - - def test_dynamic_record_failures(self): - provider = ConstellixProvider('test', 'api', 'secret') - - resp = Mock() - resp.json = Mock() - provider._client._request = Mock(return_value=resp) - - # Let's handle some failures for pools - first if it's not a simple - # weighted pool - we'll be OK as we assume a weight of 1 for all - # entries - provider._client._request.reset_mock() - provider._client.records = Mock(return_value=[ - { - "id": 1808520, - "type": "A", - "name": "www.dynamic", - "geolocation": None, - "recordOption": "pools", - "ttl": 300, - "value": [], - "pools": [ - 1808521 - ] - } - ]) - - provider._client.pools = Mock(return_value=[{ - "id": 1808521, - "name": "unit.tests.:www.dynamic:A:two", - "type": "A", - "values": [ - { - "value": "1.2.3.4", - "weight": 1 - } - ] - }]) - - provider._client.geofilters = Mock(return_value=[]) - - wanted = Zone('unit.tests.', []) - - resp.json.side_effect = [ - ['{}'], - ['{}'], - ] - wanted.add_record(Record.new(wanted, 'www.dynamic', { - 'ttl': 300, - 'type': 'A', - 'values': [ - '1.2.3.4' - ], - 'dynamic': { - 'pools': { - 'two': { - 'values': [{ - 'value': '1.2.3.4' - }], - }, - }, - 'rules': [{ - 'pool': 'two', - }], - }, - })) - - plan = provider.plan(wanted) - self.assertIsNone(plan) - - def test_dynamic_record_updates(self): - provider = ConstellixProvider('test', 'api', 'secret') - - # Constellix API can return an error if you try and update a pool and - # don't change anything, so let's test we handle it silently - - provider._client.records = Mock(return_value=[ - { - "id": 1808520, - "type": "A", - "name": "www.dynamic", - "geolocation": { - "geoipFilter": 1 - }, - "recordOption": "pools", - "ttl": 300, - "value": [], - "pools": [ - 1808521 - ] - }, { - "id": 1808521, - "type": "A", - "name": "www.dynamic", - "geolocation": { - "geoipFilter": 5303 - }, - "recordOption": "pools", - "ttl": 300, - "value": [], - "pools": [ - 1808522 - ] - } - ]) - - provider._client.pools = Mock(return_value=[ - { - "id": 1808521, - "name": "unit.tests.:www.dynamic:A:two", - "type": "A", - "values": [ - { - "value": "1.2.3.4", - "weight": 1 - }, - { - "value": "1.2.3.5", - "weight": 1 - } - ] - }, - { - "id": 1808522, - "name": "unit.tests.:www.dynamic:A:one", - "type": "A", - "values": [ - { - "value": "1.2.3.6", - "weight": 1 - }, - { - "value": "1.2.3.7", - "weight": 1 - } - ] - } - ]) - - provider._client.geofilters = Mock(return_value=[ - { - "id": 6303, - "name": "some.other", - "filterRulesLimit": 100, - "createdTs": "2021-08-19T14:47:47Z", - "modifiedTs": "2021-08-19T14:47:47Z", - "geoipContinents": ["AS", "OC"], - "geoipCountries": ["ES", "SE", "UA"], - "regions": [ - { - "continentCode": "NA", - "countryCode": "CA", - "regionCode": "NL" - } - ] - }, { - "id": 5303, - "name": "unit.tests.:www.dynamic:A:one", - "filterRulesLimit": 100, - "geoipContinents": ["AS", "OC"], - "geoipCountries": ["ES", "SE", "UA"], - "regions": [ - { - "continentCode": "NA", - "countryCode": "CA", - "regionCode": "NL" - } - ] - } - ]) - - wanted = Zone('unit.tests.', []) - - wanted.add_record(Record.new(wanted, 'www.dynamic', { - 'ttl': 300, - 'type': 'A', - 'values': [ - '1.2.3.4' - ], - 'dynamic': { - 'pools': { - 'one': { - 'fallback': 'two', - 'values': [{ - 'value': '1.2.3.6', - 'weight': 1 - }, { - 'value': '1.2.3.7', - 'weight': 1 - }], - }, - 'two': { - 'values': [{ - 'value': '1.2.3.4', - 'weight': 1 - }], - }, - }, - 'rules': [{ - 'geos': [ - 'AS', - 'EU-ES', - 'EU-UA', - 'EU-SE', - 'OC' - ], - 'pool': 'one' - }, { - 'pool': 'two', - }], - }, - })) - - # Try an error we can handle - with requests_mock() as mock: - mock.get( - "https://api.dns.constellix.com/v1/domains", - status_code=200, - text='[{"id": 1234, "name": "unit.tests", "hasGeoIP": true}]') - mock.get( - "https://api.dns.constellix.com/v1/domains/1234", - status_code=200, - text='{"id": 1234, "name": "unit.tests", "hasGeoIP": true}') - mock.delete(ANY, status_code=200, - text='{}') - mock.put("https://api.dns.constellix.com/v1/pools/A/1808521", - status_code=400, - text='{"errors": [\"no changes to save\"]}') - mock.put("https://api.dns.constellix.com/v1/pools/A/1808522", - status_code=400, - text='{"errors": [\"no changes to save\"]}') - mock.put("https://api.dns.constellix.com/v1/geoFilters/5303", - status_code=400, - text='{"errors": [\"no changes to save\"]}') - mock.post(ANY, status_code=200, - text='[{"id": 1234}]') - - plan = provider.plan(wanted) - self.assertEquals(1, len(plan.changes)) - self.assertEquals(1, provider.apply(plan)) - - provider._client.geofilters = Mock(return_value=[ - { - "id": 5303, - "name": "unit.tests.:www.dynamic:A:one", - "filterRulesLimit": 100, - "regions": [ - { - "continentCode": "NA", - "countryCode": "CA", - "regionCode": "NL" - } - ] - } - ]) - - plan = provider.plan(wanted) - self.assertEquals(1, len(plan.changes)) - self.assertEquals(1, provider.apply(plan)) - - provider._client.geofilters = Mock(return_value=[ - { - "id": 5303, - "name": "unit.tests.:www.dynamic:A:one", - "filterRulesLimit": 100, - "geoipContinents": ["AS", "OC"], - } - ]) - - plan = provider.plan(wanted) - self.assertEquals(1, len(plan.changes)) - self.assertEquals(1, provider.apply(plan)) - - # Now what happens if an error happens that we can't handle - # geofilter case - with requests_mock() as mock: - mock.get( - "https://api.dns.constellix.com/v1/domains", - status_code=200, - text='[{"id": 1234, "name": "unit.tests", "hasGeoIP": true}]') - mock.get( - "https://api.dns.constellix.com/v1/domains/1234", - status_code=200, - text='{"id": 1234, "name": "unit.tests", "hasGeoIP": true}') - mock.delete(ANY, status_code=200, - text='{}') - mock.put("https://api.dns.constellix.com/v1/pools/A/1808521", - status_code=400, - text='{"errors": [\"no changes to save\"]}') - mock.put("https://api.dns.constellix.com/v1/pools/A/1808522", - status_code=400, - text='{"errors": [\"no changes to save\"]}') - mock.put("https://api.dns.constellix.com/v1/geoFilters/5303", - status_code=400, - text='{"errors": [\"generic error\"]}') - mock.post(ANY, status_code=200, - text='[{"id": 1234}]') - - plan = provider.plan(wanted) - self.assertEquals(1, len(plan.changes)) - with self.assertRaises(ConstellixClientBadRequest): - provider.apply(plan) - - # Now what happens if an error happens that we can't handle - with requests_mock() as mock: - mock.get( - "https://api.dns.constellix.com/v1/domains", - status_code=200, - text='[{"id": 1234, "name": "unit.tests", "hasGeoIP": true}]') - mock.get( - "https://api.dns.constellix.com/v1/domains/1234", - status_code=200, - text='{"id": 1234, "name": "unit.tests", "hasGeoIP": true}') - mock.delete(ANY, status_code=200, - text='{}') - mock.put("https://api.dns.constellix.com/v1/pools/A/1808521", - status_code=400, - text='{"errors": [\"generic error\"]}') - mock.put("https://api.dns.constellix.com/v1/pools/A/1808522", - status_code=400, - text='{"errors": [\"generic error\"]}') - mock.put("https://api.dns.constellix.com/v1/geoFilters/5303", - status_code=400, - text='{"errors": [\"generic error\"]}') - mock.post(ANY, status_code=200, - text='[{"id": 1234}]') - - plan = provider.plan(wanted) - self.assertEquals(1, len(plan.changes)) - with self.assertRaises(ConstellixClientBadRequest): - provider.apply(plan) - - def test_pools_that_are_notfound(self): - provider = ConstellixProvider('test', 'api', 'secret') - - provider._client.pools = Mock(return_value=[{ - "id": 1808521, - "name": "unit.tests.:www.dynamic:A:two", - "type": "A", - "values": [ - { - "value": "1.2.3.4", - "weight": 1 - } - ] - }]) - - self.assertIsNone(provider._client.pool_by_id('A', 1)) - self.assertIsNone(provider._client.pool('A', 'foobar')) - - def test_pools_are_cached_correctly(self): - provider = ConstellixProvider('test', 'api', 'secret') - - provider._client.pools = Mock(return_value=[{ - "id": 1808521, - "name": "unit.tests.:www.dynamic:A:two", - "type": "A", - "values": [ - { - "value": "1.2.3.4", - "weight": 1 - } - ] - }]) - - found = provider._client.pool('A', 'unit.tests.:www.dynamic:A:two') - self.assertIsNotNone(found) - - not_found = provider._client.pool('AAAA', - 'unit.tests.:www.dynamic:A:two') - self.assertIsNone(not_found) - - provider._client.pools = Mock(return_value=[{ - "id": 42, - "name": "unit.tests.:www.dynamic:A:two", - "type": "A", - "values": [ - { - "value": "1.2.3.4", - "weight": 1 - } - ] - }, { - "id": 451, - "name": "unit.tests.:www.dynamic:A:two", - "type": "AAAA", - "values": [ - { - "value": "1.2.3.4", - "weight": 1 - } - ] - }]) - - a_pool = provider._client.pool('A', 'unit.tests.:www.dynamic:A:two') - self.assertEquals(42, a_pool['id']) - - aaaa_pool = provider._client.pool('AAAA', - 'unit.tests.:www.dynamic:A:two') - self.assertEquals(451, aaaa_pool['id']) + def test_missing(self): + with self.assertRaises(ModuleNotFoundError): + from octodns.provider.constellix import ConstellixProvider + ConstellixProvider diff --git a/tests/test_octodns_provider_digitalocean.py b/tests/test_octodns_provider_digitalocean.py index dca7ccc..56ca965 100644 --- a/tests/test_octodns_provider_digitalocean.py +++ b/tests/test_octodns_provider_digitalocean.py @@ -2,273 +2,15 @@ # # - from __future__ import absolute_import, division, print_function, \ unicode_literals -from mock import Mock, call -from os.path import dirname, join -from requests import HTTPError -from requests_mock import ANY, mock as requests_mock from unittest import TestCase -from octodns.record import Record -from octodns.provider.digitalocean import DigitalOceanClientNotFound, \ - DigitalOceanProvider -from octodns.provider.yaml import YamlProvider -from octodns.zone import Zone +class TestDigitalOceanShim(TestCase): -class TestDigitalOceanProvider(TestCase): - expected = Zone('unit.tests.', []) - source = YamlProvider('test', join(dirname(__file__), 'config')) - source.populate(expected) - - # Our test suite differs a bit, add our NS and remove the simple one - expected.add_record(Record.new(expected, 'under', { - 'ttl': 3600, - 'type': 'NS', - 'values': [ - 'ns1.unit.tests.', - 'ns2.unit.tests.', - ] - })) - for record in list(expected.records): - if record.name == 'sub' and record._type == 'NS': - expected._remove_record(record) - break - - def test_populate(self): - provider = DigitalOceanProvider('test', 'token') - - # Bad auth - with requests_mock() as mock: - mock.get(ANY, status_code=401, - text='{"id":"unauthorized",' - '"message":"Unable to authenticate you."}') - - with self.assertRaises(Exception) as ctx: - zone = Zone('unit.tests.', []) - provider.populate(zone) - self.assertEquals('Unauthorized', str(ctx.exception)) - - # General error - with requests_mock() as mock: - mock.get(ANY, status_code=502, text='Things caught fire') - - with self.assertRaises(HTTPError) as ctx: - zone = Zone('unit.tests.', []) - provider.populate(zone) - self.assertEquals(502, ctx.exception.response.status_code) - - # Non-existent zone doesn't populate anything - with requests_mock() as mock: - mock.get(ANY, status_code=404, - text='{"id":"not_found","message":"The resource you ' - 'were accessing could not be found."}') - - zone = Zone('unit.tests.', []) - provider.populate(zone) - self.assertEquals(set(), zone.records) - - # No diffs == no changes - with requests_mock() as mock: - base = 'https://api.digitalocean.com/v2/domains/unit.tests/' \ - 'records?page=' - with open('tests/fixtures/digitalocean-page-1.json') as fh: - mock.get(f'{base}1', text=fh.read()) - with open('tests/fixtures/digitalocean-page-2.json') as fh: - mock.get(f'{base}2', text=fh.read()) - - zone = Zone('unit.tests.', []) - provider.populate(zone) - self.assertEquals(14, len(zone.records)) - changes = self.expected.changes(zone, provider) - self.assertEquals(0, len(changes)) - - # 2nd populate makes no network calls/all from cache - again = Zone('unit.tests.', []) - provider.populate(again) - self.assertEquals(14, len(again.records)) - - # bust the cache - del provider._zone_records[zone.name] - - def test_apply(self): - provider = DigitalOceanProvider('test', 'token') - - resp = Mock() - resp.json = Mock() - provider._client._request = Mock(return_value=resp) - - domain_after_creation = { - "domain_records": [{ - "id": 11189874, - "type": "NS", - "name": "@", - "data": "ns1.digitalocean.com", - "priority": None, - "port": None, - "ttl": 3600, - "weight": None, - "flags": None, - "tag": None - }, { - "id": 11189875, - "type": "NS", - "name": "@", - "data": "ns2.digitalocean.com", - "priority": None, - "port": None, - "ttl": 3600, - "weight": None, - "flags": None, - "tag": None - }, { - "id": 11189876, - "type": "NS", - "name": "@", - "data": "ns3.digitalocean.com", - "priority": None, - "port": None, - "ttl": 3600, - "weight": None, - "flags": None, - "tag": None - }, { - "id": 11189877, - "type": "A", - "name": "@", - "data": "192.0.2.1", - "priority": None, - "port": None, - "ttl": 3600, - "weight": None, - "flags": None, - "tag": None - }], - "links": {}, - "meta": { - "total": 4 - } - } - - # non-existent domain, create everything - resp.json.side_effect = [ - DigitalOceanClientNotFound, # no zone in populate - DigitalOceanClientNotFound, # no domain during apply - domain_after_creation - ] - plan = provider.plan(self.expected) - - # No root NS, no ignored, no excluded, no unsupported - n = len(self.expected.records) - 10 - self.assertEquals(n, len(plan.changes)) - self.assertEquals(n, provider.apply(plan)) - self.assertFalse(plan.exists) - - provider._client._request.assert_has_calls([ - # created the domain - call('POST', '/domains', data={'ip_address': '192.0.2.1', - 'name': 'unit.tests'}), - # get all records in newly created zone - call('GET', '/domains/unit.tests/records', {'page': 1}), - # delete the initial A record - call('DELETE', '/domains/unit.tests/records/11189877'), - # created at least some of the record with expected data - call('POST', '/domains/unit.tests/records', data={ - 'data': '1.2.3.4', - 'name': '@', - 'ttl': 300, 'type': 'A'}), - call('POST', '/domains/unit.tests/records', data={ - 'data': '1.2.3.5', - 'name': '@', - 'ttl': 300, 'type': 'A'}), - call('POST', '/domains/unit.tests/records', data={ - 'data': 'ca.unit.tests', - 'flags': 0, 'name': '@', - 'tag': 'issue', - 'ttl': 3600, 'type': 'CAA'}), - call('POST', '/domains/unit.tests/records', data={ - 'name': '_imap._tcp', - 'weight': 0, - 'data': '.', - 'priority': 0, - 'ttl': 600, - 'type': 'SRV', - 'port': 0 - }), - call('POST', '/domains/unit.tests/records', data={ - 'name': '_pop3._tcp', - 'weight': 0, - 'data': '.', - 'priority': 0, - 'ttl': 600, - 'type': 'SRV', - 'port': 0 - }), - call('POST', '/domains/unit.tests/records', data={ - 'name': '_srv._tcp', - 'weight': 20, - 'data': 'foo-1.unit.tests.', - 'priority': 10, - 'ttl': 600, - 'type': 'SRV', - 'port': 30 - }), - ]) - self.assertEquals(26, provider._client._request.call_count) - - provider._client._request.reset_mock() - - # delete 1 and update 1 - provider._client.records = Mock(return_value=[ - { - 'id': 11189897, - 'name': 'www', - 'data': '1.2.3.4', - 'ttl': 300, - 'type': 'A', - }, - { - 'id': 11189898, - 'name': 'www', - 'data': '2.2.3.4', - 'ttl': 300, - 'type': 'A', - }, - { - 'id': 11189899, - 'name': 'ttl', - 'data': '3.2.3.4', - 'ttl': 600, - 'type': 'A', - } - ]) - - # Domain exists, we don't care about return - resp.json.side_effect = ['{}'] - - wanted = Zone('unit.tests.', []) - wanted.add_record(Record.new(wanted, 'ttl', { - 'ttl': 300, - 'type': 'A', - 'value': '3.2.3.4' - })) - - plan = provider.plan(wanted) - self.assertTrue(plan.exists) - self.assertEquals(2, len(plan.changes)) - self.assertEquals(2, provider.apply(plan)) - # recreate for update, and delete for the 2 parts of the other - provider._client._request.assert_has_calls([ - call('POST', '/domains/unit.tests/records', data={ - 'data': '3.2.3.4', - 'type': 'A', - 'name': 'ttl', - 'ttl': 300 - }), - call('DELETE', '/domains/unit.tests/records/11189899'), - call('DELETE', '/domains/unit.tests/records/11189897'), - call('DELETE', '/domains/unit.tests/records/11189898') - ], any_order=True) + def test_missing(self): + with self.assertRaises(ModuleNotFoundError): + from octodns.provider.digitalocean import DigitalOceanProvider + DigitalOceanProvider diff --git a/tests/test_octodns_provider_dnsmadeeasy.py b/tests/test_octodns_provider_dnsmadeeasy.py index e0b21a6..968ae30 100644 --- a/tests/test_octodns_provider_dnsmadeeasy.py +++ b/tests/test_octodns_provider_dnsmadeeasy.py @@ -2,227 +2,15 @@ # # - from __future__ import absolute_import, division, print_function, \ unicode_literals -from mock import Mock, call -from os.path import dirname, join -from requests import HTTPError -from requests_mock import ANY, mock as requests_mock from unittest import TestCase -from octodns.record import Record -from octodns.provider.dnsmadeeasy import DnsMadeEasyClientNotFound, \ - DnsMadeEasyProvider -from octodns.provider.yaml import YamlProvider -from octodns.zone import Zone -import json +class TestDnsMadeEasyShim(TestCase): - -class TestDnsMadeEasyProvider(TestCase): - expected = Zone('unit.tests.', []) - source = YamlProvider('test', join(dirname(__file__), 'config')) - source.populate(expected) - - # Our test suite differs a bit, add our NS and remove the simple one - expected.add_record(Record.new(expected, 'under', { - 'ttl': 3600, - 'type': 'NS', - 'values': [ - 'ns1.unit.tests.', - 'ns2.unit.tests.', - ] - })) - - # Add some ALIAS records - expected.add_record(Record.new(expected, '', { - 'ttl': 1800, - 'type': 'ALIAS', - 'value': 'aname.unit.tests.' - })) - - for record in list(expected.records): - if record.name == 'sub' and record._type == 'NS': - expected._remove_record(record) - break - - def test_populate(self): - provider = DnsMadeEasyProvider('test', 'api', 'secret') - - # Bad auth - with requests_mock() as mock: - mock.get(ANY, status_code=401, - text='{"error": ["API key not found"]}') - - with self.assertRaises(Exception) as ctx: - zone = Zone('unit.tests.', []) - provider.populate(zone) - self.assertEquals('Unauthorized', str(ctx.exception)) - - # Bad request - with requests_mock() as mock: - mock.get(ANY, status_code=400, - text='{"error": ["Rate limit exceeded"]}') - - with self.assertRaises(Exception) as ctx: - zone = Zone('unit.tests.', []) - provider.populate(zone) - self.assertEquals('\n - Rate limit exceeded', str(ctx.exception)) - - # General error - with requests_mock() as mock: - mock.get(ANY, status_code=502, text='Things caught fire') - - with self.assertRaises(HTTPError) as ctx: - zone = Zone('unit.tests.', []) - provider.populate(zone) - self.assertEquals(502, ctx.exception.response.status_code) - - # Non-existent zone doesn't populate anything - with requests_mock() as mock: - mock.get(ANY, status_code=404, - text='') - - zone = Zone('unit.tests.', []) - provider.populate(zone) - self.assertEquals(set(), zone.records) - - # No diffs == no changes - with requests_mock() as mock: - base = 'https://api.dnsmadeeasy.com/V2.0/dns/managed' - with open('tests/fixtures/dnsmadeeasy-domains.json') as fh: - mock.get(f'{base}/', text=fh.read()) - with open('tests/fixtures/dnsmadeeasy-records.json') as fh: - mock.get(f'{base}/123123/records', text=fh.read()) - - zone = Zone('unit.tests.', []) - provider.populate(zone) - self.assertEquals(14, len(zone.records)) - changes = self.expected.changes(zone, provider) - self.assertEquals(0, len(changes)) - - # 2nd populate makes no network calls/all from cache - again = Zone('unit.tests.', []) - provider.populate(again) - self.assertEquals(14, len(again.records)) - - # bust the cache - del provider._zone_records[zone.name] - - def test_apply(self): - # Create provider with sandbox enabled - provider = DnsMadeEasyProvider('test', 'api', 'secret', True) - - resp = Mock() - resp.json = Mock() - provider._client._request = Mock(return_value=resp) - - with open('tests/fixtures/dnsmadeeasy-domains.json') as fh: - domains = json.load(fh) - - # non-existent domain, create everything - resp.json.side_effect = [ - DnsMadeEasyClientNotFound, # no zone in populate - DnsMadeEasyClientNotFound, # no domain during apply - domains - ] - plan = provider.plan(self.expected) - - # No root NS, no ignored, no excluded, no unsupported - n = len(self.expected.records) - 10 - self.assertEquals(n, len(plan.changes)) - self.assertEquals(n, provider.apply(plan)) - - provider._client._request.assert_has_calls([ - # created the domain - call('POST', '/', data={'name': 'unit.tests'}), - # get all domains to build the cache - call('GET', '/'), - # created at least some of the record with expected data - call('POST', '/123123/records', data={ - 'type': 'A', - 'name': '', - 'value': '1.2.3.4', - 'ttl': 300}), - call('POST', '/123123/records', data={ - 'type': 'A', - 'name': '', - 'value': '1.2.3.5', - 'ttl': 300}), - call('POST', '/123123/records', data={ - 'type': 'ANAME', - 'name': '', - 'value': 'aname.unit.tests.', - 'ttl': 1800}), - call('POST', '/123123/records', data={ - 'name': '', - 'value': 'ca.unit.tests', - 'issuerCritical': 0, 'caaType': 'issue', - 'ttl': 3600, 'type': 'CAA'}), - call('POST', '/123123/records', data={ - 'name': '_srv._tcp', - 'weight': 20, - 'value': 'foo-1.unit.tests.', - 'priority': 10, - 'ttl': 600, - 'type': 'SRV', - 'port': 30 - }), - ]) - self.assertEquals(26, provider._client._request.call_count) - - provider._client._request.reset_mock() - - # delete 1 and update 1 - provider._client.records = Mock(return_value=[ - { - 'id': 11189897, - 'name': 'www', - 'value': '1.2.3.4', - 'ttl': 300, - 'type': 'A', - }, - { - 'id': 11189898, - 'name': 'www', - 'value': '2.2.3.4', - 'ttl': 300, - 'type': 'A', - }, - { - 'id': 11189899, - 'name': 'ttl', - 'value': '3.2.3.4', - 'ttl': 600, - 'type': 'A', - } - ]) - - # Domain exists, we don't care about return - resp.json.side_effect = ['{}'] - - wanted = Zone('unit.tests.', []) - wanted.add_record(Record.new(wanted, 'ttl', { - 'ttl': 300, - 'type': 'A', - 'value': '3.2.3.4' - })) - - plan = provider.plan(wanted) - self.assertEquals(2, len(plan.changes)) - self.assertEquals(2, provider.apply(plan)) - - # recreate for update, and deletes for the 2 parts of the other - provider._client._request.assert_has_calls([ - call('POST', '/123123/records', data={ - 'value': '3.2.3.4', - 'type': 'A', - 'name': 'ttl', - 'ttl': 300 - }), - call('DELETE', '/123123/records/11189899'), - call('DELETE', '/123123/records/11189897'), - call('DELETE', '/123123/records/11189898') - ], any_order=True) + def test_missing(self): + with self.assertRaises(ModuleNotFoundError): + from octodns.provider.dnsmadeeasy import DnsMadeEasyProvider + DnsMadeEasyProvider diff --git a/tests/test_octodns_provider_dyn.py b/tests/test_octodns_provider_dyn.py index 4072947..07fcc7b 100644 --- a/tests/test_octodns_provider_dyn.py +++ b/tests/test_octodns_provider_dyn.py @@ -5,2651 +5,12 @@ from __future__ import absolute_import, division, print_function, \ unicode_literals -from dyn.tm.errors import DynectGetError -from dyn.tm.services.dsf import DSFResponsePool -from json import loads -from mock import MagicMock, call, patch from unittest import TestCase -from octodns.record import Create, Delete, Record, Update -from octodns.provider.base import Plan -from octodns.provider.dyn import DynProvider, _CachingDynZone, DSFMonitor, \ - _dynamic_value_sort_key -from octodns.zone import Zone -from helpers import SimpleProvider +class TestDynShim(TestCase): - -class _DummyPool(object): - - def __init__(self, response_pool_id): - self.response_pool_id = response_pool_id - self.deleted = False - - def delete(self): - self.deleted = True - - -class TestDynProvider(TestCase): - expected = Zone('unit.tests.', []) - for name, data in ( - ('', { - 'type': 'A', - 'ttl': 300, - 'values': ['1.2.3.4'] - }), - ('cname', { - 'type': 'CNAME', - 'ttl': 301, - 'value': 'unit.tests.' - }), - ('', { - 'type': 'MX', - 'ttl': 302, - 'values': [{ - 'preference': 10, - 'exchange': 'smtp-1.unit.tests.' - }, { - 'preference': 20, - 'exchange': 'smtp-2.unit.tests.' - }] - }), - ('naptr', { - 'type': 'NAPTR', - 'ttl': 303, - 'values': [{ - 'order': 100, - 'preference': 101, - 'flags': 'U', - 'service': 'SIP+D2U', - 'regexp': '!^.*$!sip:info@foo.example.com!', - 'replacement': '.', - }, { - 'order': 200, - 'preference': 201, - 'flags': 'U', - 'service': 'SIP+D2U', - 'regexp': '!^.*$!sip:info@bar.example.com!', - 'replacement': '.', - }] - }), - ('sub', { - 'type': 'NS', - 'ttl': 3600, - 'values': ['ns3.p10.dynect.net.', 'ns3.p10.dynect.net.'], - }), - ('ptr', { - 'type': 'PTR', - 'ttl': 304, - 'value': 'xx.unit.tests.' - }), - ('spf', { - 'type': 'SPF', - 'ttl': 305, - 'values': ['v=spf1 ip4:192.168.0.1/16-all', 'v=spf1 -all'], - }), - ('', { - 'type': 'SSHFP', - 'ttl': 306, - 'value': { - 'algorithm': 1, - 'fingerprint_type': 1, - 'fingerprint': 'bf6b6825d2977c511a475bbefb88aad54a92ac73', - } - }), - ('_srv._tcp', { - 'type': 'SRV', - 'ttl': 307, - 'values': [{ - 'priority': 11, - 'weight': 12, - 'port': 10, - 'target': 'foo-1.unit.tests.' - }, { - 'priority': 21, - 'weight': 22, - 'port': 20, - 'target': 'foo-2.unit.tests.' - }]}), - ('', { - 'type': 'CAA', - 'ttl': 308, - 'values': [{ - 'flags': 0, - 'tag': 'issue', - 'value': 'ca.unit.tests' - }]})): - expected.add_record(Record.new(expected, name, data)) - - @classmethod - def setUpClass(self): - # Get the DynectSession creation out of the way so that tests can - # ignore it - with patch('dyn.core.SessionEngine.execute', - return_value={'status': 'success'}): - provider = DynProvider('test', 'cust', 'user', 'pass') - provider._check_dyn_sess() - - def setUp(self): - # Flush our zone to ensure we start fresh - _CachingDynZone.flush_zone(self.expected.name[:-1]) - - @patch('dyn.core.SessionEngine.execute') - def test_populate_non_existent(self, execute_mock): - provider = DynProvider('test', 'cust', 'user', 'pass') - - # Test Zone create - execute_mock.side_effect = [ - DynectGetError('foo'), - ] - got = Zone('unit.tests.', []) - provider.populate(got) - execute_mock.assert_has_calls([ - call('/Zone/unit.tests/', 'GET', {}), - ]) - self.assertEquals(set(), got.records) - - @patch('dyn.core.SessionEngine.execute') - def test_populate(self, execute_mock): - provider = DynProvider('test', 'cust', 'user', 'pass') - - # Test Zone create - execute_mock.side_effect = [ - # get Zone - {'data': {}}, - # get_all_records - {'data': { - 'a_records': [{ - 'fqdn': 'unit.tests', - 'rdata': {'address': '1.2.3.4'}, - 'record_id': 1, - 'record_type': 'A', - 'ttl': 300, - 'zone': 'unit.tests', - }], - 'cname_records': [{ - 'fqdn': 'cname.unit.tests', - 'rdata': {'cname': 'unit.tests.'}, - 'record_id': 2, - 'record_type': 'CNAME', - 'ttl': 301, - 'zone': 'unit.tests', - }], - 'ns_records': [{ - 'fqdn': 'unit.tests', - 'rdata': {'nsdname': 'ns1.p10.dynect.net.'}, - 'record_id': 254597562, - 'record_type': 'NS', - 'service_class': '', - 'ttl': 3600, - 'zone': 'unit.tests' - }, { - 'fqdn': 'unit.tests', - 'rdata': {'nsdname': 'ns2.p10.dynect.net.'}, - 'record_id': 254597563, - 'record_type': 'NS', - 'service_class': '', - 'ttl': 3600, - 'zone': 'unit.tests' - }, { - 'fqdn': 'unit.tests', - 'rdata': {'nsdname': 'ns3.p10.dynect.net.'}, - 'record_id': 254597564, - 'record_type': 'NS', - 'service_class': '', - 'ttl': 3600, - 'zone': 'unit.tests' - }, { - 'fqdn': 'unit.tests', - 'rdata': {'nsdname': 'ns4.p10.dynect.net.'}, - 'record_id': 254597565, - 'record_type': 'NS', - 'service_class': '', - 'ttl': 3600, - 'zone': 'unit.tests' - }, { - 'fqdn': 'sub.unit.tests', - 'rdata': {'nsdname': 'ns3.p10.dynect.net.'}, - 'record_id': 254597564, - 'record_type': 'NS', - 'service_class': '', - 'ttl': 3600, - 'zone': 'unit.tests' - }, { - 'fqdn': 'sub.unit.tests', - 'rdata': {'nsdname': 'ns3.p10.dynect.net.'}, - 'record_id': 254597564, - 'record_type': 'NS', - 'service_class': '', - 'ttl': 3600, - 'zone': 'unit.tests' - }], - 'mx_records': [{ - 'fqdn': 'unit.tests', - 'rdata': {'exchange': 'smtp-1.unit.tests.', - 'preference': 10}, - 'record_id': 3, - 'record_type': 'MX', - 'ttl': 302, - 'zone': 'unit.tests', - }, { - 'fqdn': 'unit.tests', - 'rdata': {'exchange': 'smtp-2.unit.tests.', - 'preference': 20}, - 'record_id': 4, - 'record_type': 'MX', - 'ttl': 302, - 'zone': 'unit.tests', - }], - 'naptr_records': [{ - 'fqdn': 'naptr.unit.tests', - 'rdata': {'flags': 'U', - 'order': 100, - 'preference': 101, - 'regexp': '!^.*$!sip:info@foo.example.com!', - 'replacement': '.', - 'services': 'SIP+D2U'}, - 'record_id': 5, - 'record_type': 'MX', - 'ttl': 303, - 'zone': 'unit.tests', - }, { - 'fqdn': 'naptr.unit.tests', - 'rdata': {'flags': 'U', - 'order': 200, - 'preference': 201, - 'regexp': '!^.*$!sip:info@bar.example.com!', - 'replacement': '.', - 'services': 'SIP+D2U'}, - 'record_id': 6, - 'record_type': 'MX', - 'ttl': 303, - 'zone': 'unit.tests', - }], - 'ptr_records': [{ - 'fqdn': 'ptr.unit.tests', - 'rdata': {'ptrdname': 'xx.unit.tests.'}, - 'record_id': 7, - 'record_type': 'PTR', - 'ttl': 304, - 'zone': 'unit.tests', - }], - 'soa_records': [{ - 'fqdn': 'unit.tests', - 'rdata': {'txtdata': 'ns1.p16.dynect.net. ' - 'hostmaster.unit.tests. 4 3600 600 604800 1800'}, - 'record_id': 99, - 'record_type': 'SOA', - 'ttl': 299, - 'zone': 'unit.tests', - }], - 'spf_records': [{ - 'fqdn': 'spf.unit.tests', - 'rdata': {'txtdata': 'v=spf1 ip4:192.168.0.1/16-all'}, - 'record_id': 8, - 'record_type': 'SPF', - 'ttl': 305, - 'zone': 'unit.tests', - }, { - 'fqdn': 'spf.unit.tests', - 'rdata': {'txtdata': 'v=spf1 -all'}, - 'record_id': 8, - 'record_type': 'SPF', - 'ttl': 305, - 'zone': 'unit.tests', - }], - 'sshfp_records': [{ - 'fqdn': 'unit.tests', - 'rdata': {'algorithm': 1, - 'fingerprint': - 'bf6b6825d2977c511a475bbefb88aad54a92ac73', - 'fptype': 1}, - 'record_id': 9, - 'record_type': 'SSHFP', - 'ttl': 306, - 'zone': 'unit.tests', - }], - 'srv_records': [{ - 'fqdn': '_srv._tcp.unit.tests', - 'rdata': {'port': 10, - 'priority': 11, - 'target': 'foo-1.unit.tests.', - 'weight': 12}, - 'record_id': 10, - 'record_type': 'SRV', - 'ttl': 307, - 'zone': 'unit.tests', - }, { - 'fqdn': '_srv._tcp.unit.tests', - 'rdata': {'port': 20, - 'priority': 21, - 'target': 'foo-2.unit.tests.', - 'weight': 22}, - 'record_id': 11, - 'record_type': 'SRV', - 'ttl': 307, - 'zone': 'unit.tests', - }], - 'caa_records': [{ - 'fqdn': 'unit.tests', - 'rdata': {'flags': 0, - 'tag': 'issue', - 'value': 'ca.unit.tests'}, - 'record_id': 12, - 'record_type': 'cAA', - 'ttl': 308, - 'zone': 'unit.tests', - }], - }} - ] - got = Zone('unit.tests.', []) - provider.populate(got) - execute_mock.assert_has_calls([ - call('/Zone/unit.tests/', 'GET', {}), - call('/AllRecord/unit.tests/unit.tests./', 'GET', {'detail': 'Y'}) - ]) - changes = self.expected.changes(got, SimpleProvider()) - self.assertEquals([], changes) - - @patch('dyn.core.SessionEngine.execute') - def test_sync(self, execute_mock): - provider = DynProvider('test', 'cust', 'user', 'pass') - - # Test Zone create - execute_mock.side_effect = [ - # No such zone, during populate - DynectGetError('foo'), - # No such zone, during sync - DynectGetError('foo'), - # get empty Zone - {'data': {}}, - # get zone we can modify & delete with - {'data': { - # A top-level to delete - 'a_records': [{ - 'fqdn': 'unit.tests', - 'rdata': {'address': '1.2.3.4'}, - 'record_id': 1, - 'record_type': 'A', - 'ttl': 30, - 'zone': 'unit.tests', - }, { - 'fqdn': 'a.unit.tests', - 'rdata': {'address': '2.3.4.5'}, - 'record_id': 2, - 'record_type': 'A', - 'ttl': 30, - 'zone': 'unit.tests', - }], - # A node to delete - 'cname_records': [{ - 'fqdn': 'cname.unit.tests', - 'rdata': {'cname': 'unit.tests.'}, - 'record_id': 3, - 'record_type': 'CNAME', - 'ttl': 30, - 'zone': 'unit.tests', - }], - # A record to leave alone - 'ptr_records': [{ - 'fqdn': 'ptr.unit.tests', - 'rdata': {'ptrdname': 'xx.unit.tests.'}, - 'record_id': 4, - 'record_type': 'PTR', - 'ttl': 30, - 'zone': 'unit.tests', - }], - # A record to modify - 'srv_records': [{ - 'fqdn': '_srv._tcp.unit.tests', - 'rdata': {'port': 10, - 'priority': 11, - 'target': 'foo-1.unit.tests.', - 'weight': 12}, - 'record_id': 5, - 'record_type': 'SRV', - 'ttl': 30, - 'zone': 'unit.tests', - }, { - 'fqdn': '_srv._tcp.unit.tests', - 'rdata': {'port': 20, - 'priority': 21, - 'target': 'foo-2.unit.tests.', - 'weight': 22}, - 'record_id': 6, - 'record_type': 'SRV', - 'ttl': 30, - 'zone': 'unit.tests', - }], - }} - ] - - # No existing records, create all - with patch('dyn.tm.zones.Zone.add_record') as add_mock: - with patch('dyn.tm.zones.Zone._update') as update_mock: - plan = provider.plan(self.expected) - update_mock.assert_not_called() - provider.apply(plan) - update_mock.assert_called() - self.assertFalse(plan.exists) - add_mock.assert_called() - # Once for each dyn record (8 Records, 2 of which have dual values) - self.assertEquals(15, len(add_mock.call_args_list)) - execute_mock.assert_has_calls([call('/Zone/unit.tests/', 'GET', {}), - call('/Zone/unit.tests/', 'GET', {})]) - self.assertEquals(10, len(plan.changes)) - - execute_mock.reset_mock() - - # Delete one and modify another - new = Zone('unit.tests.', []) - for name, data in ( - ('a', { - 'type': 'A', - 'ttl': 30, - 'value': '2.3.4.5' - }), - ('ptr', { - 'type': 'PTR', - 'ttl': 30, - 'value': 'xx.unit.tests.' - }), - ('_srv._tcp', { - 'type': 'SRV', - 'ttl': 30, - 'values': [{ - 'priority': 31, - 'weight': 12, - 'port': 10, - 'target': 'foo-1.unit.tests.' - }, { - 'priority': 21, - 'weight': 22, - 'port': 20, - 'target': 'foo-2.unit.tests.' - }]})): - new.add_record(Record.new(new, name, data)) - - with patch('dyn.tm.zones.Zone.add_record') as add_mock: - with patch('dyn.tm.records.DNSRecord.delete') as delete_mock: - with patch('dyn.tm.zones.Zone._update') as update_mock: - plan = provider.plan(new) - provider.apply(plan) - update_mock.assert_called() - self.assertTrue(plan.exists) - # we expect 4 deletes, 2 from actual deletes and 2 from - # updates which delete and recreate - self.assertEquals(4, len(delete_mock.call_args_list)) - # the 2 (re)creates - self.assertEquals(2, len(add_mock.call_args_list)) - execute_mock.assert_has_calls([ - call('/AllRecord/unit.tests/unit.tests./', 'GET', {'detail': 'Y'}) - ]) - self.assertEquals(3, len(plan.changes)) - - -class TestDynProviderGeo(TestCase): - - with open('./tests/fixtures/dyn-traffic-director-get.json') as fh: - traffic_director_response = loads(fh.read()) - - @property - def traffic_directors_response(self): - return { - 'data': [{ - 'active': 'Y', - 'label': 'unit.tests.:A', - 'nodes': [], - 'notifiers': [], - 'pending_change': '', - 'rulesets': [], - 'service_id': '2ERWXQNsb_IKG2YZgYqkPvk0PBM', - 'ttl': '300' - }, { - 'active': 'Y', - 'label': 'some.other.:A', - 'nodes': [], - 'notifiers': [], - 'pending_change': '', - 'rulesets': [], - 'service_id': '3ERWXQNsb_IKG2YZgYqkPvk0PBM', - 'ttl': '300' - }, { - 'active': 'Y', - 'label': 'other format', - 'nodes': [], - 'notifiers': [], - 'pending_change': '', - 'rulesets': [], - 'service_id': '4ERWXQNsb_IKG2YZgYqkPvk0PBM', - 'ttl': '300' - }] - } - - # Doing this as a property so that we get a fresh copy each time, dyn's - # client lib messes with the return value and prevents it from working on - # subsequent uses otherwise - @property - def records_response(self): - return { - 'data': { - 'a_records': [{ - 'fqdn': 'unit.tests', - 'rdata': {'address': '1.2.3.4'}, - 'record_id': 1, - 'record_type': 'A', - 'ttl': 301, - 'zone': 'unit.tests', - }], - } - } - - monitor_id = '42a' - monitors_response = { - 'data': [{ - 'active': 'Y', - 'agent_scheme': 'geo', - 'dsf_monitor_id': monitor_id, - 'endpoints': [], - 'label': 'unit.tests.:A', - 'notifier': [], - 'expected': '', - 'header': 'User-Agent: Dyn Monitor', - 'host': 'unit.tests', - 'path': '/_dns', - 'port': '443', - 'timeout': '10', - 'probe_interval': '60', - 'protocol': 'HTTPS', - 'response_count': '2', - 'retries': '2', - 'services': ['12311'] - }, { - 'active': 'Y', - 'agent_scheme': 'geo', - 'dsf_monitor_id': 'b52', - 'endpoints': [], - 'label': 'old-label.unit.tests.', - 'notifier': [], - 'expected': '', - 'header': 'User-Agent: Dyn Monitor', - 'host': 'old-label.unit.tests', - 'path': '/_dns', - 'port': '443', - 'timeout': '10', - 'probe_interval': '60', - 'protocol': 'HTTPS', - 'response_count': '2', - 'retries': '2', - 'services': ['12312'] - }], - 'job_id': 3376281406, - 'msgs': [{ - 'ERR_CD': None, - 'INFO': 'DSFMonitor_get: Here are your monitors', - 'LVL': 'INFO', - 'SOURCE': 'BLL' - }], - 'status': 'success' - } - - expected_geo = Zone('unit.tests.', []) - geo_record = Record.new(expected_geo, '', { - 'geo': { - 'AF': ['2.2.3.4', '2.2.3.5'], - 'AS-JP': ['3.2.3.4', '3.2.3.5'], - 'NA-US': ['4.2.3.4', '4.2.3.5'], - 'NA-US-CA': ['5.2.3.4', '5.2.3.5'] - }, - 'ttl': 300, - 'type': 'A', - 'values': ['1.2.3.4', '1.2.3.5'], - }) - expected_geo.add_record(geo_record) - expected_regular = Zone('unit.tests.', []) - regular_record = Record.new(expected_regular, '', { - 'ttl': 301, - 'type': 'A', - 'value': '1.2.3.4', - }) - expected_regular.add_record(regular_record) - - def setUp(self): - # Flush our zone to ensure we start fresh - _CachingDynZone.flush_zone('unit.tests') - - @patch('dyn.core.SessionEngine.execute') - def test_traffic_directors(self, mock): - provider = DynProvider('test', 'cust', 'user', 'pass', True) - # short-circuit session checking - provider._dyn_sess = True - provider.log.warn = MagicMock() - - # no tds - mock.side_effect = [{'data': []}] - self.assertEquals({}, provider.traffic_directors) - - # a supported td and an ignored one - response = { - 'data': [{ - 'active': 'Y', - 'label': 'unit.tests.:A', - 'nodes': [], - 'notifiers': [], - 'pending_change': '', - 'rulesets': [], - 'service_id': '2ERWXQNsb_IKG2YZgYqkPvk0PBM', - 'ttl': '300' - }, { - 'active': 'Y', - 'label': 'geo.unit.tests.:A', - 'nodes': [], - 'notifiers': [], - 'pending_change': '', - 'rulesets': [], - 'service_id': '3ERWXQNsb_IKG2YZgYqkPvk0PBM', - 'ttl': '300' - }, { - 'active': 'Y', - 'label': 'something else', - 'nodes': [], - 'notifiers': [], - 'pending_change': '', - 'rulesets': [], - 'service_id': '4ERWXQNsb_IKG2YZgYqkPvk0PBM', - 'ttl': '300' - }], - 'job_id': 3376164583, - 'status': 'success' - } - mock.side_effect = [response] - # first make sure that we get the empty version from cache - self.assertEquals({}, provider.traffic_directors) - # reach in and bust the cache - provider._traffic_directors = None - tds = provider.traffic_directors - self.assertEquals(set(['unit.tests.', 'geo.unit.tests.']), - set(tds.keys())) - self.assertEquals(['A'], list(tds['unit.tests.'].keys())) - self.assertEquals(['A'], list(tds['geo.unit.tests.'].keys())) - provider.log.warn.assert_called_with("Unsupported TrafficDirector " - "'%s'", 'something else') - - @patch('dyn.core.SessionEngine.execute') - def test_traffic_director_monitor(self, mock): - provider = DynProvider('test', 'cust', 'user', 'pass', True) - # short-circuit session checking - provider._dyn_sess = True - existing = Zone('unit.tests.', []) - - # no monitors, will try and create - geo_monitor_id = '42x' - mock.side_effect = [self.monitors_response, { - 'data': { - 'active': 'Y', - 'dsf_monitor_id': geo_monitor_id, - 'endpoints': [], - 'label': 'geo.unit.tests.:A', - 'notifier': '', - 'expected': '', - 'header': 'User-Agent: Dyn Monitor', - 'host': 'geo.unit.tests.', - 'path': '/_dns', - 'port': '443', - 'timeout': '10', - 'probe_interval': '60', - 'protocol': 'HTTPS', - 'response_count': '2', - 'retries': '2' - }, - 'job_id': 3376259461, - 'msgs': [{'ERR_CD': None, - 'INFO': 'add: Here is the new monitor', - 'LVL': 'INFO', - 'SOURCE': 'BLL'}], - 'status': 'success' - }] - - # ask for a monitor that doesn't exist - record = Record.new(existing, 'geo', { - 'ttl': 60, - 'type': 'A', - 'value': '1.2.3.4', - 'octodns': { - 'healthcheck': { - 'host': 'foo.bar', - 'path': '/_ready' - } - } - }) - monitor = provider._traffic_director_monitor(record) - self.assertEquals(geo_monitor_id, monitor.dsf_monitor_id) - # should see a request for the list and a create - mock.assert_has_calls([ - call('/DSFMonitor/', 'GET', {'detail': 'Y'}), - call('/DSFMonitor/', 'POST', { - 'retries': 2, - 'protocol': 'HTTPS', - 'response_count': 2, - 'label': 'geo.unit.tests.:A', - 'probe_interval': 60, - 'active': 'Y', - 'options': { - 'path': '/_ready', - 'host': 'foo.bar', - 'header': 'User-Agent: Dyn Monitor', - 'port': 443, - 'timeout': 10 - } - }) - ]) - # created monitor is now cached - self.assertTrue('geo.unit.tests.:A' in - provider._traffic_director_monitors) - # pre-existing one is there too - self.assertTrue('unit.tests.:A' in - provider._traffic_director_monitors) - - # now ask for a monitor that does exist - record = Record.new(existing, '', { - 'ttl': 60, - 'type': 'A', - 'value': '1.2.3.4' - }) - mock.reset_mock() - monitor = provider._traffic_director_monitor(record) - self.assertEquals(self.monitor_id, monitor.dsf_monitor_id) - # should have resulted in no calls b/c exists & we've cached the list - mock.assert_not_called() - - # and finally for a monitor that exists, but with a differing config - record = Record.new(existing, '', { - 'octodns': { - 'healthcheck': { - 'host': 'bleep.bloop', - 'path': '/_nope', - 'protocol': 'HTTP', - 'port': 8080, - } - }, - 'ttl': 60, - 'type': 'A', - 'value': '1.2.3.4' - }) - mock.reset_mock() - mock.side_effect = [{ - 'data': { - 'active': 'Y', - 'dsf_monitor_id': self.monitor_id, - 'endpoints': [], - 'label': 'unit.tests.:A', - 'notifier': '', - 'expected': '', - 'header': 'User-Agent: Dyn Monitor', - 'host': 'bleep.bloop', - 'path': '/_nope', - 'port': '8080', - 'timeout': '10', - 'probe_interval': '60', - 'protocol': 'HTTP', - 'response_count': '2', - 'retries': '2' - }, - 'job_id': 3376259461, - 'msgs': [{'ERR_CD': None, - 'INFO': 'add: Here is the new monitor', - 'LVL': 'INFO', - 'SOURCE': 'BLL'}], - 'status': 'success' - }] - monitor = provider._traffic_director_monitor(record) - self.assertEquals(self.monitor_id, monitor.dsf_monitor_id) - # should have resulted an update - mock.assert_has_calls([ - call('/DSFMonitor/42a/', 'PUT', { - 'protocol': 'HTTP', - 'options': { - 'path': '/_nope', - 'host': 'bleep.bloop', - 'header': 'User-Agent: Dyn Monitor', - 'port': 8080, - 'timeout': 10 - } - }) - ]) - # cached monitor should have been updated - self.assertTrue('unit.tests.:A' in - provider._traffic_director_monitors) - monitor = provider._traffic_director_monitors['unit.tests.:A'] - self.assertEquals('bleep.bloop', monitor.host) - self.assertEquals('/_nope', monitor.path) - self.assertEquals('HTTP', monitor.protocol) - self.assertEquals('8080', monitor.port) - - # test upgrading an old label - record = Record.new(existing, 'old-label', { - 'ttl': 60, - 'type': 'A', - 'value': '1.2.3.4' - }) - mock.reset_mock() - mock.side_effect = [{ - 'data': { - 'active': 'Y', - 'dsf_monitor_id': self.monitor_id, - 'endpoints': [], - 'label': 'old-label.unit.tests.:A', - 'notifier': '', - 'expected': '', - 'header': 'User-Agent: Dyn Monitor', - 'host': 'old-label.unit.tests', - 'path': '/_dns', - 'port': '443', - 'timeout': '10', - 'probe_interval': '60', - 'protocol': 'HTTPS', - 'response_count': '2', - 'retries': '2' - }, - 'job_id': 3376259461, - 'msgs': [{'ERR_CD': None, - 'INFO': 'add: Here is the new monitor', - 'LVL': 'INFO', - 'SOURCE': 'BLL'}], - 'status': 'success' - }] - monitor = provider._traffic_director_monitor(record) - self.assertEquals(self.monitor_id, monitor.dsf_monitor_id) - # should have resulted an update - mock.assert_has_calls([ - call('/DSFMonitor/b52/', 'PUT', { - 'label': 'old-label.unit.tests.:A' - }) - ]) - # cached monitor should have been updated - self.assertTrue('old-label.unit.tests.:A' in - provider._traffic_director_monitors) - - @patch('dyn.core.SessionEngine.execute') - def test_extra_changes(self, mock): - provider = DynProvider('test', 'cust', 'user', 'pass', True) - # short-circuit session checking - provider._dyn_sess = True - - mock.side_effect = [self.monitors_response] - - # non-geo - desired = Zone('unit.tests.', []) - record = Record.new(desired, '', { - 'ttl': 60, - 'type': 'A', - 'value': '1.2.3.4', - }) - desired.add_record(record) - extra = provider._extra_changes(desired=desired, - changes=[Create(record)]) - self.assertEquals(0, len(extra)) - - # in changes, noop - desired = Zone('unit.tests.', []) - record = Record.new(desired, '', { - 'geo': { - 'NA': ['1.2.3.4'], - }, - 'ttl': 60, - 'type': 'A', - 'value': '1.2.3.4', - }) - desired.add_record(record) - extra = provider._extra_changes(desired=desired, - changes=[Create(record)]) - self.assertEquals(0, len(extra)) - - # no diff, no extra - extra = provider._extra_changes(desired=desired, changes=[]) - self.assertEquals(0, len(extra)) - - # monitors should have been fetched now - mock.assert_called_once() - - # diff in healthcheck, gets extra - desired = Zone('unit.tests.', []) - record = Record.new(desired, '', { - 'geo': { - 'NA': ['1.2.3.4'], - }, - 'octodns': { - 'healthcheck': { - 'host': 'foo.bar', - 'path': '/_ready' - } - }, - 'ttl': 60, - 'type': 'A', - 'value': '1.2.3.4', - }) - desired.add_record(record) - extra = provider._extra_changes(desired=desired, changes=[]) - self.assertEquals(1, len(extra)) - extra = extra[0] - self.assertIsInstance(extra, Update) - self.assertEquals(record, extra.record) - - # missing health check - desired = Zone('unit.tests.', []) - record = Record.new(desired, 'geo', { - 'geo': { - 'NA': ['1.2.3.4'], - }, - 'ttl': 60, - 'type': 'A', - 'value': '1.2.3.4', - }) - desired.add_record(record) - extra = provider._extra_changes(desired=desired, changes=[]) - self.assertEquals(1, len(extra)) - extra = extra[0] - self.assertIsInstance(extra, Update) - self.assertEquals(record, extra.record) - - @patch('dyn.core.SessionEngine.execute') - def test_populate_traffic_directors_empty(self, mock): - provider = DynProvider('test', 'cust', 'user', 'pass', - traffic_directors_enabled=True) - - # empty all around - mock.side_effect = [ - # get traffic directors - {'data': []}, - # get zone - {'data': {}}, - # get records - {'data': {}}, - ] - got = Zone('unit.tests.', []) - provider.populate(got) - self.assertEquals(0, len(got.records)) - mock.assert_has_calls([ - call('/DSF/', 'GET', {'detail': 'Y'}), - call('/Zone/unit.tests/', 'GET', {}), - call('/AllRecord/unit.tests/unit.tests./', 'GET', {'detail': 'Y'}), - ]) - - @patch('dyn.core.SessionEngine.execute') - def test_populate_traffic_directors_td(self, mock): - provider = DynProvider('test', 'cust', 'user', 'pass', - traffic_directors_enabled=True) - - got = Zone('unit.tests.', []) - zone_name = got.name[:-1] - # only traffic director - mock.side_effect = [ - # get traffic directors - self.traffic_directors_response, - # get the first td's nodes - {'data': [{'fqdn': zone_name, 'zone': zone_name}]}, - # get traffic director, b/c ^ matches - self.traffic_director_response, - # get the next td's nodes, not a match - {'data': [{'fqdn': 'other', 'zone': 'other'}]}, - # get zone - {'data': {}}, - # get records - {'data': {}}, - ] - provider.populate(got) - self.assertEquals(1, len(got.records)) - self.assertFalse(self.expected_geo.changes(got, provider)) - mock.assert_has_calls([ - call('/DSF/', 'GET', {'detail': 'Y'}), - call('/DSFNode/2ERWXQNsb_IKG2YZgYqkPvk0PBM', 'GET', {}), - call('/DSF/2ERWXQNsb_IKG2YZgYqkPvk0PBM/', 'GET', - {'pending_changes': 'Y'}), - call('/DSFNode/3ERWXQNsb_IKG2YZgYqkPvk0PBM', 'GET', {}), - call('/Zone/unit.tests/', 'GET', {}), - call('/AllRecord/unit.tests/unit.tests./', 'GET', {'detail': 'Y'}) - ]) - - @patch('dyn.core.SessionEngine.execute') - def test_populate_traffic_directors_regular(self, mock): - provider = DynProvider('test', 'cust', 'user', 'pass', - traffic_directors_enabled=True) - - # only regular - mock.side_effect = [ - # get traffic directors - {'data': []}, - # get zone - {'data': {}}, - # get records - self.records_response - ] - got = Zone('unit.tests.', []) - provider.populate(got) - self.assertEquals(1, len(got.records)) - self.assertFalse(self.expected_regular.changes(got, provider)) - mock.assert_has_calls([ - call('/DSF/', 'GET', {'detail': 'Y'}), - call('/Zone/unit.tests/', 'GET', {}), - call('/AllRecord/unit.tests/unit.tests./', 'GET', {'detail': 'Y'}), - ]) - - @patch('dyn.core.SessionEngine.execute') - def test_populate_traffic_directors_both(self, mock): - provider = DynProvider('test', 'cust', 'user', 'pass', - traffic_directors_enabled=True) - - # both traffic director and regular, regular is ignored - mock.side_effect = [ - # get traffic directors - self.traffic_directors_response, - # grab its nodes, matches - {'data': [{'fqdn': 'unit.tests', 'zone': 'unit.tests'}]}, - # get traffic director b/c match - self.traffic_director_response, - # grab next td's nodes, not a match - {'data': [{'fqdn': 'other', 'zone': 'other'}]}, - # get zone - {'data': {}}, - # get records - self.records_response - ] - got = Zone('unit.tests.', []) - provider.populate(got) - self.assertEquals(1, len(got.records)) - self.assertFalse(self.expected_geo.changes(got, provider)) - mock.assert_has_calls([ - call('/DSF/', 'GET', {'detail': 'Y'}), - call('/DSFNode/2ERWXQNsb_IKG2YZgYqkPvk0PBM', 'GET', {}), - call('/DSF/2ERWXQNsb_IKG2YZgYqkPvk0PBM/', 'GET', - {'pending_changes': 'Y'}), - call('/DSFNode/3ERWXQNsb_IKG2YZgYqkPvk0PBM', 'GET', {}), - call('/Zone/unit.tests/', 'GET', {}), - call('/AllRecord/unit.tests/unit.tests./', 'GET', {'detail': 'Y'}) - ]) - - @patch('dyn.core.SessionEngine.execute') - def test_populate_traffic_director_busted(self, mock): - provider = DynProvider('test', 'cust', 'user', 'pass', - traffic_directors_enabled=True) - - busted_traffic_director_response = { - "status": "success", - "data": { - "notifiers": [], - "rulesets": [], - "ttl": "300", - "active": "Y", - "service_id": "oIRZ4lM-W64NUelJGuzuVziZ4MI", - "nodes": [{ - "fqdn": "unit.tests", - "zone": "unit.tests" - }], - "pending_change": "", - "label": "unit.tests.:A" - }, - "job_id": 3376642606, - "msgs": [{ - "INFO": "detail: Here is your service", - "LVL": "INFO", - "ERR_CD": None, - "SOURCE": "BLL" - }] - } - # busted traffic director - mock.side_effect = [ - # get traffic directors - self.traffic_directors_response, - {'data': [{'fqdn': 'unit.tests', 'zone': 'unit.tests'}]}, - # get traffic director - busted_traffic_director_response, - {'data': [{'fqdn': 'other', 'zone': 'other'}]}, - # get zone - {'data': {}}, - # get records - {'data': {}}, - ] - got = Zone('unit.tests.', []) - provider.populate(got) - self.assertEquals(1, len(got.records)) - # we expect a change here for the record, the values aren't important, - # so just compare set contents (which does name and type) - self.assertEquals(self.expected_geo.records, got.records) - mock.assert_has_calls([ - call('/DSF/', 'GET', {'detail': 'Y'}), - call('/DSFNode/2ERWXQNsb_IKG2YZgYqkPvk0PBM', 'GET', {}), - call('/DSF/2ERWXQNsb_IKG2YZgYqkPvk0PBM/', 'GET', - {'pending_changes': 'Y'}), - call('/DSFNode/3ERWXQNsb_IKG2YZgYqkPvk0PBM', 'GET', {}), - call('/Zone/unit.tests/', 'GET', {}), - call('/AllRecord/unit.tests/unit.tests./', 'GET', {'detail': 'Y'}) - ]) - - @patch('dyn.core.SessionEngine.execute') - def test_apply_traffic_director(self, mock): - provider = DynProvider('test', 'cust', 'user', 'pass', - traffic_directors_enabled=True) - - # stubbing these out to avoid a lot of messy mocking, they'll be tested - # individually, we'll check for expected calls - provider._mod_geo_Create = MagicMock() - provider._mod_geo_Update = MagicMock() - provider._mod_geo_Delete = MagicMock() - provider._mod_Create = MagicMock() - provider._mod_Update = MagicMock() - provider._mod_Delete = MagicMock() - - # busted traffic director - mock.side_effect = [ - # get zone - {'data': {}}, - # accept publish - {'data': {}}, - ] - desired = Zone('unit.tests.', []) - geo = self.geo_record - regular = self.regular_record - - changes = [ - Create(geo), - Create(regular), - Update(geo, geo), - Update(regular, regular), - Delete(geo), - Delete(regular), - ] - plan = Plan(None, desired, changes, True) - provider._apply(plan) - mock.assert_has_calls([ - call('/Zone/unit.tests/', 'GET', {}), - call('/Zone/unit.tests/', 'PUT', {'publish': True}) - ]) - # should have seen 1 call to each - provider._mod_geo_Create.assert_called_once() - provider._mod_geo_Update.assert_called_once() - provider._mod_geo_Delete.assert_called_once() - provider._mod_Create.assert_called_once() - provider._mod_Update.assert_called_once() - provider._mod_Delete.assert_called_once() - - @patch('dyn.core.SessionEngine.execute') - def test_mod_geo_create(self, mock): - provider = DynProvider('test', 'cust', 'user', 'pass', - traffic_directors_enabled=True) - - # will be tested separately - provider._mod_geo_rulesets = MagicMock() - - mock.side_effect = [ - # create traffic director - self.traffic_director_response, - # get traffic directors - self.traffic_directors_response - ] - provider._mod_geo_Create(None, Create(self.geo_record)) - # td now lives in cache - self.assertTrue('A' in provider.traffic_directors['unit.tests.']) - # should have seen 1 gen call - provider._mod_geo_rulesets.assert_called_once() - - def test_mod_geo_update_geo_geo(self): - provider = DynProvider('test', 'cust', 'user', 'pass', - traffic_directors_enabled=True) - - # update of an existing td - - # pre-populate the cache with our mock td - provider._traffic_directors = { - 'unit.tests.': { - 'A': 42, - } - } - # mock _mod_geo_rulesets - provider._mod_geo_rulesets = MagicMock() - - geo = self.geo_record - change = Update(geo, geo) - provider._mod_geo_Update(None, change) - # still in cache - self.assertTrue('A' in provider.traffic_directors['unit.tests.']) - # should have seen 1 gen call - provider._mod_geo_rulesets.assert_called_once_with(42, change) - - @patch('dyn.core.SessionEngine.execute') - def test_mod_geo_update_geo_regular(self, _): - provider = DynProvider('test', 'cust', 'user', 'pass', - traffic_directors_enabled=True) - - # convert a td to a regular record - - provider._mod_Create = MagicMock() - provider._mod_geo_Delete = MagicMock() - - change = Update(self.geo_record, self.regular_record) - provider._mod_geo_Update(42, change) - # should have seen a call to create the new regular record - provider._mod_Create.assert_called_once_with(42, change) - # should have seen a call to delete the old td record - provider._mod_geo_Delete.assert_called_once_with(42, change) - - @patch('dyn.core.SessionEngine.execute') - def test_mod_geo_update_regular_geo(self, _): - provider = DynProvider('test', 'cust', 'user', 'pass', - traffic_directors_enabled=True) - - # convert a regular record to a td - - provider._mod_geo_Create = MagicMock() - provider._mod_Delete = MagicMock() - - change = Update(self.regular_record, self.geo_record) - provider._mod_geo_Update(42, change) - # should have seen a call to create the new geo record - provider._mod_geo_Create.assert_called_once_with(42, change) - # should have seen a call to delete the old regular record - provider._mod_Delete.assert_called_once_with(42, change) - - @patch('dyn.core.SessionEngine.execute') - def test_mod_geo_delete(self, mock): - provider = DynProvider('test', 'cust', 'user', 'pass', - traffic_directors_enabled=True) - - td_mock = MagicMock() - provider._traffic_directors = { - 'unit.tests.': { - 'A': td_mock, - } - } - provider._mod_geo_Delete(None, Delete(self.geo_record)) - # delete called - td_mock.delete.assert_called_once() - # removed from cache - self.assertFalse('A' in provider.traffic_directors['unit.tests.']) - - @patch('dyn.tm.services.DSFResponsePool.create') - def test_find_or_create_geo_pool(self, mock): - provider = DynProvider('test', 'cust', 'user', 'pass', - traffic_directors_enabled=True) - - td = 42 - - # no candidates cache miss, so create - values = ['1.2.3.4', '1.2.3.5'] - pool = provider._find_or_create_geo_pool(td, [], 'default', 'A', - values) - self.assertIsInstance(pool, DSFResponsePool) - self.assertEquals(1, len(pool.rs_chains)) - records = pool.rs_chains[0].record_sets[0].records - self.assertEquals(values, [r.address for r in records]) - mock.assert_called_once_with(td) - - # cache hit, use the one we just created - mock.reset_mock() - pools = [pool] - cached = provider._find_or_create_geo_pool(td, pools, 'default', 'A', - values) - self.assertEquals(pool, cached) - mock.assert_not_called() - - # cache miss, non-matching label - mock.reset_mock() - miss = provider._find_or_create_geo_pool(td, pools, 'NA-US-CA', 'A', - values) - self.assertNotEquals(pool, miss) - self.assertEquals('NA-US-CA', miss.label) - mock.assert_called_once_with(td) - - # cache miss, matching label, mis-matching values - mock.reset_mock() - values = ['2.2.3.4.', '2.2.3.5'] - miss = provider._find_or_create_geo_pool(td, pools, 'default', 'A', - values) - self.assertNotEquals(pool, miss) - mock.assert_called_once_with(td) - - @patch('dyn.tm.services.DSFRuleset.add_response_pool') - @patch('dyn.tm.services.DSFRuleset.create') - # just lets us ignore the pool.create calls - @patch('dyn.tm.services.DSFResponsePool.create') - def test_mod_geo_rulesets_create(self, _, ruleset_create_mock, - add_response_pool_mock): - provider = DynProvider('test', 'cust', 'user', 'pass', - traffic_directors_enabled=True) - - td_mock = MagicMock() - td_mock._rulesets = [] - provider._traffic_director_monitor = MagicMock() - provider._find_or_create_geo_pool = MagicMock() - - td_mock.all_response_pools = [] - - provider._find_or_create_geo_pool.side_effect = [ - _DummyPool('default'), - _DummyPool(1), - _DummyPool(2), - _DummyPool(3), - _DummyPool(4), - ] - - change = Create(self.geo_record) - provider._mod_geo_rulesets(td_mock, change) - ruleset_create_mock.assert_has_calls(( - call(td_mock, index=0), - call(td_mock, index=0), - call(td_mock, index=0), - call(td_mock, index=0), - call(td_mock, index=0), - )) - add_response_pool_mock.assert_has_calls(( - # default - call('default'), - # first geo and it's fallback - call(1), - call('default', index=999), - # 2nd geo and it's fallback - call(2), - call('default', index=999), - # 3nd geo and it's fallback - call(3), - call('default', index=999), - # 4th geo and it's 2 levels of fallback - call(4), - call(3, index=999), - call('default', index=999), - )) - - # have to patch the place it's imported into, not where it lives - @patch('octodns.provider.dyn.get_response_pool') - @patch('dyn.tm.services.DSFRuleset.add_response_pool') - @patch('dyn.tm.services.DSFRuleset.create') - # just lets us ignore the pool.create calls - @patch('dyn.tm.services.DSFResponsePool.create') - def test_mod_geo_rulesets_existing(self, _, ruleset_create_mock, - add_response_pool_mock, - get_response_pool_mock): - provider = DynProvider('test', 'cust', 'user', 'pass', - traffic_directors_enabled=True) - - ruleset_mock = MagicMock() - ruleset_mock.response_pools = [_DummyPool(3)] - - td_mock = MagicMock() - td_mock._rulesets = [ - ruleset_mock, - ] - provider._traffic_director_monitor = MagicMock() - provider._find_or_create_geo_pool = MagicMock() - - unused_pool = _DummyPool('unused') - td_mock.all_response_pools = \ - ruleset_mock.response_pools + [unused_pool] - get_response_pool_mock.return_value = unused_pool - - provider._find_or_create_geo_pool.side_effect = [ - _DummyPool('default'), - _DummyPool(1), - _DummyPool(2), - ruleset_mock.response_pools[0], - _DummyPool(4), - ] - - change = Create(self.geo_record) - provider._mod_geo_rulesets(td_mock, change) - ruleset_create_mock.assert_has_calls(( - call(td_mock, index=2), - call(td_mock, index=2), - call(td_mock, index=2), - call(td_mock, index=2), - call(td_mock, index=2), - )) - add_response_pool_mock.assert_has_calls(( - # default - call('default'), - # first geo and it's fallback - call(1), - call('default', index=999), - # 2nd geo and it's fallback - call(2), - call('default', index=999), - # 3nd geo, from existing, and it's fallback - call(3), - call('default', index=999), - # 4th geo and it's 2 levels of fallback - call(4), - call(3, index=999), - call('default', index=999), - )) - # unused poll should have been deleted - self.assertTrue(unused_pool.deleted) - # old ruleset ruleset should be deleted, it's pool will have been - # reused - ruleset_mock.delete.assert_called_once() - - -class TestDynProviderAlias(TestCase): - expected = Zone('unit.tests.', []) - for name, data in ( - ('', { - 'type': 'ALIAS', - 'ttl': 300, - 'value': 'www.unit.tests.' - }), - ('www', { - 'type': 'A', - 'ttl': 300, - 'values': ['1.2.3.4'] - })): - expected.add_record(Record.new(expected, name, data)) - - def setUp(self): - # Flush our zone to ensure we start fresh - _CachingDynZone.flush_zone(self.expected.name[:-1]) - - @patch('dyn.core.SessionEngine.execute') - def test_populate(self, execute_mock): - provider = DynProvider('test', 'cust', 'user', 'pass') - - # Test Zone create - execute_mock.side_effect = [ - # get Zone - {'data': {}}, - # get_all_records - {'data': { - 'a_records': [{ - 'fqdn': 'www.unit.tests', - 'rdata': {'address': '1.2.3.4'}, - 'record_id': 1, - 'record_type': 'A', - 'ttl': 300, - 'zone': 'unit.tests', - }], - 'alias_records': [{ - 'fqdn': 'unit.tests', - 'rdata': {'alias': 'www.unit.tests.'}, - 'record_id': 2, - 'record_type': 'ALIAS', - 'ttl': 300, - 'zone': 'unit.tests', - }], - }} - ] - got = Zone('unit.tests.', []) - provider.populate(got) - execute_mock.assert_has_calls([ - call('/Zone/unit.tests/', 'GET', {}), - call('/AllRecord/unit.tests/unit.tests./', 'GET', {'detail': 'Y'}) - ]) - changes = self.expected.changes(got, SimpleProvider()) - self.assertEquals([], changes) - - @patch('dyn.core.SessionEngine.execute') - def test_sync(self, execute_mock): - provider = DynProvider('test', 'cust', 'user', 'pass') - - # Test Zone create - execute_mock.side_effect = [ - # No such zone, during populate - DynectGetError('foo'), - # No such zone, during sync - DynectGetError('foo'), - # get empty Zone - {'data': {}}, - # get zone we can modify & delete with - {'data': { - # A top-level to delete - 'a_records': [{ - 'fqdn': 'www.unit.tests', - 'rdata': {'address': '1.2.3.4'}, - 'record_id': 1, - 'record_type': 'A', - 'ttl': 300, - 'zone': 'unit.tests', - }], - # A node to delete - 'alias_records': [{ - 'fqdn': 'unit.tests', - 'rdata': {'alias': 'www.unit.tests.'}, - 'record_id': 2, - 'record_type': 'ALIAS', - 'ttl': 300, - 'zone': 'unit.tests', - }], - }} - ] - - # No existing records, create all - with patch('dyn.tm.zones.Zone.add_record') as add_mock: - with patch('dyn.tm.zones.Zone._update') as update_mock: - plan = provider.plan(self.expected) - update_mock.assert_not_called() - provider.apply(plan) - update_mock.assert_called() - add_mock.assert_called() - # Once for each dyn record - self.assertEquals(2, len(add_mock.call_args_list)) - execute_mock.assert_has_calls([call('/Zone/unit.tests/', 'GET', {}), - call('/Zone/unit.tests/', 'GET', {})]) - self.assertEquals(2, len(plan.changes)) - - -# Need a class that doesn't do all the "real" stuff, but gets our monkey -# patching -class DummyDSFMonitor(DSFMonitor): - - def __init__(self, host=None, path=None, protocol=None, port=None, - options_host=None, options_path=None, options_protocol=None, - options_port=None): - # not calling super on purpose - self._host = host - self._path = path - self._protocol = protocol - self._port = port - if options_host: - self._options = { - 'host': options_host, - 'path': options_path, - 'protocol': options_protocol, - 'port': options_port, - } - else: - self._options = None - - -class TestDSFMonitorMonkeyPatching(TestCase): - - def test_host(self): - monitor = DummyDSFMonitor(host='host.com', path='/path', - protocol='HTTP', port=8080) - self.assertEquals('host.com', monitor.host) - self.assertEquals('/path', monitor.path) - self.assertEquals('HTTP', monitor.protocol) - self.assertEquals(8080, monitor.port) - - monitor = DummyDSFMonitor(options_host='host.com', - options_path='/path', - options_protocol='HTTP', options_port=8080) - self.assertEquals('host.com', monitor.host) - self.assertEquals('/path', monitor.path) - - monitor.host = 'other.com' - self.assertEquals('other.com', monitor.host) - monitor.path = '/other-path' - self.assertEquals('/other-path', monitor.path) - monitor.protocol = 'HTTPS' - self.assertEquals('HTTPS', monitor.protocol) - monitor.port = 8081 - self.assertEquals(8081, monitor.port) - - monitor = DummyDSFMonitor() - monitor.host = 'other.com' - self.assertEquals('other.com', monitor.host) - monitor = DummyDSFMonitor() - monitor.path = '/other-path' - self.assertEquals('/other-path', monitor.path) - monitor.protocol = 'HTTP' - self.assertEquals('HTTP', monitor.protocol) - monitor.port = 8080 - self.assertEquals(8080, monitor.port) - - # Just to exercise the _options init - monitor = DummyDSFMonitor() - monitor.protocol = 'HTTP' - self.assertEquals('HTTP', monitor.protocol) - monitor = DummyDSFMonitor() - monitor.port = 8080 - self.assertEquals(8080, monitor.port) - - -class DummyRecord(object): - - def __init__(self, address, weight, ttl): - self.address = address - self.weight = weight - self.ttl = ttl - - -class DummyRecordSets(object): - - def __init__(self, records): - self.records = records - - -class DummyRsChains(object): - - def __init__(self, records): - self.record_sets = [DummyRecordSets(records)] - - -class DummyResponsePool(object): - - def __init__(self, label, records=[]): - self.label = label - if records: - self.rs_chains = [DummyRsChains(records)] - else: - self.rs_chains = [] - - def refresh(self): - pass - - -class DummyRuleset(object): - - def __init__(self, label, response_pools=[], - criteria_type='always', criteria={}): - self.label = label - self.response_pools = response_pools - self.criteria_type = criteria_type - self.criteria = criteria - - -class DummyTrafficDirector(object): - - def __init__(self, zone_name, rulesets=[], response_pools=[], ttl=42): - self.label = 'dummy:abcdef1234567890' - self.rulesets = rulesets - self.all_response_pools = response_pools - self.ttl = ttl - self.nodes = [{'zone': zone_name[:-1]}] - - -class TestDynProviderDynamic(TestCase): - - def test_value_for_address(self): - provider = DynProvider('test', 'cust', 'user', 'pass') - - class DummyRecord(object): - - def __init__(self, address, weight): - self.address = address - self.weight = weight - - record = DummyRecord('1.2.3.4', 32) - self.assertEquals({ - 'value': record.address, - 'weight': record.weight, - }, provider._value_for_A('A', record)) - - record = DummyRecord('2601:644:500:e210:62f8:1dff:feb8:947a', 32) - self.assertEquals({ - 'value': record.address, - 'weight': record.weight, - }, provider._value_for_AAAA('AAAA', record)) - - def test_value_for_CNAME(self): - provider = DynProvider('test', 'cust', 'user', 'pass') - - class DummyRecord(object): - - def __init__(self, cname, weight): - self.cname = cname - self.weight = weight - - record = DummyRecord('foo.unit.tests.', 32) - self.assertEquals({ - 'value': record.cname, - 'weight': record.weight, - }, provider._value_for_CNAME('CNAME', record)) - - def test_populate_dynamic_pools(self): - provider = DynProvider('test', 'cust', 'user', 'pass') - - # Empty data, empty returns - default, pools = provider._populate_dynamic_pools('A', [], []) - self.assertEquals({}, default) - self.assertEquals({}, pools) - - records_a = [DummyRecord('1.2.3.4', 32, 60)] - default_a = DummyResponsePool('default', records_a) - - # Just a default A - response_pools = [default_a] - default, pools = provider._populate_dynamic_pools('A', [], - response_pools) - self.assertEquals({ - 'ttl': 60, - 'type': 'A', - 'values': ['1.2.3.4'], - }, default) - self.assertEquals({}, pools) - - multi_a = [ - DummyRecord('1.2.3.5', 42, 90), - DummyRecord('1.2.3.6', 43, 90), - DummyRecord('1.2.3.7', 44, 90), - ] - example_a = DummyResponsePool('example', multi_a) - - # Just a named pool - response_pools = [example_a] - default, pools = provider._populate_dynamic_pools('A', [], - response_pools) - self.assertEquals({}, default) - self.assertEquals({ - 'example': { - 'values': [{ - 'value': '1.2.3.5', - 'weight': 42, - }, { - 'value': '1.2.3.6', - 'weight': 43, - }, { - 'value': '1.2.3.7', - 'weight': 44, - }], - }, - }, pools) - - # Named pool that shows up twice - response_pools = [example_a, example_a] - default, pools = provider._populate_dynamic_pools('A', [], - response_pools) - self.assertEquals({}, default) - self.assertEquals({ - 'example': { - 'values': [{ - 'value': '1.2.3.5', - 'weight': 42, - }, { - 'value': '1.2.3.6', - 'weight': 43, - }, { - 'value': '1.2.3.7', - 'weight': 44, - }], - }, - }, pools) - - # Default & named - response_pools = [example_a, default_a, example_a] - default, pools = provider._populate_dynamic_pools('A', [], - response_pools) - self.assertEquals({ - 'ttl': 60, - 'type': 'A', - 'values': ['1.2.3.4'], - }, default) - self.assertEquals({ - 'example': { - 'values': [{ - 'value': '1.2.3.5', - 'weight': 42, - }, { - 'value': '1.2.3.6', - 'weight': 43, - }, { - 'value': '1.2.3.7', - 'weight': 44, - }], - }, - }, pools) - - # empty rs_chains doesn't cause an example, just ignores - empty_a = DummyResponsePool('empty') - response_pools = [empty_a] - default, pools = provider._populate_dynamic_pools('A', [], - response_pools) - self.assertEquals({}, default) - self.assertEquals({}, pools) - - def test_populate_dynamic_rules(self): - provider = DynProvider('test', 'cust', 'user', 'pass') - - # Empty - rulesets = [] - pools = {} - rules = provider._populate_dynamic_rules(rulesets, pools) - self.assertEquals([], rules) - - # default: is ignored - rulesets = [DummyRuleset('default:')] - pools = {} - rules = provider._populate_dynamic_rules(rulesets, pools) - self.assertEquals([], rules) - - # No ResponsePools in RuleSet, ignored - rulesets = [DummyRuleset('0:abcdefg')] - pools = {} - rules = provider._populate_dynamic_rules(rulesets, pools) - self.assertEquals([], rules) - - # ResponsePool, no fallback - rulesets = [DummyRuleset('0:abcdefg', [ - DummyResponsePool('some-pool') - ])] - pools = {} - rules = provider._populate_dynamic_rules(rulesets, pools) - self.assertEquals([{ - 'pool': 'some-pool', - }], rules) - - # ResponsePool, with dfault fallback (ignored) - rulesets = [DummyRuleset('0:abcdefg', [ - DummyResponsePool('some-pool'), - DummyResponsePool('default'), - ])] - pools = {} - rules = provider._populate_dynamic_rules(rulesets, pools) - self.assertEquals([{ - 'pool': 'some-pool', - }], rules) - - # ResponsePool, with fallback - rulesets = [DummyRuleset('0:abcdefg', [ - DummyResponsePool('some-pool'), - DummyResponsePool('some-fallback'), - ])] - pools = { - 'some-pool': {}, - } - rules = provider._populate_dynamic_rules(rulesets, pools) - self.assertEquals([{ - 'pool': 'some-pool', - }], rules) - # fallback has been installed - self.assertEquals({ - 'some-pool': { - 'fallback': 'some-fallback', - } - }, pools) - - # Unsupported criteria_type (ignored) - rulesets = [DummyRuleset('0:abcdefg', [ - DummyResponsePool('some-pool') - ], 'unsupported')] - pools = {} - rules = provider._populate_dynamic_rules(rulesets, pools) - self.assertEquals([], rules) - - # Geo Continent/Region - response_pools = [DummyResponsePool('some-pool')] - criteria = { - 'geoip': { - 'country': ['US'], - 'province': ['or'], - 'region': [14], - }, - } - ruleset = DummyRuleset('0:abcdefg', response_pools, - 'geoip', criteria) - rulesets = [ruleset] - pools = {} - rules = provider._populate_dynamic_rules(rulesets, pools) - self.assertEquals([{ - 'geos': ['AF', 'NA-US', 'NA-US-OR'], - 'pool': 'some-pool', - }], rules) - - def test_populate_dynamic_traffic_director(self): - provider = DynProvider('test', 'cust', 'user', 'pass') - fqdn = 'dynamic.unit.tests.' - - multi_a = [ - DummyRecord('1.2.3.5', 1, 90), - DummyRecord('1.2.3.6', 1, 90), - DummyRecord('1.2.3.7', 1, 90), - ] - default_response_pool = DummyResponsePool('default', multi_a) - pool1_response_pool = DummyResponsePool('pool1', multi_a) - rulesets = [ - DummyRuleset('default', [default_response_pool]), - DummyRuleset('0:abcdef', [pool1_response_pool], 'geoip', { - 'geoip': { - 'country': ['US'], - 'province': ['or'], - 'region': [14], - }, - }), - ] - zone = Zone('unit.tests.', []) - td = DummyTrafficDirector(zone.name, rulesets, - [default_response_pool, pool1_response_pool]) - record = provider._populate_dynamic_traffic_director(zone, fqdn, 'A', - td, rulesets, - True) - self.assertTrue(record) - self.assertEquals('A', record._type) - self.assertEquals(90, record.ttl) - self.assertEquals([ - '1.2.3.5', - '1.2.3.6', - '1.2.3.7', - ], record.values) - self.assertTrue('pool1' in record.dynamic.pools) - self.assertEquals({ - 'fallback': None, - 'values': [{ - 'value': '1.2.3.5', - 'weight': 1, - 'status': 'obey', - }, { - 'value': '1.2.3.6', - 'weight': 1, - 'status': 'obey', - }, { - 'value': '1.2.3.7', - 'weight': 1, - 'status': 'obey', - }] - }, record.dynamic.pools['pool1'].data) - self.assertEquals(2, len(record.dynamic.rules)) - self.assertEquals({ - 'pool': 'default', - }, record.dynamic.rules[0].data) - self.assertEquals({ - 'pool': 'pool1', - 'geos': ['AF', 'NA-US', 'NA-US-OR'], - }, record.dynamic.rules[1].data) - - # Hack into the provider and create a fake list of traffic directors - provider._traffic_directors = { - 'dynamic.unit.tests.': { - 'A': td, - } - } - zone = Zone('unit.tests.', []) - records = provider._populate_traffic_directors(zone, lenient=True) - self.assertEquals(1, len(records)) - - def test_dynamic_records_for_A(self): - provider = DynProvider('test', 'cust', 'user', 'pass') - - # Empty - records = provider._dynamic_records_for_A([], {}) - self.assertEquals([], records) - - # Basic - values = [{ - 'value': '1.2.3.4', - }, { - 'value': '1.2.3.5', - 'weight': 42, - }] - records = provider._dynamic_records_for_A(values, {}) - self.assertEquals(2, len(records)) - record = records[0] - self.assertEquals('1.2.3.4', record.address) - self.assertEquals(1, record.weight) - record = records[1] - self.assertEquals('1.2.3.5', record.address) - self.assertEquals(42, record.weight) - - # With extras - records = provider._dynamic_records_for_A(values, { - 'automation': 'manual', - 'eligible': True, - }) - self.assertEquals(2, len(records)) - record = records[0] - self.assertEquals('1.2.3.4', record.address) - self.assertEquals(1, record.weight) - self.assertEquals('manual', record._automation) - self.assertTrue(record.eligible) - - def test_dynamic_records_for_AAAA(self): - provider = DynProvider('test', 'cust', 'user', 'pass') - - # Empty - records = provider._dynamic_records_for_AAAA([], {}) - self.assertEquals([], records) - - # Basic - values = [{ - 'value': '2601:644:500:e210:62f8:1dff:feb8:947a', - }, { - 'value': '2601:644:500:e210:62f8:1dff:feb8:947b', - 'weight': 42, - }] - records = provider._dynamic_records_for_AAAA(values, {}) - self.assertEquals(2, len(records)) - record = records[0] - self.assertEquals('2601:644:500:e210:62f8:1dff:feb8:947a', - record.address) - self.assertEquals(1, record.weight) - record = records[1] - self.assertEquals('2601:644:500:e210:62f8:1dff:feb8:947b', - record.address) - self.assertEquals(42, record.weight) - - # With extras - records = provider._dynamic_records_for_AAAA(values, { - 'automation': 'manual', - 'eligible': True, - }) - self.assertEquals(2, len(records)) - record = records[0] - self.assertEquals('2601:644:500:e210:62f8:1dff:feb8:947a', - record.address) - self.assertEquals(1, record.weight) - self.assertEquals('manual', record._automation) - self.assertTrue(record.eligible) - - def test_dynamic_records_for_CNAME(self): - provider = DynProvider('test', 'cust', 'user', 'pass') - - # Empty - records = provider._dynamic_records_for_CNAME([], {}) - self.assertEquals([], records) - - # Basic - values = [{ - 'value': 'target-1.unit.tests.', - }, { - 'value': 'target-2.unit.tests.', - 'weight': 42, - }] - records = provider._dynamic_records_for_CNAME(values, {}) - self.assertEquals(2, len(records)) - record = records[0] - self.assertEquals('target-1.unit.tests.', record.cname) - self.assertEquals(1, record.weight) - record = records[1] - self.assertEquals('target-2.unit.tests.', record.cname) - self.assertEquals(42, record.weight) - - # With extras - records = provider._dynamic_records_for_CNAME(values, { - 'automation': 'manual', - 'eligible': True, - }) - self.assertEquals(2, len(records)) - record = records[0] - self.assertEquals('target-1.unit.tests.', record.cname) - self.assertEquals(1, record.weight) - self.assertEquals('manual', record._automation) - self.assertTrue(record.eligible) - - def test_dynamic_value_sort_key(self): - values = [{ - 'value': '1.2.3.1', - }, { - 'value': '1.2.3.27', - }, { - 'value': '1.2.3.127', - }, { - 'value': '1.2.3.2', - }] - - self.assertEquals([{ - 'value': '1.2.3.1', - }, { - 'value': '1.2.3.127', - }, { - 'value': '1.2.3.2', - }, { - 'value': '1.2.3.27', - }], sorted(values, key=_dynamic_value_sort_key)) - - @patch('dyn.tm.services.DSFResponsePool.create') - def test_find_or_create_dynamic_pools(self, mock): - provider = DynProvider('test', 'cust', 'user', 'pass') - - td = 42 - label = 'foo' - values = [{ - 'value': '1.2.3.1', - }, { - 'value': '1.2.3.127', - }, { - 'value': '1.2.3.2', - }, { - 'value': '1.2.3.27', - }] - - # A Pool with no existing pools, will create - pools = [] - pool = provider._find_or_create_dynamic_pool(td, pools, label, 'A', - values) - self.assertIsInstance(pool, DSFResponsePool) - self.assertEquals(1, len(pool.rs_chains)) - self.assertEquals(1, len(pool.rs_chains[0].record_sets)) - records = pool.rs_chains[0].record_sets[0].records - self.assertEquals(4, len(records)) - self.assertEquals([v['value'] for v in values], - [r.address for r in records]) - self.assertEquals([1 for r in records], [r.weight for r in records]) - mock.assert_called_once_with(td) - - # Ask for the pool we created above and include it in the canidate list - mock.reset_mock() - pools = [pool] - cached = provider._find_or_create_dynamic_pool(td, pools, label, 'A', - values) - self.assertEquals(pool, cached) - mock.assert_not_called() - - # Invalid candidate pool, still finds the valid one that's there too - mock.reset_mock() - invalid = DSFResponsePool(label, rs_chains=[]) - pools = [invalid, pool] - cached = provider._find_or_create_dynamic_pool(td, pools, label, 'A', - values) - self.assertEquals(pool, cached) - mock.assert_not_called() - - # Ask for a pool with a different label, should create a new one - mock.reset_mock() - pools = [pool] - other = provider._find_or_create_dynamic_pool(td, pools, 'other', 'A', - values) - self.assertEquals('other', other.label) - mock.assert_called_once_with(td) - - # Ask for a pool that matches label-wise, but has different values - values = [{ - 'value': '1.2.3.44', - }] - mock.reset_mock() - pools = [pool] - new = provider._find_or_create_dynamic_pool(td, pools, label, 'A', - values) - self.assertEquals(label, new.label) - self.assertEquals(1, len(new.rs_chains)) - self.assertEquals(1, len(new.rs_chains[0].record_sets)) - records = new.rs_chains[0].record_sets[0].records - self.assertEquals(1, len(records)) - self.assertEquals([v['value'] for v in values], - [r.address for r in records]) - self.assertEquals([1 for r in records], [r.weight for r in records]) - mock.assert_called_once_with(td) - - zone = Zone('unit.tests.', []) - dynamic_a_record = Record.new(zone, '', { - 'dynamic': { - 'pools': { - 'one': { - 'values': [{ - 'value': '3.3.3.3', - }], - }, - 'two': { - # Testing out of order value sorting here - 'values': [{ - 'value': '5.5.5.5', - }, { - 'value': '4.4.4.4', - }], - }, - 'three': { - 'fallback': 'two', - 'values': [{ - 'weight': 10, - 'value': '4.4.4.4', - }, { - 'weight': 12, - 'value': '5.5.5.5', - }], - }, - }, - 'rules': [{ - 'geos': ['AF', 'EU', 'AS-JP'], - 'pool': 'three', - }, { - 'geos': ['NA-US-CA'], - 'pool': 'two', - }, { - 'pool': 'one', - }], - }, - 'type': 'A', - 'ttl': 60, - 'values': [ - '1.1.1.1', - '2.2.2.2', - ], - }) - geo_a_record = Record.new(zone, '', { - 'geo': { - 'AF': ['2.2.3.4', '2.2.3.5'], - 'AS-JP': ['3.2.3.4', '3.2.3.5'], - 'NA-US': ['4.2.3.4', '4.2.3.5'], - 'NA-US-CA': ['5.2.3.4', '5.2.3.5'] - }, - 'ttl': 300, - 'type': 'A', - 'values': ['1.2.3.4', '1.2.3.5'], - }) - regular_a_record = Record.new(zone, '', { - 'ttl': 301, - 'type': 'A', - 'value': '1.2.3.4', - }) - dynamic_cname_record = Record.new(zone, 'www', { - 'dynamic': { - 'pools': { - 'one': { - 'values': [{ - 'value': 'target-0.unit.tests.', - }], - }, - 'two': { - # Testing out of order value sorting here - 'values': [{ - 'value': 'target-1.unit.tests.', - }, { - 'value': 'target-2.unit.tests.', - }], - }, - 'three': { - 'values': [{ - 'weight': 10, - 'value': 'target-3.unit.tests.', - }, { - 'weight': 12, - 'value': 'target-4.unit.tests.', - }], - }, - }, - 'rules': [{ - 'geos': ['AF', 'EU', 'AS-JP'], - 'pool': 'three', - }, { - 'geos': ['NA-US-CA'], - 'pool': 'two', - }, { - 'pool': 'one', - }], - }, - 'type': 'CNAME', - 'ttl': 60, - 'value': 'target.unit.tests.', - }) - - dynamic_fallback_loop = Record.new(zone, '', { - 'dynamic': { - 'pools': { - 'one': { - 'values': [{ - 'value': '3.3.3.3', - }], - }, - 'two': { - # Testing out of order value sorting here - 'fallback': 'three', - 'values': [{ - 'value': '5.5.5.5', - }, { - 'value': '4.4.4.4', - }], - }, - 'three': { - 'fallback': 'two', - 'values': [{ - 'weight': 10, - 'value': '4.4.4.4', - }, { - 'weight': 12, - 'value': '5.5.5.5', - }], - }, - }, - 'rules': [{ - 'geos': ['AF', 'EU', 'AS-JP'], - 'pool': 'three', - }, { - 'geos': ['NA-US-CA'], - 'pool': 'two', - }, { - 'pool': 'one', - }], - }, - 'type': 'A', - 'ttl': 60, - 'values': [ - '1.1.1.1', - '2.2.2.2', - ], - }, lenient=True) - - @patch('dyn.tm.services.DSFRuleset.add_response_pool') - @patch('dyn.tm.services.DSFRuleset.create') - # just lets us ignore the pool.create calls - @patch('dyn.tm.services.DSFResponsePool.create') - def test_mod_dynamic_rulesets_create_CNAME(self, _, ruleset_create_mock, - add_response_pool_mock): - provider = DynProvider('test', 'cust', 'user', 'pass', - traffic_directors_enabled=True) - - td_mock = MagicMock() - td_mock._rulesets = [] - provider._traffic_director_monitor = MagicMock() - provider._find_or_create_dynamic_pool = MagicMock() - - td_mock.all_response_pools = [] - - provider._find_or_create_dynamic_pool.side_effect = [ - _DummyPool('default'), - _DummyPool('one'), - _DummyPool('two'), - _DummyPool('three'), - ] - - change = Create(self.dynamic_cname_record) - provider._mod_dynamic_rulesets(td_mock, change) - add_response_pool_mock.assert_has_calls(( - # default - call('default'), - # first dynamic and it's fallback - call('one'), - call('default', index=999), - # 2nd dynamic and it's fallback - call('three'), - call('default', index=999), - # 3nd dynamic and it's fallback - call('two'), - call('default', index=999), - )) - ruleset_create_mock.assert_has_calls(( - call(td_mock, index=0), - call(td_mock, index=0), - call(td_mock, index=0), - call(td_mock, index=0), - )) - - # have to patch the place it's imported into, not where it lives - @patch('octodns.provider.dyn.get_response_pool') - @patch('dyn.tm.services.DSFRuleset.add_response_pool') - @patch('dyn.tm.services.DSFRuleset.create') - # just lets us ignore the pool.create calls - @patch('dyn.tm.services.DSFResponsePool.create') - def test_mod_dynamic_rulesets_existing(self, _, ruleset_create_mock, - add_response_pool_mock, - get_response_pool_mock): - provider = DynProvider('test', 'cust', 'user', 'pass', - traffic_directors_enabled=True) - - ruleset_mock = MagicMock() - ruleset_mock.response_pools = [_DummyPool('three')] - - td_mock = MagicMock() - td_mock._rulesets = [ - ruleset_mock, - ] - provider._traffic_director_monitor = MagicMock() - provider._find_or_create_dynamic_pool = MagicMock() - # Matching ttl - td_mock.ttl = self.dynamic_a_record.ttl - - unused_pool = _DummyPool('unused') - td_mock.all_response_pools = \ - ruleset_mock.response_pools + [unused_pool] - get_response_pool_mock.return_value = unused_pool - - provider._find_or_create_dynamic_pool.side_effect = [ - _DummyPool('default'), - _DummyPool('one'), - _DummyPool('two'), - ruleset_mock.response_pools[0], - ] - - change = Create(self.dynamic_a_record) - provider._mod_dynamic_rulesets(td_mock, change) - add_response_pool_mock.assert_has_calls(( - # default - call('default'), - # first dynamic and it's fallback - call('one'), - call('default', index=999), - # 2nd dynamic and it's fallback - call('three'), - call('default', index=999), - # 3nd dynamic, from existing, and it's fallback - call('two'), - call('three', index=999), - call('default', index=999), - )) - ruleset_create_mock.assert_has_calls(( - call(td_mock, index=2), - call(td_mock, index=2), - call(td_mock, index=2), - call(td_mock, index=2), - )) - # unused poll should have been deleted - self.assertTrue(unused_pool.deleted) - # old ruleset ruleset should be deleted, it's pool will have been - # reused - ruleset_mock.delete.assert_called_once() - - # have to patch the place it's imported into, not where it lives - @patch('octodns.provider.dyn.get_response_pool') - @patch('dyn.tm.services.DSFRuleset.add_response_pool') - @patch('dyn.tm.services.DSFRuleset.create') - # just lets us ignore the pool.create calls - @patch('dyn.tm.services.DSFResponsePool.create') - def test_mod_dynamic_rulesets_fallback_loop(self, _, ruleset_create_mock, - add_response_pool_mock, - get_response_pool_mock): - provider = DynProvider('test', 'cust', 'user', 'pass', - traffic_directors_enabled=True) - - ruleset_mock = MagicMock() - ruleset_mock.response_pools = [_DummyPool('three')] - - td_mock = MagicMock() - td_mock._rulesets = [ - ruleset_mock, - ] - provider._traffic_director_monitor = MagicMock() - provider._find_or_create_dynamic_pool = MagicMock() - # Matching ttl - td_mock.ttl = self.dynamic_fallback_loop.ttl - - unused_pool = _DummyPool('unused') - td_mock.all_response_pools = \ - ruleset_mock.response_pools + [unused_pool] - get_response_pool_mock.return_value = unused_pool - - provider._find_or_create_dynamic_pool.side_effect = [ - _DummyPool('default'), - _DummyPool('one'), - _DummyPool('two'), - ruleset_mock.response_pools[0], - ] - - change = Create(self.dynamic_fallback_loop) - provider._mod_dynamic_rulesets(td_mock, change) - add_response_pool_mock.assert_has_calls(( - # default - call('default'), - # first dynamic and it's fallback - call('one'), - call('default', index=999), - # 2nd dynamic and it's fallback (no loop) - call('three'), - call('two', index=999), - call('default', index=999), - # 3nd dynamic and it's fallback (no loop) - call('two'), - call('three', index=999), - call('default', index=999), - )) - ruleset_create_mock.assert_has_calls(( - call(td_mock, index=2), - call(td_mock, index=2), - call(td_mock, index=2), - call(td_mock, index=2), - )) - # unused poll should have been deleted - self.assertTrue(unused_pool.deleted) - # old ruleset ruleset should be deleted, it's pool will have been - # reused - ruleset_mock.delete.assert_called_once() - - with open('./tests/fixtures/dyn-traffic-director-get.json') as fh: - traffic_director_response = loads(fh.read()) - - @property - def traffic_directors_response(self): - return { - 'data': [{ - 'active': 'Y', - 'label': 'unit.tests.:A', - 'nodes': [], - 'notifiers': [], - 'pending_change': '', - 'rulesets': [], - 'service_id': '2ERWXQNsb_IKG2YZgYqkPvk0PBM', - 'ttl': '300' - }, { - 'active': 'Y', - 'label': 'some.other.:A', - 'nodes': [], - 'notifiers': [], - 'pending_change': '', - 'rulesets': [], - 'service_id': '3ERWXQNsb_IKG2YZgYqkPvk0PBM', - 'ttl': '300' - }, { - 'active': 'Y', - 'label': 'other format', - 'nodes': [], - 'notifiers': [], - 'pending_change': '', - 'rulesets': [], - 'service_id': '4ERWXQNsb_IKG2YZgYqkPvk0PBM', - 'ttl': '300' - }] - } - - @patch('dyn.core.SessionEngine.execute') - def test_mod_dynamic_create(self, mock): - provider = DynProvider('test', 'cust', 'user', 'pass', - traffic_directors_enabled=True) - - # will be tested separately - provider._mod_dynamic_rulesets = MagicMock() - - mock.side_effect = [ - # create traffic director - self.traffic_director_response, - # get traffic directors - self.traffic_directors_response - ] - provider._mod_dynamic_Create(None, Create(self.dynamic_a_record)) - # td now lives in cache - self.assertTrue('A' in provider.traffic_directors['unit.tests.']) - # should have seen 1 gen call - provider._mod_dynamic_rulesets.assert_called_once() - - def test_mod_dynamic_update_dynamic_dynamic(self): - provider = DynProvider('test', 'cust', 'user', 'pass', - traffic_directors_enabled=True) - - # update of an existing dynamic td - - # pre-populate the cache with our mock td - provider._traffic_directors = { - 'unit.tests.': { - 'A': 42, - } - } - # mock _mod_dynamic_rulesets - provider._mod_dynamic_rulesets = MagicMock() - - dyn = self.dynamic_a_record - change = Update(dyn, dyn) - provider._mod_dynamic_Update(None, change) - # still in cache - self.assertTrue('A' in provider.traffic_directors['unit.tests.']) - # should have seen 1 gen call - provider._mod_dynamic_rulesets.assert_called_once_with(42, change) - - @patch('dyn.core.SessionEngine.execute') - def test_mod_dynamic_update_dynamic_geo(self, _): - provider = DynProvider('test', 'cust', 'user', 'pass', - traffic_directors_enabled=True) - - # convert a dynamic td to a geo record - - provider._mod_geo_Update = MagicMock() - - change = Update(self.dynamic_a_record, self.geo_a_record) - provider._mod_dynamic_Update(42, change) - # should have seen a call to create the new geo record - provider._mod_geo_Update.assert_called_once_with(42, change) - - @patch('dyn.core.SessionEngine.execute') - def test_mod_dynamic_update_dynamic_regular(self, _): - provider = DynProvider('test', 'cust', 'user', 'pass', - traffic_directors_enabled=True) - - # convert a dynamic td to a regular record - - provider._mod_Create = MagicMock() - provider._mod_dynamic_Delete = MagicMock() - - change = Update(self.dynamic_a_record, self.regular_a_record) - provider._mod_dynamic_Update(42, change) - # should have seen a call to create the new regular record - provider._mod_Create.assert_called_once_with(42, change) - # should have seen a call to delete the old td record - provider._mod_dynamic_Delete.assert_called_once_with(42, change) - - @patch('dyn.core.SessionEngine.execute') - def test_mod_dynamic_update_geo_dynamic(self, _): - provider = DynProvider('test', 'cust', 'user', 'pass', - traffic_directors_enabled=True) - - # convert a geo record to a dynamic td - - # pre-populate the cache with our mock td - provider._traffic_directors = { - 'unit.tests.': { - 'A': 42, - } - } - # mock _mod_dynamic_rulesets - provider._mod_dynamic_rulesets = MagicMock() - - change = Update(self.geo_a_record, self.dynamic_a_record) - provider._mod_dynamic_Update(None, change) - # still in cache - self.assertTrue('A' in provider.traffic_directors['unit.tests.']) - # should have seen 1 gen call - provider._mod_dynamic_rulesets.assert_called_once_with(42, change) - - @patch('dyn.core.SessionEngine.execute') - def test_mod_dynamic_update_regular_dynamic(self, _): - provider = DynProvider('test', 'cust', 'user', 'pass', - traffic_directors_enabled=True) - - # convert a regular record to a dynamic td - - provider._mod_dynamic_Create = MagicMock() - provider._mod_Delete = MagicMock() - - change = Update(self.regular_a_record, self.dynamic_a_record) - provider._mod_dynamic_Update(42, change) - # should have seen a call to create the new geo record - provider._mod_dynamic_Create.assert_called_once_with(42, change) - # should have seen a call to delete the old regular record - provider._mod_Delete.assert_called_once_with(42, change) - - @patch('dyn.core.SessionEngine.execute') - def test_mod_dynamic_delete(self, mock): - provider = DynProvider('test', 'cust', 'user', 'pass', - traffic_directors_enabled=True) - - td_mock = MagicMock() - provider._traffic_directors = { - 'unit.tests.': { - 'A': td_mock, - } - } - provider._mod_dynamic_Delete(None, Delete(self.dynamic_a_record)) - # delete called - td_mock.delete.assert_called_once() - # removed from cache - self.assertFalse('A' in provider.traffic_directors['unit.tests.']) - - @patch('dyn.core.SessionEngine.execute') - def test_apply_traffic_directors_dynamic(self, mock): - provider = DynProvider('test', 'cust', 'user', 'pass', - traffic_directors_enabled=True) - - # will be tested separately - provider._mod_dynamic_Create = MagicMock() - - changes = [Create(self.dynamic_a_record)] - provider._apply_traffic_directors(self.zone, changes, None) - provider._mod_dynamic_Create.assert_called_once() + def test_missing(self): + with self.assertRaises(ModuleNotFoundError): + from octodns.provider.dyn import DynProvider + DynProvider