autodns: Moar fixes and stuff after tests on the battlefield

This commit is contained in:
Alexandre Aubin 2021-09-14 18:04:02 +02:00
parent 0a21be694c
commit d5e366511a
4 changed files with 169 additions and 101 deletions

View file

@ -645,6 +645,12 @@ domain:
full: --dry-run full: --dry-run
help: Only display what's to be pushed help: Only display what's to be pushed
action: store_true action: store_true
--autoremove:
help: Also autoremove records which are stale or not part of the recommended configuration
action: store_true
--purge:
help: Delete all records
action: store_true
cert: cert:
subcategory_help: Manage domain certificates subcategory_help: Manage domain certificates

View file

@ -4,7 +4,8 @@
redact = true redact = true
[aliyun.auth_secret] [aliyun.auth_secret]
type = "password" type = "string"
redact = true
[aurora] [aurora]
[aurora.auth_api_key] [aurora.auth_api_key]
@ -12,7 +13,8 @@
redact = true redact = true
[aurora.auth_secret_key] [aurora.auth_secret_key]
type = "password" type = "string"
redact = true
[azure] [azure]
[azure.auth_client_id] [azure.auth_client_id]
@ -20,7 +22,8 @@
redact = true redact = true
[azure.auth_client_secret] [azure.auth_client_secret]
type = "password" type = "string"
redact = true
[azure.auth_tenant_id] [azure.auth_tenant_id]
type = "string" type = "string"
@ -215,7 +218,8 @@
redact = true redact = true
[exoscale.auth_secret] [exoscale.auth_secret]
type = "password" type = "string"
redact = true
[gandi] [gandi]
[gandi.auth_token] [gandi.auth_token]
@ -233,7 +237,8 @@
redact = true redact = true
[gehirn.auth_secret] [gehirn.auth_secret]
type = "password" type = "string"
redact = true
[glesys] [glesys]
[glesys.auth_username] [glesys.auth_username]
@ -250,7 +255,8 @@
redact = true redact = true
[godaddy.auth_secret] [godaddy.auth_secret]
type = "password" type = "string"
redact = true
[googleclouddns] [googleclouddns]
[goggleclouddns.auth_service_account_info] [goggleclouddns.auth_service_account_info]
@ -415,7 +421,8 @@
redact = true redact = true
[netcup.auth_api_password] [netcup.auth_api_password]
type = "password" type = "string"
redact = true
[nfsn] [nfsn]
[nfsn.auth_username] [nfsn.auth_username]
@ -550,7 +557,8 @@
redact = true redact = true
[route53.auth_access_secret] [route53.auth_access_secret]
type = "password" type = "string"
redact = true
[route53.private_zone] [route53.private_zone]
type = "string" type = "string"
@ -575,7 +583,8 @@
redact = true redact = true
[sakuracloud.auth_secret] [sakuracloud.auth_secret]
type = "password" type = "string"
redact = true
[softlayer] [softlayer]
[softlayer.auth_username] [softlayer.auth_username]

View file

@ -26,6 +26,7 @@
import os import os
import re import re
import time import time
from difflib import SequenceMatcher
from collections import OrderedDict from collections import OrderedDict
from moulinette import m18n, Moulinette from moulinette import m18n, Moulinette
@ -515,7 +516,7 @@ def _get_registrar_config_section(domain):
@is_unit_operation() @is_unit_operation()
def domain_registrar_push(operation_logger, domain, dry_run=False): def domain_registrar_push(operation_logger, domain, dry_run=False, autoremove=False, purge=False):
""" """
Send DNS records to the previously-configured registrar of the domain. Send DNS records to the previously-configured registrar of the domain.
""" """
@ -527,15 +528,15 @@ def domain_registrar_push(operation_logger, domain, dry_run=False):
settings = domain_config_get(domain, key='dns.registrar') settings = domain_config_get(domain, key='dns.registrar')
registrar_id = settings["dns.registrar.registrar"].get("value") registrar = settings["dns.registrar.registrar"].get("value")
if not registrar_id or registrar_id == "yunohost": if not registrar or registrar in ["None", "yunohost"]:
raise YunohostValidationError("registrar_push_not_applicable", domain=domain) raise YunohostValidationError("registrar_push_not_applicable", domain=domain)
registrar_credentials = { registrar_credentials = {
k.split('.')[-1]: v["value"] k.split('.')[-1]: v["value"]
for k, v in settings.items() for k, v in settings.items()
if k != "dns.registrar.registar" if k != "dns.registrar.registar"
} }
if not all(registrar_credentials.values()): if not all(registrar_credentials.values()):
@ -547,11 +548,12 @@ def domain_registrar_push(operation_logger, domain, dry_run=False):
for records in _build_dns_conf(domain).values(): for records in _build_dns_conf(domain).values():
for record in records: for record in records:
# Make sure we got "absolute" values instead of @ # Make sure the name is a FQDN
name = f"{record['name']}.{domain}" if record["name"] != "@" else f".{domain}" name = f"{record['name']}.{domain}" if record["name"] != "@" else f"{domain}"
type_ = record["type"] type_ = record["type"]
content = record["value"] content = record["value"]
# Make sure the content is also a FQDN (with trailing . ?)
if content == "@" and record["type"] == "CNAME": if content == "@" and record["type"] == "CNAME":
content = domain + "." content = domain + "."
@ -568,37 +570,48 @@ def domain_registrar_push(operation_logger, domain, dry_run=False):
# And yet, it is still not done/merged # And yet, it is still not done/merged
wanted_records = [record for record in wanted_records if record["type"] != "CAA"] wanted_records = [record for record in wanted_records if record["type"] != "CAA"]
if purge:
wanted_records = []
autoremove = True
# Construct the base data structure to use lexicon's API. # Construct the base data structure to use lexicon's API.
base_config = { base_config = {
"provider_name": registrar_id, "provider_name": registrar,
"domain": domain, "domain": domain,
registrar_id: registrar_credentials registrar: registrar_credentials
} }
# Fetch all types present in the generated records # Ugly hack to be able to fetch all record types at once:
current_records = [] # we initialize a LexiconClient with type: dummytype,
# then trigger ourselves the authentication + list_records
# Get unique types present in the generated records # instead of calling .execute()
types = ["A", "AAAA", "MX", "TXT", "CNAME", "SRV"] query = (
for key in types:
print("fetching type: " + key)
fetch_records_for_type = {
"action": "list",
"type": key,
}
query = (
LexiconConfigResolver() LexiconConfigResolver()
.with_dict(dict_object=base_config) .with_dict(dict_object=base_config)
.with_dict(dict_object=fetch_records_for_type) .with_dict(dict_object={"action": "list", "type": "dummytype"})
) )
current_records.extend(LexiconClient(query).execute()) # current_records.extend(
client = LexiconClient(query)
client.provider.authenticate()
current_records = client.provider.list_records()
# Keep only records for relevant types: A, AAAA, MX, TXT, CNAME, SRV
relevant_types = ["A", "AAAA", "MX", "TXT", "CNAME", "SRV"]
current_records = [r for r in current_records if r["type"] in relevant_types]
# Ignore records which are for a higher-level domain # Ignore records which are for a higher-level domain
# i.e. we don't care about the records for domain.tld when pushing yuno.domain.tld # i.e. we don't care about the records for domain.tld when pushing yuno.domain.tld
current_records = [r for r in current_records if r['name'].endswith(f'.{domain}')] current_records = [r for r in current_records if r['name'].endswith(f'.{domain}')]
for record in current_records:
# Try to get rid of weird stuff like ".domain.tld" or "@.domain.tld"
record["name"] = record["name"].strip("@").strip(".")
# Some API return '@' in content and we shall convert it to absolute/fqdn
record["content"] = record["content"].replace('@.', domain + ".").replace('@', domain + ".")
# Step 0 : Get the list of unique (type, name) # Step 0 : Get the list of unique (type, name)
# And compare the current and wanted records # And compare the current and wanted records
# #
@ -622,105 +635,145 @@ def domain_registrar_push(operation_logger, domain, dry_run=False):
comparison[(record["type"], record["name"])]["wanted"].append(record) comparison[(record["type"], record["name"])]["wanted"].append(record)
for type_and_name, records in comparison.items(): for type_and_name, records in comparison.items():
# #
# Step 1 : compute a first "diff" where we remove records which are the same on both sides # Step 1 : compute a first "diff" where we remove records which are the same on both sides
# NB / FIXME? : in all this we ignore the TTL value for now... # NB / FIXME? : in all this we ignore the TTL value for now...
# #
diff = {"current": [], "wanted": []}
current_contents = [r["content"] for r in records["current"]]
wanted_contents = [r["content"] for r in records["wanted"]] wanted_contents = [r["content"] for r in records["wanted"]]
current_contents = [r["content"] for r in records["current"]]
print("--------") current = [r for r in records["current"] if r["content"] not in wanted_contents]
print(type_and_name) wanted = [r for r in records["wanted"] if r["content"] not in current_contents]
print(current_contents)
print(wanted_contents)
for record in records["current"]:
if record["content"] not in wanted_contents:
diff["current"].append(record)
for record in records["wanted"]:
if record["content"] not in current_contents:
diff["wanted"].append(record)
# #
# Step 2 : simple case: 0 or 1 record on one side, 0 or 1 on the other # Step 2 : simple case: 0 or 1 record on one side, 0 or 1 on the other
# -> either nothing do (0/0) or a creation (0/1) or a deletion (1/0), or an update (1/1) # -> either nothing do (0/0) or a creation (0/1) or a deletion (1/0), or an update (1/1)
# #
if len(diff["current"]) == 0 and len(diff["wanted"]) == 0: if len(current) == 0 and len(wanted) == 0:
# No diff, nothing to do # No diff, nothing to do
continue continue
if len(diff["current"]) == 1 and len(diff["wanted"]) == 0: if len(current) == 1 and len(wanted) == 0:
changes["delete"].append(diff["current"][0]) changes["delete"].append(current[0])
continue continue
if len(diff["current"]) == 0 and len(diff["wanted"]) == 1: if len(current) == 0 and len(wanted) == 1:
changes["create"].append(diff["wanted"][0]) changes["create"].append(wanted[0])
continue continue
#
if len(diff["current"]) == 1 and len(diff["wanted"]) == 1: if len(current) == 1 and len(wanted) == 1:
diff["current"][0]["content"] = diff["wanted"][0]["content"] current[0]["old_content"] = current[0]["content"]
changes["update"].append(diff["current"][0]) current[0]["content"] = wanted[0]["content"]
changes["update"].append(current[0])
continue continue
# #
# Step 3 : N record on one side, M on the other, watdo # FIXME # Step 3 : N record on one side, M on the other
# #
for record in diff["wanted"]: # Fuzzy matching strategy:
print(f"Dunno watdo with {type_and_name} : {record['content']}") # For each wanted record, try to find a current record which looks like the wanted one
for record in diff["current"]: # -> if found, trigger an update
print(f"Dunno watdo with {type_and_name} : {record['content']}") # -> if no match found, trigger a create
#
for record in wanted:
def likeliness(r):
# We compute this only on the first 100 chars, to have a high value even for completely different DKIM keys
return SequenceMatcher(None, r["content"][:100], record["content"][:100]).ratio()
matches = sorted(current, key=lambda r: likeliness(r), reverse=True)
if matches and likeliness(matches[0]) > 0.50:
match = matches[0]
# Remove the match from 'current' so that it's not added to the removed stuff later
current.remove(match)
match["old_content"] = match["content"]
match["content"] = record["content"]
changes["update"].append(match)
else:
changes["create"].append(record)
#
# For all other remaining current records:
# -> trigger deletions
#
for record in current:
changes["delete"].append(record)
def human_readable_record(action, record):
name = record["name"]
name = name.strip(".")
name = name.replace('.' + domain, "")
name = name.replace(domain, "@")
name = name[:20]
t = record["type"]
if action in ["create", "update"]:
old_content = record.get("old_content", "(None)")[:30]
new_content = record.get("content", "(None)")[:30]
else:
new_content = record.get("old_content", "(None)")[:30]
old_content = record.get("content", "(None)")[:30]
return f'{name:>20} [{t:^5}] {old_content:^30} -> {new_content:^30}'
if dry_run: if dry_run:
return {"changes": changes} out = []
for action in ["delete", "create", "update"]:
out.append("\n" + action + ":\n")
for record in changes[action]:
out.append(human_readable_record(action, record))
return '\n'.join(out)
operation_logger.start() operation_logger.start()
# Push the records # Push the records
for record in dns_conf: for action in ["delete", "create", "update"]:
if action == "delete" and not autoremove:
continue
# For each record, first check if one record exists for the same (type, name) couple for record in changes[action]:
# TODO do not push if local and distant records are exactly the same ?
type_and_name = (record["type"], record["name"])
already_exists = any((r["type"], r["name"]) == type_and_name
for r in current_remote_records)
# Finally, push the new record or update the existing one record["action"] = action
record_to_push = {
"action": "update" if already_exists else "create",
"type": record["type"],
"name": record["name"],
"content": record["value"],
"ttl": record["ttl"],
}
# FIXME Removed TTL, because it doesn't work with Gandi. # Apparently Lexicon yields us some 'id' during fetch
# See https://github.com/AnalogJ/lexicon/issues/726 (similar issue) # But wants 'identifier' during push ...
# But I think there is another issue with Gandi. Or I'm misusing the API... if "id" in record:
if base_config["provider_name"] == "gandi": record["identifier"] = record["id"]
del record_to_push["ttl"] del record["id"]
print("pushed_record:", record_to_push) if "old_content" in record:
del record["old_content"]
if registrar == "godaddy":
if record["name"] == domain:
record["name"] = "@." + record["name"]
if record["type"] in ["MX", "SRV"]:
logger.warning(f"Pushing {record['type']} records is not properly supported by Lexicon/Godaddy.")
continue
# FIXME FIXME FIXME: if a matching record already exists multiple time, # FIXME Removed TTL, because it doesn't work with Gandi.
# the current code crashes (at least on OVH) ... we need to provide a specific identifier to update # See https://github.com/AnalogJ/lexicon/issues/726 (similar issue)
query = ( # But I think there is another issue with Gandi. Or I'm misusing the API...
LexiconConfigResolver() if registrar == "gandi":
.with_dict(dict_object=base_config) del record["ttl"]
.with_dict(dict_object=record_to_push)
)
print(query) logger.info(action + " : " + human_readable_record(action, record))
print(query.__dict__)
results = LexiconClient(query).execute()
print("results:", results)
# print("Failed" if results == False else "Ok")
# FIXME FIXME FIXME : if one create / update crash, it shouldn't block everything query = (
LexiconConfigResolver()
.with_dict(dict_object=base_config)
.with_dict(dict_object=record)
)
# FIXME : is it possible to push multiple create/update request at once ? try:
result = LexiconClient(query).execute()
except Exception as e:
logger.error(f"Failed to {action} record {record['type']}/{record['name']} : {e}")
else:
if result:
logger.success("Done!")
else:
logger.error("Uhoh!?")
# FIXME : implement a system to properly report what worked and what did not at the end of the command..
# def domain_config_fetch(domain, key, value):

View file

@ -491,6 +491,6 @@ def domain_dns_suggest(domain):
return yunohost.dns.domain_dns_suggest(domain) return yunohost.dns.domain_dns_suggest(domain)
def domain_dns_push(domain, dry_run): def domain_dns_push(domain, dry_run, autoremove, purge):
import yunohost.dns import yunohost.dns
return yunohost.dns.domain_registrar_push(domain, dry_run) return yunohost.dns.domain_registrar_push(domain, dry_run, autoremove, purge)