transip-client/transip_client/main.py
sonny eb74ac4f40
All checks were successful
ci/woodpecker/push/build Pipeline was successful
ci/woodpecker/push/tests Pipeline was successful
Update typing
2025-01-05 17:06:04 +01:00

161 lines
4.4 KiB
Python

import base64
import json
import logging
import time
from concurrent.futures import ThreadPoolExecutor, as_completed
from typing import Generator
import requests
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric.padding import PKCS1v15
from cryptography.hazmat.primitives.hashes import SHA512
logger = logging.getLogger(__name__)
def _get_ip(service: str) -> str:
try:
response = requests.get(service, timeout=10)
response.raise_for_status()
except requests.RequestException as e:
raise OSError(f"Unable to retrieve current IP from {service}") from e
return response.text
def _get_token(private_key_path: str, login: str, api_url: str) -> str:
request = requests.Request(
"POST",
f"{api_url}/auth",
json={
"login": login,
"nonce": str(int(time.time() * 1000)),
"read_only": False,
"expiration_time": "30 minutes",
"label": "Custom token",
"global_key": True,
},
)
prepped_request = request.prepare()
with open(private_key_path, "rb") as file:
private_key = serialization.load_pem_private_key(
file.read(), password=None, backend=default_backend()
)
signature = private_key.sign(prepped_request.body, PKCS1v15(), SHA512())
signature = base64.b64encode(signature)
prepped_request.headers["Signature"] = signature.decode("ascii")
with requests.Session() as session:
response = session.send(prepped_request)
if not response.ok:
response.raise_for_status()
response_data = response.json()
return response_data["token"]
def _get_domain(domain: str, token: str, api_url: str) -> requests.Response:
headers = {"Authorization": f"Bearer {token}"}
return requests.get(f"{api_url}/domains/{domain}/dns", headers=headers)
def _get_domain_data(domains: list[str], token: str, api_url: str) -> Generator[dict, None, None]:
with ThreadPoolExecutor(max_workers=10) as executor:
futures = {
executor.submit(_get_domain, domain, token, api_url): domain
for domain in domains
}
for future in as_completed(futures):
domain = futures[future]
try:
response = future.result()
response.raise_for_status()
except requests.HTTPError:
logger.exception(f"Failed retrieving information for {domain}")
continue
yield {"domain": domain, **response.json()}
def _update_domain(
domain: str, payload: dict, api_url: str, token: str
) -> requests.Response:
headers = {"Authorization": f"Bearer {token}"}
return requests.put(
f"{api_url}/domains/{domain}/dns", data=json.dumps(payload), headers=headers
)
def _update_domains(
updated_domains: dict, api_url: str, token: str, read_only: bool
) -> None:
if read_only:
return
with ThreadPoolExecutor(max_workers=10) as executor:
futures = {
executor.submit(_update_domain, domain, entries, api_url, token): domain
for domain, entries in updated_domains.items()
}
for future in as_completed(futures):
response = future.result()
domain = futures[future]
try:
response.raise_for_status()
except requests.HTTPError:
logger.exception(f"Unable to update domain {domain}")
continue
logger.info(f"Updated domain {domain}")
def detect(
domains: list[str],
service: str,
credentials: tuple[str, str],
token: str,
api_url: str,
read_only: bool,
) -> None:
ip = _get_ip(service)
if all(credentials):
token = _get_token(*credentials, api_url)
domain_data = _get_domain_data(domains, token, api_url)
updated_domains = {}
for data in domain_data:
dns_entries = data["dnsEntries"]
domain = data["domain"]
updated_entries = []
for dns_entry in dns_entries:
updated_entries.append(
{
**dns_entry,
"content": ip,
}
)
if dns_entries == updated_entries:
continue
updated_domains[domain] = {"dnsEntries": updated_entries}
_update_domains(updated_domains, api_url, token, read_only)