diff --git a/cloudflare-ddns.py b/cloudflare-ddns.py index 1d56863..abf8362 100755 --- a/cloudflare-ddns.py +++ b/cloudflare-ddns.py @@ -15,7 +15,8 @@ import threading import time import requests - +from bs4 import BeautifulSoup + CONFIG_PATH = os.environ.get('CONFIG_PATH', os.getcwd()) @@ -49,6 +50,41 @@ def deleteEntries(type): "DELETE", option) print("🗑️ Deleted stale record " + identifier) +def scrapeIPs(): + global scrape_url + global scrape_ipv4_label + global scrape_ipv6_label + global ipv4_enabled + global ipv6_enabled + + ips = {} + + # URL of the page to scrape + + # Send a GET request to the URL + response = requests.get(scrape_url) + if response.content: + # Parse the HTML content using BeautifulSoup + soup = BeautifulSoup(response.content, "html.parser") + if soup: + # Find the table containing the IP address information + + # Extract the IPv4 and IPv6 addresses + if ipv4_enabled: + ipv4_address = soup.find('th', string=scrape_ipv4_label).find_next_sibling("td").text.strip() + if (ipv4_address is not None): + ips["ipv4"] = { + "type": "A", + "ip": ipv4_address + } + if ipv6_enabled: + ipv6_address = soup.find('th', string=scrape_ipv6_label).find_next_sibling("td").text.strip() + if (ipv6_address is not None): + ips["ipv6"] = { + "type": "AAAA", + "ip": ipv6_address + } + return ips def getIPs(): a = None @@ -56,6 +92,11 @@ def getIPs(): global ipv4_enabled global ipv6_enabled global purgeUnknownRecords + global scrape_url + + if scrape_url is not None: + return scrapeIPs() + if ipv4_enabled: try: a = requests.get( @@ -253,6 +294,8 @@ def updateIPs(ips): ipv4_enabled = True ipv6_enabled = True purgeUnknownRecords = False + scrape_url = None + if sys.version_info < (3, 5): raise Exception("🐍 This script requires Python 3.5+") @@ -274,6 +317,17 @@ def updateIPs(ips): ipv4_enabled = True ipv6_enabled = True print("⚙️ Individually disable IPv4 or IPv6 with new config.json options. Read more about it here: https://github.com/timothymiller/cloudflare-ddns/blob/master/README.md") + try: + scrape_url = config["scrape_url"] + if ipv4_enabled: + scrape_ipv4_label = config["scrape_ipv4_label"] + if ipv6_enabled: + scrape_ipv6_label = config["scrape_ipv6_label"] + except: + scrape_url = None + scrape_ipv4_label = None + scrape_ipv6_label = None + print("⚙️ No config detected for 'scrape_url', 'scrape_ipv[4|6]_label' - defaulting to ipify") try: purgeUnknownRecords = config["purgeUnknownRecords"] except: diff --git a/requirements.txt b/requirements.txt index 077c95d..1f527e4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1,2 @@ -requests==2.31.0 \ No newline at end of file +requests==2.31.0 +beautifulsoup4==4.12.3