diff --git a/coc/abc.py b/coc/abc.py index 25da3849..ca93a674 100644 --- a/coc/abc.py +++ b/coc/abc.py @@ -206,6 +206,7 @@ def _load_json_meta(cls, json_meta: dict, id, name: str, lab_to_townhall): cls.range = try_enum(UnitStat, [json_meta.get(level).get("AttackRange") for level in levels_available]) cls.dps = try_enum(UnitStat, [json_meta.get(level).get("DPS") for level in levels_available]) cls.hitpoints = try_enum(UnitStat, [json_meta.get(level).get("Hitpoints") for level in levels_available]) + cls.max_level = len(levels_available) # get production building production_building = json_meta.get("ProductionBuilding") @@ -277,9 +278,11 @@ def _load_json_meta(cls, json_meta: dict, id, name: str, lab_to_townhall): cls.upgrade_cost = try_enum(UnitStat, [json_meta.get(level).get("UpgradeCost") for level in levels_available]) cls.upgrade_resource = Resource(value=json_meta.get("UpgradeResource")) upgrade_times = [ - TimeDelta(hours=json_meta.get(level, {}).get("UpgradeTimeH")) + TimeDelta( + hours=json_meta.get(level, {}).get("UpgradeTimeH", 0), + minutes=json_meta.get(level, {}).get("UpgradeTimeM", 0) + ) for level in levels_available - if json_meta.get(level, {}).get("UpgradeTimeH") is not None ] cls.upgrade_time = try_enum(UnitStat, upgrade_times) diff --git a/coc/miscmodels.py b/coc/miscmodels.py index 71278bc9..24a23a7b 100644 --- a/coc/miscmodels.py +++ b/coc/miscmodels.py @@ -175,13 +175,18 @@ class TimeDelta: """ def __init__(self, days=0, hours=0, minutes=0, seconds=0): - _days, _hours = divmod(hours, 24) - _hours_left, _mins = divmod(minutes, 60) + total_seconds = ( + (days or 0) * 86400 + + (hours or 0) * 3600 + + (minutes or 0) * 60 + + (seconds or 0) + ) + + self._total_seconds = total_seconds - self.days = days + _days - self.hours = hours + _hours + _hours_left - self.minutes = minutes + _mins - self.seconds = seconds + self.days, rem = divmod(total_seconds, 86400) + self.hours, rem = divmod(rem, 3600) + self.minutes, self.seconds = divmod(rem, 60) def total_seconds(self): """Returns the total number of seconds in the time object. @@ -192,10 +197,7 @@ def total_seconds(self): ------- int The number of seconds""" - return self.days * 24 * 60 * 60 + \ - self.hours * 60 * 60 + \ - self.minutes * 60 + \ - self.seconds + return self._total_seconds class Location: diff --git a/coc/static/apk_source.py b/coc/static/apk_source.py new file mode 100644 index 00000000..c3018bf2 --- /dev/null +++ b/coc/static/apk_source.py @@ -0,0 +1,57 @@ +import requests +from bs4 import BeautifulSoup +import re +import time + +APK_MIRROR_BASE = "https://www.apkmirror.com" +COC_PAGE = f"{APK_MIRROR_BASE}/apk/supercell/clash-of-clans/" + +HEADERS = { + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36" +} + +def get_direct_apk_url(version_suffix="2"): + """ + Skips to the latest APK download page and returns a working intermediate download URL. + This link prompts the browser or urllib to download the actual APK. + """ + session = requests.Session() + session.headers.update(HEADERS) + + print("[*] Fetching main Clash of Clans page...") + resp = session.get(COC_PAGE) + soup = BeautifulSoup(resp.text, "html.parser") + + release_link = soup.select_one("div.appRow a.downloadLink") + if not release_link: + raise Exception("ERROR: No release link found on APKMirror home page") + + release_page_url = APK_MIRROR_BASE + release_link.get("href") + print(f"[+] Latest release page: {release_page_url}") + + # Extract version string from the URL + version_match = re.search(r"clash-of-clans-([\d-]+)-release", release_page_url) + if not version_match: + raise Exception("ERROR: Could not extract version number from release URL") + + version_str = version_match.group(1) + version_segments = version_str.split("-") + version_num = "-".join(version_segments[:3]) + + # Construct the direct variant page + download_page = f"{release_page_url}clash-of-clans-{version_num}-{version_suffix}-android-apk-download/" + print(f"[+] Variant download page: {download_page}") + + variant_page = session.get(download_page) + variant_soup = BeautifulSoup(variant_page.text, "html.parser") + + dl_button = variant_soup.select_one("a.downloadButton") + if not dl_button: + raise Exception("ERROR:Download button not found on variant page") + + intermediate_url = APK_MIRROR_BASE + dl_button.get("href") + print(f"[+] Final download link (intermediate, triggers download): {intermediate_url}") + return intermediate_url + +if __name__ == "__main__": + print(get_direct_apk_url()) diff --git a/coc/static/update_static.py b/coc/static/update_static.py index 2ee0d806..7ba0932a 100644 --- a/coc/static/update_static.py +++ b/coc/static/update_static.py @@ -16,6 +16,9 @@ import os import zipfile from collections import defaultdict +import requests +from bs4 import BeautifulSoup +from apk_source import get_direct_apk_url TARGETS = [ ("logic/buildings.csv", "buildings.csv"), @@ -29,23 +32,59 @@ ("localization/texts.csv", "texts_EN.csv"), ] -APK_URL = "https://d.apkpure.net/b/APK/com.supercell.clashofclans?version=latest" +APK_URL = get_direct_apk_url() def get_fingerprint(): - async def download(): - async with aiohttp.request('GET', APK_URL) as fp: - c = await fp.read() - return c - - data = asyncio.run(download()) - + apk_url = get_direct_apk_url() + print(f"[+] Getting download page: {apk_url}") + + # create a session to handle cookies and redirects + session = requests.Session() + session.headers.update({ + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36", + "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", + }) + + # get the download page + resp = session.get(apk_url) + soup = BeautifulSoup(resp.text, 'html.parser') + + # find the direct download link + download_link = soup.select_one('p:-soup-contains("Your download will start") a') + if not download_link: + raise Exception("ERROR: Could not find direct download link on page") + + # get the relative URL and make it absolute + relative_url = download_link.get('href') + direct_url = f"https://www.apkmirror.com{relative_url}" + print(f"[+] Found direct download URL: {direct_url}") + + # download the APK using the direct URL + print("[+] Downloading APK file...") + response = session.get(direct_url, stream=True) + + if not response.headers.get('content-type', '').startswith('application/'): + raise Exception("ERROR: Response is not an APK file") + + # save the APK file with open("apk.zip", "wb") as f: - f.write(data) - zf = zipfile.ZipFile("apk.zip") - with zf.open('assets/fingerprint.json') as fp: - fingerprint = json.loads(fp.read())['sha'] + for chunk in response.iter_content(chunk_size=8192): + if chunk: + f.write(chunk) + + # unzip and extract fingerprint.json + try: + with zipfile.ZipFile("apk.zip", "r") as zf: + with zf.open("assets/fingerprint.json") as fp: + fingerprint = json.loads(fp.read())["sha"] + print(f"[+] Successfully extracted fingerprint: {fingerprint}") + except zipfile.BadZipFile: + raise Exception("ERROR: Downloaded file is not a valid APK (ZIP) file") + finally: + # clean up the APK file + if os.path.exists("apk.zip"): + os.remove("apk.zip") - os.unlink("apk.zip") return fingerprint # Hard-code or fallback @@ -173,14 +212,16 @@ def process_csv(data, file_path, save_name): base_level = all_levels[0] base_cols = list(levels_dict[base_level].keys()) + + # Cover edge cases where some troops only have UpgradeTimeM and UpgradeTimeS if it is added + do_not_promote = {"UpgradeTimeH", "UpgradeTimeM", "UpgradeTimeS"} for col in base_cols: # check if col is present in other levels - found_elsewhere = False - for lvl in all_levels[1:]: - if col in levels_dict[lvl]: - found_elsewhere = True - break + if col in do_not_promote: + continue + + found_elsewhere = any(col in levels_dict[lvl] for lvl in all_levels[1:]) # if not found in other levels => move it up if not found_elsewhere: if col not in levels_dict: @@ -188,7 +229,7 @@ def process_csv(data, file_path, save_name): final_data[troop_name][col] = levels_dict[base_level][col] # remove from base_level del levels_dict[base_level][col] - + # 4) Write final JSON import json with open(f"{save_name}.json", "w", encoding="utf-8") as jf: