SUPPORTED_METADATA_VERSION = 1
+assert sys.version_info >= (3, 5), "Python version too old. Python >=3.5.0 needed."
+
# accepts {<file-path>: <file-content>}
def merge_profiles(profiles, download_url):
title.get("vendor", ""), title["model"], title.get("variant", "")
).strip()
- def add_profile(id, target, profile, code=None):
+ def add_profile(path, id, target, profile, code=None):
images = []
for image in profile["images"]:
images.append({"name": image["name"], "type": image["type"]})
title = get_title(entry)
if len(title) == 0:
- sys.stderr.write(f"Empty title. Skip title in {path}\n")
+ sys.stderr.write(
+ "Empty title. Skip title for {} in {}\n".format(id, path)
+ )
continue
output["models"][title] = {"id": id, "target": target, "images": images}
if obj["metadata_version"] != SUPPORTED_METADATA_VERSION:
sys.stderr.write(
- f"{path} has unsupported metadata version: {obj['metadata_version']} => skip\n"
+ "{} has unsupported metadata version: {} => skip\n".format(
+ path, obj["metadata_version"]
+ )
)
continue
try:
if "profiles" in obj:
for id in obj["profiles"]:
- add_profile(id, obj.get("target"), obj["profiles"][id], code)
+ add_profile(path, id, obj.get("target"), obj["profiles"][id], code)
else:
- add_profile(obj["id"], obj["target"], obj, code)
+ add_profile(path, obj["id"], obj["target"], obj, code)
except json.decoder.JSONDecodeError as e:
- sys.stderr.write(f"Skip {path}\n {e}\n")
+ sys.stderr.write("Skip {}\n {}\n".format(path, e))
except KeyError as e:
- sys.stderr.write(f"Abort on {path}\n Missing key {e}\n")
+ sys.stderr.write("Abort on {}\n Missing key {}\n".format(path, e))
exit(1)
return output
def update_config(config_path, versions):
content = ""
- with open(config_path, "r") as file:
+ with open(str(config_path), "r") as file:
content = file.read()
- content = re.sub("versions:[\\s]*{[^}]*}", f"versions: {versions}", content)
- with open(config_path, "w+") as file:
+ content = re.sub("versions:[\\s]*{[^}]*}", "versions: {}".format(versions), content)
+ with open(str(config_path), "w+") as file:
file.write(content)
def scrape(args):
url = args.domain
selector_path = args.selector
- config_path = f"{selector_path}/config.js"
- data_path = f"{selector_path}/data"
+ config_path = "{}/config.js".format(selector_path)
+ data_path = "{}/data".format(selector_path)
versions = {}
def handle_release(target):
profiles = {}
- with urllib.request.urlopen(f"{target}/?json") as file:
+ with urllib.request.urlopen("{}/?json".format(target)) as file:
array = json.loads(file.read().decode("utf-8"))
for profile in filter(lambda x: x.endswith("/profiles.json"), array):
- with urllib.request.urlopen(f"{target}/{profile}") as file:
- profiles[f"{target}/{profile}"] = file.read()
+ with urllib.request.urlopen("{}/{}".format(target, profile)) as file:
+ profiles["{}/{}".format(target, profile)] = file.read().decode(
+ "utf-8"
+ )
return profiles
if not os.path.isfile(config_path):
- print(f"file not found: {config_path}")
+ print("file not found: {}".format(config_path))
exit(1)
# fetch release URLs
for path in re.findall(r"href=[\"']?([^'\" >]+)", str(infile.read())):
if not path.startswith("/") and path.endswith("targets/"):
release = path.strip("/").split("/")[-2]
- download_url = f"{url}/{path}/{{target}}"
+ download_url = "{}/{}/{{target}}".format(url, path)
- profiles = handle_release(f"{url}/{path}")
+ profiles = handle_release("{}/{}".format(url, path))
output = merge_profiles(profiles, download_url)
if len(output) > 0:
- Path(f"{data_path}/{release}").mkdir(parents=True, exist_ok=True)
+ os.makedirs("{}/{}".format(data_path, release), exist_ok=True)
# write overview.json
- with open(f"{data_path}/{release}/overview.json", "w") as outfile:
+ with open(
+ "{}/{}/overview.json".format(data_path, release), "w"
+ ) as outfile:
if args.formatted:
json.dump(output, outfile, indent=" ", sort_keys=True)
else:
json.dump(output, outfile, sort_keys=True)
- versions[release.upper()] = f"data/{release}/overview.json"
+ versions[release.upper()] = "data/{}/overview.json".format(release)
update_config(config_path, versions)
def scrape_wget(args):
url = args.domain
selector_path = args.selector
- config_path = f"{selector_path}/config.js"
- data_path = f"{selector_path}/data"
+ config_path = "{}/config.js".format(selector_path)
+ data_path = "{}/data".format(selector_path)
versions = {}
with tempfile.TemporaryDirectory() as tmp_dir:
# download all profiles.json files
os.system(
- f"wget -c -r -P {tmp_dir} -A 'profiles.json' --reject-regex 'kmods|packages' --no-parent {url}"
+ "wget -c -r -P {} -A 'profiles.json' --reject-regex 'kmods|packages' --no-parent {}".format(
+ tmp_dir, url
+ )
)
# delete empty folders
- os.system(f"find {tmp_dir}/* -type d -empty -delete")
+ os.system("find {}/* -type d -empty -delete".format(tmp_dir))
# create overview.json files
- for path in glob.glob(f"{tmp_dir}/*/snapshots") + glob.glob(
- f"{tmp_dir}/*/releases/*"
+ for path in glob.glob("{}/*/snapshots".format(tmp_dir)) + glob.glob(
+ "{}/*/releases/*".format(tmp_dir)
):
release = os.path.basename(path)
base = path[len(tmp_dir) + 1 :]
- versions[release.upper()] = f"data/{release}/overview.json"
- os.system(f"mkdir -p {selector_path}/data/{release}/")
-
profiles = {}
for ppath in Path(path).rglob("profiles.json"):
- with open(ppath, "r") as file:
+ with open(str(ppath), "r") as file:
profiles[ppath] = file.read()
- output = merge_profiles(profiles, f"https://{base}/targets/{{target}}")
- Path(f"{data_path}/{release}").mkdir(parents=True, exist_ok=True)
+ if len(profiles) == 0:
+ continue
+
+ versions[release.upper()] = "data/{}/overview.json".format(release)
+
+ output = merge_profiles(
+ profiles, "https://{}/targets/{{target}}".format(base)
+ )
+ os.makedirs("{}/{}".format(data_path, release), exist_ok=True)
# write overview.json
- with open(f"{data_path}/{release}/overview.json", "w") as outfile:
+ with open("{}/{}/overview.json".format(data_path, release), "w") as outfile:
if args.formatted:
json.dump(output, outfile, indent=" ", sort_keys=True)
else:
profiles = {}
def add_path(path):
- with open(path, "r") as file:
+ with open(str(path), "r") as file:
profiles[path] = file.read()
for path in input_paths:
add_path(filepath)
else:
if not path.endswith(".json"):
- sys.stderr.write(f"Folder does not exists: {path}\n")
+ sys.stderr.write("Folder does not exists: {}\n".format(path))
exit(1)
add_path(path)
json.dump(output, sys.stdout, sort_keys=True)
+"""
+Scan local directory for releases with profiles.json.
+Merge into overview.json files.
+Update config.json.
+"""
+
+
+def scan(args):
+ selector_path = args.selector
+ config_path = "{}/config.js".format(selector_path)
+ data_path = "{}/data".format(selector_path)
+ versions = {}
+
+ # create overview.json files
+ for path in glob.glob("{}/snapshots".format(args.directory)) + glob.glob(
+ "{}/releases/*".format(args.directory)
+ ):
+ release = os.path.basename(path)
+ base_dir = path[len(args.directory) + 1 :]
+
+ profiles = {}
+ for ppath in Path(path).rglob("profiles.json"):
+ with open(str(ppath), "r", encoding="utf-8") as file:
+ profiles[ppath] = file.read()
+
+ if len(profiles) == 0:
+ continue
+
+ versions[release.upper()] = "data/{}/overview.json".format(release)
+
+ output = merge_profiles(
+ profiles, "https://{}/{}/targets/{{target}}".format(args.domain, base_dir)
+ )
+ os.makedirs("{}/{}".format(data_path, release), exist_ok=True)
+
+ # write overview.json
+ with open("{}/{}/overview.json".format(data_path, release), "w") as outfile:
+ if args.formatted:
+ json.dump(output, outfile, indent=" ", sort_keys=True)
+ else:
+ json.dump(output, outfile, sort_keys=True)
+
+ update_config(config_path, versions)
+
+
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"--formatted", action="store_true", help="Output formatted JSON data."
)
- subparsers = parser.add_subparsers(dest="action", required=True)
+ subparsers = parser.add_subparsers(dest="action")
+ subparsers.required = True
parser_merge = subparsers.add_parser(
"merge",
help="Link to get the image from. May contain {target}, {version} and {commit}",
)
- parser_scrape = subparsers.add_parser(
- "scrape",
- help="Create a grid structure of horizontal, vertical and vertical connections.",
- )
+ parser_scrape = subparsers.add_parser("scrape", help="Scrape webpage for releases.")
parser_scrape.add_argument(
"domain", help="Domain to scrape. E.g. https://downloads.openwrt.org"
)
"--use-wget", action="store_true", help="Use wget to scrape the site."
)
+ parser_scan = subparsers.add_parser("scan", help="Scan directory for releases.")
+ parser_scan.add_argument(
+ "domain",
+ help="Domain for download_url attribute in overview.json. E.g. https://downloads.openwrt.org",
+ )
+ parser_scan.add_argument("directory", help="Directory to scan for releases.")
+ parser_scan.add_argument("selector", help="Path the config.js file is in.")
+
args = parser.parse_args()
if args.action == "merge":
merge(args)
+ if args.action == "scan":
+ scan(args)
+
if args.action == "scrape":
if args.use_wget:
scrape_wget(args)