3 Tool to create overview.json files and update the config.js.
6 from pathlib
import Path
16 SUPPORTED_METADATA_VERSION
= 1
19 # accepts {<file-path>: <file-content>}
20 def merge_profiles(profiles
, download_url
):
28 return "{} {} {}".format(
29 title
.get("vendor", ""), title
["model"], title
.get("variant", "")
32 def add_profile(id, target
, profile
, code
=None):
34 for image
in profile
["images"]:
35 images
.append({"name": image
["name"], "type": image
["type"]})
38 target
= profile
["target"]
40 for entry
in profile
["titles"]:
41 title
= get_title(entry
)
44 sys
.stderr
.write(f
"Empty title. Skip title in {path}\n")
47 output
["models"][title
] = {"id": id, "target": target
, "images": images
}
50 output
["models"][title
]["code"] = code
52 for path
, content
in profiles
.items():
53 obj
= json
.loads(content
)
55 if obj
["metadata_version"] != SUPPORTED_METADATA_VERSION
:
57 f
"{path} has unsupported metadata version: {obj['metadata_version']} => skip\n"
61 code
= obj
.get("version_code", obj
.get("version_commit"))
63 if "version_code" not in output
:
64 output
= {"version_code": code
, "download_url": download_url
, "models": {}}
66 # if we have mixed codes/commits, store in device object
67 if output
["version_code"] == code
:
72 for id in obj
["profiles"]:
73 add_profile(id, obj
.get("target"), obj
["profiles"][id], code
)
75 add_profile(obj
["id"], obj
["target"], obj
, code
)
76 except json
.decoder
.JSONDecodeError
as e
:
77 sys
.stderr
.write(f
"Skip {path}\n {e}\n")
79 sys
.stderr
.write(f
"Abort on {path}\n Missing key {e}\n")
85 def update_config(config_path
, versions
):
87 with
open(config_path
, "r") as file:
90 content
= re
.sub("versions:[\\s]*{[^}]*}", f
"versions: {versions}", content
)
91 with
open(config_path
, "w+") as file:
96 Scrape profiles.json using links like https://downloads.openwrt.org/releases/19.07.3/targets/?json
97 Merge into overview.json files.
104 selector_path
= args
.selector
105 config_path
= f
"{selector_path}/config.js"
106 data_path
= f
"{selector_path}/data"
109 def handle_release(target
):
111 with urllib
.request
.urlopen(f
"{target}/?json") as file:
112 array
= json
.loads(file.read().decode("utf-8"))
113 for profile
in filter(lambda x
: x
.endswith("/profiles.json"), array
):
114 with urllib
.request
.urlopen(f
"{target}/{profile}") as file:
115 profiles
[f
"{target}/{profile}"] = file.read()
118 if not os
.path
.isfile(config_path
):
119 print(f
"file not found: {config_path}")
123 with urllib
.request
.urlopen(url
) as infile
:
124 for path
in re
.findall(r
"href=[\"']?([^'\" >]+)", str(infile.read())):
125 if not path.startswith("/") and path.endswith("targets
/"):
126 release = path.strip("/").split("/")[-2]
127 download_url = f"{url}
/{path}
/{{target}
}"
129 profiles = handle_release(f"{url}
/{path}
")
130 output = merge_profiles(profiles, download_url)
132 Path(f"{data_path}
/{release}
").mkdir(parents=True, exist_ok=True)
133 # write overview.json
134 with open(f"{data_path}
/{release}
/overview
.json
", "w
") as outfile:
136 json.dump(output, outfile, indent=" ", sort_keys=True)
138 json.dump(output, outfile, sort_keys=True)
140 versions[release.upper()] = f"data
/{release}
/overview
.json
"
142 update_config(config_path, versions)
146 Scrape profiles.json using wget (slower but more generic).
147 Merge into overview.json files.
152 def scrape_wget(args):
154 selector_path = args.selector
155 config_path = f"{selector_path}
/config
.js
"
156 data_path = f"{selector_path}
/data
"
159 with tempfile.TemporaryDirectory() as tmp_dir:
160 # download all profiles.json files
162 f"wget
-c
-r
-P {tmp_dir}
-A
'profiles.json' --reject
-regex
'kmods|packages' --no
-parent {url}
"
165 # delete empty folders
166 os.system(f"find {tmp_dir}
/* -type d
-empty
-delete
")
168 # create overview.json files
169 for path in glob.glob(f"{tmp_dir}
/*/snapshots
") + glob.glob(
170 f"{tmp_dir}
/*/releases
/*"
172 release = os.path.basename(path)
173 base = path[len(tmp_dir) + 1 :]
176 for ppath in Path(path).rglob("profiles
.json
"):
177 with open(ppath, "r
") as file:
178 profiles[ppath] = file.read()
180 if len(profiles) == 0:
183 versions[release.upper()] = f"data
/{release}
/overview
.json
"
184 os.system(f"mkdir
-p {selector_path}
/data
/{release}
/")
186 output = merge_profiles(profiles, f"https
://{base}
/targets
/{{target}
}")
187 Path(f"{data_path}
/{release}
").mkdir(parents=True, exist_ok=True)
189 # write overview.json
190 with open(f"{data_path}
/{release}
/overview
.json
", "w
") as outfile:
192 json.dump(output, outfile, indent=" ", sort_keys=True)
194 json.dump(output, outfile, sort_keys=True)
196 update_config(config_path, versions)
200 Find and merge json files for a single release.
205 input_paths = args.input_path
206 # OpenWrt JSON device files
210 with open(path, "r
") as file:
211 profiles[path] = file.read()
213 for path in input_paths:
214 if os.path.isdir(path):
215 for filepath in Path(path).rglob("*.json
"):
218 if not path.endswith(".json
"):
219 sys.stderr.write(f"Folder does
not exists
: {path}
\n")
223 output = merge_profiles(profiles, args.download_url)
226 json.dump(output, sys.stdout, indent=" ", sort_keys=True)
228 json.dump(output, sys.stdout, sort_keys=True)
232 parser = argparse.ArgumentParser()
234 "--formatted
", action="store_true
", help="Output formatted JSON data
."
236 subparsers = parser.add_subparsers(dest="action
", required=True)
238 parser_merge = subparsers.add_parser(
240 help="Create a grid structure with horizontal
and vertical connections
.",
242 parser_merge.add_argument(
245 help="Input folder that
is traversed
for OpenWrt JSON device files
.",
247 parser_merge.add_argument(
251 help="Link to get the image
from. May contain {target}
, {version}
and {commit}
",
254 parser_scrape = subparsers.add_parser(
256 help="Create a grid structure of horizontal
, vertical
and vertical connections
.",
258 parser_scrape.add_argument(
259 "domain
", help="Domain to scrape
. E
.g
. https
://downloads
.openwrt
.org
"
261 parser_scrape.add_argument("selector
", help="Path the config
.js
file is in.")
262 parser_scrape.add_argument(
263 "--use
-wget
", action="store_true
", help="Use wget to scrape the site
."
266 args = parser.parse_args()
268 if args.action == "merge
":
271 if args.action == "scrape
":
278 if __name__ == "__main__
":