3 Tool to create overview.json files and update the config.js.
6 from pathlib
import Path
19 SUPPORTED_METADATA_VERSION
= 1
20 BUILD_DATE_FORMAT
= "%Y-%m-%d %H:%M:%S"
22 assert sys
.version_info
>= (3, 5), "Python version too old. Python >=3.5.0 needed."
25 def add_profile(output
, path
, id, target
, profile
, code
=None, build_date
=None):
30 return "{} {} {}".format(
31 title
.get("vendor", ""), title
["model"], title
.get("variant", "")
35 for image
in profile
["images"]:
36 images
.append({"name": image
["name"], "type": image
["type"]})
39 target
= profile
["target"]
41 for entry
in profile
["titles"]:
42 title
= get_title(entry
)
45 sys
.stderr
.write("Empty title. Skip title for {} in {}\n".format(id, path
))
49 Some devices are in ar71xx and ath79. But use TP-LINK" and "TP-Link".
50 E.g: `TP-LINK Archer C7 v5` and `TP-Link Archer C7 v5`
51 To be able to detect this, we need to make "same" titles identical.
53 if title
.startswith("TP-LINK "):
54 title
= "TP-Link {}".format(title
[8:])
56 # device is a duplicate, try to differentiate by target
57 if title
in output
["models"]:
58 title
= "{} ({})".format(title
, target
)
60 output
["models"][title
] = {"id": id, "target": target
, "images": images
}
62 if build_date
is not None:
63 output
["models"][title
]["build_date"] = build_date
66 output
["models"][title
]["code"] = code
69 # accepts {<file-path>: <file-content>}
70 def merge_profiles(profiles
, download_url
):
74 for profile
in profiles
:
75 obj
= json
.loads(profile
["file_content"])
77 if obj
["metadata_version"] != SUPPORTED_METADATA_VERSION
:
79 "{} has unsupported metadata version: {} => skip\n".format(
80 profile
["file_path"], obj
["metadata_version"]
85 code
= obj
.get("version_code", obj
.get("version_commit"))
86 file_path
= profile
["file_path"]
87 build_date
= profile
["last_modified"]
89 if "version_code" not in output
:
90 output
= {"version_code": code
, "download_url": download_url
, "models": {}}
92 # if we have mixed codes/commits, store in device object
93 if output
["version_code"] == code
:
98 for id in obj
["profiles"]:
110 output
, file_path
, obj
["id"], obj
["target"], obj
, code
, build_date
112 except json
.decoder
.JSONDecodeError
as e
:
113 sys
.stderr
.write("Skip {}\n {}\n".format(file_path
, e
))
114 except KeyError as e
:
115 sys
.stderr
.write("Abort on {}\n Missing key {}\n".format(file_path
, e
))
121 def update_config(www_path
, versions
):
122 config_path
= "{}/config.js".format(www_path
)
124 if os
.path
.isfile(config_path
):
126 with
open(str(config_path
), "r", encoding
="utf-8") as file:
127 content
= file.read()
130 "versions:[\\s]*{[^}]*}", "versions: {}".format(versions
), content
132 with
open(str(config_path
), "w+") as file:
135 sys
.stderr
.write("Warning: File not found: {}\n".format(config_path
))
139 Scrape profiles.json using links like https://downloads.openwrt.org/releases/19.07.3/targets/?json
140 Merge into overview.json files.
147 data_path
= "{}/data".format(args
.www_path
)
150 def handle_release(target
):
152 with urllib
.request
.urlopen("{}/?json".format(target
)) as file:
153 array
= json
.loads(file.read().decode("utf-8"))
154 for profile
in filter(lambda x
: x
.endswith("/profiles.json"), array
):
155 with urllib
.request
.urlopen("{}/{}".format(target
, profile
)) as file:
156 last_modified
= datetime
.datetime(
157 *email
.utils
.parsedate(file.headers
.get("last-modified"))[:6]
158 ).strftime(BUILD_DATE_FORMAT
)
161 "file_path": "{}/{}".format(target
, profile
),
162 "file_content": file.read().decode("utf-8"),
163 "last_modified": last_modified
,
169 with urllib
.request
.urlopen(url
) as infile
:
170 for path
in re
.findall(r
"href=[\"']?([^'\" >]+)", str(infile.read())):
171 if not path.startswith("/") and path.endswith("targets
/"):
172 release = path.strip("/").split("/")[-2]
173 download_url = "{}/{}/{{target}
}".format(url, path)
175 profiles = handle_release("{}/{}".format(url, path))
176 output = merge_profiles(profiles, download_url)
178 os.makedirs("{}/{}".format(data_path, release), exist_ok=True)
179 # write overview.json
181 "{}/{}/overview
.json
".format(data_path, release), "w
"
184 json.dump(output, outfile, indent=" ", sort_keys=True)
186 json.dump(output, outfile, sort_keys=True)
188 versions[release] = "data
/{}/overview
.json
".format(release)
190 update_config(args.www_path, versions)
194 Scrape profiles.json using wget (slower but more generic).
195 Merge into overview.json files.
200 def scrape_wget(args):
202 data_path = "{}/data
".format(args.www_path)
205 with tempfile.TemporaryDirectory() as tmp_dir:
206 # download all profiles.json files
208 "wget
-c
-r
-P
{} -A
'profiles.json' --reject
-regex
'kmods|packages' --no
-parent
{}".format(
213 # delete empty folders
214 os.system("find
{}/* -type d
-empty
-delete
".format(tmp_dir))
216 # create overview.json files
217 for path in glob.glob("{}/*/snapshots
".format(tmp_dir)) + glob.glob(
218 "{}/*/releases
/*".format(tmp_dir)
220 release = os.path.basename(path)
221 base = path[len(tmp_dir) + 1 :]
224 for ppath in Path(path).rglob("profiles
.json
"):
225 with open(str(ppath), "r
", encoding="utf
-8") as file:
226 # we assume local timezone is UTC/GMT
227 last_modified = datetime.datetime.fromtimestamp(
228 os.path.getmtime(ppath)
229 ).strftime(BUILD_DATE_FORMAT)
232 "file_path
": str(ppath),
233 "file_content
": file.read(),
234 "last_modified
": last_modified,
238 if len(profiles) == 0:
241 versions[release] = "data
/{}/overview
.json
".format(release)
243 output = merge_profiles(
244 profiles, "https
://{}/targets
/{{target}
}".format(base)
246 os.makedirs("{}/{}".format(data_path, release), exist_ok=True)
248 # write overview.json
249 with open("{}/{}/overview
.json
".format(data_path, release), "w
") as outfile:
251 json.dump(output, outfile, indent=" ", sort_keys=True)
253 json.dump(output, outfile, sort_keys=True)
255 update_config(args.www_path, versions)
259 Find and merge json files for a single release.
264 input_paths = args.input_path
265 # OpenWrt JSON device files
269 with open(str(path), "r
", encoding="utf
-8") as file:
270 last_modified = time.strftime(
271 BUILD_DATE_FORMAT, time.gmtime(os.path.getmtime(str(path)))
275 "file_path
": str(path),
276 "file_content
": file.read(),
277 "last_modified
": last_modified,
281 for path in input_paths:
282 if os.path.isdir(path):
283 for filepath in Path(path).rglob("*.json
"):
286 if not path.endswith(".json
"):
287 sys.stderr.write("Folder does
not exists
: {}\n".format(path))
291 output = merge_profiles(profiles, args.download_url)
294 json.dump(output, sys.stdout, indent=" ", sort_keys=True)
296 json.dump(output, sys.stdout, sort_keys=True)
300 Scan local directory for releases with profiles.json.
301 Merge into overview.json files.
307 # the overview.json files are placed here
308 data_path = "{}/data
".format(args.www_path)
311 # args.images_path => args.releases_path
313 for path in Path(args.images_path).rglob("profiles
.json
"):
314 with open(str(path), "r
", encoding="utf
-8") as file:
315 content = file.read()
316 obj = json.loads(content)
317 release = obj["version_number
"]
318 last_modified = time.strftime(
319 BUILD_DATE_FORMAT, time.gmtime(os.path.getmtime(str(path)))
321 releases.setdefault(release, []).append(
323 "file_path
": str(path),
324 "file_content
": content,
325 "last_modified
": last_modified,
330 Replace {base} variable in download URL with the intersection
331 of all profile.json paths. E.g.:
332 ../tmp/releases/18.06.8/targets => base is releases/18.06.8/targets
333 ../tmp/snapshots/targets => base in snapshots/targets
336 def replace_base(releases, target_release, download_url):
337 if "{base}
" in download_url:
338 # release => base path (of profiles.json locations)
340 for release, profiles in releases.items():
341 profile_paths = [profile["file_path
"] for profile in profiles]
342 paths[release] = os.path.commonpath(profile_paths)
343 # base path of all releases
344 release_path_base = os.path.commonpath(paths.values())
345 # get path intersection
346 base = str(paths[target_release])[len(release_path_base) + 1 :]
347 return download_url.replace("{base}
", base)
351 for release, profiles in releases.items():
352 download_url = replace_base(releases, release, args.download_url)
353 output = merge_profiles(profiles, download_url)
355 versions[release] = "data
/{}/overview
.json
".format(release)
356 os.makedirs("{}/{}".format(data_path, release), exist_ok=True)
358 # write overview.json
359 with open("{}/{}/overview
.json
".format(data_path, release), "w
") as outfile:
361 json.dump(output, outfile, indent=" ", sort_keys=True)
363 json.dump(output, outfile, sort_keys=True)
365 update_config(args.www_path, versions)
369 parser = argparse.ArgumentParser()
371 "--formatted
", action="store_true
", help="Output formatted JSON data
."
373 subparsers = parser.add_subparsers(dest="action
")
374 subparsers.required = True
376 parser_merge = subparsers.add_parser(
377 "merge
", help="Search
for profiles
.json files
and output an overview
.json
."
379 parser_merge.add_argument(
382 help="Input folder that
is traversed
for OpenWrt JSON device files
.",
384 parser_merge.add_argument(
388 help="Link to get the image
from. May contain {target}
(replaced by e
.g
. ath79
/generic
), {version}
(replace by the version key
from config
.js
) and {commit}
(git commit
in hex notation
).",
391 parser_scrape = subparsers.add_parser("scrape
", help="Scrape webpage
for releases
.")
392 parser_scrape.add_argument(
393 "domain
", help="Domain to scrape
. E
.g
. https
://downloads
.openwrt
.org
"
395 parser_scrape.add_argument("www_path
", help="Path the config
.js
file is in.")
396 parser_scrape.add_argument(
397 "--use
-wget
", action="store_true
", help="Use wget to scrape the site
."
400 parser_scan = subparsers.add_parser("scan
", help="Scan directory
for releases
.")
401 parser_scan.add_argument(
402 "download_url
", help="Download
for images
. E
.g
. https
://downloads
.openwrt
.org
"
404 parser_scan.add_argument("images_path
", help="Directory to scan
for releases
.")
405 parser_scan.add_argument("www_path
", help="Path the config
.js
file is in.")
407 args = parser.parse_args()
409 if args.action == "merge
":
412 if args.action == "scan
":
415 if args.action == "scrape
":
422 if __name__ == "__main__
":