3 Tool to create overview.json files and update the config.js.
6 from pathlib
import Path
19 SUPPORTED_METADATA_VERSION
= 1
20 BUILD_DATE_FORMAT
= "%Y-%m-%d %H:%M:%S"
22 assert sys
.version_info
>= (3, 5), "Python version too old. Python >=3.5.0 needed."
25 def add_profile(output
, path
, id, target
, profile
, code
=None, build_date
=None):
30 return "{} {} {}".format(
31 title
.get("vendor", ""), title
["model"], title
.get("variant", "")
35 for image
in profile
["images"]:
36 images
.append({"name": image
["name"], "type": image
["type"]})
39 target
= profile
["target"]
41 for entry
in profile
["titles"]:
42 title
= get_title(entry
)
45 sys
.stderr
.write("Empty title. Skip title for {} in {}\n".format(id, path
))
49 Some devices are in ar71xx and ath79. But use TP-LINK" and "TP-Link".
50 E.g: `TP-LINK Archer C7 v5` and `TP-Link Archer C7 v5`
51 To be able to detect this, we need to make "same" titles identical.
53 if title
.startswith("TP-LINK "):
54 title
= "TP-Link {}".format(title
[8:])
56 # device is a duplicate, try to differentiate by target
57 if title
in output
["models"]:
58 title
= "{} ({})".format(title
, target
)
60 output
["models"][title
] = {"id": id, "target": target
, "images": images
}
62 if build_date
is not None:
63 output
["models"][title
]["build_date"] = build_date
66 output
["models"][title
]["code"] = code
69 # accepts {<file-path>: <file-content>}
70 def merge_profiles(profiles
, download_url
):
74 for profile
in profiles
:
75 obj
= json
.loads(profile
["file_content"])
77 if obj
["metadata_version"] != SUPPORTED_METADATA_VERSION
:
79 "{} has unsupported metadata version: {} => skip\n".format(
80 profile
["file_path"], obj
["metadata_version"]
85 code
= obj
.get("version_code", obj
.get("version_commit"))
86 file_path
= profile
["file_path"]
87 build_date
= profile
["last_modified"]
89 if "version_code" not in output
:
90 output
= {"version_code": code
, "download_url": download_url
, "models": {}}
92 # if we have mixed codes/commits, store in device object
93 if output
["version_code"] == code
:
98 for id in obj
["profiles"]:
110 output
, file_path
, obj
["id"], obj
["target"], obj
, code
, build_date
112 except json
.decoder
.JSONDecodeError
as e
:
113 sys
.stderr
.write("Skip {}\n {}\n".format(file_path
, e
))
114 except KeyError as e
:
115 sys
.stderr
.write("Abort on {}\n Missing key {}\n".format(file_path
, e
))
121 def update_config(config_path
, versions
):
123 with
open(str(config_path
), "r", encoding
="utf-8") as file:
124 content
= file.read()
126 content
= re
.sub("versions:[\\s]*{[^}]*}", "versions: {}".format(versions
), content
)
127 with
open(str(config_path
), "w+") as file:
132 Scrape profiles.json using links like https://downloads.openwrt.org/releases/19.07.3/targets/?json
133 Merge into overview.json files.
140 www_path
= args
.www_path
141 config_path
= "{}/config.js".format(www_path
)
142 data_path
= "{}/data".format(www_path
)
145 def handle_release(target
):
147 with urllib
.request
.urlopen("{}/?json".format(target
)) as file:
148 array
= json
.loads(file.read().decode("utf-8"))
149 for profile
in filter(lambda x
: x
.endswith("/profiles.json"), array
):
150 with urllib
.request
.urlopen("{}/{}".format(target
, profile
)) as file:
151 last_modified
= datetime
.datetime(
152 *email
.utils
.parsedate(file.headers
.get("last-modified"))[:6]
153 ).strftime(BUILD_DATE_FORMAT
)
156 "file_path": "{}/{}".format(target
, profile
),
157 "file_content": file.read().decode("utf-8"),
158 "last_modified": last_modified
,
163 if not os
.path
.isfile(config_path
):
164 print("file not found: {}".format(config_path
))
168 with urllib
.request
.urlopen(url
) as infile
:
169 for path
in re
.findall(r
"href=[\"']?([^'\" >]+)", str(infile.read())):
170 if not path.startswith("/") and path.endswith("targets
/"):
171 release = path.strip("/").split("/")[-2]
172 download_url = "{}/{}/{{target}
}".format(url, path)
174 profiles = handle_release("{}/{}".format(url, path))
175 output = merge_profiles(profiles, download_url)
177 os.makedirs("{}/{}".format(data_path, release), exist_ok=True)
178 # write overview.json
180 "{}/{}/overview
.json
".format(data_path, release), "w
"
183 json.dump(output, outfile, indent=" ", sort_keys=True)
185 json.dump(output, outfile, sort_keys=True)
187 versions[release] = "data
/{}/overview
.json
".format(release)
189 update_config(config_path, versions)
193 Scrape profiles.json using wget (slower but more generic).
194 Merge into overview.json files.
199 def scrape_wget(args):
201 www_path = args.www_path
202 config_path = "{}/config
.js
".format(www_path)
203 data_path = "{}/data
".format(www_path)
206 with tempfile.TemporaryDirectory() as tmp_dir:
207 # download all profiles.json files
209 "wget
-c
-r
-P
{} -A
'profiles.json' --reject
-regex
'kmods|packages' --no
-parent
{}".format(
214 # delete empty folders
215 os.system("find
{}/* -type d
-empty
-delete
".format(tmp_dir))
217 # create overview.json files
218 for path in glob.glob("{}/*/snapshots
".format(tmp_dir)) + glob.glob(
219 "{}/*/releases
/*".format(tmp_dir)
221 release = os.path.basename(path)
222 base = path[len(tmp_dir) + 1 :]
225 for ppath in Path(path).rglob("profiles
.json
"):
226 with open(str(ppath), "r
", encoding="utf
-8") as file:
227 # we assume local timezone is UTC/GMT
228 last_modified = datetime.datetime.fromtimestamp(
229 os.path.getmtime(ppath)
230 ).strftime(BUILD_DATE_FORMAT)
233 "file_path
": str(ppath),
234 "file_content
": file.read(),
235 "last_modified
": last_modified,
239 if len(profiles) == 0:
242 versions[release] = "data
/{}/overview
.json
".format(release)
244 output = merge_profiles(
245 profiles, "https
://{}/targets
/{{target}
}".format(base)
247 os.makedirs("{}/{}".format(data_path, release), exist_ok=True)
249 # write overview.json
250 with open("{}/{}/overview
.json
".format(data_path, release), "w
") as outfile:
252 json.dump(output, outfile, indent=" ", sort_keys=True)
254 json.dump(output, outfile, sort_keys=True)
256 update_config(config_path, versions)
260 Find and merge json files for a single release.
265 input_paths = args.input_path
266 # OpenWrt JSON device files
270 with open(str(path), "r
", encoding="utf
-8") as file:
271 last_modified = time.strftime(
272 BUILD_DATE_FORMAT, time.gmtime(os.path.getmtime(str(path)))
276 "file_path
": str(path),
277 "file_content
": file.read(),
278 "last_modified
": last_modified,
282 for path in input_paths:
283 if os.path.isdir(path):
284 for filepath in Path(path).rglob("*.json
"):
287 if not path.endswith(".json
"):
288 sys.stderr.write("Folder does
not exists
: {}\n".format(path))
292 output = merge_profiles(profiles, args.download_url)
295 json.dump(output, sys.stdout, indent=" ", sort_keys=True)
297 json.dump(output, sys.stdout, sort_keys=True)
301 Scan local directory for releases with profiles.json.
302 Merge into overview.json files.
308 # firmware selector config
309 config_path = "{}/config
.js
".format(args.www_path)
310 # the overview.json files are placed here
311 data_path = "{}/data
".format(args.www_path)
314 # args.images_path => args.releases_path
316 for path in Path(args.images_path).rglob("profiles
.json
"):
317 with open(str(path), "r
", encoding="utf
-8") as file:
318 content = file.read()
319 obj = json.loads(content)
320 release = obj["version_number
"]
321 last_modified = time.strftime(
322 BUILD_DATE_FORMAT, time.gmtime(os.path.getmtime(str(path)))
324 releases.setdefault(release, []).append(
326 "file_path
": str(path),
327 "file_content
": content,
328 "last_modified
": last_modified,
332 for release, profiles in releases.items():
333 output = merge_profiles(profiles, args.download_url)
335 versions[release] = "data
/{}/overview
.json
".format(release)
336 os.makedirs("{}/{}".format(data_path, release), exist_ok=True)
338 # write overview.json
339 with open("{}/{}/overview
.json
".format(data_path, release), "w
") as outfile:
341 json.dump(output, outfile, indent=" ", sort_keys=True)
343 json.dump(output, outfile, sort_keys=True)
345 update_config(config_path, versions)
349 parser = argparse.ArgumentParser()
351 "--formatted
", action="store_true
", help="Output formatted JSON data
."
353 subparsers = parser.add_subparsers(dest="action
")
354 subparsers.required = True
356 parser_merge = subparsers.add_parser(
357 "merge
", help="Search
for profiles
.json files
and output an overview
.json
."
359 parser_merge.add_argument(
362 help="Input folder that
is traversed
for OpenWrt JSON device files
.",
364 parser_merge.add_argument(
368 help="Link to get the image
from. May contain {target}
(replaced by e
.g
. ath79
/generic
), {version}
(replace by the version key
from config
.js
) and {commit}
(git commit
in hex notation
).",
371 parser_scrape = subparsers.add_parser("scrape
", help="Scrape webpage
for releases
.")
372 parser_scrape.add_argument(
373 "domain
", help="Domain to scrape
. E
.g
. https
://downloads
.openwrt
.org
"
375 parser_scrape.add_argument("www_path
", help="Path the config
.js
file is in.")
376 parser_scrape.add_argument(
377 "--use
-wget
", action="store_true
", help="Use wget to scrape the site
."
380 parser_scan = subparsers.add_parser("scan
", help="Scan directory
for releases
.")
381 parser_scan.add_argument(
382 "download_url
", help="Download
for images
. E
.g
. https
://downloads
.openwrt
.org
"
384 parser_scan.add_argument("images_path
", help="Directory to scan
for releases
.")
385 parser_scan.add_argument("www_path
", help="Path the config
.js
file is in.")
387 args = parser.parse_args()
389 if args.action == "merge
":
392 if args.action == "scan
":
395 if args.action == "scrape
":
402 if __name__ == "__main__
":