import os, json, glob, stat
def run():
base_dir = "target/criterion/worst-case"
if not os.path.exists(base_dir):
print("No target dir", base_dir)
return
dirs = [d for d in os.listdir(base_dir) if "prime-factors for" in d or "lowest prime" in d]
res = []
for d in dirs:
est_file = os.path.join(base_dir, d, "new", "estimates.json")
if os.path.exists(est_file):
try:
with open(est_file, "r") as f:
data = json.load(f)
mtime = os.stat(est_file).st_mtime
bitsize = ""
if "lowest prime" in d:
bitsize = "2"
else:
parts = d.split("-bit")
if len(parts) > 1:
bitsize = parts[0].split()[-1]
point = data["mean"]["point_estimate"]
lower = data["mean"]["confidence_interval"]["lower_bound"]
upper = data["mean"]["confidence_interval"]["upper_bound"]
res.append((int(bitsize) if bitsize.isdigit() else 999, point, lower, upper, mtime))
except Exception as e:
pass
res.sort(key=lambda x: x[0])
latest_res = {}
for bits, point, lower, upper, mtime in res:
if bits not in latest_res or mtime > latest_res[bits][3]:
latest_res[bits] = (point, lower, upper, mtime)
res_merged = sorted([(k, v[0], v[1], v[2]) for k, v in latest_res.items()])
with open("table_out.txt", "w") as f:
f.write("| Bitsize | Average Time | Min .. Max |\\n")
f.write("|---------|--------------|-------------------|\\n")
for bits, point, lower, upper in res_merged:
def fmt_time(t):
if t < 1000: return f"{t:.1f} ns"
elif t < 1_000_000: return f"{t/1000:.1f} us"
elif t < 1_000_000_000: return f"{t/1_000_000:.2f} ms"
else: return f"{t/1_000_000_000:.2f} s"
p_str = fmt_time(point)
l_str = fmt_time(lower).replace(" ", "")
u_str = fmt_time(upper)
f.write(f"| {bits:<7} | {p_str:<12} | {l_str} .. {u_str:<5} |\\n")
run()