aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJ08nY2025-07-31 14:09:19 +0200
committerJ08nY2025-07-31 14:10:15 +0200
commit7754e59a7b1affec5db915bb5af8ab45d28f2953 (patch)
tree42585532fef083ccb1eb2f7516562e9d680a1466
parent8b4ec7a64a4cd62e3d70a70fdcaeebbf91e1cb73 (diff)
downloadECTester-7754e59a7b1affec5db915bb5af8ab45d28f2953.tar.gz
ECTester-7754e59a7b1affec5db915bb5af8ab45d28f2953.tar.zst
ECTester-7754e59a7b1affec5db915bb5af8ab45d28f2953.zip
Reorganize error computation.
-rw-r--r--analysis/scalarmults/simulate.ipynb44
1 files changed, 21 insertions, 23 deletions
diff --git a/analysis/scalarmults/simulate.ipynb b/analysis/scalarmults/simulate.ipynb
index b5b57bb..47c907d 100644
--- a/analysis/scalarmults/simulate.ipynb
+++ b/analysis/scalarmults/simulate.ipynb
@@ -16,6 +16,7 @@
"outputs": [],
"source": [
"import itertools\n",
+ "import gc\n",
"import glob\n",
"import pickle\n",
"import random\n",
@@ -213,7 +214,8 @@
" print(\"Error!\", error)\n",
" continue\n",
" res = future.result()\n",
- " pickle.dump((mult, res), h)"
+ " pickle.dump((mult, res), h)\n",
+ " gc.collect()"
]
},
{
@@ -232,42 +234,38 @@
"outputs": [],
"source": [
"with TaskExecutor(max_workers=num_workers, initializer=silence) as pool:\n",
- " for fname in glob.glob(f\"multiples_{bits}_{'init' if use_init else 'noinit'}_{'mult' if use_multiply else 'nomult'}_chunk*.pickle\"):\n",
- " match = re.match(\"multiples_[0-9]+_(?P<init>(?:no)?init)_(?P<mult>(?:no)?mult)_chunk(?P<id>[0-9a-f]+).pickle\", fname)\n",
- " use_init = match.group(\"init\") == \"init\"\n",
- " use_multiply = match.group(\"mult\") == \"mult\"\n",
+ " for in_fname in glob.glob(f\"multiples_{bits}_{'init' if use_init else 'noinit'}_{'mult' if use_multiply else 'nomult'}_chunk*.pickle\"):\n",
+ " match = re.match(\"multiples_(?P<bits>[0-9]+)_(?P<init>(?:no)?init)_(?P<mult>(?:no)?mult)_chunk(?P<id>[0-9a-f]+).pickle\", in_fname)\n",
+ " bits = match.group(\"bits\")\n",
+ " use_init = match.group(\"init\")\n",
+ " use_multiply = match.group(\"mult\")\n",
" chunk_id = match.group(\"id\")\n",
- " multiples_mults = {} \n",
- " with open(fname, \"rb\") as f:\n",
+ " out_fname = f\"probs_{bits}_{use_init}_{use_multiply}_chunk{chunk_id}.pickle\"\n",
+ " with open(in_fname, \"rb\") as f, open(out_fname, \"wb\") as h:\n",
" bar = tqdm(total=len(all_mults_with_ctr), desc=f\"Loading chunk {chunk_id}.\")\n",
" while True:\n",
" try:\n",
" mult, vals = pickle.load(f)\n",
" bar.update(1)\n",
- " if mult not in multiples_mults:\n",
- " multiples_mults[mult] = vals\n",
- " else:\n",
- " multiples_mults[mult].merge(vals)\n",
+ " for checks in powerset(checks_add):\n",
+ " for precomp_to_affine in (True, False):\n",
+ " for check_condition in (\"all\", \"necessary\"):\n",
+ " error_model = ErrorModel(checks, check_condition=check_condition, precomp_to_affine=precomp_to_affine)\n",
+ " full = mult.with_error_model(error_model)\n",
+ " pool.submit_task(full,\n",
+ " evaluate_multiples,\n",
+ " full, vals, divisor_map[\"all\"])\n",
+ " gc.collect()\n",
" except EOFError:\n",
" break\n",
- " for mult, res in multiples_mults.items():\n",
- " for checks in powerset(checks_add):\n",
- " for precomp_to_affine in (True, False):\n",
- " for check_condition in (\"all\", \"necessary\"):\n",
- " error_model = ErrorModel(checks, check_condition=check_condition, precomp_to_affine=precomp_to_affine)\n",
- " full = mult.with_error_model(error_model)\n",
- " pool.submit_task(full,\n",
- " evaluate_multiples,\n",
- " full, res, divisor_map[\"all\"])\n",
- " fname = f\"probs_{use_init}_{use_mult}_chunk{chunk_id}.pickle\"\n",
- " with open(fname, \"wb\") as f:\n",
" for full, future in tqdm(pool.as_completed(), desc=\"Computing errors.\", total=len(pool.tasks)):\n",
" print(f\"Got {full}.\")\n",
" if error := future.exception():\n",
" print(\"Error!\", error)\n",
" continue\n",
" res = future.result()\n",
- " pickle.dump((full, res), f)"
+ " pickle.dump((full, res), f)\n",
+ " gc.collect()"
]
},
{