aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJ08nY2025-03-15 00:53:36 +0100
committerJ08nY2025-04-16 12:25:06 +0200
commit2fc92e2b502a39c4c44cbe3a07f90c6a51448ba2 (patch)
treec6140f4fb0fd940f74f8063a7b5348ba0c581896
parentd1e7eae1889a054d2aca398c0b8537de94263556 (diff)
downloadECTester-2fc92e2b502a39c4c44cbe3a07f90c6a51448ba2.tar.gz
ECTester-2fc92e2b502a39c4c44cbe3a07f90c6a51448ba2.tar.zst
ECTester-2fc92e2b502a39c4c44cbe3a07f90c6a51448ba2.zip
Add simulate script.
-rw-r--r--epare/simulate.py123
1 files changed, 123 insertions, 0 deletions
diff --git a/epare/simulate.py b/epare/simulate.py
new file mode 100644
index 0000000..df14748
--- /dev/null
+++ b/epare/simulate.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env python
+# coding: utf-8
+
+# # Simulating EPA-RE using points of low-order
+
+
+import pickle
+import itertools
+import glob
+import random
+import sys
+import time
+
+import matplotlib
+import matplotlib.pyplot as plt
+import numpy as np
+
+from collections import Counter
+
+from pathlib import Path
+from random import randint, randbytes, shuffle
+from typing import Type, Any, Tuple
+
+from bs4 import BeautifulSoup
+from tqdm.auto import tqdm, trange
+
+from pyecsca.ec.params import DomainParameters, get_params
+from pyecsca.ec.mult import *
+from pyecsca.sca.re.rpa import multiples_computed
+from pyecsca.misc.utils import TaskExecutor
+
+from common import *
+
+def get_general_multiples(bits: int, samples: int = 1000) -> MultResults:
+ from random import randint
+ results = []
+ for _ in range(samples):
+ big_scalar = randint(1, 2**bits)
+ results.append({big_scalar})
+ return MultResults(results, samples)
+
+def get_general_n_multiples(bits: int, n: int, samples: int = 1000) -> MultResults:
+ from random import randint
+ results = []
+ for _ in range(samples):
+ smult = set()
+ for i in range(n):
+ b = randint(1,256)
+ smult.add(randint(2**b,2**(b+1)))
+ results.append(smult)
+ return MultResults(results, samples)
+
+def get_small_scalar_multiples(mult: MultIdent,
+ params: DomainParameters,
+ bits: int,
+ samples: int = 1000,
+ use_init: bool = True,
+ use_multiply: bool = True,
+ seed: bytes | None = None,
+ kind: str = "precomp+necessary") -> Tuple[MultResults, float]:
+
+ duration = -time.perf_counter()
+ results = []
+ if seed is not None:
+ random.seed(seed)
+
+ # If no countermeasure is used, we have fully random scalars.
+ # Otherwise, fix one per chunk.
+ if mult.countermeasure is None:
+ scalars = [random.randint(1, 2**bits) for _ in range(samples)]
+ else:
+ one = random.randint(1, 2**bits)
+ scalars = [one for _ in range(samples)]
+
+ for scalar in scalars:
+ # Use a list for less memory usage.
+ results.append(list(multiples_computed(scalar, params, mult.klass, mult.partial, use_init, use_multiply, kind=kind)))
+ duration += time.perf_counter()
+ return MultResults(results, samples), duration
+
+
+if __name__ == "__main__":
+ category = "secg"
+ curve = "secp256r1"
+ params = get_params(category, curve, "projective")
+ num_workers = int(sys.argv[1]) if len(sys.argv) > 1 else 32
+ bits = params.order.bit_length()
+ samples = int(sys.argv[2]) if len(sys.argv) > 2 else 100
+ selected_mults = all_mults
+ shuffle(selected_mults)
+
+ print(f"Running on {num_workers} cores, doing {samples} samples.")
+
+ multiples_mults = {}
+ chunk_id = randbytes(6).hex()
+ with TaskExecutor(max_workers=num_workers) as pool:
+ for mult in selected_mults:
+ for countermeasure in (None, "gsr", "additive", "multiplicative", "euclidean"):
+ mwc = mult.with_countermeasure(countermeasure)
+ pool.submit_task(mwc,
+ get_small_scalar_multiples,
+ mwc, params, bits, samples, seed=chunk_id)
+ for mult, future in tqdm(pool.as_completed(), desc="Computing small scalar distributions.", total=len(pool.tasks)):
+ if error := future.exception():
+ print("Error", mult, error)
+ raise error
+ res, duration = future.result()
+ res.__class__ = MultResults
+ res.__module__ = "common"
+ print(f"Got {mult} in {duration} s.")
+ if mult not in multiples_mults:
+ multiples_mults[mult] = res
+ else:
+ # Accumulate
+ multiples_mults[mult].merge(res)
+ with open(f"multiples_{category}_{curve}_{bits}_ctr_chunk{chunk_id}.pickle","wb") as h:
+ pickle.dump(multiples_mults, h)
+ # Handle the enable_spawn trick that messes up class modules.
+ for k, v in multiples_mults.items():
+ v.__class__ = MultResults
+ v.__module__ = "common"
+ with open(f"multiples_{category}_{curve}_{bits}_ctr_chunk{chunk_id}.pickle","wb") as h:
+ pickle.dump(multiples_mults, h)