diff options
| author | J08nY | 2025-03-15 19:10:15 +0100 |
|---|---|---|
| committer | J08nY | 2025-04-16 12:25:06 +0200 |
| commit | 48521975f4ba331ccd3bacb30638c52d32892b59 (patch) | |
| tree | 7a2235ac3f00542b50df976d75f9b576a74e7d8e | |
| parent | 5b4d3dd0835ede999078a852ccf9cde88caa311a (diff) | |
| download | ECTester-48521975f4ba331ccd3bacb30638c52d32892b59.tar.gz ECTester-48521975f4ba331ccd3bacb30638c52d32892b59.tar.zst ECTester-48521975f4ba331ccd3bacb30638c52d32892b59.zip | |
| -rw-r--r-- | epare/common.py | 4 | ||||
| -rw-r--r-- | epare/simulate.py | 24 |
2 files changed, 13 insertions, 15 deletions
diff --git a/epare/common.py b/epare/common.py index 15c7f7c..d416378 100644 --- a/epare/common.py +++ b/epare/common.py @@ -68,7 +68,7 @@ class MultIdent: elif self.countermeasure == "euclidean": return lambda *args, **kwargs: EuclideanSplitting(func(*args, **kwargs)) - def with_countermeasure(self, countermeasure: str): + def with_countermeasure(self, countermeasure: str | None): if countermeasure not in (None, "gsr", "additive", "multiplicative", "euclidean"): raise ValueError(f"Unknown countermeasure: {countermeasure}") return MultIdent(self.klass, *self.args, **self.kwargs, countermeasure=countermeasure) @@ -156,8 +156,8 @@ class ProbMap: raise ValueError("Enriching can only work on equal amount of samples (same run, different divisors)") self.probs.update(other.probs) -# All dbl-and-add multipliers from https://github.com/J08nY/pyecsca/blob/master/pyecsca/ec/mult +# All dbl-and-add multipliers from https://github.com/J08nY/pyecsca/blob/master/pyecsca/ec/mult window_mults = [ MultIdent(SlidingWindowMultiplier, width=3), MultIdent(SlidingWindowMultiplier, width=4), diff --git a/epare/simulate.py b/epare/simulate.py index df14748..f28a57d 100644 --- a/epare/simulate.py +++ b/epare/simulate.py @@ -31,6 +31,7 @@ from pyecsca.misc.utils import TaskExecutor from common import * + def get_general_multiples(bits: int, samples: int = 1000) -> MultResults: from random import randint results = [] @@ -39,6 +40,7 @@ def get_general_multiples(bits: int, samples: int = 1000) -> MultResults: results.append({big_scalar}) return MultResults(results, samples) + def get_general_n_multiples(bits: int, n: int, samples: int = 1000) -> MultResults: from random import randint results = [] @@ -50,6 +52,7 @@ def get_general_n_multiples(bits: int, n: int, samples: int = 1000) -> MultResul results.append(smult) return MultResults(results, samples) + def get_small_scalar_multiples(mult: MultIdent, params: DomainParameters, bits: int, @@ -76,7 +79,7 @@ def get_small_scalar_multiples(mult: MultIdent, # Use a list for less memory usage. results.append(list(multiples_computed(scalar, params, mult.klass, mult.partial, use_init, use_multiply, kind=kind))) duration += time.perf_counter() - return MultResults(results, samples), duration + return MultResults(results, samples, duration=duration, kind=kind) if __name__ == "__main__": @@ -86,11 +89,12 @@ if __name__ == "__main__": num_workers = int(sys.argv[1]) if len(sys.argv) > 1 else 32 bits = params.order.bit_length() samples = int(sys.argv[2]) if len(sys.argv) > 2 else 100 + kind = sys.argv[3] if len(sys.argv) > 3 else "precomp+necessary" selected_mults = all_mults shuffle(selected_mults) print(f"Running on {num_workers} cores, doing {samples} samples.") - + multiples_mults = {} chunk_id = randbytes(6).hex() with TaskExecutor(max_workers=num_workers) as pool: @@ -100,24 +104,18 @@ if __name__ == "__main__": pool.submit_task(mwc, get_small_scalar_multiples, mwc, params, bits, samples, seed=chunk_id) - for mult, future in tqdm(pool.as_completed(), desc="Computing small scalar distributions.", total=len(pool.tasks)): + for mult, future in tqdm(pool.as_completed(), desc="Computing small scalar distributions.", total=len(pool.tasks), smoothing=0): if error := future.exception(): print("Error", mult, error) raise error - res, duration = future.result() - res.__class__ = MultResults - res.__module__ = "common" - print(f"Got {mult} in {duration} s.") + res = future.result() + print(f"Got {mult} in {res.duration}.") if mult not in multiples_mults: multiples_mults[mult] = res else: # Accumulate multiples_mults[mult].merge(res) - with open(f"multiples_{category}_{curve}_{bits}_ctr_chunk{chunk_id}.pickle","wb") as h: + with open(f"multiples_{bits}_{kind}_chunk{chunk_id}.pickle","wb") as h: pickle.dump(multiples_mults, h) - # Handle the enable_spawn trick that messes up class modules. - for k, v in multiples_mults.items(): - v.__class__ = MultResults - v.__module__ = "common" - with open(f"multiples_{category}_{curve}_{bits}_ctr_chunk{chunk_id}.pickle","wb") as h: + with open(f"multiples_{bits}_{kind}_chunk{chunk_id}.pickle","wb") as h: pickle.dump(multiples_mults, h) |
