Skip to content
Snippets Groups Projects
Commit 49ea765f authored by Tomas Krizek's avatar Tomas Krizek
Browse files

Merge branch 'scale' into 'master'

scale: tool to scale clients in pcap

See merge request !15
parents f321d5b2 e9fd5af6
No related branches found
No related tags found
1 merge request!15scale: tool to scale clients in pcap
Pipeline #68853 passed
......@@ -5,7 +5,7 @@ stages:
- test
.debian: &debian
image: $CI_REGISTRY/knot/knot-resolver/ci/debian-buster:knot-2.8
image: $CI_REGISTRY/knot/knot-resolver/ci/debian-buster:knot-3.0
tags:
- docker
- linux
......
scale.py 0 → 100755
#!/usr/bin/python3
import argparse
import logging
import random
import sys
import traceback
from typing import Dict
import dpkt.pcap
LINKTYPES_RAW = [12, 14, 101] # possible options for DLT_RAW
def scale_pcap(
filename_in: str,
filename_out: str,
factor: float
) -> None:
if factor <= 0:
raise RuntimeError("invalid factor: must be larger than 0")
if factor > 1:
raise NotImplementedError("scaling up isn't implemented yet")
clients = {} # type: Dict[bytes, bool]
with open(filename_in, 'rb') as fin:
pcap_in = dpkt.pcap.Reader(fin)
if pcap_in.datalink() not in LINKTYPES_RAW:
logging.critical("input PCAP must be output from pellet.py")
sys.exit(1)
with open(filename_out, 'wb') as fout:
try:
pcap_out = dpkt.pcap.Writer(
fout, snaplen=66000, linktype=dpkt.pcap.DLT_RAW)
for ts, pkt in pcap_in:
ip = dpkt.ip6.IP6(pkt)
try:
write = clients[ip.src]
except KeyError:
write = random.random() < factor
clients[ip.src] = write
if write:
pcap_out.writepkt(pkt, ts=ts)
finally:
pcap_out.close()
def main():
parser = argparse.ArgumentParser(
description='scale (up or down) the number of clients in pellet PCAP')
parser.add_argument('pcap_in', type=str, help='input PCAP file to process')
parser.add_argument('pcap_out', type=str, help='output PCAP to write')
parser.add_argument(
'-f', '--factor', type=float, default=0.5,
help='the factor to which number of clients is scaled to')
parser.add_argument(
'-s', '--seed', type=int, default=0,
help='seed for PRNG')
args = parser.parse_args()
logging.basicConfig(
format='%(asctime)s %(levelname)8s %(message)s', level=logging.DEBUG)
random.seed(args.seed)
try:
scale_pcap(args.pcap_in, args.pcap_out, args.factor)
except FileNotFoundError as exc:
logging.critical('%s', exc)
sys.exit(1)
except Exception as exc:
logging.critical('uncaught exception: %s', exc)
logging.debug(traceback.format_exc())
sys.exit(1)
else:
sys.exit(0)
if __name__ == '__main__':
main()
......@@ -14,7 +14,7 @@ DEFAULT_FILENAME = 'shotgun-all.json'
class VersionError(RuntimeError):
def __init__(self):
super(VersionError, self).__init__(
super().__init__(
"Older formats of JSON data aren't supported. "
"Use older tooling or re-run the tests with newer shotgun.")
......@@ -25,13 +25,13 @@ class MismatchData(RuntimeError):
class MissingData(RuntimeError):
def __init__(self, field):
super(MissingData, self).__init__(
super().__init__(
'Field "{field}" is missing in one or more files.'.format(field=field))
class MergeFailed(RuntimeError):
def __init__(self, field):
super(MergeFailed, self).__init__(
super().__init__(
'Failed to merge field "{field}".'.format(field=field))
......@@ -125,12 +125,12 @@ def merge_fields(fields, thread_data):
for field, merge_func in fields.items():
try:
field_data = [data[field] for data in thread_data]
except KeyError:
raise MissingData(field)
except KeyError as exc:
raise MissingData(field) from exc
try:
out[field] = merge_func(field_data)
except Exception:
raise MergeFailed(field)
except Exception as exc:
raise MergeFailed(field) from exc
return out
......@@ -139,8 +139,8 @@ def merge_data(thread_data):
try:
if thread_data[0]['version'] != JSON_VERSION:
raise VersionError
except KeyError:
raise VersionError
except KeyError as exc:
raise VersionError from exc
return merge_fields(DATA_STRUCTURE_ROOT, thread_data)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment