Compare commits
247 Commits
e5c243f075
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0f9ff99d90 | ||
|
|
8129a2bd95 | ||
|
|
1c498208da | ||
|
|
2e4ff28fc2 | ||
| 8c2f78cd02 | |||
| cf18199ba3 | |||
| c43826dc9c | |||
| b92ee09af9 | |||
| fdf04fb21e | |||
| a75b0b74a0 | |||
|
|
9f8db93f7c | ||
|
|
4e85d7a3e5 | ||
|
|
0785c1e4e6 | ||
| a9f9c42ab1 | |||
| 44f20be108 | |||
| dc578c8a1b | |||
| e4fc32a1a2 | |||
| ec443c9bd4 | |||
| 009d59c499 | |||
| 85c4bfeb75 | |||
|
|
502de2d864 | ||
|
|
a713b9e262 | ||
|
|
98fe00c02f | ||
|
|
a97563fe61 | ||
|
|
951bac5f1e | ||
|
|
97f0946ad0 | ||
|
|
baf2207a4f | ||
| 345e6546ce | |||
| a32df7b8aa | |||
|
|
aca74ca09c | ||
|
|
a25288a737 | ||
|
|
67ca4d66b0 | ||
|
|
a701d378e2 | ||
|
|
eb281c976c | ||
|
|
060ffaad74 | ||
| dde4350a00 | |||
| ef9740177e | |||
| af2a53abb3 | |||
| 11749d39a3 | |||
| 4b291f04ba | |||
| 772f2d704a | |||
| 32184560f4 | |||
| 0dfc17f950 | |||
|
|
91ba0827e0 | ||
|
|
f3e155fd87 | ||
|
|
46e8bf95ba | ||
|
|
7cc030a4cc | ||
|
|
70a1e5b82e | ||
|
|
75be22b719 | ||
|
|
de04d94779 | ||
|
|
14eca54f98 | ||
|
|
4c33e4872e | ||
|
|
8168c46925 | ||
|
|
05cb425096 | ||
|
|
f594955371 | ||
|
|
041f4d0c2c | ||
| 7f1e2699c9 | |||
|
|
d6062ee78b | ||
|
|
8a91736f39 | ||
|
|
29b5e02469 | ||
|
|
f7abcf1fdf | ||
| 7764e1a49d | |||
|
|
9eabd701e4 | ||
|
|
e46cc7e8bd | ||
|
|
c4d2a66d83 | ||
|
|
a2b57d32f1 | ||
| bb94a75417 | |||
|
|
3496f8385f | ||
|
|
30fa09168e | ||
|
|
6f1f5afa07 | ||
|
|
f276dbd242 | ||
|
|
b7e09741e1 | ||
|
|
180f4dcc8a | ||
|
|
80f292767b | ||
| e5bea755ba | |||
| 5dcb2450b3 | |||
|
|
be15a51017 | ||
|
|
f650d98eb5 | ||
|
|
b578f70876 | ||
|
|
e5483760a5 | ||
|
|
725b2d9081 | ||
|
|
719413bebb | ||
|
|
bfe0a2ef0e | ||
|
|
e0d972c937 | ||
|
|
9693685434 | ||
|
|
d65ad5b10e | ||
|
|
6a72050cf1 | ||
|
|
4a2365c671 | ||
|
|
1473a0c25a | ||
|
|
5eb8a5ea8b | ||
|
|
dd086d77e0 | ||
|
|
8528f89484 | ||
|
|
6f3530e2a6 | ||
|
|
26c10c5127 | ||
|
|
259db62584 | ||
|
|
b35104f3ba | ||
|
|
a0668a89d4 | ||
|
|
3837fe81f8 | ||
|
|
95b39c8aaa | ||
|
|
0f234adabb | ||
|
|
f77140eb6b | ||
|
|
ac801dc5ac | ||
| a845747a9c | |||
|
|
b3073886c8 | ||
|
|
dbc4b4dd72 | ||
|
|
c463195a25 | ||
|
|
61e99e6e83 | ||
|
|
7d2d047903 | ||
|
|
97567140ad | ||
|
|
7a35d5014d | ||
|
|
3362ba2c60 | ||
| 58935bd3c6 | |||
|
|
7bac2f7fd7 | ||
|
|
a77425cfa2 | ||
|
|
178862a0e3 | ||
| 861c764d75 | |||
|
|
51da4e6899 | ||
|
|
e2625998c6 | ||
|
|
d74c87a4d2 | ||
|
|
266cfb5e8c | ||
| 83d40c3f04 | |||
| a5d837f865 | |||
| b2fa7f38be | |||
| 6eefc8c081 | |||
|
|
50afb7e4e9 | ||
| b95d5202d5 | |||
| 1b0c0b9c63 | |||
| 38b7bf68ec | |||
|
|
8004c74acf | ||
| 6d0b4d747d | |||
| 29590640ef | |||
| a6953f2796 | |||
|
|
08784f671e | ||
|
|
eae11e0eef | ||
|
|
5348128bfc | ||
| 8f85fa95b6 | |||
| 6eca677b52 | |||
| d93529ea0f | |||
|
|
c44c359b2f | ||
|
|
81a2381d59 | ||
|
|
5a9e3750a9 | ||
|
|
37412e4f96 | ||
|
|
d028cfc0bb | ||
|
|
d5912242b7 | ||
|
|
b99e2f7f12 | ||
| 4d09e0d2f1 | |||
| 97aed222ba | |||
|
|
d3880d4ffc | ||
|
|
1b97e12b14 | ||
|
|
e1b0cfa32a | ||
| 16b9929dcb | |||
|
|
fce276836a | ||
|
|
45c8ba0a56 | ||
|
|
edad08d92d | ||
|
|
bcab8745a0 | ||
|
|
be88aec9b3 | ||
|
|
2f877015d3 | ||
|
|
481b6299f0 | ||
|
|
295b21fac7 | ||
|
|
ce13ee1ec4 | ||
|
|
6bd4ecfc04 | ||
|
|
4dab08b710 | ||
|
|
64360e9a28 | ||
|
|
675764bdc6 | ||
|
|
b55d38c5b9 | ||
|
|
f0df8aeb2c | ||
|
|
84546710ed | ||
|
|
b7676674ee | ||
| 7d134fc34a | |||
| 881495b3d0 | |||
| 6eddf0a125 | |||
| 04f1d78741 | |||
| 02d76c1c96 | |||
| 72ba200c9e | |||
| c66c6e9ad2 | |||
| c434464bb1 | |||
| 86ed3c3109 | |||
| 77583205ac | |||
| d5d23f0c34 | |||
| c8f5e1f08f | |||
| 4a5856edca | |||
| 28d9eb8cf9 | |||
| e924df98a5 | |||
| c4ea007aa2 | |||
|
|
0b93810204 | ||
|
|
4ddb6497ae | ||
|
|
fd382fa252 | ||
|
|
7187e62c9c | ||
|
|
3d29c1c572 | ||
|
|
6a0aa5ba05 | ||
|
|
452e103d10 | ||
|
|
c947e967ce | ||
|
|
f0d224b452 | ||
|
|
d3e3d3d31a | ||
|
|
494ae460df | ||
|
|
8a9806bb6a | ||
| ec0195de38 | |||
| a8e5ce0dbd | |||
| 27590e4ed6 | |||
| 57a1d5f249 | |||
| 981a668fe6 | |||
| 77ed647603 | |||
| f037f94040 | |||
| 10ce080108 | |||
| 6ee6416c37 | |||
| 0f67ef76a8 | |||
| 86285146ab | |||
| a3e760a682 | |||
| 69dfc9a1d8 | |||
| cb59a663b8 | |||
| 237dfe73d9 | |||
| 3f140fa016 | |||
| 50d03e85b7 | |||
| ab7ef014f8 | |||
| 2754882147 | |||
| 41ccc31ac7 | |||
| 53d89bad05 | |||
| a72552d373 | |||
| c81e6635b4 | |||
|
|
17421f713e | ||
|
|
7430b4c651 | ||
|
|
91bae23ab1 | ||
|
|
0b63242bbd | ||
|
|
c9b7d8fc0d | ||
|
|
f06dad1281 | ||
|
|
90023312dc | ||
|
|
45b8c74b02 | ||
|
|
877f0d9d3e | ||
|
|
832a41fa46 | ||
|
|
6e4b0a3466 | ||
|
|
eb7c832b98 | ||
|
|
3f248c6d66 | ||
|
|
9ac527c8d7 | ||
|
|
070c0600d0 | ||
|
|
20bbc02385 | ||
|
|
2239911bc0 | ||
|
|
ccfd3c3d06 | ||
| 91e98bc524 | |||
|
|
ba023f3573 | ||
|
|
69e6284550 | ||
|
|
c568e03ca8 | ||
|
|
ae2827319e | ||
|
|
20a70979c1 | ||
|
|
0e2a9789b2 | ||
|
|
22d9744763 | ||
|
|
d5ed4efca8 | ||
|
|
39aae62867 |
@@ -4,6 +4,31 @@ WORKING_DIR="/home/prause/measurement-scripts/"
|
|||||||
DATE=$(date +%F_%H-%M-%S)
|
DATE=$(date +%F_%H-%M-%S)
|
||||||
NEW_DIR="/home/prause/mobile_measurements/$DATE/"
|
NEW_DIR="/home/prause/mobile_measurements/$DATE/"
|
||||||
|
|
||||||
|
ethtool -k wwan0
|
||||||
|
STATE=$?
|
||||||
|
|
||||||
|
if test $STATE -ne 0
|
||||||
|
then
|
||||||
|
for n in 1 2 3 ; do
|
||||||
|
for f in 400 500 600 700 800 900 1000 1100 1200 1300 1400 1500 1600; do
|
||||||
|
beep -f $f -l 20
|
||||||
|
done
|
||||||
|
done
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
/root/connect-modem.py -l telekom
|
||||||
|
|
||||||
|
if test $STATE -ne 0
|
||||||
|
then
|
||||||
|
for n in 1 2 3 ; do
|
||||||
|
for f in 400 500 600 700 800 900 1000 1100 1200 1300 1400 1500 1600; do
|
||||||
|
beep -f $f -l 20
|
||||||
|
done
|
||||||
|
done
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
export PIPENV_VENV_IN_PROJECT=1
|
export PIPENV_VENV_IN_PROJECT=1
|
||||||
mkdir $NEW_DIR
|
mkdir $NEW_DIR
|
||||||
cd $WORKING_DIR
|
cd $WORKING_DIR
|
||||||
@@ -11,10 +36,11 @@ cd $WORKING_DIR
|
|||||||
beep -f 130 -l 100 -n -f 262 -l 100 -n -f 330 -l 100 -n -f 392 -l 100 -n -f 523 -l 100 -n -f 660 -l 100 -n -f 784 -l 300 -n -f 660 -l 300 -n -f 146 -l 100 -n -f 262 -l 100 -n -f 311 -l 100 -n -f 415 -l 100 -n -f 523 -l 100 -n -f 622 -l 100 -n -f 831 -l 300 -n -f 622 -l 300 -n -f 155 -l 100 -n -f 294 -l 100 -n -f 349 -l 100 -n -f 466 -l 100 -n -f 588 -l 100 -n -f 699 -l 100 -n -f 933 -l 300 -n -f 933 -l 100 -n -f 933 -l 100 -n -f 933 -l 100 -n -f 1047 -l 400
|
beep -f 130 -l 100 -n -f 262 -l 100 -n -f 330 -l 100 -n -f 392 -l 100 -n -f 523 -l 100 -n -f 660 -l 100 -n -f 784 -l 300 -n -f 660 -l 300 -n -f 146 -l 100 -n -f 262 -l 100 -n -f 311 -l 100 -n -f 415 -l 100 -n -f 523 -l 100 -n -f 622 -l 100 -n -f 831 -l 300 -n -f 622 -l 300 -n -f 155 -l 100 -n -f 294 -l 100 -n -f 349 -l 100 -n -f 466 -l 100 -n -f 588 -l 100 -n -f 699 -l 100 -n -f 933 -l 300 -n -f 933 -l 100 -n -f 933 -l 100 -n -f 933 -l 100 -n -f 1047 -l 400
|
||||||
pipenv run pipenv run ./measurement_main.py -c 130.75.73.69 \
|
pipenv run pipenv run ./measurement_main.py -c 130.75.73.69 \
|
||||||
--bandwidth \
|
--bandwidth \
|
||||||
--set time=30 \
|
--set time=60 \
|
||||||
--gps /dev/serial/by-id/usb-u-blox_AG_-_www.u-blox.com_u-blox_5_-_GPS_Receiver-if00 \
|
--gps /dev/serial/by-id/usb-u-blox_AG_-_www.u-blox.com_u-blox_5_-_GPS_Receiver-if00 \
|
||||||
--serial /dev/ttyUSB2 \
|
--serial /dev/ttyUSB2 \
|
||||||
-n 10 \
|
-n 600 \
|
||||||
--folder $NEW_DIR \
|
--folder $NEW_DIR \
|
||||||
--prefix automated_$DATE \
|
--prefix automated_$DATE \
|
||||||
-i wwan0
|
-i wwan0 \
|
||||||
|
| tee $NEW_DIR/automated_measurement_$DATE.log
|
||||||
198
calc_bandwidth_goodput_csv.py
Normal file
198
calc_bandwidth_goodput_csv.py
Normal file
@@ -0,0 +1,198 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import math
|
||||||
|
import multiprocessing
|
||||||
|
import os
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
|
import matplotlib
|
||||||
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
|
||||||
|
import seaborn as sns
|
||||||
|
|
||||||
|
sns.set()
|
||||||
|
#sns.set(font_scale=1.5)
|
||||||
|
|
||||||
|
tex_fonts = {
|
||||||
|
"pgf.texsystem": "lualatex",
|
||||||
|
# "legend.fontsize": "x-large",
|
||||||
|
# "figure.figsize": (15, 5),
|
||||||
|
"axes.labelsize": 15, # "small",
|
||||||
|
# "axes.titlesize": "x-large",
|
||||||
|
"xtick.labelsize": 15, # "small",
|
||||||
|
"ytick.labelsize": 15, # "small",
|
||||||
|
"legend.fontsize": 15,
|
||||||
|
"axes.formatter.use_mathtext": True,
|
||||||
|
"mathtext.fontset": "dejavusans",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# plt.rcParams.update(tex_fonts)
|
||||||
|
|
||||||
|
|
||||||
|
def convert_cellid(value):
|
||||||
|
if isinstance(value, str):
|
||||||
|
try:
|
||||||
|
r = int(value.split(" ")[-1].replace("(", "").replace(")", ""))
|
||||||
|
return r
|
||||||
|
except Exception as e:
|
||||||
|
return -1
|
||||||
|
else:
|
||||||
|
return int(-1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = ArgumentParser()
|
||||||
|
parser.add_argument("-s", "--serial_file", required=True, help="Serial csv file.")
|
||||||
|
parser.add_argument(
|
||||||
|
"-p", "--pcap_csv_folder", required=True, help="PCAP csv folder."
|
||||||
|
)
|
||||||
|
parser.add_argument("--save", required=True, help="Location to save pdf file.")
|
||||||
|
parser.add_argument(
|
||||||
|
"-i",
|
||||||
|
"--interval",
|
||||||
|
default=10,
|
||||||
|
type=int,
|
||||||
|
help="Time interval for rolling window.",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
pcap_csv_list = list()
|
||||||
|
for filename in os.listdir(args.pcap_csv_folder):
|
||||||
|
if filename.endswith(".csv") and "tcp" in filename:
|
||||||
|
pcap_csv_list.append(filename)
|
||||||
|
|
||||||
|
counter = 1
|
||||||
|
if len(pcap_csv_list) == 0:
|
||||||
|
print("No CSV files found.")
|
||||||
|
|
||||||
|
pcap_csv_list.sort(key=lambda x: int(x.split("_")[-1].replace(".csv", "")))
|
||||||
|
|
||||||
|
concat_frame = None
|
||||||
|
|
||||||
|
for csv in pcap_csv_list:
|
||||||
|
print(
|
||||||
|
"\rProcessing {} out of {} CSVs.\t({}%)\t".format(
|
||||||
|
counter, len(pcap_csv_list), math.floor(counter / len(pcap_csv_list))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# try:
|
||||||
|
transmission_df = pd.read_csv(
|
||||||
|
"{}{}".format(args.pcap_csv_folder, csv),
|
||||||
|
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
|
||||||
|
)
|
||||||
|
|
||||||
|
transmission_df["datetime"] = pd.to_datetime(
|
||||||
|
transmission_df["datetime"]
|
||||||
|
) - pd.Timedelta(hours=1)
|
||||||
|
transmission_df = transmission_df.set_index("datetime")
|
||||||
|
transmission_df.index = pd.to_datetime(transmission_df.index)
|
||||||
|
transmission_df = transmission_df.sort_index()
|
||||||
|
|
||||||
|
# srtt to [s]
|
||||||
|
transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10 ** 6)
|
||||||
|
|
||||||
|
# key for columns and level for index
|
||||||
|
transmission_df["goodput"] = (
|
||||||
|
transmission_df["payload_size"]
|
||||||
|
.groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval)))
|
||||||
|
.transform("sum")
|
||||||
|
)
|
||||||
|
transmission_df["goodput"] = transmission_df["goodput"].apply(
|
||||||
|
lambda x: ((x * 8) / args.interval) / 10 ** 6
|
||||||
|
)
|
||||||
|
|
||||||
|
transmission_df["goodput_rolling"] = (
|
||||||
|
transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
|
||||||
|
)
|
||||||
|
transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply(
|
||||||
|
lambda x: ((x * 8) / args.interval) / 10 ** 6
|
||||||
|
)
|
||||||
|
|
||||||
|
# set meta values and remove all not needed columns
|
||||||
|
cc_algo = transmission_df["congestion_control"].iloc[0]
|
||||||
|
cc_algo = cc_algo.upper()
|
||||||
|
transmission_direction = transmission_df["direction"].iloc[0]
|
||||||
|
|
||||||
|
# transmission_df = transmission_df.filter(["goodput", "datetime", "ack_rtt", "goodput_rolling", "snd_cwnd"])
|
||||||
|
|
||||||
|
# read serial csv
|
||||||
|
serial_df = pd.read_csv(
|
||||||
|
args.serial_file, converters={"Cell_ID": convert_cellid}
|
||||||
|
)
|
||||||
|
serial_df["datetime"] = pd.to_datetime(serial_df["datetime"]) - pd.Timedelta(
|
||||||
|
hours=1
|
||||||
|
)
|
||||||
|
serial_df = serial_df.set_index("datetime")
|
||||||
|
serial_df.index = pd.to_datetime(serial_df.index)
|
||||||
|
serial_df.sort_index()
|
||||||
|
|
||||||
|
# print(serial_df["Cell_ID"])
|
||||||
|
|
||||||
|
# serial_df["Cell_ID"] = serial_df["Cell_ID"].apply(
|
||||||
|
# lambda x: int(x.split(" ")[-1].replace("(", "").replace(")", "")))
|
||||||
|
|
||||||
|
transmission_df = pd.merge_asof(
|
||||||
|
transmission_df,
|
||||||
|
serial_df,
|
||||||
|
tolerance=pd.Timedelta("1s"),
|
||||||
|
right_index=True,
|
||||||
|
left_index=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
#transmission_df.index = transmission_df["arrival_time"]
|
||||||
|
|
||||||
|
# replace 0 in RSRQ with Nan
|
||||||
|
transmission_df["NR5G_RSRQ_(dB)"] = transmission_df["NR5G_RSRQ_(dB)"].replace(
|
||||||
|
0, np.NaN
|
||||||
|
)
|
||||||
|
transmission_df["RSRQ_(dB)"] = transmission_df["RSRQ_(dB)"].replace(0, np.NaN)
|
||||||
|
|
||||||
|
# filter active state
|
||||||
|
for i in range(1, 5):
|
||||||
|
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
|
||||||
|
"LTE_SCC{}_bw".format(i)
|
||||||
|
]
|
||||||
|
|
||||||
|
mask = transmission_df["LTE_SCC{}_state".format(i)].isin(["ACTIVE"])
|
||||||
|
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
|
||||||
|
"LTE_SCC{}_effective_bw".format(i)
|
||||||
|
].where(mask, other=0)
|
||||||
|
|
||||||
|
# filter if sc is usesd for uplink
|
||||||
|
for i in range(1, 5):
|
||||||
|
mask = transmission_df["LTE_SCC{}_UL_Configured".format(i)].isin([False])
|
||||||
|
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
|
||||||
|
"LTE_SCC{}_effective_bw".format(i)
|
||||||
|
].where(mask, other=0)
|
||||||
|
|
||||||
|
# sum all effective bandwidth for 5G and 4G
|
||||||
|
transmission_df["SCC1_NR5G_effective_bw"] = transmission_df[
|
||||||
|
"SCC1_NR5G_bw"
|
||||||
|
].fillna(0)
|
||||||
|
|
||||||
|
transmission_df["lte_effective_bw_sum"] = (
|
||||||
|
transmission_df["LTE_SCC1_effective_bw"].fillna(0)
|
||||||
|
+ transmission_df["LTE_SCC2_effective_bw"].fillna(0)
|
||||||
|
+ transmission_df["LTE_SCC3_effective_bw"].fillna(0)
|
||||||
|
+ transmission_df["LTE_SCC4_effective_bw"].fillna(0)
|
||||||
|
+ transmission_df["LTE_bw"].fillna(0))
|
||||||
|
transmission_df["nr_effective_bw_sum"] = transmission_df["SCC1_NR5G_effective_bw"]
|
||||||
|
|
||||||
|
transmission_df["effective_bw_sum"] = transmission_df["nr_effective_bw_sum"] + transmission_df[
|
||||||
|
"lte_effective_bw_sum"]
|
||||||
|
|
||||||
|
transmission_df = transmission_df.filter(["goodput", "effective_bw_sum", "srtt"])
|
||||||
|
transmission_df = transmission_df.reset_index(drop=True)
|
||||||
|
|
||||||
|
if concat_frame is None:
|
||||||
|
concat_frame = transmission_df
|
||||||
|
else:
|
||||||
|
concat_frame = pd.concat([concat_frame, transmission_df])
|
||||||
|
|
||||||
|
concat_frame.to_csv("{}_concat_bw_gp.csv".format(args.save))
|
||||||
|
|
||||||
|
|
||||||
192
calc_gps_map_csv.py
Executable file
192
calc_gps_map_csv.py
Executable file
@@ -0,0 +1,192 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import multiprocessing
|
||||||
|
import os
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
from datetime import datetime
|
||||||
|
from math import ceil
|
||||||
|
from time import sleep
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
import geopandas as gpd
|
||||||
|
import contextily as cx
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
|
||||||
|
|
||||||
|
def csv_to_dataframe(csv_list, dummy):
|
||||||
|
|
||||||
|
global n
|
||||||
|
global frame_list
|
||||||
|
|
||||||
|
transmission_df = None
|
||||||
|
|
||||||
|
for csv in csv_list:
|
||||||
|
tmp_df = pd.read_csv(
|
||||||
|
"{}{}".format(args.pcap_csv_folder, csv),
|
||||||
|
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
|
||||||
|
)
|
||||||
|
#tmp_df["datetime"] = pd.to_datetime(tmp_df["datetime"]) - pd.Timedelta(hours=1)
|
||||||
|
tmp_df["datetime"] = pd.to_datetime(tmp_df["datetime"])
|
||||||
|
tmp_df = tmp_df.set_index("datetime")
|
||||||
|
tmp_df.index = pd.to_datetime(tmp_df.index)
|
||||||
|
if transmission_df is None:
|
||||||
|
transmission_df = tmp_df
|
||||||
|
else:
|
||||||
|
transmission_df = pd.concat([transmission_df, tmp_df])
|
||||||
|
|
||||||
|
n.value += 1
|
||||||
|
|
||||||
|
frame_list.append(transmission_df)
|
||||||
|
|
||||||
|
|
||||||
|
from itertools import islice
|
||||||
|
|
||||||
|
|
||||||
|
def chunk(it, size):
|
||||||
|
it = iter(it)
|
||||||
|
return iter(lambda: tuple(islice(it, size)), ())
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = ArgumentParser()
|
||||||
|
parser.add_argument("-f", "--gps_file", required=True, help="GPS csv file.")
|
||||||
|
parser.add_argument("-s", "--serial_file", required=True, help="Serial csv file.")
|
||||||
|
parser.add_argument("-p", "--pcap_csv_folder", required=True, help="PCAP csv folder.")
|
||||||
|
parser.add_argument("--save", default=None, help="Location to save pdf file.")
|
||||||
|
parser.add_argument("--time_offset", default=0, type=int, help="Minutes added to GPS datetime.")
|
||||||
|
parser.add_argument("--neg_offset", default=False, action="store_true", help="Subtract GPS time offset.")
|
||||||
|
parser.add_argument("--auto_offset", default=False, action="store_true", help="Calculate GPS time offset.")
|
||||||
|
parser.add_argument(
|
||||||
|
"-c",
|
||||||
|
"--cores",
|
||||||
|
default=1,
|
||||||
|
type=int,
|
||||||
|
help="Number of cores for multiprocessing.",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-i",
|
||||||
|
"--interval",
|
||||||
|
default=10,
|
||||||
|
type=int,
|
||||||
|
help="Time interval for rolling window.",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
manager = multiprocessing.Manager()
|
||||||
|
n = manager.Value("i", 0)
|
||||||
|
frame_list = manager.list()
|
||||||
|
jobs = []
|
||||||
|
|
||||||
|
# load all pcap csv into one dataframe
|
||||||
|
pcap_csv_list = list()
|
||||||
|
for filename in os.listdir(args.pcap_csv_folder):
|
||||||
|
if filename.endswith(".csv") and "tcp" in filename:
|
||||||
|
pcap_csv_list.append(filename)
|
||||||
|
|
||||||
|
parts = chunk(pcap_csv_list, ceil(len(pcap_csv_list) / args.cores))
|
||||||
|
print("Start processing with {} jobs.".format(args.cores))
|
||||||
|
for p in parts:
|
||||||
|
process = multiprocessing.Process(target=csv_to_dataframe, args=(p, "dummy"))
|
||||||
|
jobs.append(process)
|
||||||
|
|
||||||
|
for j in jobs:
|
||||||
|
j.start()
|
||||||
|
|
||||||
|
print("Started all jobs.")
|
||||||
|
# Ensure all of the processes have finished
|
||||||
|
finished_job_counter = 0
|
||||||
|
working = ["|", "/", "-", "\\", "|", "/", "-", "\\"]
|
||||||
|
w = 0
|
||||||
|
while len(jobs) != finished_job_counter:
|
||||||
|
sleep(1)
|
||||||
|
print(
|
||||||
|
"\r\t{}{}{}\t Running {} jobs ({} finished). Processed {} out of {} pcap csv files. ({}%) ".format(
|
||||||
|
working[w],
|
||||||
|
working[w],
|
||||||
|
working[w],
|
||||||
|
len(jobs),
|
||||||
|
finished_job_counter,
|
||||||
|
n.value,
|
||||||
|
len(pcap_csv_list),
|
||||||
|
round((n.value / len(pcap_csv_list)) * 100, 2),
|
||||||
|
),
|
||||||
|
end="",
|
||||||
|
)
|
||||||
|
finished_job_counter = 0
|
||||||
|
for j in jobs:
|
||||||
|
if not j.is_alive():
|
||||||
|
finished_job_counter += 1
|
||||||
|
if (w + 1) % len(working) == 0:
|
||||||
|
w = 0
|
||||||
|
else:
|
||||||
|
w += 1
|
||||||
|
print("\r\nSorting table...")
|
||||||
|
|
||||||
|
transmission_df = pd.concat(frame_list)
|
||||||
|
frame_list = None
|
||||||
|
transmission_df = transmission_df.sort_index()
|
||||||
|
|
||||||
|
print("Calculate goodput...")
|
||||||
|
transmission_df["goodput"] = transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
|
||||||
|
transmission_df["goodput"] = transmission_df["goodput"].apply(
|
||||||
|
lambda x: ((x * 8) / args.interval) / 10**6
|
||||||
|
)
|
||||||
|
|
||||||
|
# load dataframe an put it into geopandas
|
||||||
|
df = pd.read_csv(args.gps_file)
|
||||||
|
df["kmh"] = df["speed (knots)"].apply(lambda x: x * 1.852)
|
||||||
|
if not args.auto_offset and args.time_offset > 0:
|
||||||
|
if args.neg_offset:
|
||||||
|
df["datetime"] = pd.to_datetime(df["datetime"]) - pd.Timedelta(minutes=args.time_offset)
|
||||||
|
else:
|
||||||
|
df["datetime"] = pd.to_datetime(df["datetime"]) + pd.Timedelta(minutes=args.time_offset)
|
||||||
|
elif args.auto_offset:
|
||||||
|
gps_first = datetime.strptime(df["datetime"].iloc[0], "%Y-%m-%d %H:%M:%S.%f")
|
||||||
|
pcap_first = pd.to_datetime(transmission_df.first_valid_index())
|
||||||
|
calc_offset = gps_first - pcap_first
|
||||||
|
if gps_first > pcap_first:
|
||||||
|
time_offset = gps_first - pcap_first
|
||||||
|
df["datetime"] = pd.to_datetime(df["datetime"]) - time_offset
|
||||||
|
else:
|
||||||
|
time_offset = pcap_first - gps_first
|
||||||
|
df["datetime"] = pd.to_datetime(df["datetime"]) + time_offset
|
||||||
|
else:
|
||||||
|
df["datetime"] = pd.to_datetime(df["datetime"])
|
||||||
|
|
||||||
|
df = df.set_index("datetime")
|
||||||
|
df.index = pd.to_datetime(df.index)
|
||||||
|
|
||||||
|
gdf = gpd.GeoDataFrame(
|
||||||
|
df,
|
||||||
|
geometry=gpd.points_from_xy(df["longitude"], df["latitude"]),
|
||||||
|
crs="EPSG:4326",
|
||||||
|
)
|
||||||
|
gdf = pd.merge_asof(
|
||||||
|
gdf,
|
||||||
|
transmission_df,
|
||||||
|
tolerance=pd.Timedelta("10s"),
|
||||||
|
right_index=True,
|
||||||
|
left_index=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# read serial csv
|
||||||
|
serial_df = pd.read_csv(args.serial_file)
|
||||||
|
#serial_df["datetime"] = pd.to_datetime(serial_df["datetime"]) - pd.Timedelta(hours=1)
|
||||||
|
serial_df["datetime"] = pd.to_datetime(serial_df["datetime"])
|
||||||
|
serial_df = serial_df.set_index("datetime")
|
||||||
|
serial_df.index = pd.to_datetime(serial_df.index)
|
||||||
|
|
||||||
|
gdf = pd.merge_asof(
|
||||||
|
gdf,
|
||||||
|
serial_df,
|
||||||
|
tolerance=pd.Timedelta("1s"),
|
||||||
|
right_index=True,
|
||||||
|
left_index=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# format to needed format and add basemap as background
|
||||||
|
df_wm = gdf.to_crs(epsg=3857)
|
||||||
|
#df_wm.to_csv("debug-data.csv")
|
||||||
|
# ax2 = df_wm.plot(figsize=(10, 10), alpha=0.5, edgecolor='k')
|
||||||
|
|
||||||
|
df_wm.to_csv("{}gps_plot.csv".format(args.save))
|
||||||
|
print("Saved calculations to: {}gps_plot.csv".format(args.save))
|
||||||
228
cdf_compare.py
Executable file
228
cdf_compare.py
Executable file
@@ -0,0 +1,228 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import multiprocessing
|
||||||
|
import os
|
||||||
|
import pickle
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
from math import ceil
|
||||||
|
from time import sleep
|
||||||
|
|
||||||
|
import matplotlib
|
||||||
|
import pandas as pd
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
from mpl_toolkits import axisartist
|
||||||
|
from mpl_toolkits.axes_grid1 import host_subplot
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def csv_to_dataframe(csv_list, folder, dummy):
|
||||||
|
|
||||||
|
global n
|
||||||
|
global frame_list
|
||||||
|
|
||||||
|
tmp_df = None
|
||||||
|
|
||||||
|
for csv in csv_list:
|
||||||
|
tmp_df = pd.read_csv(
|
||||||
|
"{}{}".format(folder, csv),
|
||||||
|
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
|
||||||
|
)
|
||||||
|
tmp_df["datetime"] = pd.to_datetime(tmp_df["datetime"]) - pd.Timedelta(hours=1)
|
||||||
|
tmp_df = tmp_df.set_index("datetime")
|
||||||
|
tmp_df.index = pd.to_datetime(tmp_df.index)
|
||||||
|
if tmp_df is None:
|
||||||
|
tmp_df = tmp_df
|
||||||
|
else:
|
||||||
|
tmp_df = pd.concat([tmp_df, tmp_df])
|
||||||
|
|
||||||
|
n.value += 1
|
||||||
|
|
||||||
|
tmp_df = tmp_df.filter(
|
||||||
|
["srtt", "datetime", "payload_size", "congestion_control", "direction"])
|
||||||
|
frame_list.append(tmp_df)
|
||||||
|
del tmp_df
|
||||||
|
|
||||||
|
|
||||||
|
from itertools import islice
|
||||||
|
|
||||||
|
def chunk(it, size):
|
||||||
|
it = iter(it)
|
||||||
|
return iter(lambda: tuple(islice(it, size)), ())
|
||||||
|
|
||||||
|
|
||||||
|
def plot_cdf(dataframe, column_name, axis=None):
|
||||||
|
stats_df = dataframe \
|
||||||
|
.groupby(column_name) \
|
||||||
|
[column_name] \
|
||||||
|
.agg("count") \
|
||||||
|
.pipe(pd.DataFrame) \
|
||||||
|
.rename(columns={column_name: "frequency"})
|
||||||
|
|
||||||
|
# PDF
|
||||||
|
stats_df["PDF"] = stats_df["frequency"] / sum(stats_df["frequency"])
|
||||||
|
|
||||||
|
# CDF
|
||||||
|
stats_df["CDF"] = stats_df["PDF"].cumsum()
|
||||||
|
stats_df = stats_df.reset_index()
|
||||||
|
|
||||||
|
if axis:
|
||||||
|
stats_df.plot(x=column_name, y=["CDF"], grid=True, ax=axis)
|
||||||
|
else:
|
||||||
|
stats_df.plot(x=column_name, y=["CDF"], grid=True)
|
||||||
|
del stats_df
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = ArgumentParser()
|
||||||
|
parser.add_argument("-s", "--serials", required=True, help="Serial csv files. Comma separated.")
|
||||||
|
parser.add_argument("-f", "--folders", required=True, help="PCAP csv folders. Comma separated.")
|
||||||
|
parser.add_argument("--save", default=None, help="Location to save pdf file.")
|
||||||
|
parser.add_argument(
|
||||||
|
"-c",
|
||||||
|
"--cores",
|
||||||
|
default=1,
|
||||||
|
type=int,
|
||||||
|
help="Number of cores for multiprocessing.",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-i",
|
||||||
|
"--interval",
|
||||||
|
default=2,
|
||||||
|
type=int,
|
||||||
|
help="Time interval for rolling window.",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
transmission_df_list = list()
|
||||||
|
for f in args.folders.split(","):
|
||||||
|
manager = multiprocessing.Manager()
|
||||||
|
n = manager.Value("i", 0)
|
||||||
|
frame_list = manager.list()
|
||||||
|
jobs = []
|
||||||
|
|
||||||
|
# load all pcap csv into one dataframe
|
||||||
|
pcap_csv_list = list()
|
||||||
|
for filename in os.listdir(f):
|
||||||
|
if filename.endswith(".csv") and "tcp" in filename:
|
||||||
|
pcap_csv_list.append(filename)
|
||||||
|
|
||||||
|
parts = chunk(pcap_csv_list, ceil(len(pcap_csv_list) / args.cores))
|
||||||
|
print("Start processing with {} jobs.".format(args.cores))
|
||||||
|
for p in parts:
|
||||||
|
process = multiprocessing.Process(target=csv_to_dataframe, args=(p, f, "dummy"))
|
||||||
|
jobs.append(process)
|
||||||
|
|
||||||
|
for j in jobs:
|
||||||
|
j.start()
|
||||||
|
|
||||||
|
print("Started all jobs.")
|
||||||
|
# Ensure all the processes have finished
|
||||||
|
finished_job_counter = 0
|
||||||
|
working = ["|", "/", "-", "\\", "|", "/", "-", "\\"]
|
||||||
|
w = 0
|
||||||
|
while len(jobs) != finished_job_counter:
|
||||||
|
sleep(1)
|
||||||
|
print(
|
||||||
|
"\r\t{}{}{}\t Running {} jobs ({} finished). Processed {} out of {} pcap csv files. ({}%) ".format(
|
||||||
|
working[w],
|
||||||
|
working[w],
|
||||||
|
working[w],
|
||||||
|
len(jobs),
|
||||||
|
finished_job_counter,
|
||||||
|
n.value,
|
||||||
|
len(pcap_csv_list),
|
||||||
|
round((n.value / len(pcap_csv_list)) * 100, 2),
|
||||||
|
),
|
||||||
|
end="",
|
||||||
|
)
|
||||||
|
finished_job_counter = 0
|
||||||
|
for j in jobs:
|
||||||
|
if not j.is_alive():
|
||||||
|
finished_job_counter += 1
|
||||||
|
if (w + 1) % len(working) == 0:
|
||||||
|
w = 0
|
||||||
|
else:
|
||||||
|
w += 1
|
||||||
|
print("\r\nSorting table...")
|
||||||
|
|
||||||
|
transmission_df = pd.concat(frame_list)
|
||||||
|
frame_list = None
|
||||||
|
transmission_df = transmission_df.sort_index()
|
||||||
|
|
||||||
|
#
|
||||||
|
# Don't forget to add new columns to the filter argument in the function above!
|
||||||
|
#
|
||||||
|
|
||||||
|
transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10 ** 6)
|
||||||
|
|
||||||
|
# key for columns and level for index
|
||||||
|
transmission_df["goodput"] = transmission_df["payload_size"].groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval))).transform("sum")
|
||||||
|
transmission_df["goodput"] = transmission_df["goodput"].apply(
|
||||||
|
lambda x: ((x * 8) / args.interval) / 10**6
|
||||||
|
)
|
||||||
|
|
||||||
|
transmission_df["goodput_rolling"] = transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
|
||||||
|
transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply(
|
||||||
|
lambda x: ((x * 8) / args.interval) / 10 ** 6
|
||||||
|
)
|
||||||
|
|
||||||
|
# set meta values
|
||||||
|
cc_algo = transmission_df["congestion_control"].iloc[0]
|
||||||
|
cc_algo = cc_algo.upper()
|
||||||
|
transmission_direction = transmission_df["direction"].iloc[0]
|
||||||
|
|
||||||
|
#transmission_df = transmission_df.filter(["srtt", "datetime", "srtt", "payload_size"])
|
||||||
|
transmission_df = transmission_df.drop(columns=["congestion_control", "direction"])
|
||||||
|
|
||||||
|
|
||||||
|
# read serial csv
|
||||||
|
#serial_df = pd.read_csv(args.serial_file)
|
||||||
|
#serial_df["datetime"] = pd.to_datetime(serial_df["datetime"]) - pd.Timedelta(hours=1)
|
||||||
|
#serial_df = serial_df.set_index("datetime")
|
||||||
|
#serial_df.index = pd.to_datetime(serial_df.index)
|
||||||
|
#serial_df.sort_index()
|
||||||
|
|
||||||
|
#transmission_df = pd.merge_asof(
|
||||||
|
# transmission_df,
|
||||||
|
# serial_df,
|
||||||
|
# tolerance=pd.Timedelta("1s"),
|
||||||
|
# right_index=True,
|
||||||
|
# left_index=True,
|
||||||
|
#)
|
||||||
|
|
||||||
|
transmission_df_list.append(dict(
|
||||||
|
df=transmission_df,
|
||||||
|
cc_algo=cc_algo,
|
||||||
|
transmission_direction=transmission_direction
|
||||||
|
))
|
||||||
|
del transmission_df
|
||||||
|
|
||||||
|
# Plot sRTT CDF
|
||||||
|
legend = list()
|
||||||
|
plot_cdf(transmission_df_list[0]["df"], "srtt")
|
||||||
|
legend.append(transmission_df_list[0]["cc_algo"])
|
||||||
|
for i in range(1, len(transmission_df_list)):
|
||||||
|
plot_cdf(transmission_df_list[i]["df"], "srtt", axis=plt.gca())
|
||||||
|
legend.append(transmission_df_list[i]["cc_algo"])
|
||||||
|
#plt.xscale("log")
|
||||||
|
plt.xlim(0, 0.15)
|
||||||
|
plt.xlabel("sRTT [s]")
|
||||||
|
plt.ylabel("CDF")
|
||||||
|
plt.legend(legend)
|
||||||
|
plt.title("{}".format(transmission_df_list[0]["transmission_direction"]))
|
||||||
|
plt.savefig("{}{}_cdf_compare_plot.pdf".format(args.save, "srtt"))
|
||||||
|
|
||||||
|
plt.clf()
|
||||||
|
|
||||||
|
# Plot goodput CDF
|
||||||
|
legend = list()
|
||||||
|
plot_cdf(transmission_df_list[0]["df"], "goodput_rolling")
|
||||||
|
legend.append(transmission_df_list[0]["cc_algo"])
|
||||||
|
for i in range(1, len(transmission_df_list)):
|
||||||
|
plot_cdf(transmission_df_list[i]["df"], "goodput_rolling", axis=plt.gca())
|
||||||
|
legend.append(transmission_df_list[i]["cc_algo"])
|
||||||
|
plt.xlabel("goodput [mbps]")
|
||||||
|
plt.ylabel("CDF")
|
||||||
|
plt.legend(legend)
|
||||||
|
plt.title("{}".format(transmission_df_list[0]["transmission_direction"]))
|
||||||
|
plt.savefig("{}{}_cdf_compare_plot.pdf".format(args.save, "goodput"))
|
||||||
57
format_gps_to_csv.py
Normal file → Executable file
57
format_gps_to_csv.py
Normal file → Executable file
@@ -0,0 +1,57 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
from time import sleep
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
import csv
|
||||||
|
from datetime import datetime
|
||||||
|
import math
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = ArgumentParser()
|
||||||
|
parser.add_argument("-f", "--file", required=True, help="NMEA file.")
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
epoch = datetime.utcfromtimestamp(0)
|
||||||
|
|
||||||
|
outputfile = open(args.file.replace("nmea", "csv"), "w")
|
||||||
|
writer = csv.writer(outputfile, delimiter=",", lineterminator="\n")
|
||||||
|
writer.writerow(
|
||||||
|
["datetime", "time_epoch", "latitude", "longitude", "speed (knots)", "systime_epoch"]
|
||||||
|
)
|
||||||
|
csv_string = ""
|
||||||
|
|
||||||
|
for line in open(args.file, "r").readlines():
|
||||||
|
if line.startswith("$GPRMC"):
|
||||||
|
row = line.split(",")
|
||||||
|
|
||||||
|
time = row[1]
|
||||||
|
lat = row[3]
|
||||||
|
lat_direction = row[4]
|
||||||
|
lon = row[5]
|
||||||
|
lon_direction = row[6]
|
||||||
|
speed = row[7]
|
||||||
|
date = row[9]
|
||||||
|
systime_epoch = row[13] if len(row) < 14 else 0
|
||||||
|
|
||||||
|
date_and_time = datetime.strptime(date + " " + time, "%d%m%y %H%M%S.%f")
|
||||||
|
time_since_epoch = date_and_time.timestamp()
|
||||||
|
date_and_time = date_and_time.strftime("%y-%m-%d %H:%M:%S.%f") # [:-3]
|
||||||
|
|
||||||
|
lat = round(math.floor(float(lat) / 100) + (float(lat) % 100) / 60, 6)
|
||||||
|
if lat_direction == "S":
|
||||||
|
lat = lat * -1
|
||||||
|
|
||||||
|
lon = round(math.floor(float(lon) / 100) + (float(lon) % 100) / 60, 6)
|
||||||
|
if lon_direction == "W":
|
||||||
|
lon = lon * -1
|
||||||
|
writer.writerow([date_and_time, time_since_epoch, lat, lon, speed, systime_epoch])
|
||||||
|
outputfile.close()
|
||||||
|
sleep(1)
|
||||||
|
|
||||||
|
gps_df = pd.read_csv(args.file.replace("nmea", "csv"))
|
||||||
|
#gps_df["datetime"] = pd.to_datetime(
|
||||||
|
# gps_df["systime_epoch"].apply(lambda x: datetime.fromtimestamp(x))
|
||||||
|
#)
|
||||||
|
gps_df.to_csv(args.file.replace("nmea", "csv"))
|
||||||
|
|||||||
191
format_serial_txt_to_csv.py
Executable file
191
format_serial_txt_to_csv.py
Executable file
@@ -0,0 +1,191 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import csv
|
||||||
|
import datetime
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
|
||||||
|
def convert_bandwidth(value):
|
||||||
|
try:
|
||||||
|
value = int(value)
|
||||||
|
except:
|
||||||
|
value = -1
|
||||||
|
if value == 0:
|
||||||
|
return 1.4
|
||||||
|
elif value == 1:
|
||||||
|
return 3
|
||||||
|
elif value == 2:
|
||||||
|
return 5
|
||||||
|
elif value == 3:
|
||||||
|
return 10
|
||||||
|
elif value == 4:
|
||||||
|
return 15
|
||||||
|
elif value == 5:
|
||||||
|
return 20
|
||||||
|
else:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
|
||||||
|
parser = ArgumentParser()
|
||||||
|
parser.add_argument("-f", "--file", required=True, help="Input txt file.")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
file = open(args.file, "r")
|
||||||
|
content = file.read()
|
||||||
|
file.close()
|
||||||
|
|
||||||
|
all_csv_lines = list()
|
||||||
|
csv_header = ["time"]
|
||||||
|
|
||||||
|
h1 = False
|
||||||
|
h2 = False
|
||||||
|
h3 = False
|
||||||
|
|
||||||
|
for line in content.split("\n"):
|
||||||
|
if line == "" or line == "\n":
|
||||||
|
break
|
||||||
|
raw_columns = line.split(";")
|
||||||
|
csv_line = list()
|
||||||
|
csv_line.append(raw_columns[0])
|
||||||
|
for i in range(1, len(raw_columns)):
|
||||||
|
col = raw_columns[i]
|
||||||
|
|
||||||
|
if 'AT+QNWCFG="nr5g_csi"' in col:
|
||||||
|
if not h1:
|
||||||
|
csv_header += ["mcs_PDSCH", "ri_PDSCH", "downlink_cqi", "pmi"]
|
||||||
|
h1 = True
|
||||||
|
tmp = raw_columns[i + 1].replace('+QNWCFG: "nr5g_csi",', "")
|
||||||
|
csv_line += tmp.split(",")
|
||||||
|
elif "AT+QENDC" in col:
|
||||||
|
if not h2:
|
||||||
|
csv_header += [
|
||||||
|
"endc_avl",
|
||||||
|
"plmn_info_list_r15_avl",
|
||||||
|
"endc_rstr",
|
||||||
|
"5G_basic",
|
||||||
|
]
|
||||||
|
h2 = True
|
||||||
|
tmp = raw_columns[i + 1].replace("+QENDC: ", "")
|
||||||
|
csv_line += tmp.split(",")
|
||||||
|
elif 'AT+QENG="servingcell"' in col:
|
||||||
|
if not h3:
|
||||||
|
csv_header += [
|
||||||
|
"connection_state",
|
||||||
|
"is_tdd",
|
||||||
|
"mcc",
|
||||||
|
"mnc",
|
||||||
|
"cellID",
|
||||||
|
"PCID",
|
||||||
|
"earfcn",
|
||||||
|
"freq_band_ind",
|
||||||
|
"UL_bandwidth",
|
||||||
|
"DL_bandwidth",
|
||||||
|
"TAC",
|
||||||
|
"RSRP",
|
||||||
|
"RSRQ",
|
||||||
|
"RSSI",
|
||||||
|
"SINR",
|
||||||
|
"CQI_1-30",
|
||||||
|
"tx_power",
|
||||||
|
"srxlev",
|
||||||
|
"MCC",
|
||||||
|
"MNC",
|
||||||
|
"PCID",
|
||||||
|
"RSRP",
|
||||||
|
"SINR",
|
||||||
|
"RSRQ",
|
||||||
|
"ARFCN",
|
||||||
|
"band",
|
||||||
|
]
|
||||||
|
h3 = True
|
||||||
|
if "NOCONN" in raw_columns[i + 1]:
|
||||||
|
csv_line.append("NOCONN")
|
||||||
|
csv_line += raw_columns[i + 2].replace('+QENG: "LTE",', "").split(",")
|
||||||
|
csv_line += (
|
||||||
|
raw_columns[i + 3].replace('+QENG:"NR5G-NSA",', "").split(",")
|
||||||
|
)
|
||||||
|
elif "SEARCH" in raw_columns[i + 1]:
|
||||||
|
csv_line.append("SEARCH")
|
||||||
|
csv_line += [""] * 25
|
||||||
|
elif "OK" == raw_columns[i + 1]:
|
||||||
|
csv_line.append("OK")
|
||||||
|
csv_line += [""] * 25
|
||||||
|
else:
|
||||||
|
csv_line.append("undefined")
|
||||||
|
csv_line += [""] * 25
|
||||||
|
|
||||||
|
|
||||||
|
all_csv_lines.append(csv_line)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
outputfile = open(args.file.replace("txt", "csv"), "w")
|
||||||
|
writer = csv.writer(outputfile, delimiter=",", lineterminator="\n", escapechar='\\')
|
||||||
|
writer.writerow(csv_header)
|
||||||
|
#print(all_csv_lines)
|
||||||
|
for l in all_csv_lines:
|
||||||
|
#print(l)
|
||||||
|
writer.writerow(l)
|
||||||
|
|
||||||
|
outputfile.close()
|
||||||
|
|
||||||
|
outputfile = open(args.file.replace("txt", "csv"), "r")
|
||||||
|
serial_df = pd.read_csv(outputfile,
|
||||||
|
converters={"UL_bandwidth": convert_bandwidth, "DL_bandwidth": convert_bandwidth},
|
||||||
|
)
|
||||||
|
serial_df = serial_df.drop(columns=["MCC", "MNC"])
|
||||||
|
serial_df["datetime"] = pd.to_datetime(
|
||||||
|
serial_df["time"].apply(lambda x: datetime.datetime.fromtimestamp(x))
|
||||||
|
)
|
||||||
|
serial_df.to_csv(args.file.replace("txt", "csv"))
|
||||||
|
outputfile.close()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
exit()
|
||||||
|
|
||||||
|
delete_string = [
|
||||||
|
'AT+QNWCFG="nr5g_csi";',
|
||||||
|
'+QNWCFG: "nr5g_csi"',
|
||||||
|
'AT+QENG="servingcell";+QENG: "servingcell",',
|
||||||
|
"+QENG:",
|
||||||
|
"AT+QENDC;+QENDC:",
|
||||||
|
]
|
||||||
|
|
||||||
|
for d in delete_string:
|
||||||
|
content = content.replace(d, ",")
|
||||||
|
content = (
|
||||||
|
content.replace(";", "")
|
||||||
|
.replace(" ", "")
|
||||||
|
.replace(",,,", ",")
|
||||||
|
.replace('"', "")
|
||||||
|
.replace("LTE,", "")
|
||||||
|
.replace("NR5G-NSA,", "")
|
||||||
|
)
|
||||||
|
|
||||||
|
header = "time,mcs,ri,cqi,pmi,conn_state,is_tdd,MCC,MNC,cellID,PCID,earfcn,freq_band_ind,UL_bandwidth,DL_bandwidth,TAC,RSRP,RSRQ,RSSI,SINR,CQI,tx_power,srxlev,MCC,MNC,PCID,RSRP,SINR,RSRQ,ARFCN,band,endc_avl,plmn_info_list_r15_avl,endc_rstr,5G_basic\n"
|
||||||
|
csv_path = args.file.replace("txt", "csv")
|
||||||
|
print("Write to: {}".format(csv_path))
|
||||||
|
csv_string = header
|
||||||
|
for csv_line in content.split("\n"):
|
||||||
|
if len(header.split(",")) == len(csv_line.split(",")):
|
||||||
|
csv_string += csv_line + "\n"
|
||||||
|
else:
|
||||||
|
# print("{} found {}".format(len(header.split(",")), len(csv_line.split(","))))
|
||||||
|
print("Could not interpret string: {}".format(csv_line))
|
||||||
|
print(
|
||||||
|
"Expect {} columns got {}".format(
|
||||||
|
len(header.split(",")), len(csv_line.split(","))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
csv_string_io = StringIO(csv_string)
|
||||||
|
serial_df = pd.read_csv(csv_string_io)
|
||||||
|
serial_df = serial_df
|
||||||
|
serial_df["datetime"] = pd.to_datetime(
|
||||||
|
serial_df["time"].apply(lambda x: datetime.datetime.fromtimestamp(x))
|
||||||
|
)
|
||||||
|
serial_df.to_csv(csv_path)
|
||||||
|
print(serial_df)
|
||||||
57
format_serial_txt_to_csv_EM9190.py
Executable file
57
format_serial_txt_to_csv_EM9190.py
Executable file
@@ -0,0 +1,57 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import datetime
|
||||||
|
import re
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
KEY_VALUE_REGEX = r"(.+):(.+)"
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
|
||||||
|
parser = ArgumentParser()
|
||||||
|
parser.add_argument("-f", "--file", required=True, help="Input txt file.")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
file = open(args.file, "r")
|
||||||
|
content = file.read()
|
||||||
|
file.close()
|
||||||
|
|
||||||
|
serial_df = None
|
||||||
|
p = re.compile(KEY_VALUE_REGEX)
|
||||||
|
|
||||||
|
for part in content.split(";;;"):
|
||||||
|
if part == "":
|
||||||
|
break
|
||||||
|
part = part.replace("\t", "\n").strip()
|
||||||
|
time = None
|
||||||
|
line_dict = dict(time=None)
|
||||||
|
for line in part.split("\n"):
|
||||||
|
if not line.startswith("!") or line == "" or line == "\n":
|
||||||
|
if line_dict["time"] is None:
|
||||||
|
time = line
|
||||||
|
line_dict["time"] = [time]
|
||||||
|
m = p.match(line)
|
||||||
|
if m:
|
||||||
|
key = m.group(1).strip().replace(" ", "_")
|
||||||
|
value = m.group(2).replace("MHz", "").replace("---", "").strip()
|
||||||
|
|
||||||
|
line_dict[key] = [value]
|
||||||
|
|
||||||
|
if len(line_dict) > 1:
|
||||||
|
#print("line:")
|
||||||
|
#print(line_dict)
|
||||||
|
#print("serial_df:")
|
||||||
|
#print(serial_df)
|
||||||
|
if serial_df is None:
|
||||||
|
serial_df = pd.DataFrame.from_dict(line_dict, orient="columns",)
|
||||||
|
else:
|
||||||
|
serial_df = pd.concat([serial_df, pd.DataFrame.from_dict(line_dict, orient="columns")])
|
||||||
|
|
||||||
|
serial_df = serial_df.copy()
|
||||||
|
serial_df["datetime"] = pd.to_datetime(
|
||||||
|
serial_df["time"].apply(lambda x: datetime.datetime.fromtimestamp(int(x)))
|
||||||
|
)
|
||||||
|
serial_df.to_csv(args.file.replace("txt", "csv"))
|
||||||
|
|
||||||
|
#serial_df = serial_df.filter(["datetime", "LTE_bw", "LTE_SCC2_bw", "LTE_SCC3_bw", "LTE_SCC4_bw", "SCC1_NR5G_bw", "NR5G_dl_bw", "NR5G_ul_bw", "LTE_SCC1_bw", "NR5G_bw"])
|
||||||
|
#print(serial_df.to_string())
|
||||||
356
format_throughput_pcap_to_csv.py
Executable file
356
format_throughput_pcap_to_csv.py
Executable file
@@ -0,0 +1,356 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import datetime
|
||||||
|
from io import StringIO
|
||||||
|
from math import ceil
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
import multiprocessing
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
from time import sleep, time
|
||||||
|
|
||||||
|
# tshark -r ./tcp-cap-test/test__bandwidth_reverse_tcp_bbr_1.pcap -Y "tcp.stream eq 1" -T fields -e frame.time_relative -e ip.len -e ip.hdr_len -e tcp.hdr_len -e tcp.analysis.ack_rtt -e tcp.analysis.bytes_in_flight -e tcp.analysis.retransmission -e tcp.analysis.duplicate_ack -e ip.dst -e ip.src -e tcp.options.mss_val -E header=y -E separator=, -E quote=d
|
||||||
|
from util import chunk_list
|
||||||
|
|
||||||
|
|
||||||
|
def format_tcp_trace_to_csv(pcap_number, packets_to_keep, is_reverse=False):
|
||||||
|
txt_name = "{}{}{}.txt".format(args.folder, args.tcp_trace, pcap_number)
|
||||||
|
try:
|
||||||
|
txt_file = open(txt_name, "r")
|
||||||
|
except IOError as e:
|
||||||
|
print("\rCan not open file {}\n {} {}".format(txt_name, e.errno, e.strerror))
|
||||||
|
return
|
||||||
|
content = txt_file.read()
|
||||||
|
txt_file.close()
|
||||||
|
|
||||||
|
csv_string = ""
|
||||||
|
csv_string += "time_tcp_probe,snd_cwnd,snd_wnd,srtt\n"
|
||||||
|
uptime = None
|
||||||
|
counter = 0
|
||||||
|
lines = content.split("\n")
|
||||||
|
start_time = None
|
||||||
|
for line in lines:
|
||||||
|
counter += 1
|
||||||
|
if uptime is None:
|
||||||
|
uptime = float(line.split(" ")[0])
|
||||||
|
else:
|
||||||
|
if is_reverse:
|
||||||
|
line_filter = "src=[::ffff:{}]:{}".format(args.server, args.port)
|
||||||
|
else:
|
||||||
|
line_filter = "dest={}:{}".format(args.server, args.port)
|
||||||
|
|
||||||
|
# ignore tcp packets from iperf syn (packets to keep = len of tcp.stream.eq 1)
|
||||||
|
if line_filter in line and counter >= (len(lines) - packets_to_keep):
|
||||||
|
match = re.match(
|
||||||
|
r".* (\d+\.\d+): tcp_probe:.*snd_cwnd=(\d+).*snd_wnd=(\d+).*srtt=(\d+)",
|
||||||
|
line,
|
||||||
|
)
|
||||||
|
if match:
|
||||||
|
if start_time is None:
|
||||||
|
start_time = float(match.group(1)) - uptime
|
||||||
|
time = float(match.group(1)) - (uptime + start_time)
|
||||||
|
snd_cwnd = match.group(2)
|
||||||
|
snd_wnd = match.group(3)
|
||||||
|
srtt = match.group(4)
|
||||||
|
csv_string += "{},{},{},{}\n".format(time, snd_cwnd, snd_wnd, srtt)
|
||||||
|
|
||||||
|
csv_string_io = StringIO(csv_string)
|
||||||
|
|
||||||
|
trace_df = pd.read_csv(csv_string_io)
|
||||||
|
if len(trace_df) <= 1:
|
||||||
|
print("\rFaulty tcp trace file for pcap no: {}".format(pcap_number))
|
||||||
|
return None
|
||||||
|
return trace_df
|
||||||
|
|
||||||
|
|
||||||
|
def format_pcaps_to_csv(pcaps, dummy):
|
||||||
|
global n
|
||||||
|
for pcap in pcaps:
|
||||||
|
if pcap.endswith(".pcap") and pcap.startswith(args.prefix):
|
||||||
|
match = re.match(regex, pcap)
|
||||||
|
if match:
|
||||||
|
# metadata from pcap filename
|
||||||
|
direction = "upload"
|
||||||
|
if "_reverse_" in pcap:
|
||||||
|
direction = "download"
|
||||||
|
congestion_control = match.group(2)
|
||||||
|
pcap_number = match.group(3)
|
||||||
|
|
||||||
|
# analyse traffic from pcap (receiver side)
|
||||||
|
tshark_command = [
|
||||||
|
"tshark",
|
||||||
|
"-r",
|
||||||
|
"{}{}".format(args.folder, pcap),
|
||||||
|
# remove this for mobile measurements
|
||||||
|
# "-Y",
|
||||||
|
# "tcp.stream eq 1",
|
||||||
|
"-T",
|
||||||
|
"fields",
|
||||||
|
"-e",
|
||||||
|
"frame.time_relative",
|
||||||
|
"-e",
|
||||||
|
"ip.len",
|
||||||
|
"-e",
|
||||||
|
"ip.hdr_len",
|
||||||
|
"-e",
|
||||||
|
"tcp.hdr_len",
|
||||||
|
"-e",
|
||||||
|
"tcp.analysis.ack_rtt",
|
||||||
|
"-e",
|
||||||
|
"tcp.analysis.bytes_in_flight",
|
||||||
|
"-e",
|
||||||
|
"tcp.analysis.retransmission",
|
||||||
|
"-e",
|
||||||
|
"tcp.analysis.duplicate_ack",
|
||||||
|
"-e",
|
||||||
|
"ip.src",
|
||||||
|
"-e",
|
||||||
|
"ip.dst",
|
||||||
|
"-e",
|
||||||
|
"tcp.options.mss_val",
|
||||||
|
"-e",
|
||||||
|
"tcp.window_size",
|
||||||
|
"-e",
|
||||||
|
"frame.time_epoch",
|
||||||
|
"-e",
|
||||||
|
"tcp.stream", # have to be the last value in line!
|
||||||
|
"-E",
|
||||||
|
"header=y",
|
||||||
|
"-E",
|
||||||
|
"separator=,",
|
||||||
|
"-E",
|
||||||
|
"quote=d",
|
||||||
|
]
|
||||||
|
|
||||||
|
tshark_out = None
|
||||||
|
try:
|
||||||
|
tshark_out = subprocess.check_output(tshark_command).decode("utf-8")
|
||||||
|
except subprocess.CalledProcessError as tsharkexec:
|
||||||
|
if tsharkexec.returncode == 2:
|
||||||
|
print("\rtshark could not open pcap: {}".format(pcap))
|
||||||
|
else:
|
||||||
|
print("\rtshark exited with code: {}".format(tsharkexec.returncode))
|
||||||
|
print(tsharkexec.output)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Convert String into StringIO
|
||||||
|
csv_string_io = StringIO(tshark_out)
|
||||||
|
|
||||||
|
conv_bool = lambda x: (True if x != "" else False)
|
||||||
|
|
||||||
|
pcap_df = pd.read_csv(
|
||||||
|
csv_string_io,
|
||||||
|
converters={
|
||||||
|
"tcp.analysis.retransmission": conv_bool,
|
||||||
|
"tcp.analysis.duplicate_ack": conv_bool,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
last_tcp_stream_in_pcap = pcap_df["tcp.stream"].max()
|
||||||
|
pcap_df = pcap_df.loc[pcap_df["tcp.stream"] == last_tcp_stream_in_pcap]
|
||||||
|
|
||||||
|
pcap_df["payload_size"] = pcap_df["ip.len"] - (
|
||||||
|
pcap_df["ip.hdr_len"] + pcap_df["tcp.hdr_len"]
|
||||||
|
)
|
||||||
|
pcap_df["direction"] = direction
|
||||||
|
pcap_df["congestion_control"] = congestion_control
|
||||||
|
pcap_df["pcap_number"] = pcap_number
|
||||||
|
pcap_df["datetime"] = pd.to_datetime(
|
||||||
|
pcap_df["frame.time_epoch"].apply(
|
||||||
|
lambda x: datetime.datetime.fromtimestamp(x)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
pcap_df = pcap_df.drop(
|
||||||
|
columns=["tcp.stream", "ip.len", "ip.hdr_len", "tcp.hdr_len"]
|
||||||
|
)
|
||||||
|
|
||||||
|
pcap_df.rename(
|
||||||
|
columns={
|
||||||
|
"frame.time_relative": "arrival_time",
|
||||||
|
"ip.src": "src_ip",
|
||||||
|
"ip.dst": "dst_ip",
|
||||||
|
"tcp.options.mss_val": "mss",
|
||||||
|
"tcp.analysis.ack_rtt": "ack_rtt",
|
||||||
|
"tcp.analysis.bytes_in_flight": "bytes_in_flight",
|
||||||
|
"tcp.window_size": "receive_window_size",
|
||||||
|
"tcp.analysis.retransmission": "is_retranmission",
|
||||||
|
"tcp.analysis.duplicate_ack": "is_dup_ack",
|
||||||
|
"frame.time_epoch": "time_epoch",
|
||||||
|
},
|
||||||
|
inplace=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
pcap_df = pcap_df.sort_values("arrival_time")
|
||||||
|
try:
|
||||||
|
# join tcp_trace data with pcap data
|
||||||
|
merge_srtt = True
|
||||||
|
if merge_srtt:
|
||||||
|
tcp_trace_df = format_tcp_trace_to_csv(
|
||||||
|
pcap_number,
|
||||||
|
len(pcap_df),
|
||||||
|
is_reverse=True if "_reverse_" in pcap else False,
|
||||||
|
)
|
||||||
|
|
||||||
|
if tcp_trace_df is None:
|
||||||
|
print(
|
||||||
|
"\rNo tcp trace file for pcap no {} found".format(
|
||||||
|
pcap_number
|
||||||
|
)
|
||||||
|
)
|
||||||
|
continue ## break before but stoped the thread
|
||||||
|
merged_df = pd.merge_asof(
|
||||||
|
pcap_df.loc[pcap_df["src_ip"] != args.server],
|
||||||
|
tcp_trace_df,
|
||||||
|
left_on="arrival_time",
|
||||||
|
right_on="time_tcp_probe",
|
||||||
|
tolerance=0.01,
|
||||||
|
)
|
||||||
|
merged_df = pd.concat(
|
||||||
|
[merged_df, pcap_df.loc[pcap_df["src_ip"] == args.server]]
|
||||||
|
)
|
||||||
|
merged_df = merged_df.sort_values("arrival_time")
|
||||||
|
merged_df.to_csv(
|
||||||
|
"{}{}".format(args.folder, pcap).replace(".pcap", ".csv")
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
pcap_df.to_csv(
|
||||||
|
"{}{}".format(args.folder, pcap).replace(".pcap", ".csv")
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
print("\rCould not merge data for pcap no: {}".format(pcap))
|
||||||
|
pcap_df.to_csv(
|
||||||
|
"{}{}".format(args.folder, pcap).replace(".pcap", ".csv")
|
||||||
|
)
|
||||||
|
|
||||||
|
n.value += 1
|
||||||
|
|
||||||
|
else:
|
||||||
|
print("File does not match regex: {}".format(pcap))
|
||||||
|
else:
|
||||||
|
print("File is not from type PCAP: {}".format(pcap))
|
||||||
|
|
||||||
|
|
||||||
|
from itertools import islice
|
||||||
|
|
||||||
|
|
||||||
|
def chunk(it, size):
|
||||||
|
it = iter(it)
|
||||||
|
return iter(lambda: tuple(islice(it, size)), ())
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
|
||||||
|
parser = ArgumentParser()
|
||||||
|
parser.add_argument("-f", "--folder", required=True, help="Folder with pcaps.")
|
||||||
|
parser.add_argument(
|
||||||
|
"-p",
|
||||||
|
"--prefix",
|
||||||
|
required=True,
|
||||||
|
help="Filename prefix e.g. 2021-03-17_bandwidth_tcp_bbr_",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-t",
|
||||||
|
"--tcp_trace",
|
||||||
|
required=True,
|
||||||
|
help="Format of tcp trace txt files e.g: 2021_03_30_bandwidth_reverse_tcp_tcp_trace_ for "
|
||||||
|
"2021_03_30_bandwidth_reverse_tcp_tcp_trace_1.txt",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-c",
|
||||||
|
"--cores",
|
||||||
|
default=1,
|
||||||
|
type=int,
|
||||||
|
help="Number of cores for multiprocessing.",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--port",
|
||||||
|
default=5201,
|
||||||
|
type=int,
|
||||||
|
help="iPerf3 port used for measurements",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--server",
|
||||||
|
default="130.75.73.69",
|
||||||
|
type=str,
|
||||||
|
help="iPerf3 server ip used for measurements",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
manager = multiprocessing.Manager()
|
||||||
|
|
||||||
|
# regex for protokoll, algo and bitrate
|
||||||
|
regex = r".*_bandwidth_[reverse_]*(.+)_(.+)_(\d+)\.pcap"
|
||||||
|
|
||||||
|
csv_header = "n,start_time,end_time,payload_size,protocol,algorithm,direction,packages_received,syns_in_pcap\n"
|
||||||
|
n = manager.Value("i", 0)
|
||||||
|
filenames = os.listdir(args.folder)
|
||||||
|
number_of_files = len(filenames)
|
||||||
|
|
||||||
|
pcap_list = []
|
||||||
|
jobs = []
|
||||||
|
|
||||||
|
st = time()
|
||||||
|
|
||||||
|
for filename in filenames:
|
||||||
|
if filename.endswith(".pcap") and filename.startswith(args.prefix):
|
||||||
|
if re.match(regex, filename):
|
||||||
|
pcap_list.append(filename)
|
||||||
|
pcap_list.sort()
|
||||||
|
|
||||||
|
print("Found {} pcap files in {} files.".format(len(pcap_list), len(filenames)))
|
||||||
|
if len(pcap_list) == 0:
|
||||||
|
print("Abort no pcaps found with prefix: {}".format(args.prefix))
|
||||||
|
print("{}{}".format(args.folder, args.prefix))
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
parts = chunk(pcap_list, ceil(len(pcap_list) / args.cores))
|
||||||
|
print("Start processing with {} jobs.".format(args.cores))
|
||||||
|
|
||||||
|
|
||||||
|
for p in parts:
|
||||||
|
process = multiprocessing.Process(target=format_pcaps_to_csv, args=(p, "dummy"))
|
||||||
|
jobs.append(process)
|
||||||
|
|
||||||
|
for j in jobs:
|
||||||
|
j.start()
|
||||||
|
|
||||||
|
print("Started all jobs.")
|
||||||
|
# Ensure all of the processes have finished
|
||||||
|
finished_job_counter = 0
|
||||||
|
working = ["|", "/", "-", "\\", "|", "/", "-", "\\"]
|
||||||
|
w = 0
|
||||||
|
while len(jobs) != finished_job_counter:
|
||||||
|
sleep(1)
|
||||||
|
print(
|
||||||
|
"\r\t{}{}{}\t Running {} jobs ({} finished). Processed {} out of {} pcaps. ({}%) ".format(
|
||||||
|
working[w],
|
||||||
|
working[w],
|
||||||
|
working[w],
|
||||||
|
len(jobs),
|
||||||
|
finished_job_counter,
|
||||||
|
n.value,
|
||||||
|
len(pcap_list),
|
||||||
|
round((n.value / len(pcap_list)) * 100, 2),
|
||||||
|
),
|
||||||
|
end="",
|
||||||
|
)
|
||||||
|
finished_job_counter = 0
|
||||||
|
for j in jobs:
|
||||||
|
if not j.is_alive():
|
||||||
|
finished_job_counter += 1
|
||||||
|
if (w + 1) % len(working) == 0:
|
||||||
|
w = 0
|
||||||
|
else:
|
||||||
|
w += 1
|
||||||
|
print("")
|
||||||
|
|
||||||
|
et = time()
|
||||||
|
# get the execution time
|
||||||
|
elapsed_time = et - st
|
||||||
|
print("Execution time:", elapsed_time, "seconds")
|
||||||
@@ -23,9 +23,16 @@ GET_IPV4_SHELL_COMMAND = "ip a | grep {} | grep inet | cut -d' ' -f6 | cut -d'/'
|
|||||||
NR_CQI_COMMAND = b'AT+QNWCFG="nr5g_csi"\r\n'
|
NR_CQI_COMMAND = b'AT+QNWCFG="nr5g_csi"\r\n'
|
||||||
NR_SERVINGCELL_COMMAND = b'AT+QENG="servingcell"\r\n'
|
NR_SERVINGCELL_COMMAND = b'AT+QENG="servingcell"\r\n'
|
||||||
NR_EN_DC_STATUS_COMMAND = b"AT+QENDC\r\n"
|
NR_EN_DC_STATUS_COMMAND = b"AT+QENDC\r\n"
|
||||||
NR_SERIAL_RESPOND_TIME = 0.3 # s
|
NE_CA_COMMAND = b'AT+QCAINFO\r\n'
|
||||||
|
NR_SERIAL_RESPOND_TIME = 0.5 # s
|
||||||
CMD_TIME_EPOCH = "date +%s"
|
CMD_TIME_EPOCH = "date +%s"
|
||||||
TIMEOUT_OFFSET = 10.0
|
TIMEOUT_OFFSET = 10.0
|
||||||
|
WAIT_AFTER_IPERF = 5.0
|
||||||
|
|
||||||
|
modem_serial_obj = None
|
||||||
|
gps_serial_obj = None
|
||||||
|
|
||||||
|
MODEM_MODEL = None
|
||||||
|
|
||||||
|
|
||||||
class ProcessHandler:
|
class ProcessHandler:
|
||||||
@@ -192,6 +199,10 @@ def is_tcp_probe_enabled():
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def disable_tso(interface):
|
||||||
|
os.system("ethtool -K {} tx off sg off tso off gro off".format(interface))
|
||||||
|
|
||||||
|
|
||||||
def enable_tcp_probe():
|
def enable_tcp_probe():
|
||||||
os.system("echo '1' > /sys/kernel/debug/tracing/events/tcp/tcp_probe/enable")
|
os.system("echo '1' > /sys/kernel/debug/tracing/events/tcp/tcp_probe/enable")
|
||||||
|
|
||||||
@@ -211,7 +222,8 @@ def raise_receive_window():
|
|||||||
|
|
||||||
|
|
||||||
def monitor_serial(ser, output_file):
|
def monitor_serial(ser, output_file):
|
||||||
run_cmds = [NR_CQI_COMMAND, NR_SERVINGCELL_COMMAND, NR_EN_DC_STATUS_COMMAND]
|
#run_cmds = [NR_CQI_COMMAND, NR_SERVINGCELL_COMMAND, NR_EN_DC_STATUS_COMMAND, NE_CA_COMMAND]
|
||||||
|
run_cmds = [b"at!gstatus?\r\n", b"AT!NRINFO?\r\n"]
|
||||||
try:
|
try:
|
||||||
while ser.is_open:
|
while ser.is_open:
|
||||||
response = subprocess.check_output(CMD_TIME_EPOCH, shell=True).decode(
|
response = subprocess.check_output(CMD_TIME_EPOCH, shell=True).decode(
|
||||||
@@ -219,15 +231,15 @@ def monitor_serial(ser, output_file):
|
|||||||
)
|
)
|
||||||
for cmd in run_cmds:
|
for cmd in run_cmds:
|
||||||
ser.write(cmd)
|
ser.write(cmd)
|
||||||
sleep(0.3)
|
sleep(NR_SERIAL_RESPOND_TIME)
|
||||||
response += ser.read(ser.inWaiting()).decode("utf-8")
|
response += ser.read(ser.inWaiting()).decode("utf-8")
|
||||||
response = (
|
#response = (
|
||||||
response.replace("\n", ";")
|
# response.replace("\n", ";")
|
||||||
.replace("\r", "")
|
# .replace("\r", "")
|
||||||
.replace(";;OK", ";")
|
# .replace(";;OK", ";")
|
||||||
.replace(";;", ";")
|
# .replace(";;", ";")
|
||||||
)
|
#)
|
||||||
write_to_file(output_file, response + "\n")
|
write_to_file(output_file, response + ";;;\n")
|
||||||
except:
|
except:
|
||||||
if not ser.is_open:
|
if not ser.is_open:
|
||||||
print_message("Serial port is closed. Exit monitoring thread.")
|
print_message("Serial port is closed. Exit monitoring thread.")
|
||||||
@@ -238,6 +250,53 @@ def monitor_serial(ser, output_file):
|
|||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def start_serial_monitoring(ser, baudrate, folder, prefix):
|
||||||
|
global modem_serial_obj
|
||||||
|
|
||||||
|
print_message("Opening serial port for {}".format(ser))
|
||||||
|
modem_serial_obj = serial.Serial(
|
||||||
|
port=ser,
|
||||||
|
baudrate=baudrate,
|
||||||
|
)
|
||||||
|
modem_serial_obj.isOpen()
|
||||||
|
ser_filepath = "{}{}_serial_monitor_output.txt".format(
|
||||||
|
folder, prefix
|
||||||
|
)
|
||||||
|
ser_thread = Thread(
|
||||||
|
target=monitor_serial,
|
||||||
|
args=(
|
||||||
|
modem_serial_obj,
|
||||||
|
ser_filepath,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
ser_thread.start()
|
||||||
|
|
||||||
|
|
||||||
|
def is_serial_monitoring_running():
|
||||||
|
return modem_serial_obj.is_open
|
||||||
|
|
||||||
|
|
||||||
|
def start_gps_monitoring(gps, baudrate, folder, prefix):
|
||||||
|
global gps_serial_obj
|
||||||
|
|
||||||
|
print_message("Opening GPS serial port for {}".format(gps))
|
||||||
|
gps_serial_obj = serial.Serial(
|
||||||
|
gps,
|
||||||
|
baudrate=baudrate,
|
||||||
|
)
|
||||||
|
gps_ser_filepath = "{}{}_gps.nmea".format(
|
||||||
|
folder, prefix
|
||||||
|
)
|
||||||
|
gps_ser_thread = Thread(
|
||||||
|
target=monitor_gps,
|
||||||
|
args=(
|
||||||
|
gps_serial_obj,
|
||||||
|
gps_ser_filepath,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
gps_ser_thread.start()
|
||||||
|
|
||||||
|
|
||||||
def monitor_gps(ser, output_file):
|
def monitor_gps(ser, output_file):
|
||||||
ser.flushInput()
|
ser.flushInput()
|
||||||
ser.flushOutput()
|
ser.flushOutput()
|
||||||
@@ -245,10 +304,13 @@ def monitor_gps(ser, output_file):
|
|||||||
ser.readline()
|
ser.readline()
|
||||||
try:
|
try:
|
||||||
while ser.is_open:
|
while ser.is_open:
|
||||||
nmea_sentence = ser.readline() #GPRMC
|
nmea_sentence = ser.readline() # GPRMC
|
||||||
nmea_str = nmea_sentence.decode("utf-8")
|
nmea_str = nmea_sentence.decode("utf-8")
|
||||||
if nmea_str.startswith("$GPRMC"):
|
if nmea_str.startswith("$GPRMC"):
|
||||||
write_to_file(output_file, nmea_str)
|
time_epoch = subprocess.check_output(CMD_TIME_EPOCH, shell=True).decode(
|
||||||
|
"utf-8"
|
||||||
|
)
|
||||||
|
write_to_file(output_file, "{},{}".format(nmea_str.replace("\n", ""), time_epoch))
|
||||||
except:
|
except:
|
||||||
if not ser.is_open:
|
if not ser.is_open:
|
||||||
print_message("GPS serial port is closed. Exit monitoring thread.")
|
print_message("GPS serial port is closed. Exit monitoring thread.")
|
||||||
@@ -261,15 +323,48 @@ def monitor_gps(ser, output_file):
|
|||||||
|
|
||||||
def connect_moden(provider="telekom"):
|
def connect_moden(provider="telekom"):
|
||||||
print_message("Connect modem with provider {} ...".format(provider))
|
print_message("Connect modem with provider {} ...".format(provider))
|
||||||
os.system("/root/connect-modem.py -l {}".format(provider))
|
if MODEM_MODEL == "EM9191":
|
||||||
|
os.system("/root/connection_mbim.py -l {}".format(provider))
|
||||||
|
else:
|
||||||
|
os.system("/root/connect-modem.py -l {}".format(provider))
|
||||||
print_message("...done")
|
print_message("...done")
|
||||||
|
|
||||||
|
|
||||||
def reconnect_modem(provider="telekom"):
|
def reconnect_modem(provider="telekom", hard=False):
|
||||||
|
#TODO
|
||||||
|
os.system("/root/connection_mbim.py -s")
|
||||||
|
sleep(2)
|
||||||
|
os.system("/root/connection_mbim.py -l {}".format(provider))
|
||||||
|
|
||||||
|
return
|
||||||
|
global modem_serial_obj
|
||||||
print_message("Reonnect modem with provider {} ...".format(provider))
|
print_message("Reonnect modem with provider {} ...".format(provider))
|
||||||
os.system("/root/connect-modem.py -s")
|
if hard:
|
||||||
sleep(5)
|
print_message("Performing HARD reconnect...")
|
||||||
os.system("/root/connect-modem.py -l {}".format(provider))
|
try:
|
||||||
|
if modem_serial_obj.is_open:
|
||||||
|
modem_serial_obj.write(b'at+cfun=4\r\n')
|
||||||
|
sleep(NR_SERIAL_RESPOND_TIME)
|
||||||
|
sleep(2)
|
||||||
|
modem_serial_obj.write(b'at+cfun=1\r\n')
|
||||||
|
sleep(NR_SERIAL_RESPOND_TIME)
|
||||||
|
except Exception as e:
|
||||||
|
if not modem_serial_obj.is_open:
|
||||||
|
print_message("Serial port is closed. {}".format(e))
|
||||||
|
os.system("/root/connect-modem.py -s")
|
||||||
|
else:
|
||||||
|
print_message(
|
||||||
|
"Something went wrong while writing to serial. {}".format(e)
|
||||||
|
)
|
||||||
|
os.system("/root/connect-modem.py -s")
|
||||||
|
sleep(2)
|
||||||
|
os.system("/root/connect-modem.py -s")
|
||||||
|
sleep(5)
|
||||||
|
os.system("/root/connect-modem.py -l {}".format(provider))
|
||||||
|
else:
|
||||||
|
os.system("/root/connect-modem.py -s")
|
||||||
|
sleep(5)
|
||||||
|
os.system("/root/connect-modem.py -l {}".format(provider))
|
||||||
print_message("...done")
|
print_message("...done")
|
||||||
|
|
||||||
|
|
||||||
@@ -339,6 +434,7 @@ class Server:
|
|||||||
sleep(2)
|
sleep(2)
|
||||||
ws_filter = ""
|
ws_filter = ""
|
||||||
congestion_control_index = 0
|
congestion_control_index = 0
|
||||||
|
|
||||||
if server_is_sender:
|
if server_is_sender:
|
||||||
# server sends
|
# server sends
|
||||||
if not is_tcp_probe_enabled():
|
if not is_tcp_probe_enabled():
|
||||||
@@ -441,7 +537,7 @@ class Server:
|
|||||||
"--one-off",
|
"--one-off",
|
||||||
]
|
]
|
||||||
subprocess.call(iperf_command)
|
subprocess.call(iperf_command)
|
||||||
sleep(2)
|
sleep(WAIT_AFTER_IPERF)
|
||||||
processHandler.kill_all()
|
processHandler.kill_all()
|
||||||
congestion_control_index = (congestion_control_index + 1) % len(
|
congestion_control_index = (congestion_control_index + 1) % len(
|
||||||
tcp_algo
|
tcp_algo
|
||||||
@@ -532,46 +628,11 @@ class Client:
|
|||||||
sleep(1)
|
sleep(1)
|
||||||
print_message("Start measurement")
|
print_message("Start measurement")
|
||||||
|
|
||||||
ser = None
|
|
||||||
|
|
||||||
if self.config["serial"] is not None:
|
if self.config["serial"] is not None:
|
||||||
print_message("Opening serial port for {}".format(self.config["serial"]))
|
start_serial_monitoring(self.config["serial"], self.config["baudrate"], self.config["folder"], self.config["prefix"])
|
||||||
ser = serial.Serial(
|
|
||||||
port=self.config["serial"],
|
|
||||||
baudrate=self.config["baudrate"],
|
|
||||||
)
|
|
||||||
ser.isOpen()
|
|
||||||
ser_filepath = "{}{}_serial_monitor_output.txt".format(
|
|
||||||
self.config["folder"], self.config["prefix"]
|
|
||||||
)
|
|
||||||
ser_thread = Thread(
|
|
||||||
target=monitor_serial,
|
|
||||||
args=(
|
|
||||||
ser,
|
|
||||||
ser_filepath,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
ser_thread.start()
|
|
||||||
|
|
||||||
gps_ser = None
|
|
||||||
|
|
||||||
if self.config["gps"] is not None:
|
if self.config["gps"] is not None:
|
||||||
print_message("Opening GPS serial port for {}".format(self.config["gps"]))
|
start_gps_monitoring(self.config["gps"], self.config["gps_baudrate"], self.config["folder"], self.config["prefix"])
|
||||||
gps_ser = serial.Serial(
|
|
||||||
self.config["gps"],
|
|
||||||
baudrate=self.config["gps_baudrate"],
|
|
||||||
)
|
|
||||||
gps_ser_filepath = "{}{}_gps.nmea".format(
|
|
||||||
self.config["folder"], self.config["prefix"]
|
|
||||||
)
|
|
||||||
gps_ser_thread = Thread(
|
|
||||||
target=monitor_gps,
|
|
||||||
args=(
|
|
||||||
gps_ser,
|
|
||||||
gps_ser_filepath,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
gps_ser_thread.start()
|
|
||||||
|
|
||||||
if self.config["bandwidth"]:
|
if self.config["bandwidth"]:
|
||||||
self.bandwidth()
|
self.bandwidth()
|
||||||
@@ -586,18 +647,17 @@ class Client:
|
|||||||
elif self.config["ping"]:
|
elif self.config["ping"]:
|
||||||
self.ping()
|
self.ping()
|
||||||
|
|
||||||
if ser is not None:
|
if modem_serial_obj is not None:
|
||||||
print_message("Closing serial port...")
|
print_message("Closing serial port...")
|
||||||
ser.close()
|
modem_serial_obj.close()
|
||||||
sleep(2)
|
sleep(2)
|
||||||
print_message("done...")
|
print_message("done...")
|
||||||
if gps_ser is not None:
|
if gps_serial_obj is not None:
|
||||||
print_message("Closing GPS serial port...")
|
print_message("Closing GPS serial port...")
|
||||||
gps_ser.close()
|
gps_serial_obj.close()
|
||||||
sleep(2)
|
sleep(2)
|
||||||
print_message("done...")
|
print_message("done...")
|
||||||
|
|
||||||
|
|
||||||
def ping(self):
|
def ping(self):
|
||||||
c = "ping {} -I {} -i {} -c {}".format(
|
c = "ping {} -I {} -i {} -c {}".format(
|
||||||
self.config["server"],
|
self.config["server"],
|
||||||
@@ -762,8 +822,18 @@ class Client:
|
|||||||
ws_filter = "{} and port {}".format("tcp", self.config["port"])
|
ws_filter = "{} and port {}".format("tcp", self.config["port"])
|
||||||
print_message("Use ws filter: {}".format(ws_filter))
|
print_message("Use ws filter: {}".format(ws_filter))
|
||||||
for n in range(1, self.config["number_of_measurements"] + 1):
|
for n in range(1, self.config["number_of_measurements"] + 1):
|
||||||
|
reconnect_count = 0
|
||||||
if not is_modem_connected():
|
if not is_modem_connected():
|
||||||
|
background_write_to_file(
|
||||||
|
filepath="{}{}_reconnect.log".format(
|
||||||
|
self.config["folder"], self.config["prefix"]
|
||||||
|
),
|
||||||
|
content='{}\n'.format(datetime.timestamp(datetime.now())),
|
||||||
|
)
|
||||||
reconnect_modem()
|
reconnect_modem()
|
||||||
|
sleep(2)
|
||||||
|
if not is_serial_monitoring_running():
|
||||||
|
start_serial_monitoring()
|
||||||
print_message(
|
print_message(
|
||||||
"{} of {}".format(n, self.config["number_of_measurements"])
|
"{} of {}".format(n, self.config["number_of_measurements"])
|
||||||
)
|
)
|
||||||
@@ -807,12 +877,30 @@ class Client:
|
|||||||
iperf_return = 0
|
iperf_return = 0
|
||||||
while not is_measurement_done or iperf_return != 0:
|
while not is_measurement_done or iperf_return != 0:
|
||||||
if iperf_return != 0:
|
if iperf_return != 0:
|
||||||
reconnect_modem()
|
background_write_to_file(
|
||||||
|
filepath="{}{}_reconnect.log".format(
|
||||||
|
self.config["folder"], self.config["prefix"]
|
||||||
|
),
|
||||||
|
content='{}\n'.format(datetime.timestamp(datetime.now())),
|
||||||
|
)
|
||||||
|
reconnect_modem(hard=reconnect_count > 5)
|
||||||
|
reconnect_count += 1
|
||||||
|
sleep(2)
|
||||||
|
if not is_serial_monitoring_running():
|
||||||
|
start_serial_monitoring()
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
iperf_return = subprocess.call(iperf_command, timeout=float(time) + TIMEOUT_OFFSET)
|
iperf_return = subprocess.call(
|
||||||
|
iperf_command, timeout=float(time) + TIMEOUT_OFFSET
|
||||||
|
)
|
||||||
except:
|
except:
|
||||||
print_message("iPerf timed out...")
|
print_message("iPerf timed out...")
|
||||||
|
background_write_to_file(
|
||||||
|
filepath="{}{}_reconnect.log".format(
|
||||||
|
self.config["folder"], self.config["prefix"]
|
||||||
|
),
|
||||||
|
content='{}\n'.format(datetime.timestamp(datetime.now())),
|
||||||
|
)
|
||||||
reconnect_modem()
|
reconnect_modem()
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
exit()
|
exit()
|
||||||
@@ -831,8 +919,18 @@ class Client:
|
|||||||
enable_tcp_probe()
|
enable_tcp_probe()
|
||||||
print_message("tcp probe is now enabled")
|
print_message("tcp probe is now enabled")
|
||||||
for n in range(1, self.config["number_of_measurements"] + 1):
|
for n in range(1, self.config["number_of_measurements"] + 1):
|
||||||
|
reconnect_count = 0
|
||||||
if not is_modem_connected():
|
if not is_modem_connected():
|
||||||
|
background_write_to_file(
|
||||||
|
filepath="{}{}_reconnect.log".format(
|
||||||
|
self.config["folder"], self.config["prefix"]
|
||||||
|
),
|
||||||
|
content='{}\n'.format(datetime.timestamp(datetime.now())),
|
||||||
|
)
|
||||||
reconnect_modem()
|
reconnect_modem()
|
||||||
|
sleep(2)
|
||||||
|
if not is_serial_monitoring_running():
|
||||||
|
start_serial_monitoring()
|
||||||
print_message(
|
print_message(
|
||||||
"{} of {}".format(n, self.config["number_of_measurements"])
|
"{} of {}".format(n, self.config["number_of_measurements"])
|
||||||
)
|
)
|
||||||
@@ -875,12 +973,30 @@ class Client:
|
|||||||
iperf_return = 0
|
iperf_return = 0
|
||||||
while not is_measurement_done or iperf_return != 0:
|
while not is_measurement_done or iperf_return != 0:
|
||||||
if iperf_return != 0:
|
if iperf_return != 0:
|
||||||
reconnect_modem()
|
background_write_to_file(
|
||||||
|
filepath="{}{}_reconnect.log".format(
|
||||||
|
self.config["folder"], self.config["prefix"]
|
||||||
|
),
|
||||||
|
content='{}\n'.format(datetime.timestamp(datetime.now())),
|
||||||
|
)
|
||||||
|
reconnect_modem(hard=reconnect_count > 5)
|
||||||
|
reconnect_count += 1
|
||||||
|
sleep(2)
|
||||||
|
if not is_serial_monitoring_running():
|
||||||
|
start_serial_monitoring()
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
iperf_return = subprocess.call(iperf_command, timeout=float(time) + TIMEOUT_OFFSET)
|
iperf_return = subprocess.call(
|
||||||
|
iperf_command, timeout=float(time) + TIMEOUT_OFFSET
|
||||||
|
)
|
||||||
except:
|
except:
|
||||||
print_message("iPerf timed out...")
|
print_message("iPerf timed out...")
|
||||||
|
background_write_to_file(
|
||||||
|
filepath="{}{}_reconnect.log".format(
|
||||||
|
self.config["folder"], self.config["prefix"]
|
||||||
|
),
|
||||||
|
content='{}\n'.format(datetime.timestamp(datetime.now())),
|
||||||
|
)
|
||||||
reconnect_modem()
|
reconnect_modem()
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
exit()
|
exit()
|
||||||
@@ -889,7 +1005,7 @@ class Client:
|
|||||||
congestion_control_index = (congestion_control_index + 1) % len(
|
congestion_control_index = (congestion_control_index + 1) % len(
|
||||||
tcp_algo
|
tcp_algo
|
||||||
)
|
)
|
||||||
sleep(4)
|
sleep(WAIT_AFTER_IPERF + 2)
|
||||||
|
|
||||||
def cbr(self):
|
def cbr(self):
|
||||||
bitrate = "1M"
|
bitrate = "1M"
|
||||||
@@ -1097,6 +1213,7 @@ if __name__ == "__main__":
|
|||||||
default=None,
|
default=None,
|
||||||
help="Start in client mode and set the server IPv4 address.",
|
help="Start in client mode and set the server IPv4 address.",
|
||||||
)
|
)
|
||||||
|
parser.add_argument("--modem", default="EM9191", help="Modem model name.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--prefix", default=now.strftime("%Y-%m-%d"), help="Prefix on filename."
|
"--prefix", default=now.strftime("%Y-%m-%d"), help="Prefix on filename."
|
||||||
)
|
)
|
||||||
@@ -1198,6 +1315,9 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
disable_tso(args.interface)
|
||||||
|
MODEM_MODEL = args.modem
|
||||||
|
|
||||||
if args.server:
|
if args.server:
|
||||||
asyncio.run(start_server(args))
|
asyncio.run(start_server(args))
|
||||||
elif args.client is not None:
|
elif args.client is not None:
|
||||||
|
|||||||
85
plot_gps_csv.py
Executable file
85
plot_gps_csv.py
Executable file
@@ -0,0 +1,85 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
|
import geopandas as gpd
|
||||||
|
import contextily as cx
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
|
||||||
|
import seaborn as sns
|
||||||
|
|
||||||
|
sns.set()
|
||||||
|
#sns.set(font_scale=1.5)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = ArgumentParser()
|
||||||
|
parser.add_argument("-f", "--file", required=True, help="Messfahrt csv")
|
||||||
|
parser.add_argument("-a", "--column", required=True, help="Column to plot")
|
||||||
|
parser.add_argument("-l", "--label", help="Label above the plot.")
|
||||||
|
parser.add_argument("--no_legend", action="store_false", default=True, help="Do not show legend.")
|
||||||
|
parser.add_argument("--save", default=None, help="Location to save pdf file.")
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--show_providerinfo",
|
||||||
|
default=False,
|
||||||
|
help="Show providerinfo for map tiles an zoom levels.",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
df = pd.read_csv(args.file)
|
||||||
|
|
||||||
|
gdf = gpd.GeoDataFrame(
|
||||||
|
df,
|
||||||
|
geometry=gpd.points_from_xy(df["longitude"], df["latitude"]),
|
||||||
|
crs="EPSG:4326",
|
||||||
|
)
|
||||||
|
|
||||||
|
gdf["srtt"] = gdf["srtt"].apply(lambda x: x / 10 ** 6)
|
||||||
|
gdf["is_retranmission"] = gdf["is_retranmission"].replace(True, np.NaN).dropna().astype(float)
|
||||||
|
|
||||||
|
print("Start plotting...")
|
||||||
|
|
||||||
|
df_wm = gdf.to_crs(epsg=3857)
|
||||||
|
ax2 = df_wm.plot()
|
||||||
|
ax2 = df_wm.plot(column=args.column, cmap="hot", legend=args.no_legend, ax=ax2, legend_kwds={"label": args.label},)
|
||||||
|
# ax2 = df_wm.plot.scatter(x="longitude", y="latitude", c="kmh", cmap="hot")
|
||||||
|
# zoom 17 is pretty
|
||||||
|
cx.add_basemap(ax2, source=cx.providers.OpenStreetMap.Mapnik, zoom=17)
|
||||||
|
|
||||||
|
# gdf.plot()
|
||||||
|
ax2.set_axis_off()
|
||||||
|
if not args.no_legend:
|
||||||
|
ax2.set_title(args.label if args.label else args.column)
|
||||||
|
else:
|
||||||
|
fig = ax2.figure
|
||||||
|
cb_ax = fig.axes[0]
|
||||||
|
cb_ax.set_label(args.label)
|
||||||
|
cb_ax.tick_params(labelsize=30)
|
||||||
|
|
||||||
|
if args.show_providerinfo:
|
||||||
|
#####################################
|
||||||
|
# Identifying how many tiles
|
||||||
|
latlon_outline = gdf.to_crs("epsg:4326").total_bounds
|
||||||
|
def_zoom = cx.tile._calculate_zoom(*latlon_outline)
|
||||||
|
print(f"Default Zoom level {def_zoom}")
|
||||||
|
|
||||||
|
cx.howmany(*latlon_outline, def_zoom, ll=True)
|
||||||
|
cx.howmany(*latlon_outline, def_zoom + 1, ll=True)
|
||||||
|
cx.howmany(*latlon_outline, def_zoom + 2, ll=True)
|
||||||
|
|
||||||
|
# Checking out some of the other providers and tiles
|
||||||
|
print(cx.providers.CartoDB.Voyager)
|
||||||
|
print(cx.providers.Stamen.TonerLite)
|
||||||
|
print(cx.providers.Stamen.keys())
|
||||||
|
#####################################
|
||||||
|
|
||||||
|
# df.plot(x="longitude", y="latitude", kind="scatter", colormap="YlOrRd")
|
||||||
|
|
||||||
|
if args.save:
|
||||||
|
plt.savefig("{}gps_plot.eps".format(args.save), bbox_inches="tight")
|
||||||
|
else:
|
||||||
|
plt.show()
|
||||||
71
plot_gps_new.py
Executable file
71
plot_gps_new.py
Executable file
@@ -0,0 +1,71 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
import geopandas as gpd
|
||||||
|
import contextily as cx
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = ArgumentParser()
|
||||||
|
parser.add_argument("-f", "--file", required=True, help="Messfahrt csv")
|
||||||
|
parser.add_argument("-a", "--column", required=True, help="Column to plot")
|
||||||
|
parser.add_argument("-l", "--label", help="Label above the plot.")
|
||||||
|
parser.add_argument("--no_legend", action="store_false", default=True, help="Do not show legend.")
|
||||||
|
parser.add_argument("--save", default=None, help="Location to save pdf file.")
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--show_providerinfo",
|
||||||
|
default=False,
|
||||||
|
help="Show providerinfo for map tiles an zoom levels.",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
df = pd.read_csv(args.file)
|
||||||
|
|
||||||
|
gdf = gpd.GeoDataFrame(
|
||||||
|
df,
|
||||||
|
geometry=gpd.points_from_xy(df["longitude"], df["latitude"]),
|
||||||
|
crs="EPSG:4326",
|
||||||
|
)
|
||||||
|
|
||||||
|
gdf["srtt"] = gdf["srtt"].apply(lambda x: x / 10 ** 6)
|
||||||
|
|
||||||
|
print("Start plotting...")
|
||||||
|
|
||||||
|
df_wm = gdf.to_crs(epsg=3857)
|
||||||
|
ax2 = df_wm.plot()
|
||||||
|
ax2 = df_wm.plot(args.column, cmap="hot", legend=args.no_legend, ax=ax2)
|
||||||
|
# ax2 = df_wm.plot.scatter(x="longitude", y="latitude", c="kmh", cmap="hot")
|
||||||
|
# zoom 17 is pretty
|
||||||
|
cx.add_basemap(ax2, source=cx.providers.OpenStreetMap.Mapnik, zoom=17)
|
||||||
|
|
||||||
|
# gdf.plot()
|
||||||
|
ax2.set_axis_off()
|
||||||
|
ax2.set_title(args.label if args.label else args.column)
|
||||||
|
|
||||||
|
if args.show_providerinfo:
|
||||||
|
#####################################
|
||||||
|
# Identifying how many tiles
|
||||||
|
latlon_outline = gdf.to_crs("epsg:4326").total_bounds
|
||||||
|
def_zoom = cx.tile._calculate_zoom(*latlon_outline)
|
||||||
|
print(f"Default Zoom level {def_zoom}")
|
||||||
|
|
||||||
|
cx.howmany(*latlon_outline, def_zoom, ll=True)
|
||||||
|
cx.howmany(*latlon_outline, def_zoom + 1, ll=True)
|
||||||
|
cx.howmany(*latlon_outline, def_zoom + 2, ll=True)
|
||||||
|
|
||||||
|
# Checking out some of the other providers and tiles
|
||||||
|
print(cx.providers.CartoDB.Voyager)
|
||||||
|
print(cx.providers.Stamen.TonerLite)
|
||||||
|
print(cx.providers.Stamen.keys())
|
||||||
|
#####################################
|
||||||
|
|
||||||
|
# df.plot(x="longitude", y="latitude", kind="scatter", colormap="YlOrRd")
|
||||||
|
|
||||||
|
if args.save:
|
||||||
|
plt.savefig("{}gps_plot.pdf".format(args.save))
|
||||||
|
else:
|
||||||
|
plt.show()
|
||||||
0
plot_rn_testat_results.py
Normal file
0
plot_rn_testat_results.py
Normal file
166
plot_single_transmission.py
Executable file
166
plot_single_transmission.py
Executable file
@@ -0,0 +1,166 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import math
|
||||||
|
import multiprocessing
|
||||||
|
import os
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
|
import matplotlib
|
||||||
|
import pandas as pd
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
|
||||||
|
# Using seaborn's style
|
||||||
|
#plt.style.use('seaborn')
|
||||||
|
|
||||||
|
tex_fonts = {
|
||||||
|
"pgf.texsystem": "lualatex",
|
||||||
|
# "legend.fontsize": "x-large",
|
||||||
|
# "figure.figsize": (15, 5),
|
||||||
|
"axes.labelsize": 15, # "small",
|
||||||
|
# "axes.titlesize": "x-large",
|
||||||
|
"xtick.labelsize": 15, # "small",
|
||||||
|
"ytick.labelsize": 15, # "small",
|
||||||
|
"legend.fontsize": 15,
|
||||||
|
"axes.formatter.use_mathtext": True,
|
||||||
|
"mathtext.fontset": "dejavusans",
|
||||||
|
}
|
||||||
|
|
||||||
|
#plt.rcParams.update(tex_fonts)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = ArgumentParser()
|
||||||
|
parser.add_argument("-s", "--serial_file", required=True, help="Serial csv file.")
|
||||||
|
parser.add_argument("-p", "--pcap_csv_folder", required=True, help="PCAP csv folder.")
|
||||||
|
parser.add_argument("--save", required=True, help="Location to save pdf file.")
|
||||||
|
parser.add_argument(
|
||||||
|
"-i",
|
||||||
|
"--interval",
|
||||||
|
default=10,
|
||||||
|
type=int,
|
||||||
|
help="Time interval for rolling window.",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
pcap_csv_list = list()
|
||||||
|
for filename in os.listdir(args.pcap_csv_folder):
|
||||||
|
if filename.endswith(".csv") and "tcp" in filename:
|
||||||
|
pcap_csv_list.append(filename)
|
||||||
|
|
||||||
|
counter = 1
|
||||||
|
if len(pcap_csv_list) == 0:
|
||||||
|
print("No CSV files found.")
|
||||||
|
|
||||||
|
pcap_csv_list.sort(key=lambda x: int(x.split("_")[-1].replace(".csv", "")))
|
||||||
|
|
||||||
|
for csv in pcap_csv_list:
|
||||||
|
|
||||||
|
print("\rProcessing {} out of {} CSVs.\t({}%)\t".format(counter, len(pcap_csv_list), math.floor(counter/len(pcap_csv_list))))
|
||||||
|
|
||||||
|
#try:
|
||||||
|
transmission_df = pd.read_csv(
|
||||||
|
"{}{}".format(args.pcap_csv_folder, csv),
|
||||||
|
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
|
||||||
|
)
|
||||||
|
|
||||||
|
transmission_df["datetime"] = pd.to_datetime(transmission_df["datetime"]) - pd.Timedelta(hours=1)
|
||||||
|
transmission_df = transmission_df.set_index("datetime")
|
||||||
|
transmission_df.index = pd.to_datetime(transmission_df.index)
|
||||||
|
transmission_df = transmission_df.sort_index()
|
||||||
|
|
||||||
|
# srtt to [s]
|
||||||
|
transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10**6)
|
||||||
|
|
||||||
|
# key for columns and level for index
|
||||||
|
transmission_df["goodput"] = transmission_df["payload_size"].groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval))).transform("sum")
|
||||||
|
transmission_df["goodput"] = transmission_df["goodput"].apply(
|
||||||
|
lambda x: ((x * 8) / args.interval) / 10**6
|
||||||
|
)
|
||||||
|
|
||||||
|
transmission_df["goodput_rolling"] = transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
|
||||||
|
transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply(
|
||||||
|
lambda x: ((x * 8) / args.interval) / 10 ** 6
|
||||||
|
)
|
||||||
|
|
||||||
|
# set meta values and remove all not needed columns
|
||||||
|
cc_algo = transmission_df["congestion_control"].iloc[0]
|
||||||
|
cc_algo = cc_algo.upper()
|
||||||
|
transmission_direction = transmission_df["direction"].iloc[0]
|
||||||
|
|
||||||
|
#transmission_df = transmission_df.filter(["goodput", "datetime", "ack_rtt", "goodput_rolling", "snd_cwnd"])
|
||||||
|
|
||||||
|
# read serial csv
|
||||||
|
serial_df = pd.read_csv(args.serial_file)
|
||||||
|
serial_df["datetime"] = pd.to_datetime(serial_df["datetime"]) - pd.Timedelta(hours=1)
|
||||||
|
serial_df = serial_df.set_index("datetime")
|
||||||
|
serial_df.index = pd.to_datetime(serial_df.index)
|
||||||
|
serial_df.sort_index()
|
||||||
|
|
||||||
|
transmission_df = pd.merge_asof(
|
||||||
|
transmission_df,
|
||||||
|
serial_df,
|
||||||
|
tolerance=pd.Timedelta("1s"),
|
||||||
|
right_index=True,
|
||||||
|
left_index=True,
|
||||||
|
)
|
||||||
|
transmission_df = transmission_df.rename(columns={"PCID": "lte_pcid", "PCID.1": "nr_pcid"})
|
||||||
|
|
||||||
|
transmission_df.index = transmission_df["arrival_time"]
|
||||||
|
|
||||||
|
# transmission timeline
|
||||||
|
scaley = 1.5
|
||||||
|
scalex = 1.0
|
||||||
|
plt.title("{} with {}".format(transmission_direction, cc_algo))
|
||||||
|
fig, ax = plt.subplots(2, 1, figsize=[6.4 * scaley, 4.8 * scalex])
|
||||||
|
fig.subplots_adjust(right=0.75)
|
||||||
|
fig.suptitle("{} with {}".format(transmission_direction, cc_algo))
|
||||||
|
ax0 = ax[0]
|
||||||
|
ax1 = ax0.twinx()
|
||||||
|
ax2 = ax0.twinx()
|
||||||
|
#ax2.spines.right.set_position(("axes", 1.22))
|
||||||
|
|
||||||
|
ax00 = ax[1]
|
||||||
|
ax01 = ax00.twinx()
|
||||||
|
|
||||||
|
# Plot vertical lines
|
||||||
|
lte_handovers = transmission_df["lte_pcid"].diff().dropna()
|
||||||
|
for index, value in lte_handovers.items():
|
||||||
|
if value > 0:
|
||||||
|
ax00.axvline(index, ymin=0, ymax=1, color="skyblue", label="4G Handover")
|
||||||
|
|
||||||
|
nr_handovers = transmission_df["nr_pcid"].diff().dropna()
|
||||||
|
for index, value in nr_handovers.items():
|
||||||
|
if value > 0:
|
||||||
|
ax00.axvline(index, ymin=0, ymax=1, color="greenyellow", label="5G Handover")
|
||||||
|
|
||||||
|
ax0.plot(transmission_df["snd_cwnd"].dropna(), color="lime", linestyle="dashed", label="cwnd")
|
||||||
|
ax1.plot(transmission_df["srtt"].dropna(), color="red", linestyle="dashdot", label="sRTT")
|
||||||
|
ax2.plot(transmission_df["goodput_rolling"], color="blue", linestyle="solid", label="goodput")
|
||||||
|
ax00.plot(transmission_df["downlink_cqi"].dropna(), color="magenta", linestyle="dotted", label="CQI")
|
||||||
|
ax01.plot(transmission_df["DL_bandwidth"].dropna(), color="peru", linestyle="dotted", label="bandwidth")
|
||||||
|
|
||||||
|
ax2.spines.right.set_position(("axes", 1.1))
|
||||||
|
|
||||||
|
ax0.set_ylim(0, 5000)
|
||||||
|
ax1.set_ylim(0, 0.3)
|
||||||
|
ax2.set_ylim(0, 500)
|
||||||
|
ax00.set_ylim(0, 16)
|
||||||
|
ax01.set_ylim(0, 21)
|
||||||
|
|
||||||
|
ax00.set_xlabel("arrival time [s]")
|
||||||
|
ax2.set_ylabel("Goodput [mbps]")
|
||||||
|
ax00.set_ylabel("CQI")
|
||||||
|
ax1.set_ylabel("sRTT [s]")
|
||||||
|
ax0.set_ylabel("cwnd")
|
||||||
|
ax01.set_ylabel("Bandwidth [MHz]")
|
||||||
|
|
||||||
|
fig.legend(loc="lower right")
|
||||||
|
|
||||||
|
plt.savefig("{}{}_plot.pdf".format(args.save, csv.replace(".csv", "")))
|
||||||
|
#except Exception as e:
|
||||||
|
# print("Error processing file: {}".format(csv))
|
||||||
|
# print(str(e))
|
||||||
|
counter += 1
|
||||||
|
|
||||||
|
plt.close(fig)
|
||||||
|
plt.clf()
|
||||||
343
plot_single_transmission_EM9190.py
Executable file
343
plot_single_transmission_EM9190.py
Executable file
@@ -0,0 +1,343 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import math
|
||||||
|
import multiprocessing
|
||||||
|
import os
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
|
import matplotlib
|
||||||
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
|
||||||
|
import seaborn as sns
|
||||||
|
|
||||||
|
sns.set()
|
||||||
|
#sns.set(font_scale=1.5)
|
||||||
|
|
||||||
|
tex_fonts = {
|
||||||
|
"pgf.texsystem": "lualatex",
|
||||||
|
# "legend.fontsize": "x-large",
|
||||||
|
# "figure.figsize": (15, 5),
|
||||||
|
"axes.labelsize": 15, # "small",
|
||||||
|
# "axes.titlesize": "x-large",
|
||||||
|
"xtick.labelsize": 15, # "small",
|
||||||
|
"ytick.labelsize": 15, # "small",
|
||||||
|
"legend.fontsize": 15,
|
||||||
|
"axes.formatter.use_mathtext": True,
|
||||||
|
"mathtext.fontset": "dejavusans",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# plt.rcParams.update(tex_fonts)
|
||||||
|
|
||||||
|
|
||||||
|
def convert_cellid(value):
|
||||||
|
if isinstance(value, str):
|
||||||
|
try:
|
||||||
|
r = int(value.split(" ")[-1].replace("(", "").replace(")", ""))
|
||||||
|
return r
|
||||||
|
except Exception as e:
|
||||||
|
return -1
|
||||||
|
else:
|
||||||
|
return int(-1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = ArgumentParser()
|
||||||
|
parser.add_argument("-s", "--serial_file", required=True, help="Serial csv file.")
|
||||||
|
parser.add_argument(
|
||||||
|
"-p", "--pcap_csv_folder", required=True, help="PCAP csv folder."
|
||||||
|
)
|
||||||
|
parser.add_argument("--save", required=True, help="Location to save pdf file.")
|
||||||
|
parser.add_argument("--fancy", action="store_true", help="Create fancy plot.")
|
||||||
|
parser.add_argument(
|
||||||
|
"-i",
|
||||||
|
"--interval",
|
||||||
|
default=10,
|
||||||
|
type=int,
|
||||||
|
help="Time interval for rolling window.",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
pcap_csv_list = list()
|
||||||
|
for filename in os.listdir(args.pcap_csv_folder):
|
||||||
|
if filename.endswith(".csv") and "tcp" in filename:
|
||||||
|
pcap_csv_list.append(filename)
|
||||||
|
|
||||||
|
counter = 1
|
||||||
|
if len(pcap_csv_list) == 0:
|
||||||
|
print("No CSV files found.")
|
||||||
|
|
||||||
|
pcap_csv_list.sort(key=lambda x: int(x.split("_")[-1].replace(".csv", "")))
|
||||||
|
|
||||||
|
for csv in pcap_csv_list:
|
||||||
|
print(
|
||||||
|
"\rProcessing {} out of {} CSVs.\t({}%)\t".format(
|
||||||
|
counter, len(pcap_csv_list), math.floor(counter / len(pcap_csv_list))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# try:
|
||||||
|
transmission_df = pd.read_csv(
|
||||||
|
"{}{}".format(args.pcap_csv_folder, csv),
|
||||||
|
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
|
||||||
|
)
|
||||||
|
|
||||||
|
transmission_df["datetime"] = pd.to_datetime(
|
||||||
|
transmission_df["datetime"]
|
||||||
|
) - pd.Timedelta(hours=1)
|
||||||
|
transmission_df = transmission_df.set_index("datetime")
|
||||||
|
transmission_df.index = pd.to_datetime(transmission_df.index)
|
||||||
|
transmission_df = transmission_df.sort_index()
|
||||||
|
|
||||||
|
# srtt to [s]
|
||||||
|
transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10 ** 6)
|
||||||
|
|
||||||
|
# key for columns and level for index
|
||||||
|
transmission_df["goodput"] = (
|
||||||
|
transmission_df["payload_size"]
|
||||||
|
.groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval)))
|
||||||
|
.transform("sum")
|
||||||
|
)
|
||||||
|
transmission_df["goodput"] = transmission_df["goodput"].apply(
|
||||||
|
lambda x: ((x * 8) / args.interval) / 10 ** 6
|
||||||
|
)
|
||||||
|
|
||||||
|
transmission_df["goodput_rolling"] = (
|
||||||
|
transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
|
||||||
|
)
|
||||||
|
transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply(
|
||||||
|
lambda x: ((x * 8) / args.interval) / 10 ** 6
|
||||||
|
)
|
||||||
|
|
||||||
|
# set meta values and remove all not needed columns
|
||||||
|
cc_algo = transmission_df["congestion_control"].iloc[0]
|
||||||
|
cc_algo = cc_algo.upper()
|
||||||
|
transmission_direction = transmission_df["direction"].iloc[0]
|
||||||
|
|
||||||
|
# transmission_df = transmission_df.filter(["goodput", "datetime", "ack_rtt", "goodput_rolling", "snd_cwnd"])
|
||||||
|
|
||||||
|
# read serial csv
|
||||||
|
serial_df = pd.read_csv(
|
||||||
|
args.serial_file, converters={"Cell_ID": convert_cellid}
|
||||||
|
)
|
||||||
|
serial_df["datetime"] = pd.to_datetime(serial_df["datetime"]) - pd.Timedelta(
|
||||||
|
hours=1
|
||||||
|
)
|
||||||
|
serial_df = serial_df.set_index("datetime")
|
||||||
|
serial_df.index = pd.to_datetime(serial_df.index)
|
||||||
|
serial_df.sort_index()
|
||||||
|
|
||||||
|
# print(serial_df["Cell_ID"])
|
||||||
|
|
||||||
|
# serial_df["Cell_ID"] = serial_df["Cell_ID"].apply(
|
||||||
|
# lambda x: int(x.split(" ")[-1].replace("(", "").replace(")", "")))
|
||||||
|
|
||||||
|
transmission_df = pd.merge_asof(
|
||||||
|
transmission_df,
|
||||||
|
serial_df,
|
||||||
|
tolerance=pd.Timedelta("1s"),
|
||||||
|
right_index=True,
|
||||||
|
left_index=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
transmission_df.index = transmission_df["arrival_time"]
|
||||||
|
|
||||||
|
# replace 0 in RSRQ with Nan
|
||||||
|
transmission_df["NR5G_RSRQ_(dB)"] = transmission_df["NR5G_RSRQ_(dB)"].replace(
|
||||||
|
0, np.NaN
|
||||||
|
)
|
||||||
|
transmission_df["RSRQ_(dB)"] = transmission_df["RSRQ_(dB)"].replace(0, np.NaN)
|
||||||
|
|
||||||
|
# filter active state
|
||||||
|
for i in range(1, 5):
|
||||||
|
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
|
||||||
|
"LTE_SCC{}_bw".format(i)
|
||||||
|
]
|
||||||
|
|
||||||
|
mask = transmission_df["LTE_SCC{}_state".format(i)].isin(["ACTIVE"])
|
||||||
|
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
|
||||||
|
"LTE_SCC{}_effective_bw".format(i)
|
||||||
|
].where(mask, other=0)
|
||||||
|
|
||||||
|
# filter if sc is usesd for uplink
|
||||||
|
for i in range(1, 5):
|
||||||
|
mask = transmission_df["LTE_SCC{}_UL_Configured".format(i)].isin([False])
|
||||||
|
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
|
||||||
|
"LTE_SCC{}_effective_bw".format(i)
|
||||||
|
].where(mask, other=0)
|
||||||
|
|
||||||
|
# sum all effective bandwidth for 5G and 4G
|
||||||
|
transmission_df["SCC1_NR5G_effective_bw"] = transmission_df[
|
||||||
|
"SCC1_NR5G_bw"
|
||||||
|
].fillna(0)
|
||||||
|
|
||||||
|
transmission_df["lte_effective_bw_sum"] = (
|
||||||
|
transmission_df["LTE_SCC1_effective_bw"].fillna(0)
|
||||||
|
+ transmission_df["LTE_SCC2_effective_bw"].fillna(0)
|
||||||
|
+ transmission_df["LTE_SCC3_effective_bw"].fillna(0)
|
||||||
|
+ transmission_df["LTE_SCC4_effective_bw"].fillna(0)
|
||||||
|
+ transmission_df["LTE_bw"].fillna(0))
|
||||||
|
transmission_df["nr_effective_bw_sum"] = transmission_df["SCC1_NR5G_effective_bw"]
|
||||||
|
|
||||||
|
transmission_df["effective_bw_sum"] = transmission_df["nr_effective_bw_sum"] + transmission_df[
|
||||||
|
"lte_effective_bw_sum"]
|
||||||
|
|
||||||
|
# transmission timeline
|
||||||
|
scaley = 1.5
|
||||||
|
scalex = 1.0
|
||||||
|
fig, ax = plt.subplots(2, 1, figsize=[6.4 * scaley, 4.8 * scalex])
|
||||||
|
fig.subplots_adjust(right=0.75)
|
||||||
|
if not args.fancy:
|
||||||
|
plt.title("{} with {}".format(transmission_direction, cc_algo))
|
||||||
|
fig.suptitle("{} with {}".format(transmission_direction, cc_algo))
|
||||||
|
ax0 = ax[0]
|
||||||
|
ax1 = ax0.twinx()
|
||||||
|
ax2 = ax0.twinx()
|
||||||
|
# ax2.spines.right.set_position(("axes", 1.22))
|
||||||
|
|
||||||
|
ax00 = ax[1]
|
||||||
|
ax01 = ax00.twinx()
|
||||||
|
ax02 = ax00.twinx()
|
||||||
|
|
||||||
|
# Plot vertical lines
|
||||||
|
first = True
|
||||||
|
lte_handovers = transmission_df["Cell_ID"].dropna().diff()
|
||||||
|
lte_hanover_plot = None
|
||||||
|
for index, value in lte_handovers.items():
|
||||||
|
if value > 0:
|
||||||
|
if first:
|
||||||
|
lte_hanover_plot = ax00.axvline(
|
||||||
|
index, ymin=0, ymax=1, color="skyblue", label="4G Handover"
|
||||||
|
)
|
||||||
|
first = False
|
||||||
|
else:
|
||||||
|
ax00.axvline(index, ymin=0, ymax=1, color="skyblue")
|
||||||
|
|
||||||
|
first = True
|
||||||
|
nr_handovers = (
|
||||||
|
transmission_df["NR5G_Cell_ID"].replace(0, np.NaN).dropna().diff()
|
||||||
|
)
|
||||||
|
|
||||||
|
nr_hanover_plot = None
|
||||||
|
for index, value in nr_handovers.items():
|
||||||
|
if value > 0:
|
||||||
|
if first:
|
||||||
|
nr_hanover_plot = ax00.axvline(
|
||||||
|
index, ymin=0, ymax=1, color="greenyellow", label="5G Handover"
|
||||||
|
)
|
||||||
|
first = False
|
||||||
|
else:
|
||||||
|
ax00.axvline(index, ymin=0, ymax=1, color="greenyellow")
|
||||||
|
|
||||||
|
snd_plot = ax0.plot(
|
||||||
|
transmission_df["snd_cwnd"].dropna(),
|
||||||
|
color="lime",
|
||||||
|
linestyle="dashed",
|
||||||
|
label="cwnd",
|
||||||
|
)
|
||||||
|
srtt_plot = ax1.plot(
|
||||||
|
transmission_df["srtt"].dropna(),
|
||||||
|
color="red",
|
||||||
|
linestyle="dashdot",
|
||||||
|
label="sRTT",
|
||||||
|
)
|
||||||
|
goodput_plot = ax2.plot(
|
||||||
|
transmission_df["goodput_rolling"],
|
||||||
|
color="blue",
|
||||||
|
linestyle="solid",
|
||||||
|
label="goodput",
|
||||||
|
)
|
||||||
|
# ax2.plot(transmission_df["goodput"], color="blue", linestyle="solid", label="goodput")
|
||||||
|
|
||||||
|
eff_bw_plot = ax01.plot(
|
||||||
|
transmission_df["effective_bw_sum"].dropna(),
|
||||||
|
color="peru",
|
||||||
|
linestyle="solid",
|
||||||
|
label="bandwidth",
|
||||||
|
)
|
||||||
|
lte_eff_bw_plot = ax01.plot(
|
||||||
|
transmission_df["lte_effective_bw_sum"].dropna(),
|
||||||
|
color="lightsteelblue",
|
||||||
|
linestyle="solid",
|
||||||
|
label="4G bandwidth",
|
||||||
|
alpha=0.5,
|
||||||
|
)
|
||||||
|
nr_eff_bw_plot = ax01.plot(
|
||||||
|
transmission_df["nr_effective_bw_sum"].dropna(),
|
||||||
|
color="cornflowerblue",
|
||||||
|
linestyle="solid",
|
||||||
|
label="5G bandwidth",
|
||||||
|
alpha=0.5,
|
||||||
|
)
|
||||||
|
|
||||||
|
# ax01.stackplot(transmission_df["arrival_time"].to_list(),
|
||||||
|
# [transmission_df["lte_bw_sum"].to_list(), transmission_df["nr_bw_sum"].to_list()],
|
||||||
|
# colors=["lightsteelblue", "cornflowerblue"],
|
||||||
|
# labels=["4G bandwidth", "5G bandwidth"]
|
||||||
|
# )
|
||||||
|
|
||||||
|
lte_rsrq_plot = ax02.plot(
|
||||||
|
transmission_df["RSRQ_(dB)"].dropna(),
|
||||||
|
color="purple",
|
||||||
|
linestyle="dotted",
|
||||||
|
label="LTE RSRQ",
|
||||||
|
)
|
||||||
|
nr_rsrq_plot = ax00.plot(
|
||||||
|
transmission_df["NR5G_RSRQ_(dB)"].dropna(),
|
||||||
|
color="magenta",
|
||||||
|
linestyle="dotted",
|
||||||
|
label="NR RSRQ",
|
||||||
|
)
|
||||||
|
|
||||||
|
ax2.spines.right.set_position(("axes", 1.1))
|
||||||
|
ax02.spines.right.set_position(("axes", 1.1))
|
||||||
|
|
||||||
|
ax0.set_ylim(0, 5000)
|
||||||
|
ax1.set_ylim(0, 0.3)
|
||||||
|
ax2.set_ylim(0, 600)
|
||||||
|
ax00.set_ylim(-25, 0)
|
||||||
|
ax01.set_ylim(0, 250)
|
||||||
|
# second dB axis
|
||||||
|
ax02.set_ylim(-25, 0)
|
||||||
|
ax02.set_axis_off()
|
||||||
|
|
||||||
|
ax00.set_xlabel("arrival time [s]")
|
||||||
|
|
||||||
|
ax2.set_ylabel("Goodput [mbps]")
|
||||||
|
ax00.set_ylabel("LTE/NR RSRQ [dB]")
|
||||||
|
# ax02.set_ylabel("LTE RSRQ [dB]")
|
||||||
|
ax1.set_ylabel("sRTT [s]")
|
||||||
|
ax0.set_ylabel("cwnd [MSS]")
|
||||||
|
ax01.set_ylabel("Bandwidth [MHz]")
|
||||||
|
|
||||||
|
if args.fancy:
|
||||||
|
legend_frame = False
|
||||||
|
ax0.set_xlim([0, transmission_df.index[-1]])
|
||||||
|
ax00.set_xlim([0, transmission_df.index[-1]])
|
||||||
|
# added these three lines
|
||||||
|
lns_ax0 = snd_plot + srtt_plot + goodput_plot
|
||||||
|
labs_ax0 = [l.get_label() for l in lns_ax0]
|
||||||
|
ax2.legend(lns_ax0, labs_ax0, ncols=9, fontsize=9, loc="upper right", frameon=legend_frame)
|
||||||
|
#ax0.set_zorder(100)
|
||||||
|
|
||||||
|
lns_ax00 = eff_bw_plot + lte_eff_bw_plot + nr_eff_bw_plot + lte_rsrq_plot + nr_rsrq_plot
|
||||||
|
if lte_hanover_plot:
|
||||||
|
lns_ax00.append(lte_hanover_plot)
|
||||||
|
if nr_hanover_plot:
|
||||||
|
lns_ax00.append(nr_hanover_plot)
|
||||||
|
labs_ax00 = [l.get_label() for l in lns_ax00]
|
||||||
|
ax02.legend(lns_ax00, labs_ax00, ncols=3, fontsize=9, loc="upper center", frameon=legend_frame)
|
||||||
|
#ax00.set_zorder(100)
|
||||||
|
plt.savefig("{}{}_plot.eps".format(args.save, csv.replace(".csv", "")), bbox_inches="tight")
|
||||||
|
else:
|
||||||
|
fig.legend(loc="lower right")
|
||||||
|
plt.savefig("{}{}_plot.pdf".format(args.save, csv.replace(".csv", "")), bbox_inches="tight")
|
||||||
|
# except Exception as e:
|
||||||
|
# print("Error processing file: {}".format(csv))
|
||||||
|
# print(str(e))
|
||||||
|
counter += 1
|
||||||
|
|
||||||
|
plt.close(fig)
|
||||||
|
plt.clf()
|
||||||
269
plot_single_transmission_paper.py
Executable file
269
plot_single_transmission_paper.py
Executable file
@@ -0,0 +1,269 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import math
|
||||||
|
import multiprocessing
|
||||||
|
import os
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
|
import matplotlib
|
||||||
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
|
||||||
|
import seaborn as sns
|
||||||
|
|
||||||
|
sns.set()
|
||||||
|
#sns.set(font_scale=1.5)
|
||||||
|
|
||||||
|
tex_fonts = {
|
||||||
|
"pgf.texsystem": "lualatex",
|
||||||
|
# "legend.fontsize": "x-large",
|
||||||
|
# "figure.figsize": (15, 5),
|
||||||
|
"axes.labelsize": 15, # "small",
|
||||||
|
# "axes.titlesize": "x-large",
|
||||||
|
"xtick.labelsize": 15, # "small",
|
||||||
|
"ytick.labelsize": 15, # "small",
|
||||||
|
"legend.fontsize": 15,
|
||||||
|
"axes.formatter.use_mathtext": True,
|
||||||
|
"mathtext.fontset": "dejavusans",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# plt.rcParams.update(tex_fonts)
|
||||||
|
|
||||||
|
|
||||||
|
def convert_cellid(value):
|
||||||
|
if isinstance(value, str):
|
||||||
|
try:
|
||||||
|
r = int(value.split(" ")[-1].replace("(", "").replace(")", ""))
|
||||||
|
return r
|
||||||
|
except Exception as e:
|
||||||
|
return -1
|
||||||
|
else:
|
||||||
|
return int(-1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = ArgumentParser()
|
||||||
|
parser.add_argument("-s", "--serial_file", required=True, help="Serial csv file.")
|
||||||
|
parser.add_argument(
|
||||||
|
"-p", "--pcap_csv_folder", required=True, help="PCAP csv folder."
|
||||||
|
)
|
||||||
|
parser.add_argument("--save", required=True, help="Location to save pdf file.")
|
||||||
|
parser.add_argument("--fancy", action="store_true", help="Create fancy plot.")
|
||||||
|
parser.add_argument(
|
||||||
|
"-i",
|
||||||
|
"--interval",
|
||||||
|
default=10,
|
||||||
|
type=int,
|
||||||
|
help="Time interval for rolling window.",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
pcap_csv_list = list()
|
||||||
|
for filename in os.listdir(args.pcap_csv_folder):
|
||||||
|
if filename.endswith(".csv") and "tcp" in filename:
|
||||||
|
pcap_csv_list.append(filename)
|
||||||
|
|
||||||
|
counter = 1
|
||||||
|
if len(pcap_csv_list) == 0:
|
||||||
|
print("No CSV files found.")
|
||||||
|
|
||||||
|
pcap_csv_list.sort(key=lambda x: int(x.split("_")[-1].replace(".csv", "")))
|
||||||
|
|
||||||
|
for csv in pcap_csv_list:
|
||||||
|
print(
|
||||||
|
"\rProcessing {} out of {} CSVs.\t({}%)\t".format(
|
||||||
|
counter, len(pcap_csv_list), math.floor(counter / len(pcap_csv_list))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# try:
|
||||||
|
transmission_df = pd.read_csv(
|
||||||
|
"{}{}".format(args.pcap_csv_folder, csv),
|
||||||
|
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
|
||||||
|
)
|
||||||
|
|
||||||
|
transmission_df = transmission_df.set_index("datetime")
|
||||||
|
transmission_df.index = pd.to_datetime(transmission_df.index)
|
||||||
|
transmission_df = transmission_df.sort_index()
|
||||||
|
|
||||||
|
# srtt to [s]
|
||||||
|
transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10 ** 6)
|
||||||
|
|
||||||
|
# key for columns and level for index
|
||||||
|
transmission_df["goodput"] = (
|
||||||
|
transmission_df["payload_size"]
|
||||||
|
.groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval)))
|
||||||
|
.transform("sum")
|
||||||
|
)
|
||||||
|
transmission_df["goodput"] = transmission_df["goodput"].apply(
|
||||||
|
lambda x: ((x * 8) / args.interval) / 10 ** 6
|
||||||
|
)
|
||||||
|
|
||||||
|
transmission_df["goodput_rolling"] = (
|
||||||
|
transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
|
||||||
|
)
|
||||||
|
transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply(
|
||||||
|
lambda x: ((x * 8) / args.interval) / 10 ** 6
|
||||||
|
)
|
||||||
|
|
||||||
|
# set meta values and remove all not needed columns
|
||||||
|
cc_algo = transmission_df["congestion_control"].iloc[0]
|
||||||
|
cc_algo = cc_algo.upper()
|
||||||
|
transmission_direction = transmission_df["direction"].iloc[0]
|
||||||
|
|
||||||
|
# transmission_df = transmission_df.filter(["goodput", "datetime", "ack_rtt", "goodput_rolling", "snd_cwnd"])
|
||||||
|
|
||||||
|
# read serial csv
|
||||||
|
serial_df = pd.read_csv(
|
||||||
|
args.serial_file, converters={"Cell_ID": convert_cellid}
|
||||||
|
)
|
||||||
|
serial_df = serial_df.set_index("datetime")
|
||||||
|
serial_df.index = pd.to_datetime(serial_df.index)
|
||||||
|
serial_df.sort_index()
|
||||||
|
|
||||||
|
# Select DataFrame rows between two dates
|
||||||
|
mask = (serial_df.index >= transmission_df.index[0]) & (serial_df.index <= transmission_df.index[-1])
|
||||||
|
serial_df = serial_df.loc[mask]
|
||||||
|
|
||||||
|
serial_df["arrival_time"] = (serial_df["time"] - serial_df["time"].iloc[0]) * 60
|
||||||
|
serial_df.index = serial_df["arrival_time"]
|
||||||
|
|
||||||
|
transmission_df.index = transmission_df["arrival_time"]
|
||||||
|
|
||||||
|
# filter active state
|
||||||
|
for i in range(1, 5):
|
||||||
|
serial_df["LTE_SCC{}_effective_bw".format(i)] = serial_df[
|
||||||
|
"LTE_SCC{}_bw".format(i)
|
||||||
|
]
|
||||||
|
|
||||||
|
mask = serial_df["LTE_SCC{}_state".format(i)].isin(["ACTIVE"])
|
||||||
|
serial_df["LTE_SCC{}_effective_bw".format(i)] = serial_df[
|
||||||
|
"LTE_SCC{}_effective_bw".format(i)
|
||||||
|
].where(mask, other=0)
|
||||||
|
|
||||||
|
# filter if sc is usesd for uplink
|
||||||
|
for i in range(1, 5):
|
||||||
|
mask = serial_df["LTE_SCC{}_UL_Configured".format(i)].isin([False])
|
||||||
|
serial_df["LTE_SCC{}_effective_bw".format(i)] = serial_df[
|
||||||
|
"LTE_SCC{}_effective_bw".format(i)
|
||||||
|
].where(mask, other=0)
|
||||||
|
|
||||||
|
# sum all effective bandwidth for 5G and 4G
|
||||||
|
serial_df["SCC1_NR5G_effective_bw"] = serial_df["SCC1_NR5G_bw"].fillna(0)
|
||||||
|
serial_df["effective_bw_sum"] = (
|
||||||
|
serial_df["SCC1_NR5G_effective_bw"]
|
||||||
|
+ serial_df["LTE_SCC1_effective_bw"]
|
||||||
|
+ serial_df["LTE_SCC2_effective_bw"]
|
||||||
|
+ serial_df["LTE_SCC3_effective_bw"]
|
||||||
|
+ serial_df["LTE_SCC4_effective_bw"]
|
||||||
|
+ serial_df["LTE_bw"]
|
||||||
|
)
|
||||||
|
bw_cols = [
|
||||||
|
"SCC1_NR5G_effective_bw",
|
||||||
|
"LTE_bw",
|
||||||
|
"LTE_SCC1_effective_bw",
|
||||||
|
"LTE_SCC2_effective_bw",
|
||||||
|
"LTE_SCC3_effective_bw",
|
||||||
|
"LTE_SCC4_effective_bw",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# transmission timeline
|
||||||
|
scaley = 1.5
|
||||||
|
scalex = 1.0
|
||||||
|
fig, ax = plt.subplots(2, 1, figsize=[6.4 * scaley, 4.8 * scalex])
|
||||||
|
fig.subplots_adjust(right=0.75)
|
||||||
|
if not args.fancy:
|
||||||
|
plt.title("{} with {}".format(transmission_direction, cc_algo))
|
||||||
|
fig.suptitle("{} with {}".format(transmission_direction, cc_algo))
|
||||||
|
ax0 = ax[0]
|
||||||
|
ax1 = ax0.twinx()
|
||||||
|
ax2 = ax0.twinx()
|
||||||
|
# ax2.spines.right.set_position(("axes", 1.22))
|
||||||
|
|
||||||
|
ax00 = ax[1]
|
||||||
|
|
||||||
|
snd_plot = ax0.plot(
|
||||||
|
transmission_df["snd_cwnd"].dropna(),
|
||||||
|
color="darkorange",
|
||||||
|
linestyle="dashed",
|
||||||
|
label="cwnd",
|
||||||
|
)
|
||||||
|
srtt_plot = ax1.plot(
|
||||||
|
transmission_df["srtt"].dropna(),
|
||||||
|
color="maroon",
|
||||||
|
linestyle="dotted",
|
||||||
|
label="sRTT",
|
||||||
|
)
|
||||||
|
goodput_plot = ax2.plot(
|
||||||
|
transmission_df["goodput_rolling"],
|
||||||
|
color="blue",
|
||||||
|
linestyle="solid",
|
||||||
|
label="goodput",
|
||||||
|
)
|
||||||
|
|
||||||
|
serial_df["time_rel"] = serial_df["time"] - serial_df["time"].iloc[0]
|
||||||
|
serial_df.index = serial_df["time_rel"]
|
||||||
|
|
||||||
|
ax_stacked = serial_df[bw_cols].plot.area(stacked=True, linewidth=0, ax=ax00)
|
||||||
|
ax00.set_ylabel("bandwidth [MHz]")
|
||||||
|
ax00.set_ylim(0, 200)
|
||||||
|
#ax.set_xlabel("time [minutes]")
|
||||||
|
#ax00.set_xlim([0, transmission_df.index[-1]])
|
||||||
|
ax00.xaxis.grid(True)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
ax2.spines.right.set_position(("axes", 1.1))
|
||||||
|
|
||||||
|
|
||||||
|
ax0.set_ylim(0, 5000) #2500
|
||||||
|
ax1.set_ylim(0, 2) #0.3
|
||||||
|
ax2.set_ylim(0, 500)
|
||||||
|
#ax00.set_ylim(-25, 0)
|
||||||
|
|
||||||
|
ax00.set_xlabel("time [s]")
|
||||||
|
|
||||||
|
ax2.set_ylabel("goodput [mbps]")
|
||||||
|
#ax00.set_ylabel("LTE/NR RSRQ [dB]")
|
||||||
|
# ax02.set_ylabel("LTE RSRQ [dB]")
|
||||||
|
ax1.set_ylabel("sRTT [s]")
|
||||||
|
ax0.set_ylabel("cwnd [MSS]")
|
||||||
|
|
||||||
|
if args.fancy:
|
||||||
|
legend_frame = False
|
||||||
|
ax0.set_xlim([0, 60])
|
||||||
|
ax00.set_xlim([0, 60])
|
||||||
|
# added these three lines
|
||||||
|
lns_ax0 = snd_plot + srtt_plot + goodput_plot
|
||||||
|
labs_ax0 = [l.get_label() for l in lns_ax0]
|
||||||
|
ax2.legend(lns_ax0, labs_ax0, ncols=9, fontsize=9, loc="upper right", frameon=legend_frame)
|
||||||
|
#ax0.set_zorder(100)
|
||||||
|
|
||||||
|
#lns_ax00 = [ax_stacked]
|
||||||
|
#labs_ax00 = ["5G bandwidth", "4G bandwidth"]
|
||||||
|
#ax00.legend(lns_ax00, labs_ax00, ncols=3, fontsize=9, loc="upper center", frameon=legend_frame)
|
||||||
|
|
||||||
|
L = ax00.legend(ncols=3, fontsize=9, frameon=False)
|
||||||
|
L.get_texts()[0].set_text("5G main")
|
||||||
|
L.get_texts()[1].set_text("4G main")
|
||||||
|
L.get_texts()[2].set_text("4G SCC 1")
|
||||||
|
L.get_texts()[3].set_text("4G SCC 2")
|
||||||
|
L.get_texts()[4].set_text("4G SCC 3")
|
||||||
|
L.get_texts()[5].set_text("4G SCC 4")
|
||||||
|
|
||||||
|
#ax00.set_zorder(100)
|
||||||
|
plt.savefig("{}{}_plot.eps".format(args.save, csv.replace(".csv", "")), bbox_inches="tight")
|
||||||
|
#serial_df.to_csv("{}{}_plot.csv".format(args.save, csv.replace(".csv", "")))
|
||||||
|
else:
|
||||||
|
fig.legend(loc="lower right")
|
||||||
|
plt.savefig("{}{}_plot.pdf".format(args.save, csv.replace(".csv", "")), bbox_inches="tight")
|
||||||
|
# except Exception as e:
|
||||||
|
# print("Error processing file: {}".format(csv))
|
||||||
|
# print(str(e))
|
||||||
|
counter += 1
|
||||||
|
|
||||||
|
plt.close(fig)
|
||||||
|
plt.clf()
|
||||||
179
plot_single_transmission_timeline.py
Executable file
179
plot_single_transmission_timeline.py
Executable file
@@ -0,0 +1,179 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import math
|
||||||
|
import multiprocessing
|
||||||
|
import os
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
|
import matplotlib
|
||||||
|
import pandas as pd
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = ArgumentParser()
|
||||||
|
parser.add_argument("-s", "--serial_file", required=True, help="Serial csv file.")
|
||||||
|
parser.add_argument("-p", "--pcap_csv_folder", required=True, help="PCAP csv folder.")
|
||||||
|
parser.add_argument("--save", default=None, help="Location to save pdf file.")
|
||||||
|
parser.add_argument(
|
||||||
|
"-i",
|
||||||
|
"--interval",
|
||||||
|
default=10,
|
||||||
|
type=int,
|
||||||
|
help="Time interval for rolling window.",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
manager = multiprocessing.Manager()
|
||||||
|
n = manager.Value("i", 0)
|
||||||
|
frame_list = manager.list()
|
||||||
|
jobs = []
|
||||||
|
|
||||||
|
# load all pcap csv into one dataframe
|
||||||
|
pcap_csv_list = list()
|
||||||
|
for filename in os.listdir(args.pcap_csv_folder):
|
||||||
|
if filename.endswith(".csv") and "tcp" in filename:
|
||||||
|
pcap_csv_list.append(filename)
|
||||||
|
|
||||||
|
counter = 1
|
||||||
|
if len(pcap_csv_list) == 0:
|
||||||
|
print("No CSV files found.")
|
||||||
|
|
||||||
|
pcap_csv_list.sort(key=lambda x: int(x.split("_")[-1].replace(".csv", "")))
|
||||||
|
|
||||||
|
for csv in pcap_csv_list:
|
||||||
|
|
||||||
|
print("\rProcessing {} out of {} CSVs.\t({}%)\t".format(counter, len(pcap_csv_list), math.floor(counter/len(pcap_csv_list))))
|
||||||
|
|
||||||
|
try:
|
||||||
|
transmission_df = pd.read_csv(
|
||||||
|
"{}{}".format(args.pcap_csv_folder, csv),
|
||||||
|
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
|
||||||
|
)
|
||||||
|
|
||||||
|
transmission_df["datetime"] = pd.to_datetime(transmission_df["datetime"]) - pd.Timedelta(hours=1)
|
||||||
|
transmission_df = transmission_df.set_index("datetime")
|
||||||
|
transmission_df.index = pd.to_datetime(transmission_df.index)
|
||||||
|
transmission_df = transmission_df.sort_index()
|
||||||
|
|
||||||
|
# srtt to [s]
|
||||||
|
transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10**6)
|
||||||
|
|
||||||
|
# key for columns and level for index
|
||||||
|
transmission_df["goodput"] = transmission_df["payload_size"].groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval))).transform("sum")
|
||||||
|
transmission_df["goodput"] = transmission_df["goodput"].apply(
|
||||||
|
lambda x: ((x * 8) / args.interval) / 10**6
|
||||||
|
)
|
||||||
|
|
||||||
|
transmission_df["goodput_rolling"] = transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
|
||||||
|
transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply(
|
||||||
|
lambda x: ((x * 8) / args.interval) / 10 ** 6
|
||||||
|
)
|
||||||
|
|
||||||
|
# set meta values and remove all not needed columns
|
||||||
|
cc_algo = transmission_df["congestion_control"].iloc[0]
|
||||||
|
cc_algo = cc_algo.upper()
|
||||||
|
transmission_direction = transmission_df["direction"].iloc[0]
|
||||||
|
|
||||||
|
#transmission_df = transmission_df.filter(["goodput", "datetime", "ack_rtt", "goodput_rolling", "snd_cwnd"])
|
||||||
|
|
||||||
|
# read serial csv
|
||||||
|
serial_df = pd.read_csv(args.serial_file)
|
||||||
|
serial_df["datetime"] = pd.to_datetime(serial_df["datetime"]) - pd.Timedelta(hours=1)
|
||||||
|
serial_df = serial_df.set_index("datetime")
|
||||||
|
serial_df.index = pd.to_datetime(serial_df.index)
|
||||||
|
serial_df.sort_index()
|
||||||
|
|
||||||
|
transmission_df = pd.merge_asof(
|
||||||
|
transmission_df,
|
||||||
|
serial_df,
|
||||||
|
tolerance=pd.Timedelta("1s"),
|
||||||
|
right_index=True,
|
||||||
|
left_index=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# transmission timeline
|
||||||
|
|
||||||
|
scaley = 1.5
|
||||||
|
scalex = 1.0
|
||||||
|
fig, ax = plt.subplots(figsize=[6.4 * scaley, 4.8 * scalex])
|
||||||
|
plt.title("{} with {}".format(transmission_direction, cc_algo))
|
||||||
|
fig.subplots_adjust(right=0.75)
|
||||||
|
|
||||||
|
twin1 = ax.twinx()
|
||||||
|
twin2 = ax.twinx()
|
||||||
|
twin3 = ax.twinx()
|
||||||
|
twin4 = ax.twinx()
|
||||||
|
# Offset the right spine of twin2. The ticks and label have already been
|
||||||
|
# placed on the right by twinx above.
|
||||||
|
twin2.spines.right.set_position(("axes", 1.1))
|
||||||
|
twin3.spines.right.set_position(("axes", 1.2))
|
||||||
|
twin4.spines.right.set_position(("axes", 1.3))
|
||||||
|
|
||||||
|
|
||||||
|
# create list fo color indices
|
||||||
|
transmission_df["index"] = transmission_df.index
|
||||||
|
color_dict = dict()
|
||||||
|
color_list = list()
|
||||||
|
i = 0
|
||||||
|
for cell_id in transmission_df["cellID"]:
|
||||||
|
if cell_id not in color_dict:
|
||||||
|
color_dict[cell_id] = i
|
||||||
|
i += 1
|
||||||
|
color_list.append(color_dict[cell_id])
|
||||||
|
|
||||||
|
transmission_df["cell_color"] = color_list
|
||||||
|
color_dict = None
|
||||||
|
color_list = None
|
||||||
|
|
||||||
|
cmap = matplotlib.cm.get_cmap("Set3")
|
||||||
|
unique_cells = transmission_df["cell_color"].unique()
|
||||||
|
color_list = cmap.colors * (round(len(unique_cells) / len(cmap.colors)) + 1)
|
||||||
|
|
||||||
|
for c in transmission_df["cell_color"].unique():
|
||||||
|
bounds = transmission_df[["index", "cell_color"]].groupby("cell_color").agg(["min", "max"]).loc[c]
|
||||||
|
ax.axvspan(bounds.min(), bounds.max(), alpha=0.3, color=color_list[c])
|
||||||
|
|
||||||
|
p4, = twin3.plot(transmission_df["snd_cwnd"].dropna(), color="lime", linestyle="dashed", label="cwnd")
|
||||||
|
p3, = twin2.plot(transmission_df["srtt"].dropna(), color="red", linestyle="dashdot", label="sRTT")
|
||||||
|
p1, = ax.plot(transmission_df["goodput_rolling"], color="blue", linestyle="solid", label="goodput")
|
||||||
|
p2, = twin1.plot(transmission_df["downlink_cqi"].dropna(), color="magenta", linestyle="dotted", label="CQI")
|
||||||
|
p5, = twin4.plot(transmission_df["DL_bandwidth"].dropna(), color="peru", linestyle="dotted", label="DL_bandwidth")
|
||||||
|
|
||||||
|
ax.set_xlim(transmission_df["index"].min(), transmission_df["index"].max())
|
||||||
|
ax.set_ylim(0, 500)
|
||||||
|
twin1.set_ylim(0, 15)
|
||||||
|
twin2.set_ylim(0, 0.2) #twin2.set_ylim(0, transmission_df["ack_rtt"].max())
|
||||||
|
twin3.set_ylim(0, transmission_df["snd_cwnd"].max() + 10)
|
||||||
|
twin4.set_ylim(0, 21)
|
||||||
|
|
||||||
|
ax.set_xlabel("arrival time")
|
||||||
|
ax.set_ylabel("Goodput [mbps]")
|
||||||
|
twin1.set_ylabel("CQI")
|
||||||
|
twin2.set_ylabel("sRTT [s]")
|
||||||
|
twin3.set_ylabel("cwnd")
|
||||||
|
twin4.set_ylabel("DL_bandwidth")
|
||||||
|
|
||||||
|
ax.yaxis.label.set_color(p1.get_color())
|
||||||
|
twin1.yaxis.label.set_color(p2.get_color())
|
||||||
|
twin2.yaxis.label.set_color(p3.get_color())
|
||||||
|
twin3.yaxis.label.set_color(p4.get_color())
|
||||||
|
twin4.yaxis.label.set_color(p5.get_color())
|
||||||
|
|
||||||
|
tkw = dict(size=4, width=1.5)
|
||||||
|
ax.tick_params(axis='y', colors=p1.get_color(), **tkw)
|
||||||
|
twin1.tick_params(axis='y', colors=p2.get_color(), **tkw)
|
||||||
|
twin2.tick_params(axis='y', colors=p3.get_color(), **tkw)
|
||||||
|
twin3.tick_params(axis='y', colors=p4.get_color(), **tkw)
|
||||||
|
twin4.tick_params(axis='y', colors=p5.get_color(), **tkw)
|
||||||
|
ax.tick_params(axis='x', **tkw)
|
||||||
|
|
||||||
|
#ax.legend(handles=[p1, p2, p3])
|
||||||
|
|
||||||
|
if args.save:
|
||||||
|
plt.savefig("{}{}_plot.pdf".format(args.save, csv.replace(".csv", "")))
|
||||||
|
except Exception as e:
|
||||||
|
print("Error processing file: {}".format(csv))
|
||||||
|
print(str(e))
|
||||||
|
counter += 1
|
||||||
|
|
||||||
|
plt.clf()
|
||||||
95
plot_stacked_bandwidth.py
Executable file
95
plot_stacked_bandwidth.py
Executable file
@@ -0,0 +1,95 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
|
||||||
|
|
||||||
|
plt_params = {
|
||||||
|
"pgf.texsystem": "lualatex",
|
||||||
|
#"legend.fontsize": "x-large",
|
||||||
|
#"figure.figsize": (15, 5),
|
||||||
|
"axes.labelsize": 15, # "small",
|
||||||
|
"axes.titlesize": "x-large",
|
||||||
|
"xtick.labelsize": 15, # "small",
|
||||||
|
"ytick.labelsize": 15, # "small",
|
||||||
|
"legend.fontsize": 15,
|
||||||
|
"axes.formatter.use_mathtext": True,
|
||||||
|
"mathtext.fontset": "dejavusans",
|
||||||
|
}
|
||||||
|
|
||||||
|
#plt.rcParams.update(plt_params)
|
||||||
|
|
||||||
|
import seaborn as sns
|
||||||
|
sns.set()
|
||||||
|
sns.set(font_scale=1.5)
|
||||||
|
|
||||||
|
plt.rcParams["figure.figsize"] = (10, 3)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = ArgumentParser()
|
||||||
|
parser.add_argument("-f", "--file", required=True, help="Serial CSV")
|
||||||
|
parser.add_argument("--save", default=None, help="Location to save pdf file.")
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
df = pd.read_csv(args.file)
|
||||||
|
df["time_rel"] = df["time"] - df["time"].iloc[0]
|
||||||
|
df.index = df["time_rel"] / 60
|
||||||
|
|
||||||
|
# filter active state
|
||||||
|
for i in range(1, 5):
|
||||||
|
df["LTE_SCC{}_effective_bw".format(i)] = df["LTE_SCC{}_bw".format(i)]
|
||||||
|
|
||||||
|
mask = df["LTE_SCC{}_state".format(i)].isin(["ACTIVE"])
|
||||||
|
df["LTE_SCC{}_effective_bw".format(i)] = df[
|
||||||
|
"LTE_SCC{}_effective_bw".format(i)
|
||||||
|
].where(mask, other=0)
|
||||||
|
|
||||||
|
# filter if sc is usesd for uplink
|
||||||
|
for i in range(1, 5):
|
||||||
|
mask = df["LTE_SCC{}_UL_Configured".format(i)].isin([False])
|
||||||
|
df["LTE_SCC{}_effective_bw".format(i)] = df[
|
||||||
|
"LTE_SCC{}_effective_bw".format(i)
|
||||||
|
].where(mask, other=0)
|
||||||
|
|
||||||
|
# sum all effective bandwidth for 5G and 4G
|
||||||
|
df["SCC1_NR5G_effective_bw"] = df["SCC1_NR5G_bw"].fillna(0)
|
||||||
|
df["effective_bw_sum"] = (
|
||||||
|
df["SCC1_NR5G_effective_bw"]
|
||||||
|
+ df["LTE_SCC1_effective_bw"]
|
||||||
|
+ df["LTE_SCC2_effective_bw"]
|
||||||
|
+ df["LTE_SCC3_effective_bw"]
|
||||||
|
+ df["LTE_SCC4_effective_bw"]
|
||||||
|
+ df["LTE_bw"]
|
||||||
|
)
|
||||||
|
bw_cols = [
|
||||||
|
"SCC1_NR5G_effective_bw",
|
||||||
|
"LTE_bw",
|
||||||
|
"LTE_SCC1_effective_bw",
|
||||||
|
"LTE_SCC2_effective_bw",
|
||||||
|
"LTE_SCC3_effective_bw",
|
||||||
|
"LTE_SCC4_effective_bw",
|
||||||
|
]
|
||||||
|
|
||||||
|
ax = df[bw_cols].plot.area(stacked=True, linewidth=0)
|
||||||
|
ax.set_ylabel("bandwidth [MHz]")
|
||||||
|
ax.set_xlabel("time [minutes]")
|
||||||
|
ax.set_xlim([0, df.index[-1]])
|
||||||
|
ax.xaxis.grid(False)
|
||||||
|
|
||||||
|
|
||||||
|
L = plt.legend(ncols=2, fontsize=12, frameon=False)
|
||||||
|
L.get_texts()[0].set_text("5G main")
|
||||||
|
L.get_texts()[1].set_text("4G main")
|
||||||
|
L.get_texts()[2].set_text("4G SCC 1")
|
||||||
|
L.get_texts()[3].set_text("4G SCC 2")
|
||||||
|
L.get_texts()[4].set_text("4G SCC 3")
|
||||||
|
L.get_texts()[5].set_text("4G SCC 4")
|
||||||
|
|
||||||
|
if args.save:
|
||||||
|
plt.savefig("{}-used_bandwidth.eps".format(args.save), bbox_inches="tight")
|
||||||
|
else:
|
||||||
|
plt.show()
|
||||||
281
plot_transmission_timeline.py
Executable file
281
plot_transmission_timeline.py
Executable file
@@ -0,0 +1,281 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import multiprocessing
|
||||||
|
import os
|
||||||
|
import pickle
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
from math import ceil
|
||||||
|
from time import sleep
|
||||||
|
|
||||||
|
import matplotlib
|
||||||
|
import pandas as pd
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
from mpl_toolkits import axisartist
|
||||||
|
from mpl_toolkits.axes_grid1 import host_subplot
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def csv_to_dataframe(csv_list, dummy):
|
||||||
|
|
||||||
|
global n
|
||||||
|
global frame_list
|
||||||
|
|
||||||
|
transmission_df = None
|
||||||
|
|
||||||
|
for csv in csv_list:
|
||||||
|
tmp_df = pd.read_csv(
|
||||||
|
"{}{}".format(args.pcap_csv_folder, csv),
|
||||||
|
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
|
||||||
|
)
|
||||||
|
tmp_df["datetime"] = pd.to_datetime(tmp_df["datetime"]) - pd.Timedelta(hours=1)
|
||||||
|
tmp_df = tmp_df.set_index("datetime")
|
||||||
|
tmp_df.index = pd.to_datetime(tmp_df.index)
|
||||||
|
if transmission_df is None:
|
||||||
|
transmission_df = tmp_df
|
||||||
|
else:
|
||||||
|
transmission_df = pd.concat([transmission_df, tmp_df])
|
||||||
|
|
||||||
|
n.value += 1
|
||||||
|
|
||||||
|
frame_list.append(transmission_df)
|
||||||
|
|
||||||
|
|
||||||
|
from itertools import islice
|
||||||
|
|
||||||
|
def chunk(it, size):
|
||||||
|
it = iter(it)
|
||||||
|
return iter(lambda: tuple(islice(it, size)), ())
|
||||||
|
|
||||||
|
|
||||||
|
def plot_cdf(dataframe, column_name):
|
||||||
|
stats_df = dataframe \
|
||||||
|
.groupby(column_name) \
|
||||||
|
[column_name] \
|
||||||
|
.agg("count") \
|
||||||
|
.pipe(pd.DataFrame) \
|
||||||
|
.rename(columns={column_name: "frequency"})
|
||||||
|
|
||||||
|
# PDF
|
||||||
|
stats_df["PDF"] = stats_df["frequency"] / sum(stats_df["frequency"])
|
||||||
|
|
||||||
|
# CDF
|
||||||
|
stats_df["CDF"] = stats_df["PDF"].cumsum()
|
||||||
|
stats_df = stats_df.reset_index()
|
||||||
|
|
||||||
|
stats_df.plot(x=column_name, y=["CDF"], grid=True)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = ArgumentParser()
|
||||||
|
parser.add_argument("-s", "--serial_file", required=True, help="Serial csv file.")
|
||||||
|
parser.add_argument("-p", "--pcap_csv_folder", required=True, help="PCAP csv folder.")
|
||||||
|
parser.add_argument("--save", default=None, help="Location to save pdf file.")
|
||||||
|
parser.add_argument("--export", default=None, help="Export figure as an pickle file.")
|
||||||
|
parser.add_argument(
|
||||||
|
"-c",
|
||||||
|
"--cores",
|
||||||
|
default=1,
|
||||||
|
type=int,
|
||||||
|
help="Number of cores for multiprocessing.",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-i",
|
||||||
|
"--interval",
|
||||||
|
default=10,
|
||||||
|
type=int,
|
||||||
|
help="Time interval for rolling window.",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
manager = multiprocessing.Manager()
|
||||||
|
n = manager.Value("i", 0)
|
||||||
|
frame_list = manager.list()
|
||||||
|
jobs = []
|
||||||
|
|
||||||
|
# load all pcap csv into one dataframe
|
||||||
|
pcap_csv_list = list()
|
||||||
|
for filename in os.listdir(args.pcap_csv_folder):
|
||||||
|
if filename.endswith(".csv") and "tcp" in filename:
|
||||||
|
pcap_csv_list.append(filename)
|
||||||
|
|
||||||
|
parts = chunk(pcap_csv_list, ceil(len(pcap_csv_list) / args.cores))
|
||||||
|
print("Start processing with {} jobs.".format(args.cores))
|
||||||
|
for p in parts:
|
||||||
|
process = multiprocessing.Process(target=csv_to_dataframe, args=(p, "dummy"))
|
||||||
|
jobs.append(process)
|
||||||
|
|
||||||
|
for j in jobs:
|
||||||
|
j.start()
|
||||||
|
|
||||||
|
print("Started all jobs.")
|
||||||
|
# Ensure all the processes have finished
|
||||||
|
finished_job_counter = 0
|
||||||
|
working = ["|", "/", "-", "\\", "|", "/", "-", "\\"]
|
||||||
|
w = 0
|
||||||
|
while len(jobs) != finished_job_counter:
|
||||||
|
sleep(1)
|
||||||
|
print(
|
||||||
|
"\r\t{}{}{}\t Running {} jobs ({} finished). Processed {} out of {} pcap csv files. ({}%) ".format(
|
||||||
|
working[w],
|
||||||
|
working[w],
|
||||||
|
working[w],
|
||||||
|
len(jobs),
|
||||||
|
finished_job_counter,
|
||||||
|
n.value,
|
||||||
|
len(pcap_csv_list),
|
||||||
|
round((n.value / len(pcap_csv_list)) * 100, 2),
|
||||||
|
),
|
||||||
|
end="",
|
||||||
|
)
|
||||||
|
finished_job_counter = 0
|
||||||
|
for j in jobs:
|
||||||
|
if not j.is_alive():
|
||||||
|
finished_job_counter += 1
|
||||||
|
if (w + 1) % len(working) == 0:
|
||||||
|
w = 0
|
||||||
|
else:
|
||||||
|
w += 1
|
||||||
|
print("\r\nSorting table...")
|
||||||
|
|
||||||
|
transmission_df = pd.concat(frame_list)
|
||||||
|
frame_list = None
|
||||||
|
transmission_df = transmission_df.sort_index()
|
||||||
|
|
||||||
|
print("Calculate goodput...")
|
||||||
|
|
||||||
|
#print(transmission_df)
|
||||||
|
|
||||||
|
# srtt to [s]
|
||||||
|
transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10 ** 6)
|
||||||
|
|
||||||
|
# key for columns and level for index
|
||||||
|
transmission_df["goodput"] = transmission_df["payload_size"].groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval))).transform("sum")
|
||||||
|
transmission_df["goodput"] = transmission_df["goodput"].apply(
|
||||||
|
lambda x: ((x * 8) / args.interval) / 10**6
|
||||||
|
)
|
||||||
|
|
||||||
|
transmission_df["goodput_rolling"] = transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
|
||||||
|
transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply(
|
||||||
|
lambda x: ((x * 8) / args.interval) / 10 ** 6
|
||||||
|
)
|
||||||
|
|
||||||
|
# set meta values and remove all not needed columns
|
||||||
|
cc_algo = transmission_df["congestion_control"].iloc[0]
|
||||||
|
cc_algo = cc_algo.upper()
|
||||||
|
transmission_direction = transmission_df["direction"].iloc[0]
|
||||||
|
|
||||||
|
transmission_df = transmission_df.filter(["goodput", "datetime", "srtt", "goodput_rolling"])
|
||||||
|
|
||||||
|
# read serial csv
|
||||||
|
serial_df = pd.read_csv(args.serial_file)
|
||||||
|
serial_df["datetime"] = pd.to_datetime(serial_df["datetime"]) - pd.Timedelta(hours=1)
|
||||||
|
serial_df = serial_df.set_index("datetime")
|
||||||
|
serial_df.index = pd.to_datetime(serial_df.index)
|
||||||
|
serial_df.sort_index()
|
||||||
|
|
||||||
|
transmission_df = pd.merge_asof(
|
||||||
|
transmission_df,
|
||||||
|
serial_df,
|
||||||
|
tolerance=pd.Timedelta("1s"),
|
||||||
|
right_index=True,
|
||||||
|
left_index=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# transmission timeline
|
||||||
|
|
||||||
|
scaley = 1.5
|
||||||
|
scalex = 1.0
|
||||||
|
fig, ax = plt.subplots(figsize=[6.4 * scaley, 4.8 * scalex])
|
||||||
|
plt.title("{} with {}".format(transmission_direction, cc_algo))
|
||||||
|
fig.subplots_adjust(right=0.75)
|
||||||
|
|
||||||
|
twin1 = ax.twinx()
|
||||||
|
twin2 = ax.twinx()
|
||||||
|
# Offset the right spine of twin2. The ticks and label have already been
|
||||||
|
# placed on the right by twinx above.
|
||||||
|
twin2.spines.right.set_position(("axes", 1.2))
|
||||||
|
|
||||||
|
|
||||||
|
# create list fo color indices
|
||||||
|
transmission_df["index"] = transmission_df.index
|
||||||
|
color_dict = dict()
|
||||||
|
color_list = list()
|
||||||
|
i = 0
|
||||||
|
for cell_id in transmission_df["cellID"]:
|
||||||
|
if cell_id not in color_dict:
|
||||||
|
color_dict[cell_id] = i
|
||||||
|
i += 1
|
||||||
|
color_list.append(color_dict[cell_id])
|
||||||
|
|
||||||
|
transmission_df["cell_color"] = color_list
|
||||||
|
color_dict = None
|
||||||
|
color_list = None
|
||||||
|
|
||||||
|
cmap = matplotlib.cm.get_cmap("Set3")
|
||||||
|
unique_cells = transmission_df["cell_color"].unique()
|
||||||
|
color_list = cmap.colors * (round(len(unique_cells) / len(cmap.colors)) + 1)
|
||||||
|
|
||||||
|
for c in transmission_df["cell_color"].unique():
|
||||||
|
bounds = transmission_df[["index", "cell_color"]].groupby("cell_color").agg(["min", "max"]).loc[c]
|
||||||
|
ax.axvspan(bounds.min(), bounds.max(), alpha=0.3, color=color_list[c])
|
||||||
|
|
||||||
|
p1, = ax.plot(transmission_df["goodput_rolling"], "-", color="blue", label="goodput")
|
||||||
|
p2, = twin1.plot(transmission_df["downlink_cqi"], "--", color="green", label="CQI")
|
||||||
|
p3, = twin2.plot(transmission_df["srtt"], "-.", color="red", label="sRTT")
|
||||||
|
|
||||||
|
ax.set_xlim(transmission_df["index"].min(), transmission_df["index"].max())
|
||||||
|
ax.set_ylim(0, 500)
|
||||||
|
twin1.set_ylim(0, 15)
|
||||||
|
twin2.set_ylim(0, 1)
|
||||||
|
|
||||||
|
ax.set_xlabel("Time")
|
||||||
|
ax.set_ylabel("Goodput")
|
||||||
|
twin1.set_ylabel("CQI")
|
||||||
|
twin2.set_ylabel("sRTT")
|
||||||
|
|
||||||
|
ax.yaxis.label.set_color(p1.get_color())
|
||||||
|
twin1.yaxis.label.set_color(p2.get_color())
|
||||||
|
twin2.yaxis.label.set_color(p3.get_color())
|
||||||
|
|
||||||
|
tkw = dict(size=4, width=1.5)
|
||||||
|
ax.tick_params(axis='y', colors=p1.get_color(), **tkw)
|
||||||
|
twin1.tick_params(axis='y', colors=p2.get_color(), **tkw)
|
||||||
|
twin2.tick_params(axis='y', colors=p3.get_color(), **tkw)
|
||||||
|
ax.tick_params(axis='x', **tkw)
|
||||||
|
|
||||||
|
#ax.legend(handles=[p1, p2, p3])
|
||||||
|
|
||||||
|
if args.save:
|
||||||
|
plt.savefig("{}timeline_plot.pdf".format(args.save))
|
||||||
|
if args.export:
|
||||||
|
pickle.dump(fig, open("{}timeline_plot.pkl".format(args.export), "wb"))
|
||||||
|
|
||||||
|
#goodput cdf
|
||||||
|
plt.clf()
|
||||||
|
|
||||||
|
print("Calculate and polt goodput CDF...")
|
||||||
|
plot_cdf(transmission_df, "goodput")
|
||||||
|
plt.xlabel("goodput [mbps]")
|
||||||
|
plt.ylabel("CDF")
|
||||||
|
plt.legend([cc_algo])
|
||||||
|
plt.title("{} with {}".format(transmission_direction, cc_algo))
|
||||||
|
|
||||||
|
if args.save:
|
||||||
|
plt.savefig("{}{}_cdf_plot.pdf".format(args.save, "goodput"))
|
||||||
|
else:
|
||||||
|
plt.show()
|
||||||
|
|
||||||
|
# rtt cdf
|
||||||
|
plt.clf()
|
||||||
|
|
||||||
|
print("Calculate and polt rtt CDF...")
|
||||||
|
plot_cdf(transmission_df, "srtt")
|
||||||
|
plt.xlabel("sRTT [s]")
|
||||||
|
plt.ylabel("CDF")
|
||||||
|
plt.xscale("log")
|
||||||
|
plt.legend([cc_algo])
|
||||||
|
plt.title("{} with {}".format(transmission_direction, cc_algo))
|
||||||
|
|
||||||
|
if args.save:
|
||||||
|
plt.savefig("{}{}_cdf_plot.pdf".format(args.save, "srtt"))
|
||||||
|
else:
|
||||||
|
plt.show()
|
||||||
34
reset_modem_gps.py
Normal file
34
reset_modem_gps.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
|
import serial
|
||||||
|
from time import sleep
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = ArgumentParser()
|
||||||
|
parser.add_argument("-s", "--serial", required=True, help="Serial Interface")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
command_order = [
|
||||||
|
b'AT!CUSTOM="GPSENABLE",1',
|
||||||
|
b'AT!CUSTOM="GPSSEL",0',
|
||||||
|
b'AT!CUSTOM="GPSLPM",0',
|
||||||
|
b'AT!GPSNMEACONFIG=1,1',
|
||||||
|
b'AT+WANT=1',
|
||||||
|
b'AT!GPSNMEASENTENCE=FF'
|
||||||
|
]
|
||||||
|
|
||||||
|
ser = serial.Serial(
|
||||||
|
port=args.serial,
|
||||||
|
baudrate=115200,
|
||||||
|
)
|
||||||
|
|
||||||
|
if ser.is_open:
|
||||||
|
ser.write(b'At!Reset')
|
||||||
|
sleep(0.5)
|
||||||
|
ser.write(b'AT!ENTERCND="A710"')
|
||||||
|
sleep(0.5)
|
||||||
|
for cmd in command_order:
|
||||||
|
ser.write(cmd)
|
||||||
|
sleep(0.5)
|
||||||
|
print(cmd)
|
||||||
Reference in New Issue
Block a user