Compare commits

...

135 Commits

Author SHA1 Message Date
Lukas Prause
0f9ff99d90 Adds srtts to csv. 2023-08-31 13:29:35 +02:00
Lukas Prause
8129a2bd95 Removing index from frame. 2023-08-28 12:57:24 +02:00
Lukas Prause
1c498208da Merge branch 'master' of ssh://git.black-mesa.xyz:434/langspielplatte/measurement-scripts 2023-08-24 14:05:21 +02:00
Lukas Prause
2e4ff28fc2 Adds new script. 2023-08-24 14:05:14 +02:00
8c2f78cd02 Changes cwnd scaling. 2023-07-24 10:05:59 +02:00
cf18199ba3 Changes plot label. 2023-07-18 08:39:13 +02:00
c43826dc9c Changes plot color and y scale. 2023-07-14 10:49:30 +02:00
b92ee09af9 Changes plot color and y scale. 2023-07-14 10:25:56 +02:00
fdf04fb21e Changes plot color and y scale. 2023-07-14 09:57:37 +02:00
a75b0b74a0 Changes plot color and y scale. 2023-07-14 09:49:31 +02:00
Lukas Prause
9f8db93f7c Debug 2023-07-13 13:21:46 +02:00
Lukas Prause
4e85d7a3e5 Debug 2023-07-13 13:12:12 +02:00
Lukas Prause
0785c1e4e6 Debug 2023-07-13 12:37:46 +02:00
a9f9c42ab1 Valuefixes 2023-07-13 10:25:14 +02:00
44f20be108 Valuefixes 2023-07-13 10:20:54 +02:00
dc578c8a1b Valuefixes 2023-07-13 10:15:13 +02:00
e4fc32a1a2 Valuefixes 2023-07-13 10:05:26 +02:00
ec443c9bd4 Valuefixes 2023-07-13 09:55:12 +02:00
009d59c499 Valuefixes 2023-07-13 09:43:21 +02:00
85c4bfeb75 Valuefixes 2023-07-13 09:15:06 +02:00
Lukas Prause
502de2d864 Merge timing. 2023-07-12 16:16:01 +02:00
Lukas Prause
a713b9e262 Merge timing. 2023-07-12 15:18:14 +02:00
Lukas Prause
98fe00c02f Merge timing. 2023-07-12 14:55:49 +02:00
Lukas Prause
a97563fe61 Fixes typo and and too large dataset. 2023-07-12 14:36:21 +02:00
Lukas Prause
951bac5f1e Fixes typo and and too large dataset. 2023-07-12 14:17:33 +02:00
Lukas Prause
97f0946ad0 Fixes typo and and too large dataset. 2023-07-12 13:54:43 +02:00
Lukas Prause
baf2207a4f Fixes typo and and too large dataset. 2023-07-12 13:54:00 +02:00
345e6546ce stacked 2023-07-11 20:37:57 +02:00
a32df7b8aa fix stacked 2023-07-11 20:13:29 +02:00
Lukas Prause
aca74ca09c Paper plots. 2023-07-11 16:28:29 +02:00
Lukas Prause
a25288a737 Removes fontsize scaling 2023-07-11 13:18:54 +02:00
Lukas Prause
67ca4d66b0 Removes fontsize scaling 2023-07-11 13:18:00 +02:00
Lukas Prause
a701d378e2 Changes fontsize scale. 2023-07-11 13:06:25 +02:00
Lukas Prause
eb281c976c merge 2023-07-11 13:04:13 +02:00
Lukas Prause
060ffaad74 merge 2023-07-11 13:03:30 +02:00
dde4350a00 Add unit to cwnd label. 2023-07-11 10:15:55 +02:00
ef9740177e Removes legend border frame. 2023-07-11 10:04:32 +02:00
af2a53abb3 Legend fixes 2023-07-11 09:57:29 +02:00
11749d39a3 Legend fixes 2023-07-11 09:54:20 +02:00
4b291f04ba Legend fixes 2023-07-11 09:50:45 +02:00
772f2d704a Legend fixes 2023-07-11 09:47:22 +02:00
32184560f4 Legend fixes 2023-07-11 09:43:00 +02:00
0dfc17f950 Legend fixes 2023-07-11 09:25:14 +02:00
Lukas Prause
91ba0827e0 Adds fancy legend. 2023-07-10 17:01:51 +02:00
Lukas Prause
f3e155fd87 Adds fancy legend. 2023-07-10 16:56:30 +02:00
Lukas Prause
46e8bf95ba Adds fancy legend. 2023-07-10 16:52:33 +02:00
Lukas Prause
7cc030a4cc Adds fancy legend. 2023-07-10 16:47:25 +02:00
Lukas Prause
70a1e5b82e Adds fancy legend. 2023-07-10 16:01:57 +02:00
Lukas Prause
75be22b719 Adds fancy legend. 2023-07-10 15:53:35 +02:00
Lukas Prause
de04d94779 Adds fancy plot settings for eps export. 2023-07-10 15:27:47 +02:00
Lukas Prause
14eca54f98 Adds fancy plot settings for eps export. 2023-07-10 14:37:29 +02:00
Lukas Prause
4c33e4872e Styling polts 2023-07-06 15:25:27 +02:00
Lukas Prause
8168c46925 Bugfix 2023-07-06 15:07:25 +02:00
Lukas Prause
05cb425096 Bugfix 2023-07-06 14:53:26 +02:00
Lukas Prause
f594955371 Bugfix 2023-07-06 13:40:38 +02:00
Lukas Prause
041f4d0c2c Adds plot styling. 2023-07-06 12:47:02 +02:00
7f1e2699c9 Adds a filter for uplink bandwidth. 2023-07-06 10:06:25 +02:00
Lukas Prause
d6062ee78b Adds script to plot the usage of bandwidth. 2023-07-05 15:38:12 +02:00
Lukas Prause
8a91736f39 Adds script to plot the usage of bandwidth. 2023-07-05 15:20:30 +02:00
Lukas Prause
29b5e02469 Merge branch 'master' of ssh://git.black-mesa.xyz:434/langspielplatte/measurement-scripts 2023-07-05 15:15:58 +02:00
Lukas Prause
f7abcf1fdf Adds script to plot the usage of bandwidth. 2023-07-05 15:15:45 +02:00
7764e1a49d Changes to bandwidth calculation. 2023-07-05 10:20:39 +02:00
Lukas Prause
9eabd701e4 Bugfix 2023-06-30 13:29:09 +02:00
Lukas Prause
e46cc7e8bd Bugfix 2023-06-30 13:23:26 +02:00
Lukas Prause
c4d2a66d83 Merge branch 'master' of ssh://git.black-mesa.xyz:434/langspielplatte/measurement-scripts 2023-06-30 13:12:58 +02:00
Lukas Prause
a2b57d32f1 Bugfix 2023-06-30 13:12:50 +02:00
bb94a75417 Adds support automatic timestamp offset calculation. 2023-06-29 09:46:44 +02:00
Lukas Prause
3496f8385f Adds support automatic timestamp offset calculation. 2023-06-28 15:36:03 +02:00
Lukas Prause
30fa09168e Adds support automatic timestamp offset calculation. 2023-06-27 15:42:22 +02:00
Lukas Prause
6f1f5afa07 Adds support for negative time offset in gps timestamp. 2023-06-27 14:14:44 +02:00
Lukas Prause
f276dbd242 Adds support for negative time offset in gps timestamp. 2023-06-27 13:53:40 +02:00
Lukas Prause
b7e09741e1 Merge branch 'master' of ssh://git.black-mesa.xyz:434/langspielplatte/measurement-scripts 2023-06-26 12:26:46 +02:00
Lukas Prause
180f4dcc8a Refactor GPS scripts. 2023-06-21 14:35:07 +02:00
Lukas Prause
80f292767b Bufix: Exiting threads on error. 2023-06-21 14:21:06 +02:00
e5bea755ba Changes calculation of 5G handover. 2023-04-28 09:00:35 +02:00
5dcb2450b3 Changes calculation of 5G handover. 2023-04-28 08:39:32 +02:00
Lukas Prause
be15a51017 Changing plots again 2023-04-27 15:17:51 +02:00
Lukas Prause
f650d98eb5 Changes goodpit calculation. 2023-04-25 14:22:40 +02:00
Lukas Prause
b578f70876 Changes goodput caluclation 2023-04-25 14:19:27 +02:00
Lukas Prause
e5483760a5 Changes scaleing. 2023-04-25 13:11:07 +02:00
Lukas Prause
725b2d9081 Adds stackplots. 2023-04-24 17:05:20 +02:00
Lukas Prause
719413bebb Adds stackplots. 2023-04-24 17:00:56 +02:00
Lukas Prause
bfe0a2ef0e Adds stackplots. 2023-04-24 16:57:04 +02:00
Lukas Prause
e0d972c937 Adds stackplots. 2023-04-24 16:50:10 +02:00
Lukas Prause
9693685434 Adds stackplots. 2023-04-24 16:23:24 +02:00
Lukas Prause
d65ad5b10e Adds stackplots. 2023-04-24 16:17:13 +02:00
Lukas Prause
6a72050cf1 Adds stackplots. 2023-04-24 16:08:30 +02:00
Lukas Prause
4a2365c671 Adds stackplot for bandwith. 2023-04-24 10:56:24 +02:00
Lukas Prause
1473a0c25a Adds stackplot for bandwith. 2023-04-24 10:48:18 +02:00
Lukas Prause
5eb8a5ea8b Adds stackplot for bandwith. 2023-04-24 10:44:08 +02:00
Lukas Prause
dd086d77e0 Try to fix bandwidth ploting. 2023-04-24 10:13:15 +02:00
Lukas Prause
8528f89484 Try to fix bandwidth ploting. 2023-04-24 10:08:08 +02:00
Lukas Prause
6f3530e2a6 Changes for new modem. 2023-04-21 13:36:22 +02:00
Lukas Prause
26c10c5127 Changes for new modem. 2023-04-21 13:32:37 +02:00
Lukas Prause
259db62584 Changes for new modem. 2023-04-21 13:25:04 +02:00
Lukas Prause
b35104f3ba Changes for new modem. 2023-04-21 13:22:03 +02:00
Lukas Prause
a0668a89d4 Changes for new modem. 2023-04-21 13:05:11 +02:00
Lukas Prause
3837fe81f8 Changes for new modem. 2023-04-20 16:14:41 +02:00
Lukas Prause
95b39c8aaa Changes for new modem. 2023-04-20 16:12:02 +02:00
Lukas Prause
0f234adabb Changes for new modem. 2023-04-20 16:04:52 +02:00
Lukas Prause
f77140eb6b Changes for new modem. 2023-04-20 15:58:31 +02:00
Lukas Prause
ac801dc5ac Changes for new modem. 2023-04-20 15:53:43 +02:00
a845747a9c Serial output parser for EM919x. 2023-03-21 10:54:33 +01:00
Lukas Prause
b3073886c8 Refactor plots. 2023-03-17 15:19:24 +01:00
Lukas Prause
dbc4b4dd72 Refactor plots. 2023-03-17 15:16:32 +01:00
Lukas Prause
c463195a25 Pls fix. 2023-03-16 18:14:54 +01:00
Lukas Prause
61e99e6e83 Plot pcid and scid. 2023-03-16 15:33:13 +01:00
Lukas Prause
7d2d047903 Plot pcid and scid. 2023-03-16 15:17:55 +01:00
Lukas Prause
97567140ad Plot pcid and scid. 2023-03-16 15:11:13 +01:00
Lukas Prause
7a35d5014d Plot pcid and scid. 2023-03-16 14:51:18 +01:00
Lukas Prause
3362ba2c60 Adds delimiter to serial output. 2023-03-16 12:57:18 +01:00
58935bd3c6 Adds support for EM9191. 2023-03-16 10:34:12 +01:00
Lukas Prause
7bac2f7fd7 Moves bandwidth converters to serial format script. 2023-03-14 13:14:24 +01:00
Lukas Prause
a77425cfa2 Bugfix 2023-03-13 13:43:40 +01:00
Lukas Prause
178862a0e3 Adds monitoring for carrier aggregation. 2023-03-13 12:30:10 +01:00
861c764d75 Adds DL_bandwidth to plot. 2023-03-13 10:06:40 +01:00
Lukas Prause
51da4e6899 Plot changes 2023-03-09 14:50:32 +01:00
Lukas Prause
e2625998c6 Adds 'some' error handling. 2023-03-09 14:45:13 +01:00
Lukas Prause
d74c87a4d2 Fix grouper issue. 2023-03-06 13:39:10 +01:00
Lukas Prause
266cfb5e8c Fix grouper issue. 2023-03-06 13:21:34 +01:00
83d40c3f04 Bugfix 2023-03-03 10:28:13 +01:00
a5d837f865 Memory management 2023-03-01 09:03:34 +01:00
b2fa7f38be Memory management 2023-03-01 08:45:58 +01:00
6eefc8c081 Memory management 2023-03-01 08:43:02 +01:00
Lukas Prause
50afb7e4e9 Adds compare between multiple folders 2023-02-28 15:54:30 +01:00
b95d5202d5 Bugfixes 2023-02-28 09:20:31 +01:00
1b0c0b9c63 Bugfixes 2023-02-28 09:03:18 +01:00
38b7bf68ec Bugfixes 2023-02-28 08:47:40 +01:00
Lukas Prause
8004c74acf Adds script for cdf plots. 2023-02-27 13:05:36 +01:00
6d0b4d747d Changes ACK_RTT to sRTT in CDF plot. 2023-02-27 10:48:00 +01:00
29590640ef Changes ACK_RTT to sRTT in CDF plot. 2023-02-27 09:45:06 +01:00
a6953f2796 Bugfix: Missing goodput in figure. 2023-02-15 09:46:03 +01:00
Lukas Prause
08784f671e Removes correlation plot. 2023-02-10 15:51:04 +01:00
Lukas Prause
eae11e0eef Merge branch 'master' of ssh://git.black-mesa.xyz:434/langspielplatte/measurement-scripts 2023-02-10 14:29:53 +01:00
Lukas Prause
5348128bfc merge 2023-02-10 14:29:50 +01:00
15 changed files with 1797 additions and 190 deletions

View File

@@ -0,0 +1,198 @@
#!/usr/bin/env python3
import math
import multiprocessing
import os
from argparse import ArgumentParser
import matplotlib
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
sns.set()
#sns.set(font_scale=1.5)
tex_fonts = {
"pgf.texsystem": "lualatex",
# "legend.fontsize": "x-large",
# "figure.figsize": (15, 5),
"axes.labelsize": 15, # "small",
# "axes.titlesize": "x-large",
"xtick.labelsize": 15, # "small",
"ytick.labelsize": 15, # "small",
"legend.fontsize": 15,
"axes.formatter.use_mathtext": True,
"mathtext.fontset": "dejavusans",
}
# plt.rcParams.update(tex_fonts)
def convert_cellid(value):
if isinstance(value, str):
try:
r = int(value.split(" ")[-1].replace("(", "").replace(")", ""))
return r
except Exception as e:
return -1
else:
return int(-1)
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-s", "--serial_file", required=True, help="Serial csv file.")
parser.add_argument(
"-p", "--pcap_csv_folder", required=True, help="PCAP csv folder."
)
parser.add_argument("--save", required=True, help="Location to save pdf file.")
parser.add_argument(
"-i",
"--interval",
default=10,
type=int,
help="Time interval for rolling window.",
)
args = parser.parse_args()
pcap_csv_list = list()
for filename in os.listdir(args.pcap_csv_folder):
if filename.endswith(".csv") and "tcp" in filename:
pcap_csv_list.append(filename)
counter = 1
if len(pcap_csv_list) == 0:
print("No CSV files found.")
pcap_csv_list.sort(key=lambda x: int(x.split("_")[-1].replace(".csv", "")))
concat_frame = None
for csv in pcap_csv_list:
print(
"\rProcessing {} out of {} CSVs.\t({}%)\t".format(
counter, len(pcap_csv_list), math.floor(counter / len(pcap_csv_list))
)
)
# try:
transmission_df = pd.read_csv(
"{}{}".format(args.pcap_csv_folder, csv),
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
)
transmission_df["datetime"] = pd.to_datetime(
transmission_df["datetime"]
) - pd.Timedelta(hours=1)
transmission_df = transmission_df.set_index("datetime")
transmission_df.index = pd.to_datetime(transmission_df.index)
transmission_df = transmission_df.sort_index()
# srtt to [s]
transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10 ** 6)
# key for columns and level for index
transmission_df["goodput"] = (
transmission_df["payload_size"]
.groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval)))
.transform("sum")
)
transmission_df["goodput"] = transmission_df["goodput"].apply(
lambda x: ((x * 8) / args.interval) / 10 ** 6
)
transmission_df["goodput_rolling"] = (
transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
)
transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply(
lambda x: ((x * 8) / args.interval) / 10 ** 6
)
# set meta values and remove all not needed columns
cc_algo = transmission_df["congestion_control"].iloc[0]
cc_algo = cc_algo.upper()
transmission_direction = transmission_df["direction"].iloc[0]
# transmission_df = transmission_df.filter(["goodput", "datetime", "ack_rtt", "goodput_rolling", "snd_cwnd"])
# read serial csv
serial_df = pd.read_csv(
args.serial_file, converters={"Cell_ID": convert_cellid}
)
serial_df["datetime"] = pd.to_datetime(serial_df["datetime"]) - pd.Timedelta(
hours=1
)
serial_df = serial_df.set_index("datetime")
serial_df.index = pd.to_datetime(serial_df.index)
serial_df.sort_index()
# print(serial_df["Cell_ID"])
# serial_df["Cell_ID"] = serial_df["Cell_ID"].apply(
# lambda x: int(x.split(" ")[-1].replace("(", "").replace(")", "")))
transmission_df = pd.merge_asof(
transmission_df,
serial_df,
tolerance=pd.Timedelta("1s"),
right_index=True,
left_index=True,
)
#transmission_df.index = transmission_df["arrival_time"]
# replace 0 in RSRQ with Nan
transmission_df["NR5G_RSRQ_(dB)"] = transmission_df["NR5G_RSRQ_(dB)"].replace(
0, np.NaN
)
transmission_df["RSRQ_(dB)"] = transmission_df["RSRQ_(dB)"].replace(0, np.NaN)
# filter active state
for i in range(1, 5):
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
"LTE_SCC{}_bw".format(i)
]
mask = transmission_df["LTE_SCC{}_state".format(i)].isin(["ACTIVE"])
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
"LTE_SCC{}_effective_bw".format(i)
].where(mask, other=0)
# filter if sc is usesd for uplink
for i in range(1, 5):
mask = transmission_df["LTE_SCC{}_UL_Configured".format(i)].isin([False])
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
"LTE_SCC{}_effective_bw".format(i)
].where(mask, other=0)
# sum all effective bandwidth for 5G and 4G
transmission_df["SCC1_NR5G_effective_bw"] = transmission_df[
"SCC1_NR5G_bw"
].fillna(0)
transmission_df["lte_effective_bw_sum"] = (
transmission_df["LTE_SCC1_effective_bw"].fillna(0)
+ transmission_df["LTE_SCC2_effective_bw"].fillna(0)
+ transmission_df["LTE_SCC3_effective_bw"].fillna(0)
+ transmission_df["LTE_SCC4_effective_bw"].fillna(0)
+ transmission_df["LTE_bw"].fillna(0))
transmission_df["nr_effective_bw_sum"] = transmission_df["SCC1_NR5G_effective_bw"]
transmission_df["effective_bw_sum"] = transmission_df["nr_effective_bw_sum"] + transmission_df[
"lte_effective_bw_sum"]
transmission_df = transmission_df.filter(["goodput", "effective_bw_sum", "srtt"])
transmission_df = transmission_df.reset_index(drop=True)
if concat_frame is None:
concat_frame = transmission_df
else:
concat_frame = pd.concat([concat_frame, transmission_df])
concat_frame.to_csv("{}_concat_bw_gp.csv".format(args.save))

View File

@@ -2,6 +2,7 @@
import multiprocessing import multiprocessing
import os import os
from argparse import ArgumentParser from argparse import ArgumentParser
from datetime import datetime
from math import ceil from math import ceil
from time import sleep from time import sleep
@@ -50,18 +51,10 @@ if __name__ == "__main__":
parser.add_argument("-f", "--gps_file", required=True, help="GPS csv file.") parser.add_argument("-f", "--gps_file", required=True, help="GPS csv file.")
parser.add_argument("-s", "--serial_file", required=True, help="Serial csv file.") parser.add_argument("-s", "--serial_file", required=True, help="Serial csv file.")
parser.add_argument("-p", "--pcap_csv_folder", required=True, help="PCAP csv folder.") parser.add_argument("-p", "--pcap_csv_folder", required=True, help="PCAP csv folder.")
parser.add_argument("-a", "--column", required=True, help="Column to plot")
parser.add_argument("-l", "--label", help="Label above the plot.")
parser.add_argument("--no_legend", action="store_false", default=True, help="Do not show legend.")
parser.add_argument("--save", default=None, help="Location to save pdf file.") parser.add_argument("--save", default=None, help="Location to save pdf file.")
parser.add_argument("--time_offset", default=None, type=int, help="Minutes added to GPS datetime.") parser.add_argument("--time_offset", default=0, type=int, help="Minutes added to GPS datetime.")
parser.add_argument("--no_plot", default=False, action="store_true", help="Only calculations without plotting.") parser.add_argument("--neg_offset", default=False, action="store_true", help="Subtract GPS time offset.")
parser.add_argument("--auto_offset", default=False, action="store_true", help="Calculate GPS time offset.")
parser.add_argument(
"--show_providerinfo",
default=False,
help="Show providerinfo for map tiles an zoom levels.",
)
parser.add_argument( parser.add_argument(
"-c", "-c",
"--cores", "--cores",
@@ -141,10 +134,24 @@ if __name__ == "__main__":
# load dataframe an put it into geopandas # load dataframe an put it into geopandas
df = pd.read_csv(args.gps_file) df = pd.read_csv(args.gps_file)
df["kmh"] = df["speed (knots)"].apply(lambda x: x * 1.852) df["kmh"] = df["speed (knots)"].apply(lambda x: x * 1.852)
if args.time_offset: if not args.auto_offset and args.time_offset > 0:
df["datetime"] = pd.to_datetime(df["datetime"]) + pd.Timedelta(minutes=args.time_offset) if args.neg_offset:
df["datetime"] = pd.to_datetime(df["datetime"]) - pd.Timedelta(minutes=args.time_offset)
else:
df["datetime"] = pd.to_datetime(df["datetime"]) + pd.Timedelta(minutes=args.time_offset)
elif args.auto_offset:
gps_first = datetime.strptime(df["datetime"].iloc[0], "%Y-%m-%d %H:%M:%S.%f")
pcap_first = pd.to_datetime(transmission_df.first_valid_index())
calc_offset = gps_first - pcap_first
if gps_first > pcap_first:
time_offset = gps_first - pcap_first
df["datetime"] = pd.to_datetime(df["datetime"]) - time_offset
else:
time_offset = pcap_first - gps_first
df["datetime"] = pd.to_datetime(df["datetime"]) + time_offset
else: else:
df["datetime"] = pd.to_datetime(df["datetime"]) df["datetime"] = pd.to_datetime(df["datetime"])
df = df.set_index("datetime") df = df.set_index("datetime")
df.index = pd.to_datetime(df.index) df.index = pd.to_datetime(df.index)
@@ -180,43 +187,6 @@ if __name__ == "__main__":
df_wm = gdf.to_crs(epsg=3857) df_wm = gdf.to_crs(epsg=3857)
#df_wm.to_csv("debug-data.csv") #df_wm.to_csv("debug-data.csv")
# ax2 = df_wm.plot(figsize=(10, 10), alpha=0.5, edgecolor='k') # ax2 = df_wm.plot(figsize=(10, 10), alpha=0.5, edgecolor='k')
if args.no_plot:
df_wm.to_csv("{}gps_plot.csv".format(args.save))
print("Saved calculations to: {}gps_plot.csv".format(args.save))
exit(0)
print("Start plotting...") df_wm.to_csv("{}gps_plot.csv".format(args.save))
print("Saved calculations to: {}gps_plot.csv".format(args.save))
ax2 = df_wm.plot()
ax2 = df_wm.plot(args.column, cmap="hot", legend=args.no_legend, ax=ax2)
# ax2 = df_wm.plot.scatter(x="longitude", y="latitude", c="kmh", cmap="hot")
# zoom 17 is pretty
cx.add_basemap(ax2, source=cx.providers.OpenStreetMap.Mapnik, zoom=15)
# gdf.plot()
ax2.set_axis_off()
ax2.set_title(args.label if args.label else args.column)
if args.show_providerinfo:
#####################################
# Identifying how many tiles
latlon_outline = gdf.to_crs("epsg:4326").total_bounds
def_zoom = cx.tile._calculate_zoom(*latlon_outline)
print(f"Default Zoom level {def_zoom}")
cx.howmany(*latlon_outline, def_zoom, ll=True)
cx.howmany(*latlon_outline, def_zoom + 1, ll=True)
cx.howmany(*latlon_outline, def_zoom + 2, ll=True)
# Checking out some of the other providers and tiles
print(cx.providers.CartoDB.Voyager)
print(cx.providers.Stamen.TonerLite)
print(cx.providers.Stamen.keys())
#####################################
# df.plot(x="longitude", y="latitude", kind="scatter", colormap="YlOrRd")
if args.save:
plt.savefig("{}gps_plot.pdf".format(args.save))
else:
plt.show()

228
cdf_compare.py Executable file
View File

@@ -0,0 +1,228 @@
#!/usr/bin/env python3
import multiprocessing
import os
import pickle
from argparse import ArgumentParser
from math import ceil
from time import sleep
import matplotlib
import pandas as pd
import matplotlib.pyplot as plt
from mpl_toolkits import axisartist
from mpl_toolkits.axes_grid1 import host_subplot
def csv_to_dataframe(csv_list, folder, dummy):
global n
global frame_list
tmp_df = None
for csv in csv_list:
tmp_df = pd.read_csv(
"{}{}".format(folder, csv),
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
)
tmp_df["datetime"] = pd.to_datetime(tmp_df["datetime"]) - pd.Timedelta(hours=1)
tmp_df = tmp_df.set_index("datetime")
tmp_df.index = pd.to_datetime(tmp_df.index)
if tmp_df is None:
tmp_df = tmp_df
else:
tmp_df = pd.concat([tmp_df, tmp_df])
n.value += 1
tmp_df = tmp_df.filter(
["srtt", "datetime", "payload_size", "congestion_control", "direction"])
frame_list.append(tmp_df)
del tmp_df
from itertools import islice
def chunk(it, size):
it = iter(it)
return iter(lambda: tuple(islice(it, size)), ())
def plot_cdf(dataframe, column_name, axis=None):
stats_df = dataframe \
.groupby(column_name) \
[column_name] \
.agg("count") \
.pipe(pd.DataFrame) \
.rename(columns={column_name: "frequency"})
# PDF
stats_df["PDF"] = stats_df["frequency"] / sum(stats_df["frequency"])
# CDF
stats_df["CDF"] = stats_df["PDF"].cumsum()
stats_df = stats_df.reset_index()
if axis:
stats_df.plot(x=column_name, y=["CDF"], grid=True, ax=axis)
else:
stats_df.plot(x=column_name, y=["CDF"], grid=True)
del stats_df
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-s", "--serials", required=True, help="Serial csv files. Comma separated.")
parser.add_argument("-f", "--folders", required=True, help="PCAP csv folders. Comma separated.")
parser.add_argument("--save", default=None, help="Location to save pdf file.")
parser.add_argument(
"-c",
"--cores",
default=1,
type=int,
help="Number of cores for multiprocessing.",
)
parser.add_argument(
"-i",
"--interval",
default=2,
type=int,
help="Time interval for rolling window.",
)
args = parser.parse_args()
transmission_df_list = list()
for f in args.folders.split(","):
manager = multiprocessing.Manager()
n = manager.Value("i", 0)
frame_list = manager.list()
jobs = []
# load all pcap csv into one dataframe
pcap_csv_list = list()
for filename in os.listdir(f):
if filename.endswith(".csv") and "tcp" in filename:
pcap_csv_list.append(filename)
parts = chunk(pcap_csv_list, ceil(len(pcap_csv_list) / args.cores))
print("Start processing with {} jobs.".format(args.cores))
for p in parts:
process = multiprocessing.Process(target=csv_to_dataframe, args=(p, f, "dummy"))
jobs.append(process)
for j in jobs:
j.start()
print("Started all jobs.")
# Ensure all the processes have finished
finished_job_counter = 0
working = ["|", "/", "-", "\\", "|", "/", "-", "\\"]
w = 0
while len(jobs) != finished_job_counter:
sleep(1)
print(
"\r\t{}{}{}\t Running {} jobs ({} finished). Processed {} out of {} pcap csv files. ({}%) ".format(
working[w],
working[w],
working[w],
len(jobs),
finished_job_counter,
n.value,
len(pcap_csv_list),
round((n.value / len(pcap_csv_list)) * 100, 2),
),
end="",
)
finished_job_counter = 0
for j in jobs:
if not j.is_alive():
finished_job_counter += 1
if (w + 1) % len(working) == 0:
w = 0
else:
w += 1
print("\r\nSorting table...")
transmission_df = pd.concat(frame_list)
frame_list = None
transmission_df = transmission_df.sort_index()
#
# Don't forget to add new columns to the filter argument in the function above!
#
transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10 ** 6)
# key for columns and level for index
transmission_df["goodput"] = transmission_df["payload_size"].groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval))).transform("sum")
transmission_df["goodput"] = transmission_df["goodput"].apply(
lambda x: ((x * 8) / args.interval) / 10**6
)
transmission_df["goodput_rolling"] = transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply(
lambda x: ((x * 8) / args.interval) / 10 ** 6
)
# set meta values
cc_algo = transmission_df["congestion_control"].iloc[0]
cc_algo = cc_algo.upper()
transmission_direction = transmission_df["direction"].iloc[0]
#transmission_df = transmission_df.filter(["srtt", "datetime", "srtt", "payload_size"])
transmission_df = transmission_df.drop(columns=["congestion_control", "direction"])
# read serial csv
#serial_df = pd.read_csv(args.serial_file)
#serial_df["datetime"] = pd.to_datetime(serial_df["datetime"]) - pd.Timedelta(hours=1)
#serial_df = serial_df.set_index("datetime")
#serial_df.index = pd.to_datetime(serial_df.index)
#serial_df.sort_index()
#transmission_df = pd.merge_asof(
# transmission_df,
# serial_df,
# tolerance=pd.Timedelta("1s"),
# right_index=True,
# left_index=True,
#)
transmission_df_list.append(dict(
df=transmission_df,
cc_algo=cc_algo,
transmission_direction=transmission_direction
))
del transmission_df
# Plot sRTT CDF
legend = list()
plot_cdf(transmission_df_list[0]["df"], "srtt")
legend.append(transmission_df_list[0]["cc_algo"])
for i in range(1, len(transmission_df_list)):
plot_cdf(transmission_df_list[i]["df"], "srtt", axis=plt.gca())
legend.append(transmission_df_list[i]["cc_algo"])
#plt.xscale("log")
plt.xlim(0, 0.15)
plt.xlabel("sRTT [s]")
plt.ylabel("CDF")
plt.legend(legend)
plt.title("{}".format(transmission_df_list[0]["transmission_direction"]))
plt.savefig("{}{}_cdf_compare_plot.pdf".format(args.save, "srtt"))
plt.clf()
# Plot goodput CDF
legend = list()
plot_cdf(transmission_df_list[0]["df"], "goodput_rolling")
legend.append(transmission_df_list[0]["cc_algo"])
for i in range(1, len(transmission_df_list)):
plot_cdf(transmission_df_list[i]["df"], "goodput_rolling", axis=plt.gca())
legend.append(transmission_df_list[i]["cc_algo"])
plt.xlabel("goodput [mbps]")
plt.ylabel("CDF")
plt.legend(legend)
plt.title("{}".format(transmission_df_list[0]["transmission_direction"]))
plt.savefig("{}{}_cdf_compare_plot.pdf".format(args.save, "goodput"))

191
format_serial_txt_to_csv.py Normal file → Executable file
View File

@@ -0,0 +1,191 @@
#!/usr/bin/env python3
import csv
import datetime
from argparse import ArgumentParser
import pandas as pd
def convert_bandwidth(value):
try:
value = int(value)
except:
value = -1
if value == 0:
return 1.4
elif value == 1:
return 3
elif value == 2:
return 5
elif value == 3:
return 10
elif value == 4:
return 15
elif value == 5:
return 20
else:
return 0
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-f", "--file", required=True, help="Input txt file.")
args = parser.parse_args()
file = open(args.file, "r")
content = file.read()
file.close()
all_csv_lines = list()
csv_header = ["time"]
h1 = False
h2 = False
h3 = False
for line in content.split("\n"):
if line == "" or line == "\n":
break
raw_columns = line.split(";")
csv_line = list()
csv_line.append(raw_columns[0])
for i in range(1, len(raw_columns)):
col = raw_columns[i]
if 'AT+QNWCFG="nr5g_csi"' in col:
if not h1:
csv_header += ["mcs_PDSCH", "ri_PDSCH", "downlink_cqi", "pmi"]
h1 = True
tmp = raw_columns[i + 1].replace('+QNWCFG: "nr5g_csi",', "")
csv_line += tmp.split(",")
elif "AT+QENDC" in col:
if not h2:
csv_header += [
"endc_avl",
"plmn_info_list_r15_avl",
"endc_rstr",
"5G_basic",
]
h2 = True
tmp = raw_columns[i + 1].replace("+QENDC: ", "")
csv_line += tmp.split(",")
elif 'AT+QENG="servingcell"' in col:
if not h3:
csv_header += [
"connection_state",
"is_tdd",
"mcc",
"mnc",
"cellID",
"PCID",
"earfcn",
"freq_band_ind",
"UL_bandwidth",
"DL_bandwidth",
"TAC",
"RSRP",
"RSRQ",
"RSSI",
"SINR",
"CQI_1-30",
"tx_power",
"srxlev",
"MCC",
"MNC",
"PCID",
"RSRP",
"SINR",
"RSRQ",
"ARFCN",
"band",
]
h3 = True
if "NOCONN" in raw_columns[i + 1]:
csv_line.append("NOCONN")
csv_line += raw_columns[i + 2].replace('+QENG: "LTE",', "").split(",")
csv_line += (
raw_columns[i + 3].replace('+QENG:"NR5G-NSA",', "").split(",")
)
elif "SEARCH" in raw_columns[i + 1]:
csv_line.append("SEARCH")
csv_line += [""] * 25
elif "OK" == raw_columns[i + 1]:
csv_line.append("OK")
csv_line += [""] * 25
else:
csv_line.append("undefined")
csv_line += [""] * 25
all_csv_lines.append(csv_line)
outputfile = open(args.file.replace("txt", "csv"), "w")
writer = csv.writer(outputfile, delimiter=",", lineterminator="\n", escapechar='\\')
writer.writerow(csv_header)
#print(all_csv_lines)
for l in all_csv_lines:
#print(l)
writer.writerow(l)
outputfile.close()
outputfile = open(args.file.replace("txt", "csv"), "r")
serial_df = pd.read_csv(outputfile,
converters={"UL_bandwidth": convert_bandwidth, "DL_bandwidth": convert_bandwidth},
)
serial_df = serial_df.drop(columns=["MCC", "MNC"])
serial_df["datetime"] = pd.to_datetime(
serial_df["time"].apply(lambda x: datetime.datetime.fromtimestamp(x))
)
serial_df.to_csv(args.file.replace("txt", "csv"))
outputfile.close()
exit()
delete_string = [
'AT+QNWCFG="nr5g_csi";',
'+QNWCFG: "nr5g_csi"',
'AT+QENG="servingcell";+QENG: "servingcell",',
"+QENG:",
"AT+QENDC;+QENDC:",
]
for d in delete_string:
content = content.replace(d, ",")
content = (
content.replace(";", "")
.replace(" ", "")
.replace(",,,", ",")
.replace('"', "")
.replace("LTE,", "")
.replace("NR5G-NSA,", "")
)
header = "time,mcs,ri,cqi,pmi,conn_state,is_tdd,MCC,MNC,cellID,PCID,earfcn,freq_band_ind,UL_bandwidth,DL_bandwidth,TAC,RSRP,RSRQ,RSSI,SINR,CQI,tx_power,srxlev,MCC,MNC,PCID,RSRP,SINR,RSRQ,ARFCN,band,endc_avl,plmn_info_list_r15_avl,endc_rstr,5G_basic\n"
csv_path = args.file.replace("txt", "csv")
print("Write to: {}".format(csv_path))
csv_string = header
for csv_line in content.split("\n"):
if len(header.split(",")) == len(csv_line.split(",")):
csv_string += csv_line + "\n"
else:
# print("{} found {}".format(len(header.split(",")), len(csv_line.split(","))))
print("Could not interpret string: {}".format(csv_line))
print(
"Expect {} columns got {}".format(
len(header.split(",")), len(csv_line.split(","))
)
)
csv_string_io = StringIO(csv_string)
serial_df = pd.read_csv(csv_string_io)
serial_df = serial_df
serial_df["datetime"] = pd.to_datetime(
serial_df["time"].apply(lambda x: datetime.datetime.fromtimestamp(x))
)
serial_df.to_csv(csv_path)
print(serial_df)

View File

@@ -0,0 +1,57 @@
#!/usr/bin/env python3
import datetime
import re
from argparse import ArgumentParser
import pandas as pd
KEY_VALUE_REGEX = r"(.+):(.+)"
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-f", "--file", required=True, help="Input txt file.")
args = parser.parse_args()
file = open(args.file, "r")
content = file.read()
file.close()
serial_df = None
p = re.compile(KEY_VALUE_REGEX)
for part in content.split(";;;"):
if part == "":
break
part = part.replace("\t", "\n").strip()
time = None
line_dict = dict(time=None)
for line in part.split("\n"):
if not line.startswith("!") or line == "" or line == "\n":
if line_dict["time"] is None:
time = line
line_dict["time"] = [time]
m = p.match(line)
if m:
key = m.group(1).strip().replace(" ", "_")
value = m.group(2).replace("MHz", "").replace("---", "").strip()
line_dict[key] = [value]
if len(line_dict) > 1:
#print("line:")
#print(line_dict)
#print("serial_df:")
#print(serial_df)
if serial_df is None:
serial_df = pd.DataFrame.from_dict(line_dict, orient="columns",)
else:
serial_df = pd.concat([serial_df, pd.DataFrame.from_dict(line_dict, orient="columns")])
serial_df = serial_df.copy()
serial_df["datetime"] = pd.to_datetime(
serial_df["time"].apply(lambda x: datetime.datetime.fromtimestamp(int(x)))
)
serial_df.to_csv(args.file.replace("txt", "csv"))
#serial_df = serial_df.filter(["datetime", "LTE_bw", "LTE_SCC2_bw", "LTE_SCC3_bw", "LTE_SCC4_bw", "SCC1_NR5G_bw", "NR5G_dl_bw", "NR5G_ul_bw", "LTE_SCC1_bw", "NR5G_bw"])
#print(serial_df.to_string())

View File

@@ -188,7 +188,7 @@ def format_pcaps_to_csv(pcaps, dummy):
pcap_df = pcap_df.sort_values("arrival_time") pcap_df = pcap_df.sort_values("arrival_time")
try: try:
# join tcp_trace data with pcap data # join tcp_trace data with pcap data
merge_srtt = False merge_srtt = True
if merge_srtt: if merge_srtt:
tcp_trace_df = format_tcp_trace_to_csv( tcp_trace_df = format_tcp_trace_to_csv(
pcap_number, pcap_number,
@@ -202,7 +202,7 @@ def format_pcaps_to_csv(pcaps, dummy):
pcap_number pcap_number
) )
) )
break continue ## break before but stoped the thread
merged_df = pd.merge_asof( merged_df = pd.merge_asof(
pcap_df.loc[pcap_df["src_ip"] != args.server], pcap_df.loc[pcap_df["src_ip"] != args.server],
tcp_trace_df, tcp_trace_df,

View File

@@ -23,6 +23,7 @@ GET_IPV4_SHELL_COMMAND = "ip a | grep {} | grep inet | cut -d' ' -f6 | cut -d'/'
NR_CQI_COMMAND = b'AT+QNWCFG="nr5g_csi"\r\n' NR_CQI_COMMAND = b'AT+QNWCFG="nr5g_csi"\r\n'
NR_SERVINGCELL_COMMAND = b'AT+QENG="servingcell"\r\n' NR_SERVINGCELL_COMMAND = b'AT+QENG="servingcell"\r\n'
NR_EN_DC_STATUS_COMMAND = b"AT+QENDC\r\n" NR_EN_DC_STATUS_COMMAND = b"AT+QENDC\r\n"
NE_CA_COMMAND = b'AT+QCAINFO\r\n'
NR_SERIAL_RESPOND_TIME = 0.5 # s NR_SERIAL_RESPOND_TIME = 0.5 # s
CMD_TIME_EPOCH = "date +%s" CMD_TIME_EPOCH = "date +%s"
TIMEOUT_OFFSET = 10.0 TIMEOUT_OFFSET = 10.0
@@ -31,6 +32,8 @@ WAIT_AFTER_IPERF = 5.0
modem_serial_obj = None modem_serial_obj = None
gps_serial_obj = None gps_serial_obj = None
MODEM_MODEL = None
class ProcessHandler: class ProcessHandler:
def __init__(self): def __init__(self):
@@ -219,7 +222,8 @@ def raise_receive_window():
def monitor_serial(ser, output_file): def monitor_serial(ser, output_file):
run_cmds = [NR_CQI_COMMAND, NR_SERVINGCELL_COMMAND, NR_EN_DC_STATUS_COMMAND] #run_cmds = [NR_CQI_COMMAND, NR_SERVINGCELL_COMMAND, NR_EN_DC_STATUS_COMMAND, NE_CA_COMMAND]
run_cmds = [b"at!gstatus?\r\n", b"AT!NRINFO?\r\n"]
try: try:
while ser.is_open: while ser.is_open:
response = subprocess.check_output(CMD_TIME_EPOCH, shell=True).decode( response = subprocess.check_output(CMD_TIME_EPOCH, shell=True).decode(
@@ -229,13 +233,13 @@ def monitor_serial(ser, output_file):
ser.write(cmd) ser.write(cmd)
sleep(NR_SERIAL_RESPOND_TIME) sleep(NR_SERIAL_RESPOND_TIME)
response += ser.read(ser.inWaiting()).decode("utf-8") response += ser.read(ser.inWaiting()).decode("utf-8")
response = ( #response = (
response.replace("\n", ";") # response.replace("\n", ";")
.replace("\r", "") # .replace("\r", "")
.replace(";;OK", ";") # .replace(";;OK", ";")
.replace(";;", ";") # .replace(";;", ";")
) #)
write_to_file(output_file, response + "\n") write_to_file(output_file, response + ";;;\n")
except: except:
if not ser.is_open: if not ser.is_open:
print_message("Serial port is closed. Exit monitoring thread.") print_message("Serial port is closed. Exit monitoring thread.")
@@ -319,11 +323,20 @@ def monitor_gps(ser, output_file):
def connect_moden(provider="telekom"): def connect_moden(provider="telekom"):
print_message("Connect modem with provider {} ...".format(provider)) print_message("Connect modem with provider {} ...".format(provider))
os.system("/root/connect-modem.py -l {}".format(provider)) if MODEM_MODEL == "EM9191":
os.system("/root/connection_mbim.py -l {}".format(provider))
else:
os.system("/root/connect-modem.py -l {}".format(provider))
print_message("...done") print_message("...done")
def reconnect_modem(provider="telekom", hard=False): def reconnect_modem(provider="telekom", hard=False):
#TODO
os.system("/root/connection_mbim.py -s")
sleep(2)
os.system("/root/connection_mbim.py -l {}".format(provider))
return
global modem_serial_obj global modem_serial_obj
print_message("Reonnect modem with provider {} ...".format(provider)) print_message("Reonnect modem with provider {} ...".format(provider))
if hard: if hard:
@@ -1200,6 +1213,7 @@ if __name__ == "__main__":
default=None, default=None,
help="Start in client mode and set the server IPv4 address.", help="Start in client mode and set the server IPv4 address.",
) )
parser.add_argument("--modem", default="EM9191", help="Modem model name.")
parser.add_argument( parser.add_argument(
"--prefix", default=now.strftime("%Y-%m-%d"), help="Prefix on filename." "--prefix", default=now.strftime("%Y-%m-%d"), help="Prefix on filename."
) )
@@ -1302,6 +1316,7 @@ if __name__ == "__main__":
args = parser.parse_args() args = parser.parse_args()
disable_tso(args.interface) disable_tso(args.interface)
MODEM_MODEL = args.modem
if args.server: if args.server:
asyncio.run(start_server(args)) asyncio.run(start_server(args))

85
plot_gps_csv.py Executable file
View File

@@ -0,0 +1,85 @@
#!/usr/bin/env python3
from argparse import ArgumentParser
import numpy as np
import pandas as pd
import geopandas as gpd
import contextily as cx
import matplotlib.pyplot as plt
import seaborn as sns
sns.set()
#sns.set(font_scale=1.5)
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-f", "--file", required=True, help="Messfahrt csv")
parser.add_argument("-a", "--column", required=True, help="Column to plot")
parser.add_argument("-l", "--label", help="Label above the plot.")
parser.add_argument("--no_legend", action="store_false", default=True, help="Do not show legend.")
parser.add_argument("--save", default=None, help="Location to save pdf file.")
parser.add_argument(
"--show_providerinfo",
default=False,
help="Show providerinfo for map tiles an zoom levels.",
)
args = parser.parse_args()
df = pd.read_csv(args.file)
gdf = gpd.GeoDataFrame(
df,
geometry=gpd.points_from_xy(df["longitude"], df["latitude"]),
crs="EPSG:4326",
)
gdf["srtt"] = gdf["srtt"].apply(lambda x: x / 10 ** 6)
gdf["is_retranmission"] = gdf["is_retranmission"].replace(True, np.NaN).dropna().astype(float)
print("Start plotting...")
df_wm = gdf.to_crs(epsg=3857)
ax2 = df_wm.plot()
ax2 = df_wm.plot(column=args.column, cmap="hot", legend=args.no_legend, ax=ax2, legend_kwds={"label": args.label},)
# ax2 = df_wm.plot.scatter(x="longitude", y="latitude", c="kmh", cmap="hot")
# zoom 17 is pretty
cx.add_basemap(ax2, source=cx.providers.OpenStreetMap.Mapnik, zoom=17)
# gdf.plot()
ax2.set_axis_off()
if not args.no_legend:
ax2.set_title(args.label if args.label else args.column)
else:
fig = ax2.figure
cb_ax = fig.axes[0]
cb_ax.set_label(args.label)
cb_ax.tick_params(labelsize=30)
if args.show_providerinfo:
#####################################
# Identifying how many tiles
latlon_outline = gdf.to_crs("epsg:4326").total_bounds
def_zoom = cx.tile._calculate_zoom(*latlon_outline)
print(f"Default Zoom level {def_zoom}")
cx.howmany(*latlon_outline, def_zoom, ll=True)
cx.howmany(*latlon_outline, def_zoom + 1, ll=True)
cx.howmany(*latlon_outline, def_zoom + 2, ll=True)
# Checking out some of the other providers and tiles
print(cx.providers.CartoDB.Voyager)
print(cx.providers.Stamen.TonerLite)
print(cx.providers.Stamen.keys())
#####################################
# df.plot(x="longitude", y="latitude", kind="scatter", colormap="YlOrRd")
if args.save:
plt.savefig("{}gps_plot.eps".format(args.save), bbox_inches="tight")
else:
plt.show()

View File

166
plot_single_transmission.py Executable file
View File

@@ -0,0 +1,166 @@
#!/usr/bin/env python3
import math
import multiprocessing
import os
from argparse import ArgumentParser
import matplotlib
import pandas as pd
import matplotlib.pyplot as plt
# Using seaborn's style
#plt.style.use('seaborn')
tex_fonts = {
"pgf.texsystem": "lualatex",
# "legend.fontsize": "x-large",
# "figure.figsize": (15, 5),
"axes.labelsize": 15, # "small",
# "axes.titlesize": "x-large",
"xtick.labelsize": 15, # "small",
"ytick.labelsize": 15, # "small",
"legend.fontsize": 15,
"axes.formatter.use_mathtext": True,
"mathtext.fontset": "dejavusans",
}
#plt.rcParams.update(tex_fonts)
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-s", "--serial_file", required=True, help="Serial csv file.")
parser.add_argument("-p", "--pcap_csv_folder", required=True, help="PCAP csv folder.")
parser.add_argument("--save", required=True, help="Location to save pdf file.")
parser.add_argument(
"-i",
"--interval",
default=10,
type=int,
help="Time interval for rolling window.",
)
args = parser.parse_args()
pcap_csv_list = list()
for filename in os.listdir(args.pcap_csv_folder):
if filename.endswith(".csv") and "tcp" in filename:
pcap_csv_list.append(filename)
counter = 1
if len(pcap_csv_list) == 0:
print("No CSV files found.")
pcap_csv_list.sort(key=lambda x: int(x.split("_")[-1].replace(".csv", "")))
for csv in pcap_csv_list:
print("\rProcessing {} out of {} CSVs.\t({}%)\t".format(counter, len(pcap_csv_list), math.floor(counter/len(pcap_csv_list))))
#try:
transmission_df = pd.read_csv(
"{}{}".format(args.pcap_csv_folder, csv),
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
)
transmission_df["datetime"] = pd.to_datetime(transmission_df["datetime"]) - pd.Timedelta(hours=1)
transmission_df = transmission_df.set_index("datetime")
transmission_df.index = pd.to_datetime(transmission_df.index)
transmission_df = transmission_df.sort_index()
# srtt to [s]
transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10**6)
# key for columns and level for index
transmission_df["goodput"] = transmission_df["payload_size"].groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval))).transform("sum")
transmission_df["goodput"] = transmission_df["goodput"].apply(
lambda x: ((x * 8) / args.interval) / 10**6
)
transmission_df["goodput_rolling"] = transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply(
lambda x: ((x * 8) / args.interval) / 10 ** 6
)
# set meta values and remove all not needed columns
cc_algo = transmission_df["congestion_control"].iloc[0]
cc_algo = cc_algo.upper()
transmission_direction = transmission_df["direction"].iloc[0]
#transmission_df = transmission_df.filter(["goodput", "datetime", "ack_rtt", "goodput_rolling", "snd_cwnd"])
# read serial csv
serial_df = pd.read_csv(args.serial_file)
serial_df["datetime"] = pd.to_datetime(serial_df["datetime"]) - pd.Timedelta(hours=1)
serial_df = serial_df.set_index("datetime")
serial_df.index = pd.to_datetime(serial_df.index)
serial_df.sort_index()
transmission_df = pd.merge_asof(
transmission_df,
serial_df,
tolerance=pd.Timedelta("1s"),
right_index=True,
left_index=True,
)
transmission_df = transmission_df.rename(columns={"PCID": "lte_pcid", "PCID.1": "nr_pcid"})
transmission_df.index = transmission_df["arrival_time"]
# transmission timeline
scaley = 1.5
scalex = 1.0
plt.title("{} with {}".format(transmission_direction, cc_algo))
fig, ax = plt.subplots(2, 1, figsize=[6.4 * scaley, 4.8 * scalex])
fig.subplots_adjust(right=0.75)
fig.suptitle("{} with {}".format(transmission_direction, cc_algo))
ax0 = ax[0]
ax1 = ax0.twinx()
ax2 = ax0.twinx()
#ax2.spines.right.set_position(("axes", 1.22))
ax00 = ax[1]
ax01 = ax00.twinx()
# Plot vertical lines
lte_handovers = transmission_df["lte_pcid"].diff().dropna()
for index, value in lte_handovers.items():
if value > 0:
ax00.axvline(index, ymin=0, ymax=1, color="skyblue", label="4G Handover")
nr_handovers = transmission_df["nr_pcid"].diff().dropna()
for index, value in nr_handovers.items():
if value > 0:
ax00.axvline(index, ymin=0, ymax=1, color="greenyellow", label="5G Handover")
ax0.plot(transmission_df["snd_cwnd"].dropna(), color="lime", linestyle="dashed", label="cwnd")
ax1.plot(transmission_df["srtt"].dropna(), color="red", linestyle="dashdot", label="sRTT")
ax2.plot(transmission_df["goodput_rolling"], color="blue", linestyle="solid", label="goodput")
ax00.plot(transmission_df["downlink_cqi"].dropna(), color="magenta", linestyle="dotted", label="CQI")
ax01.plot(transmission_df["DL_bandwidth"].dropna(), color="peru", linestyle="dotted", label="bandwidth")
ax2.spines.right.set_position(("axes", 1.1))
ax0.set_ylim(0, 5000)
ax1.set_ylim(0, 0.3)
ax2.set_ylim(0, 500)
ax00.set_ylim(0, 16)
ax01.set_ylim(0, 21)
ax00.set_xlabel("arrival time [s]")
ax2.set_ylabel("Goodput [mbps]")
ax00.set_ylabel("CQI")
ax1.set_ylabel("sRTT [s]")
ax0.set_ylabel("cwnd")
ax01.set_ylabel("Bandwidth [MHz]")
fig.legend(loc="lower right")
plt.savefig("{}{}_plot.pdf".format(args.save, csv.replace(".csv", "")))
#except Exception as e:
# print("Error processing file: {}".format(csv))
# print(str(e))
counter += 1
plt.close(fig)
plt.clf()

View File

@@ -0,0 +1,343 @@
#!/usr/bin/env python3
import math
import multiprocessing
import os
from argparse import ArgumentParser
import matplotlib
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
sns.set()
#sns.set(font_scale=1.5)
tex_fonts = {
"pgf.texsystem": "lualatex",
# "legend.fontsize": "x-large",
# "figure.figsize": (15, 5),
"axes.labelsize": 15, # "small",
# "axes.titlesize": "x-large",
"xtick.labelsize": 15, # "small",
"ytick.labelsize": 15, # "small",
"legend.fontsize": 15,
"axes.formatter.use_mathtext": True,
"mathtext.fontset": "dejavusans",
}
# plt.rcParams.update(tex_fonts)
def convert_cellid(value):
if isinstance(value, str):
try:
r = int(value.split(" ")[-1].replace("(", "").replace(")", ""))
return r
except Exception as e:
return -1
else:
return int(-1)
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-s", "--serial_file", required=True, help="Serial csv file.")
parser.add_argument(
"-p", "--pcap_csv_folder", required=True, help="PCAP csv folder."
)
parser.add_argument("--save", required=True, help="Location to save pdf file.")
parser.add_argument("--fancy", action="store_true", help="Create fancy plot.")
parser.add_argument(
"-i",
"--interval",
default=10,
type=int,
help="Time interval for rolling window.",
)
args = parser.parse_args()
pcap_csv_list = list()
for filename in os.listdir(args.pcap_csv_folder):
if filename.endswith(".csv") and "tcp" in filename:
pcap_csv_list.append(filename)
counter = 1
if len(pcap_csv_list) == 0:
print("No CSV files found.")
pcap_csv_list.sort(key=lambda x: int(x.split("_")[-1].replace(".csv", "")))
for csv in pcap_csv_list:
print(
"\rProcessing {} out of {} CSVs.\t({}%)\t".format(
counter, len(pcap_csv_list), math.floor(counter / len(pcap_csv_list))
)
)
# try:
transmission_df = pd.read_csv(
"{}{}".format(args.pcap_csv_folder, csv),
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
)
transmission_df["datetime"] = pd.to_datetime(
transmission_df["datetime"]
) - pd.Timedelta(hours=1)
transmission_df = transmission_df.set_index("datetime")
transmission_df.index = pd.to_datetime(transmission_df.index)
transmission_df = transmission_df.sort_index()
# srtt to [s]
transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10 ** 6)
# key for columns and level for index
transmission_df["goodput"] = (
transmission_df["payload_size"]
.groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval)))
.transform("sum")
)
transmission_df["goodput"] = transmission_df["goodput"].apply(
lambda x: ((x * 8) / args.interval) / 10 ** 6
)
transmission_df["goodput_rolling"] = (
transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
)
transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply(
lambda x: ((x * 8) / args.interval) / 10 ** 6
)
# set meta values and remove all not needed columns
cc_algo = transmission_df["congestion_control"].iloc[0]
cc_algo = cc_algo.upper()
transmission_direction = transmission_df["direction"].iloc[0]
# transmission_df = transmission_df.filter(["goodput", "datetime", "ack_rtt", "goodput_rolling", "snd_cwnd"])
# read serial csv
serial_df = pd.read_csv(
args.serial_file, converters={"Cell_ID": convert_cellid}
)
serial_df["datetime"] = pd.to_datetime(serial_df["datetime"]) - pd.Timedelta(
hours=1
)
serial_df = serial_df.set_index("datetime")
serial_df.index = pd.to_datetime(serial_df.index)
serial_df.sort_index()
# print(serial_df["Cell_ID"])
# serial_df["Cell_ID"] = serial_df["Cell_ID"].apply(
# lambda x: int(x.split(" ")[-1].replace("(", "").replace(")", "")))
transmission_df = pd.merge_asof(
transmission_df,
serial_df,
tolerance=pd.Timedelta("1s"),
right_index=True,
left_index=True,
)
transmission_df.index = transmission_df["arrival_time"]
# replace 0 in RSRQ with Nan
transmission_df["NR5G_RSRQ_(dB)"] = transmission_df["NR5G_RSRQ_(dB)"].replace(
0, np.NaN
)
transmission_df["RSRQ_(dB)"] = transmission_df["RSRQ_(dB)"].replace(0, np.NaN)
# filter active state
for i in range(1, 5):
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
"LTE_SCC{}_bw".format(i)
]
mask = transmission_df["LTE_SCC{}_state".format(i)].isin(["ACTIVE"])
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
"LTE_SCC{}_effective_bw".format(i)
].where(mask, other=0)
# filter if sc is usesd for uplink
for i in range(1, 5):
mask = transmission_df["LTE_SCC{}_UL_Configured".format(i)].isin([False])
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
"LTE_SCC{}_effective_bw".format(i)
].where(mask, other=0)
# sum all effective bandwidth for 5G and 4G
transmission_df["SCC1_NR5G_effective_bw"] = transmission_df[
"SCC1_NR5G_bw"
].fillna(0)
transmission_df["lte_effective_bw_sum"] = (
transmission_df["LTE_SCC1_effective_bw"].fillna(0)
+ transmission_df["LTE_SCC2_effective_bw"].fillna(0)
+ transmission_df["LTE_SCC3_effective_bw"].fillna(0)
+ transmission_df["LTE_SCC4_effective_bw"].fillna(0)
+ transmission_df["LTE_bw"].fillna(0))
transmission_df["nr_effective_bw_sum"] = transmission_df["SCC1_NR5G_effective_bw"]
transmission_df["effective_bw_sum"] = transmission_df["nr_effective_bw_sum"] + transmission_df[
"lte_effective_bw_sum"]
# transmission timeline
scaley = 1.5
scalex = 1.0
fig, ax = plt.subplots(2, 1, figsize=[6.4 * scaley, 4.8 * scalex])
fig.subplots_adjust(right=0.75)
if not args.fancy:
plt.title("{} with {}".format(transmission_direction, cc_algo))
fig.suptitle("{} with {}".format(transmission_direction, cc_algo))
ax0 = ax[0]
ax1 = ax0.twinx()
ax2 = ax0.twinx()
# ax2.spines.right.set_position(("axes", 1.22))
ax00 = ax[1]
ax01 = ax00.twinx()
ax02 = ax00.twinx()
# Plot vertical lines
first = True
lte_handovers = transmission_df["Cell_ID"].dropna().diff()
lte_hanover_plot = None
for index, value in lte_handovers.items():
if value > 0:
if first:
lte_hanover_plot = ax00.axvline(
index, ymin=0, ymax=1, color="skyblue", label="4G Handover"
)
first = False
else:
ax00.axvline(index, ymin=0, ymax=1, color="skyblue")
first = True
nr_handovers = (
transmission_df["NR5G_Cell_ID"].replace(0, np.NaN).dropna().diff()
)
nr_hanover_plot = None
for index, value in nr_handovers.items():
if value > 0:
if first:
nr_hanover_plot = ax00.axvline(
index, ymin=0, ymax=1, color="greenyellow", label="5G Handover"
)
first = False
else:
ax00.axvline(index, ymin=0, ymax=1, color="greenyellow")
snd_plot = ax0.plot(
transmission_df["snd_cwnd"].dropna(),
color="lime",
linestyle="dashed",
label="cwnd",
)
srtt_plot = ax1.plot(
transmission_df["srtt"].dropna(),
color="red",
linestyle="dashdot",
label="sRTT",
)
goodput_plot = ax2.plot(
transmission_df["goodput_rolling"],
color="blue",
linestyle="solid",
label="goodput",
)
# ax2.plot(transmission_df["goodput"], color="blue", linestyle="solid", label="goodput")
eff_bw_plot = ax01.plot(
transmission_df["effective_bw_sum"].dropna(),
color="peru",
linestyle="solid",
label="bandwidth",
)
lte_eff_bw_plot = ax01.plot(
transmission_df["lte_effective_bw_sum"].dropna(),
color="lightsteelblue",
linestyle="solid",
label="4G bandwidth",
alpha=0.5,
)
nr_eff_bw_plot = ax01.plot(
transmission_df["nr_effective_bw_sum"].dropna(),
color="cornflowerblue",
linestyle="solid",
label="5G bandwidth",
alpha=0.5,
)
# ax01.stackplot(transmission_df["arrival_time"].to_list(),
# [transmission_df["lte_bw_sum"].to_list(), transmission_df["nr_bw_sum"].to_list()],
# colors=["lightsteelblue", "cornflowerblue"],
# labels=["4G bandwidth", "5G bandwidth"]
# )
lte_rsrq_plot = ax02.plot(
transmission_df["RSRQ_(dB)"].dropna(),
color="purple",
linestyle="dotted",
label="LTE RSRQ",
)
nr_rsrq_plot = ax00.plot(
transmission_df["NR5G_RSRQ_(dB)"].dropna(),
color="magenta",
linestyle="dotted",
label="NR RSRQ",
)
ax2.spines.right.set_position(("axes", 1.1))
ax02.spines.right.set_position(("axes", 1.1))
ax0.set_ylim(0, 5000)
ax1.set_ylim(0, 0.3)
ax2.set_ylim(0, 600)
ax00.set_ylim(-25, 0)
ax01.set_ylim(0, 250)
# second dB axis
ax02.set_ylim(-25, 0)
ax02.set_axis_off()
ax00.set_xlabel("arrival time [s]")
ax2.set_ylabel("Goodput [mbps]")
ax00.set_ylabel("LTE/NR RSRQ [dB]")
# ax02.set_ylabel("LTE RSRQ [dB]")
ax1.set_ylabel("sRTT [s]")
ax0.set_ylabel("cwnd [MSS]")
ax01.set_ylabel("Bandwidth [MHz]")
if args.fancy:
legend_frame = False
ax0.set_xlim([0, transmission_df.index[-1]])
ax00.set_xlim([0, transmission_df.index[-1]])
# added these three lines
lns_ax0 = snd_plot + srtt_plot + goodput_plot
labs_ax0 = [l.get_label() for l in lns_ax0]
ax2.legend(lns_ax0, labs_ax0, ncols=9, fontsize=9, loc="upper right", frameon=legend_frame)
#ax0.set_zorder(100)
lns_ax00 = eff_bw_plot + lte_eff_bw_plot + nr_eff_bw_plot + lte_rsrq_plot + nr_rsrq_plot
if lte_hanover_plot:
lns_ax00.append(lte_hanover_plot)
if nr_hanover_plot:
lns_ax00.append(nr_hanover_plot)
labs_ax00 = [l.get_label() for l in lns_ax00]
ax02.legend(lns_ax00, labs_ax00, ncols=3, fontsize=9, loc="upper center", frameon=legend_frame)
#ax00.set_zorder(100)
plt.savefig("{}{}_plot.eps".format(args.save, csv.replace(".csv", "")), bbox_inches="tight")
else:
fig.legend(loc="lower right")
plt.savefig("{}{}_plot.pdf".format(args.save, csv.replace(".csv", "")), bbox_inches="tight")
# except Exception as e:
# print("Error processing file: {}".format(csv))
# print(str(e))
counter += 1
plt.close(fig)
plt.clf()

269
plot_single_transmission_paper.py Executable file
View File

@@ -0,0 +1,269 @@
#!/usr/bin/env python3
import math
import multiprocessing
import os
from argparse import ArgumentParser
import matplotlib
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
sns.set()
#sns.set(font_scale=1.5)
tex_fonts = {
"pgf.texsystem": "lualatex",
# "legend.fontsize": "x-large",
# "figure.figsize": (15, 5),
"axes.labelsize": 15, # "small",
# "axes.titlesize": "x-large",
"xtick.labelsize": 15, # "small",
"ytick.labelsize": 15, # "small",
"legend.fontsize": 15,
"axes.formatter.use_mathtext": True,
"mathtext.fontset": "dejavusans",
}
# plt.rcParams.update(tex_fonts)
def convert_cellid(value):
if isinstance(value, str):
try:
r = int(value.split(" ")[-1].replace("(", "").replace(")", ""))
return r
except Exception as e:
return -1
else:
return int(-1)
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-s", "--serial_file", required=True, help="Serial csv file.")
parser.add_argument(
"-p", "--pcap_csv_folder", required=True, help="PCAP csv folder."
)
parser.add_argument("--save", required=True, help="Location to save pdf file.")
parser.add_argument("--fancy", action="store_true", help="Create fancy plot.")
parser.add_argument(
"-i",
"--interval",
default=10,
type=int,
help="Time interval for rolling window.",
)
args = parser.parse_args()
pcap_csv_list = list()
for filename in os.listdir(args.pcap_csv_folder):
if filename.endswith(".csv") and "tcp" in filename:
pcap_csv_list.append(filename)
counter = 1
if len(pcap_csv_list) == 0:
print("No CSV files found.")
pcap_csv_list.sort(key=lambda x: int(x.split("_")[-1].replace(".csv", "")))
for csv in pcap_csv_list:
print(
"\rProcessing {} out of {} CSVs.\t({}%)\t".format(
counter, len(pcap_csv_list), math.floor(counter / len(pcap_csv_list))
)
)
# try:
transmission_df = pd.read_csv(
"{}{}".format(args.pcap_csv_folder, csv),
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
)
transmission_df = transmission_df.set_index("datetime")
transmission_df.index = pd.to_datetime(transmission_df.index)
transmission_df = transmission_df.sort_index()
# srtt to [s]
transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10 ** 6)
# key for columns and level for index
transmission_df["goodput"] = (
transmission_df["payload_size"]
.groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval)))
.transform("sum")
)
transmission_df["goodput"] = transmission_df["goodput"].apply(
lambda x: ((x * 8) / args.interval) / 10 ** 6
)
transmission_df["goodput_rolling"] = (
transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
)
transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply(
lambda x: ((x * 8) / args.interval) / 10 ** 6
)
# set meta values and remove all not needed columns
cc_algo = transmission_df["congestion_control"].iloc[0]
cc_algo = cc_algo.upper()
transmission_direction = transmission_df["direction"].iloc[0]
# transmission_df = transmission_df.filter(["goodput", "datetime", "ack_rtt", "goodput_rolling", "snd_cwnd"])
# read serial csv
serial_df = pd.read_csv(
args.serial_file, converters={"Cell_ID": convert_cellid}
)
serial_df = serial_df.set_index("datetime")
serial_df.index = pd.to_datetime(serial_df.index)
serial_df.sort_index()
# Select DataFrame rows between two dates
mask = (serial_df.index >= transmission_df.index[0]) & (serial_df.index <= transmission_df.index[-1])
serial_df = serial_df.loc[mask]
serial_df["arrival_time"] = (serial_df["time"] - serial_df["time"].iloc[0]) * 60
serial_df.index = serial_df["arrival_time"]
transmission_df.index = transmission_df["arrival_time"]
# filter active state
for i in range(1, 5):
serial_df["LTE_SCC{}_effective_bw".format(i)] = serial_df[
"LTE_SCC{}_bw".format(i)
]
mask = serial_df["LTE_SCC{}_state".format(i)].isin(["ACTIVE"])
serial_df["LTE_SCC{}_effective_bw".format(i)] = serial_df[
"LTE_SCC{}_effective_bw".format(i)
].where(mask, other=0)
# filter if sc is usesd for uplink
for i in range(1, 5):
mask = serial_df["LTE_SCC{}_UL_Configured".format(i)].isin([False])
serial_df["LTE_SCC{}_effective_bw".format(i)] = serial_df[
"LTE_SCC{}_effective_bw".format(i)
].where(mask, other=0)
# sum all effective bandwidth for 5G and 4G
serial_df["SCC1_NR5G_effective_bw"] = serial_df["SCC1_NR5G_bw"].fillna(0)
serial_df["effective_bw_sum"] = (
serial_df["SCC1_NR5G_effective_bw"]
+ serial_df["LTE_SCC1_effective_bw"]
+ serial_df["LTE_SCC2_effective_bw"]
+ serial_df["LTE_SCC3_effective_bw"]
+ serial_df["LTE_SCC4_effective_bw"]
+ serial_df["LTE_bw"]
)
bw_cols = [
"SCC1_NR5G_effective_bw",
"LTE_bw",
"LTE_SCC1_effective_bw",
"LTE_SCC2_effective_bw",
"LTE_SCC3_effective_bw",
"LTE_SCC4_effective_bw",
]
# transmission timeline
scaley = 1.5
scalex = 1.0
fig, ax = plt.subplots(2, 1, figsize=[6.4 * scaley, 4.8 * scalex])
fig.subplots_adjust(right=0.75)
if not args.fancy:
plt.title("{} with {}".format(transmission_direction, cc_algo))
fig.suptitle("{} with {}".format(transmission_direction, cc_algo))
ax0 = ax[0]
ax1 = ax0.twinx()
ax2 = ax0.twinx()
# ax2.spines.right.set_position(("axes", 1.22))
ax00 = ax[1]
snd_plot = ax0.plot(
transmission_df["snd_cwnd"].dropna(),
color="darkorange",
linestyle="dashed",
label="cwnd",
)
srtt_plot = ax1.plot(
transmission_df["srtt"].dropna(),
color="maroon",
linestyle="dotted",
label="sRTT",
)
goodput_plot = ax2.plot(
transmission_df["goodput_rolling"],
color="blue",
linestyle="solid",
label="goodput",
)
serial_df["time_rel"] = serial_df["time"] - serial_df["time"].iloc[0]
serial_df.index = serial_df["time_rel"]
ax_stacked = serial_df[bw_cols].plot.area(stacked=True, linewidth=0, ax=ax00)
ax00.set_ylabel("bandwidth [MHz]")
ax00.set_ylim(0, 200)
#ax.set_xlabel("time [minutes]")
#ax00.set_xlim([0, transmission_df.index[-1]])
ax00.xaxis.grid(True)
ax2.spines.right.set_position(("axes", 1.1))
ax0.set_ylim(0, 5000) #2500
ax1.set_ylim(0, 2) #0.3
ax2.set_ylim(0, 500)
#ax00.set_ylim(-25, 0)
ax00.set_xlabel("time [s]")
ax2.set_ylabel("goodput [mbps]")
#ax00.set_ylabel("LTE/NR RSRQ [dB]")
# ax02.set_ylabel("LTE RSRQ [dB]")
ax1.set_ylabel("sRTT [s]")
ax0.set_ylabel("cwnd [MSS]")
if args.fancy:
legend_frame = False
ax0.set_xlim([0, 60])
ax00.set_xlim([0, 60])
# added these three lines
lns_ax0 = snd_plot + srtt_plot + goodput_plot
labs_ax0 = [l.get_label() for l in lns_ax0]
ax2.legend(lns_ax0, labs_ax0, ncols=9, fontsize=9, loc="upper right", frameon=legend_frame)
#ax0.set_zorder(100)
#lns_ax00 = [ax_stacked]
#labs_ax00 = ["5G bandwidth", "4G bandwidth"]
#ax00.legend(lns_ax00, labs_ax00, ncols=3, fontsize=9, loc="upper center", frameon=legend_frame)
L = ax00.legend(ncols=3, fontsize=9, frameon=False)
L.get_texts()[0].set_text("5G main")
L.get_texts()[1].set_text("4G main")
L.get_texts()[2].set_text("4G SCC 1")
L.get_texts()[3].set_text("4G SCC 2")
L.get_texts()[4].set_text("4G SCC 3")
L.get_texts()[5].set_text("4G SCC 4")
#ax00.set_zorder(100)
plt.savefig("{}{}_plot.eps".format(args.save, csv.replace(".csv", "")), bbox_inches="tight")
#serial_df.to_csv("{}{}_plot.csv".format(args.save, csv.replace(".csv", "")))
else:
fig.legend(loc="lower right")
plt.savefig("{}{}_plot.pdf".format(args.save, csv.replace(".csv", "")), bbox_inches="tight")
# except Exception as e:
# print("Error processing file: {}".format(csv))
# print(str(e))
counter += 1
plt.close(fig)
plt.clf()

View File

@@ -2,7 +2,6 @@
import math import math
import multiprocessing import multiprocessing
import os import os
import pickle
from argparse import ArgumentParser from argparse import ArgumentParser
import matplotlib import matplotlib
@@ -44,148 +43,137 @@ if __name__ == "__main__":
for csv in pcap_csv_list: for csv in pcap_csv_list:
print("\rProcessing {} out of {} CSVs.\t({}%)\t".format(counter, len(pcap_csv_list), math.floor(counter/len(pcap_csv_list)))) print("\rProcessing {} out of {} CSVs.\t({}%)\t".format(counter, len(pcap_csv_list), math.floor(counter/len(pcap_csv_list))))
transmission_df = pd.read_csv(
"{}{}".format(args.pcap_csv_folder, csv),
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
)
transmission_df["datetime"] = pd.to_datetime(transmission_df["datetime"]) - pd.Timedelta(hours=1)
transmission_df = transmission_df.set_index("datetime")
transmission_df.index = pd.to_datetime(transmission_df.index)
transmission_df = transmission_df.sort_index()
# srtt to [s] try:
transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10**6) transmission_df = pd.read_csv(
"{}{}".format(args.pcap_csv_folder, csv),
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
)
# key for columns and level for index transmission_df["datetime"] = pd.to_datetime(transmission_df["datetime"]) - pd.Timedelta(hours=1)
transmission_df["goodput"] = transmission_df["payload_size"].groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval))).transform("sum") transmission_df = transmission_df.set_index("datetime")
transmission_df["goodput"] = transmission_df["goodput"].apply( transmission_df.index = pd.to_datetime(transmission_df.index)
lambda x: ((x * 8) / args.interval) / 10**6 transmission_df = transmission_df.sort_index()
)
transmission_df["goodput_rolling"] = transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum() # srtt to [s]
transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply( transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10**6)
lambda x: ((x * 8) / args.interval) / 10 ** 6
)
# set meta values and remove all not needed columns # key for columns and level for index
cc_algo = transmission_df["congestion_control"].iloc[0] transmission_df["goodput"] = transmission_df["payload_size"].groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval))).transform("sum")
cc_algo = cc_algo.upper() transmission_df["goodput"] = transmission_df["goodput"].apply(
transmission_direction = transmission_df["direction"].iloc[0] lambda x: ((x * 8) / args.interval) / 10**6
)
#transmission_df = transmission_df.filter(["goodput", "datetime", "ack_rtt", "goodput_rolling", "snd_cwnd"]) transmission_df["goodput_rolling"] = transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply(
lambda x: ((x * 8) / args.interval) / 10 ** 6
)
# read serial csv # set meta values and remove all not needed columns
serial_df = pd.read_csv(args.serial_file) cc_algo = transmission_df["congestion_control"].iloc[0]
serial_df["datetime"] = pd.to_datetime(serial_df["datetime"]) - pd.Timedelta(hours=1) cc_algo = cc_algo.upper()
serial_df = serial_df.set_index("datetime") transmission_direction = transmission_df["direction"].iloc[0]
serial_df.index = pd.to_datetime(serial_df.index)
serial_df.sort_index()
transmission_df = pd.merge_asof( #transmission_df = transmission_df.filter(["goodput", "datetime", "ack_rtt", "goodput_rolling", "snd_cwnd"])
transmission_df,
serial_df,
tolerance=pd.Timedelta("1s"),
right_index=True,
left_index=True,
)
# transmission timeline # read serial csv
serial_df = pd.read_csv(args.serial_file)
serial_df["datetime"] = pd.to_datetime(serial_df["datetime"]) - pd.Timedelta(hours=1)
serial_df = serial_df.set_index("datetime")
serial_df.index = pd.to_datetime(serial_df.index)
serial_df.sort_index()
scaley = 1.5 transmission_df = pd.merge_asof(
scalex = 1.0 transmission_df,
fig, ax = plt.subplots(figsize=[6.4 * scaley, 4.8 * scalex]) serial_df,
plt.title("{} with {}".format(transmission_direction, cc_algo)) tolerance=pd.Timedelta("1s"),
fig.subplots_adjust(right=0.75) right_index=True,
left_index=True,
)
twin1 = ax.twinx() # transmission timeline
twin2 = ax.twinx()
twin3 = ax.twinx() scaley = 1.5
# Offset the right spine of twin2. The ticks and label have already been scalex = 1.0
# placed on the right by twinx above. fig, ax = plt.subplots(figsize=[6.4 * scaley, 4.8 * scalex])
twin2.spines.right.set_position(("axes", 1.1)) plt.title("{} with {}".format(transmission_direction, cc_algo))
twin3.spines.right.set_position(("axes", 1.2)) fig.subplots_adjust(right=0.75)
twin1 = ax.twinx()
twin2 = ax.twinx()
twin3 = ax.twinx()
twin4 = ax.twinx()
# Offset the right spine of twin2. The ticks and label have already been
# placed on the right by twinx above.
twin2.spines.right.set_position(("axes", 1.1))
twin3.spines.right.set_position(("axes", 1.2))
twin4.spines.right.set_position(("axes", 1.3))
# create list fo color indices # create list fo color indices
transmission_df["index"] = transmission_df.index transmission_df["index"] = transmission_df.index
color_dict = dict() color_dict = dict()
color_list = list() color_list = list()
i = 0 i = 0
for cell_id in transmission_df["cellID"]: for cell_id in transmission_df["cellID"]:
if cell_id not in color_dict: if cell_id not in color_dict:
color_dict[cell_id] = i color_dict[cell_id] = i
i += 1 i += 1
color_list.append(color_dict[cell_id]) color_list.append(color_dict[cell_id])
transmission_df["cell_color"] = color_list transmission_df["cell_color"] = color_list
color_dict = None color_dict = None
color_list = None color_list = None
cmap = matplotlib.cm.get_cmap("Set3") cmap = matplotlib.cm.get_cmap("Set3")
unique_cells = transmission_df["cell_color"].unique() unique_cells = transmission_df["cell_color"].unique()
color_list = cmap.colors * (round(len(unique_cells) / len(cmap.colors)) + 1) color_list = cmap.colors * (round(len(unique_cells) / len(cmap.colors)) + 1)
for c in transmission_df["cell_color"].unique(): for c in transmission_df["cell_color"].unique():
bounds = transmission_df[["index", "cell_color"]].groupby("cell_color").agg(["min", "max"]).loc[c] bounds = transmission_df[["index", "cell_color"]].groupby("cell_color").agg(["min", "max"]).loc[c]
ax.axvspan(bounds.min(), bounds.max(), alpha=0.3, color=color_list[c]) ax.axvspan(bounds.min(), bounds.max(), alpha=0.3, color=color_list[c])
p4, = twin3.plot(transmission_df["snd_cwnd"].dropna(), color="lime", linestyle="dashed", label="cwnd") p4, = twin3.plot(transmission_df["snd_cwnd"].dropna(), color="lime", linestyle="dashed", label="cwnd")
p3, = twin2.plot(transmission_df["srtt"].dropna(), color="red", linestyle="dashdot", label="sRTT") p3, = twin2.plot(transmission_df["srtt"].dropna(), color="red", linestyle="dashdot", label="sRTT")
p1, = ax.plot(transmission_df["arrival_time"], transmission_df["goodput_rolling"], color="blue", linestyle="solid", label="goodput") p1, = ax.plot(transmission_df["goodput_rolling"], color="blue", linestyle="solid", label="goodput")
p2, = twin1.plot(transmission_df["downlink_cqi"].dropna(), color="magenta", linestyle="dotted", label="CQI") p2, = twin1.plot(transmission_df["downlink_cqi"].dropna(), color="magenta", linestyle="dotted", label="CQI")
p5, = twin4.plot(transmission_df["DL_bandwidth"].dropna(), color="peru", linestyle="dotted", label="DL_bandwidth")
ax.set_xlim(transmission_df["index"].min(), transmission_df["index"].max()) ax.set_xlim(transmission_df["index"].min(), transmission_df["index"].max())
ax.set_ylim(0, 500) ax.set_ylim(0, 500)
twin1.set_ylim(0, 15) twin1.set_ylim(0, 15)
twin2.set_ylim(0, transmission_df["ack_rtt"].max()) twin2.set_ylim(0, 0.2) #twin2.set_ylim(0, transmission_df["ack_rtt"].max())
twin3.set_ylim(0, transmission_df["snd_cwnd"].max() + 10) twin3.set_ylim(0, transmission_df["snd_cwnd"].max() + 10)
twin4.set_ylim(0, 21)
ax.set_xlabel("arrival time") ax.set_xlabel("arrival time")
ax.set_ylabel("Goodput [mbps]") ax.set_ylabel("Goodput [mbps]")
twin1.set_ylabel("CQI") twin1.set_ylabel("CQI")
twin2.set_ylabel("sRTT [s]") twin2.set_ylabel("sRTT [s]")
twin3.set_ylabel("cwnd") twin3.set_ylabel("cwnd")
twin4.set_ylabel("DL_bandwidth")
ax.yaxis.label.set_color(p1.get_color()) ax.yaxis.label.set_color(p1.get_color())
twin1.yaxis.label.set_color(p2.get_color()) twin1.yaxis.label.set_color(p2.get_color())
twin2.yaxis.label.set_color(p3.get_color()) twin2.yaxis.label.set_color(p3.get_color())
twin3.yaxis.label.set_color(p4.get_color()) twin3.yaxis.label.set_color(p4.get_color())
twin4.yaxis.label.set_color(p5.get_color())
tkw = dict(size=4, width=1.5) tkw = dict(size=4, width=1.5)
ax.tick_params(axis='y', colors=p1.get_color(), **tkw) ax.tick_params(axis='y', colors=p1.get_color(), **tkw)
twin1.tick_params(axis='y', colors=p2.get_color(), **tkw) twin1.tick_params(axis='y', colors=p2.get_color(), **tkw)
twin2.tick_params(axis='y', colors=p3.get_color(), **tkw) twin2.tick_params(axis='y', colors=p3.get_color(), **tkw)
twin3.tick_params(axis='y', colors=p4.get_color(), **tkw) twin3.tick_params(axis='y', colors=p4.get_color(), **tkw)
ax.tick_params(axis='x', **tkw) twin4.tick_params(axis='y', colors=p5.get_color(), **tkw)
ax.tick_params(axis='x', **tkw)
#ax.legend(handles=[p1, p2, p3]) #ax.legend(handles=[p1, p2, p3])
if args.save:
plt.savefig("{}{}_plot.pdf".format(args.save, csv.replace(".csv", "")))
# plot correlations
corr_pairs = [
["goodput_rolling", "RSRQ"],
["goodput_rolling", "RSRP"],
["goodput_rolling", "RSSI"],
["goodput_rolling", "SINR"],
["goodput_rolling", "downlink_cqi"],
]
for pair in corr_pairs:
# spearman and pearson
sp = transmission_df[pair[0]].corr(transmission_df[pair[1]], method="spearman")
pe = transmission_df[pair[0]].corr(transmission_df[pair[1]], method="pearson")
title = "{}/{} spearman: {} pearson: {}".format(pair[0], pair[1], round(sp, 4), round(pe, 4))
transmission_df.plot.scatter(x=pair[0], y=pair[1], c="DarkBlue", title=title)
if args.save: if args.save:
plt.savefig("{}{}_corr_{}_and_{}.pdf".format(args.save, csv.replace(".csv", ""), pair[0], pair[1])) plt.savefig("{}{}_plot.pdf".format(args.save, csv.replace(".csv", "")))
except Exception as e:
plt.clf() print("Error processing file: {}".format(csv))
print(str(e))
counter += 1 counter += 1
plt.clf() plt.clf()

95
plot_stacked_bandwidth.py Executable file
View File

@@ -0,0 +1,95 @@
#!/usr/bin/env python3
from argparse import ArgumentParser
import pandas as pd
import matplotlib.pyplot as plt
plt_params = {
"pgf.texsystem": "lualatex",
#"legend.fontsize": "x-large",
#"figure.figsize": (15, 5),
"axes.labelsize": 15, # "small",
"axes.titlesize": "x-large",
"xtick.labelsize": 15, # "small",
"ytick.labelsize": 15, # "small",
"legend.fontsize": 15,
"axes.formatter.use_mathtext": True,
"mathtext.fontset": "dejavusans",
}
#plt.rcParams.update(plt_params)
import seaborn as sns
sns.set()
sns.set(font_scale=1.5)
plt.rcParams["figure.figsize"] = (10, 3)
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-f", "--file", required=True, help="Serial CSV")
parser.add_argument("--save", default=None, help="Location to save pdf file.")
args = parser.parse_args()
df = pd.read_csv(args.file)
df["time_rel"] = df["time"] - df["time"].iloc[0]
df.index = df["time_rel"] / 60
# filter active state
for i in range(1, 5):
df["LTE_SCC{}_effective_bw".format(i)] = df["LTE_SCC{}_bw".format(i)]
mask = df["LTE_SCC{}_state".format(i)].isin(["ACTIVE"])
df["LTE_SCC{}_effective_bw".format(i)] = df[
"LTE_SCC{}_effective_bw".format(i)
].where(mask, other=0)
# filter if sc is usesd for uplink
for i in range(1, 5):
mask = df["LTE_SCC{}_UL_Configured".format(i)].isin([False])
df["LTE_SCC{}_effective_bw".format(i)] = df[
"LTE_SCC{}_effective_bw".format(i)
].where(mask, other=0)
# sum all effective bandwidth for 5G and 4G
df["SCC1_NR5G_effective_bw"] = df["SCC1_NR5G_bw"].fillna(0)
df["effective_bw_sum"] = (
df["SCC1_NR5G_effective_bw"]
+ df["LTE_SCC1_effective_bw"]
+ df["LTE_SCC2_effective_bw"]
+ df["LTE_SCC3_effective_bw"]
+ df["LTE_SCC4_effective_bw"]
+ df["LTE_bw"]
)
bw_cols = [
"SCC1_NR5G_effective_bw",
"LTE_bw",
"LTE_SCC1_effective_bw",
"LTE_SCC2_effective_bw",
"LTE_SCC3_effective_bw",
"LTE_SCC4_effective_bw",
]
ax = df[bw_cols].plot.area(stacked=True, linewidth=0)
ax.set_ylabel("bandwidth [MHz]")
ax.set_xlabel("time [minutes]")
ax.set_xlim([0, df.index[-1]])
ax.xaxis.grid(False)
L = plt.legend(ncols=2, fontsize=12, frameon=False)
L.get_texts()[0].set_text("5G main")
L.get_texts()[1].set_text("4G main")
L.get_texts()[2].set_text("4G SCC 1")
L.get_texts()[3].set_text("4G SCC 2")
L.get_texts()[4].set_text("4G SCC 3")
L.get_texts()[5].set_text("4G SCC 4")
if args.save:
plt.savefig("{}-used_bandwidth.eps".format(args.save), bbox_inches="tight")
else:
plt.show()

View File

@@ -144,6 +144,9 @@ if __name__ == "__main__":
#print(transmission_df) #print(transmission_df)
# srtt to [s]
transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10 ** 6)
# key for columns and level for index # key for columns and level for index
transmission_df["goodput"] = transmission_df["payload_size"].groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval))).transform("sum") transmission_df["goodput"] = transmission_df["payload_size"].groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval))).transform("sum")
transmission_df["goodput"] = transmission_df["goodput"].apply( transmission_df["goodput"] = transmission_df["goodput"].apply(
@@ -160,7 +163,7 @@ if __name__ == "__main__":
cc_algo = cc_algo.upper() cc_algo = cc_algo.upper()
transmission_direction = transmission_df["direction"].iloc[0] transmission_direction = transmission_df["direction"].iloc[0]
transmission_df = transmission_df.filter(["goodput", "datetime", "ack_rtt", "goodput_rolling"]) transmission_df = transmission_df.filter(["goodput", "datetime", "srtt", "goodput_rolling"])
# read serial csv # read serial csv
serial_df = pd.read_csv(args.serial_file) serial_df = pd.read_csv(args.serial_file)
@@ -217,7 +220,7 @@ if __name__ == "__main__":
p1, = ax.plot(transmission_df["goodput_rolling"], "-", color="blue", label="goodput") p1, = ax.plot(transmission_df["goodput_rolling"], "-", color="blue", label="goodput")
p2, = twin1.plot(transmission_df["downlink_cqi"], "--", color="green", label="CQI") p2, = twin1.plot(transmission_df["downlink_cqi"], "--", color="green", label="CQI")
p3, = twin2.plot(transmission_df["ack_rtt"], "-.", color="red", label="ACK RTT") p3, = twin2.plot(transmission_df["srtt"], "-.", color="red", label="sRTT")
ax.set_xlim(transmission_df["index"].min(), transmission_df["index"].max()) ax.set_xlim(transmission_df["index"].min(), transmission_df["index"].max())
ax.set_ylim(0, 500) ax.set_ylim(0, 500)
@@ -227,7 +230,7 @@ if __name__ == "__main__":
ax.set_xlabel("Time") ax.set_xlabel("Time")
ax.set_ylabel("Goodput") ax.set_ylabel("Goodput")
twin1.set_ylabel("CQI") twin1.set_ylabel("CQI")
twin2.set_ylabel("ACK RTT") twin2.set_ylabel("sRTT")
ax.yaxis.label.set_color(p1.get_color()) ax.yaxis.label.set_color(p1.get_color())
twin1.yaxis.label.set_color(p2.get_color()) twin1.yaxis.label.set_color(p2.get_color())
@@ -264,16 +267,15 @@ if __name__ == "__main__":
# rtt cdf # rtt cdf
plt.clf() plt.clf()
print(transmission_df["ack_rtt"])
print("Calculate and polt rtt CDF...") print("Calculate and polt rtt CDF...")
plot_cdf(transmission_df, "ack_rtt") plot_cdf(transmission_df, "srtt")
plt.xlabel("ACK RTT [s]") plt.xlabel("sRTT [s]")
plt.ylabel("CDF") plt.ylabel("CDF")
plt.xscale("log") plt.xscale("log")
plt.legend([cc_algo]) plt.legend([cc_algo])
plt.title("{} with {}".format(transmission_direction, cc_algo)) plt.title("{} with {}".format(transmission_direction, cc_algo))
if args.save: if args.save:
plt.savefig("{}{}_cdf_plot.pdf".format(args.save, "ack_rtt")) plt.savefig("{}{}_cdf_plot.pdf".format(args.save, "srtt"))
else: else:
plt.show() plt.show()