Compare commits

...

33 Commits

Author SHA1 Message Date
Lukas Prause
0f9ff99d90 Adds srtts to csv. 2023-08-31 13:29:35 +02:00
Lukas Prause
8129a2bd95 Removing index from frame. 2023-08-28 12:57:24 +02:00
Lukas Prause
1c498208da Merge branch 'master' of ssh://git.black-mesa.xyz:434/langspielplatte/measurement-scripts 2023-08-24 14:05:21 +02:00
Lukas Prause
2e4ff28fc2 Adds new script. 2023-08-24 14:05:14 +02:00
8c2f78cd02 Changes cwnd scaling. 2023-07-24 10:05:59 +02:00
cf18199ba3 Changes plot label. 2023-07-18 08:39:13 +02:00
c43826dc9c Changes plot color and y scale. 2023-07-14 10:49:30 +02:00
b92ee09af9 Changes plot color and y scale. 2023-07-14 10:25:56 +02:00
fdf04fb21e Changes plot color and y scale. 2023-07-14 09:57:37 +02:00
a75b0b74a0 Changes plot color and y scale. 2023-07-14 09:49:31 +02:00
Lukas Prause
9f8db93f7c Debug 2023-07-13 13:21:46 +02:00
Lukas Prause
4e85d7a3e5 Debug 2023-07-13 13:12:12 +02:00
Lukas Prause
0785c1e4e6 Debug 2023-07-13 12:37:46 +02:00
a9f9c42ab1 Valuefixes 2023-07-13 10:25:14 +02:00
44f20be108 Valuefixes 2023-07-13 10:20:54 +02:00
dc578c8a1b Valuefixes 2023-07-13 10:15:13 +02:00
e4fc32a1a2 Valuefixes 2023-07-13 10:05:26 +02:00
ec443c9bd4 Valuefixes 2023-07-13 09:55:12 +02:00
009d59c499 Valuefixes 2023-07-13 09:43:21 +02:00
85c4bfeb75 Valuefixes 2023-07-13 09:15:06 +02:00
Lukas Prause
502de2d864 Merge timing. 2023-07-12 16:16:01 +02:00
Lukas Prause
a713b9e262 Merge timing. 2023-07-12 15:18:14 +02:00
Lukas Prause
98fe00c02f Merge timing. 2023-07-12 14:55:49 +02:00
Lukas Prause
a97563fe61 Fixes typo and and too large dataset. 2023-07-12 14:36:21 +02:00
Lukas Prause
951bac5f1e Fixes typo and and too large dataset. 2023-07-12 14:17:33 +02:00
Lukas Prause
97f0946ad0 Fixes typo and and too large dataset. 2023-07-12 13:54:43 +02:00
Lukas Prause
baf2207a4f Fixes typo and and too large dataset. 2023-07-12 13:54:00 +02:00
345e6546ce stacked 2023-07-11 20:37:57 +02:00
a32df7b8aa fix stacked 2023-07-11 20:13:29 +02:00
Lukas Prause
aca74ca09c Paper plots. 2023-07-11 16:28:29 +02:00
Lukas Prause
a25288a737 Removes fontsize scaling 2023-07-11 13:18:54 +02:00
Lukas Prause
67ca4d66b0 Removes fontsize scaling 2023-07-11 13:18:00 +02:00
Lukas Prause
a701d378e2 Changes fontsize scale. 2023-07-11 13:06:25 +02:00
4 changed files with 484 additions and 4 deletions

View File

@@ -0,0 +1,198 @@
#!/usr/bin/env python3
import math
import multiprocessing
import os
from argparse import ArgumentParser
import matplotlib
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
sns.set()
#sns.set(font_scale=1.5)
tex_fonts = {
"pgf.texsystem": "lualatex",
# "legend.fontsize": "x-large",
# "figure.figsize": (15, 5),
"axes.labelsize": 15, # "small",
# "axes.titlesize": "x-large",
"xtick.labelsize": 15, # "small",
"ytick.labelsize": 15, # "small",
"legend.fontsize": 15,
"axes.formatter.use_mathtext": True,
"mathtext.fontset": "dejavusans",
}
# plt.rcParams.update(tex_fonts)
def convert_cellid(value):
if isinstance(value, str):
try:
r = int(value.split(" ")[-1].replace("(", "").replace(")", ""))
return r
except Exception as e:
return -1
else:
return int(-1)
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-s", "--serial_file", required=True, help="Serial csv file.")
parser.add_argument(
"-p", "--pcap_csv_folder", required=True, help="PCAP csv folder."
)
parser.add_argument("--save", required=True, help="Location to save pdf file.")
parser.add_argument(
"-i",
"--interval",
default=10,
type=int,
help="Time interval for rolling window.",
)
args = parser.parse_args()
pcap_csv_list = list()
for filename in os.listdir(args.pcap_csv_folder):
if filename.endswith(".csv") and "tcp" in filename:
pcap_csv_list.append(filename)
counter = 1
if len(pcap_csv_list) == 0:
print("No CSV files found.")
pcap_csv_list.sort(key=lambda x: int(x.split("_")[-1].replace(".csv", "")))
concat_frame = None
for csv in pcap_csv_list:
print(
"\rProcessing {} out of {} CSVs.\t({}%)\t".format(
counter, len(pcap_csv_list), math.floor(counter / len(pcap_csv_list))
)
)
# try:
transmission_df = pd.read_csv(
"{}{}".format(args.pcap_csv_folder, csv),
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
)
transmission_df["datetime"] = pd.to_datetime(
transmission_df["datetime"]
) - pd.Timedelta(hours=1)
transmission_df = transmission_df.set_index("datetime")
transmission_df.index = pd.to_datetime(transmission_df.index)
transmission_df = transmission_df.sort_index()
# srtt to [s]
transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10 ** 6)
# key for columns and level for index
transmission_df["goodput"] = (
transmission_df["payload_size"]
.groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval)))
.transform("sum")
)
transmission_df["goodput"] = transmission_df["goodput"].apply(
lambda x: ((x * 8) / args.interval) / 10 ** 6
)
transmission_df["goodput_rolling"] = (
transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
)
transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply(
lambda x: ((x * 8) / args.interval) / 10 ** 6
)
# set meta values and remove all not needed columns
cc_algo = transmission_df["congestion_control"].iloc[0]
cc_algo = cc_algo.upper()
transmission_direction = transmission_df["direction"].iloc[0]
# transmission_df = transmission_df.filter(["goodput", "datetime", "ack_rtt", "goodput_rolling", "snd_cwnd"])
# read serial csv
serial_df = pd.read_csv(
args.serial_file, converters={"Cell_ID": convert_cellid}
)
serial_df["datetime"] = pd.to_datetime(serial_df["datetime"]) - pd.Timedelta(
hours=1
)
serial_df = serial_df.set_index("datetime")
serial_df.index = pd.to_datetime(serial_df.index)
serial_df.sort_index()
# print(serial_df["Cell_ID"])
# serial_df["Cell_ID"] = serial_df["Cell_ID"].apply(
# lambda x: int(x.split(" ")[-1].replace("(", "").replace(")", "")))
transmission_df = pd.merge_asof(
transmission_df,
serial_df,
tolerance=pd.Timedelta("1s"),
right_index=True,
left_index=True,
)
#transmission_df.index = transmission_df["arrival_time"]
# replace 0 in RSRQ with Nan
transmission_df["NR5G_RSRQ_(dB)"] = transmission_df["NR5G_RSRQ_(dB)"].replace(
0, np.NaN
)
transmission_df["RSRQ_(dB)"] = transmission_df["RSRQ_(dB)"].replace(0, np.NaN)
# filter active state
for i in range(1, 5):
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
"LTE_SCC{}_bw".format(i)
]
mask = transmission_df["LTE_SCC{}_state".format(i)].isin(["ACTIVE"])
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
"LTE_SCC{}_effective_bw".format(i)
].where(mask, other=0)
# filter if sc is usesd for uplink
for i in range(1, 5):
mask = transmission_df["LTE_SCC{}_UL_Configured".format(i)].isin([False])
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
"LTE_SCC{}_effective_bw".format(i)
].where(mask, other=0)
# sum all effective bandwidth for 5G and 4G
transmission_df["SCC1_NR5G_effective_bw"] = transmission_df[
"SCC1_NR5G_bw"
].fillna(0)
transmission_df["lte_effective_bw_sum"] = (
transmission_df["LTE_SCC1_effective_bw"].fillna(0)
+ transmission_df["LTE_SCC2_effective_bw"].fillna(0)
+ transmission_df["LTE_SCC3_effective_bw"].fillna(0)
+ transmission_df["LTE_SCC4_effective_bw"].fillna(0)
+ transmission_df["LTE_bw"].fillna(0))
transmission_df["nr_effective_bw_sum"] = transmission_df["SCC1_NR5G_effective_bw"]
transmission_df["effective_bw_sum"] = transmission_df["nr_effective_bw_sum"] + transmission_df[
"lte_effective_bw_sum"]
transmission_df = transmission_df.filter(["goodput", "effective_bw_sum", "srtt"])
transmission_df = transmission_df.reset_index(drop=True)
if concat_frame is None:
concat_frame = transmission_df
else:
concat_frame = pd.concat([concat_frame, transmission_df])
concat_frame.to_csv("{}_concat_bw_gp.csv".format(args.save))

View File

@@ -8,6 +8,12 @@ import geopandas as gpd
import contextily as cx
import matplotlib.pyplot as plt
import seaborn as sns
sns.set()
#sns.set(font_scale=1.5)
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-f", "--file", required=True, help="Messfahrt csv")
@@ -39,14 +45,20 @@ if __name__ == "__main__":
df_wm = gdf.to_crs(epsg=3857)
ax2 = df_wm.plot()
ax2 = df_wm.plot(args.column, cmap="hot", legend=args.no_legend, ax=ax2)
ax2 = df_wm.plot(column=args.column, cmap="hot", legend=args.no_legend, ax=ax2, legend_kwds={"label": args.label},)
# ax2 = df_wm.plot.scatter(x="longitude", y="latitude", c="kmh", cmap="hot")
# zoom 17 is pretty
cx.add_basemap(ax2, source=cx.providers.OpenStreetMap.Mapnik, zoom=17)
# gdf.plot()
ax2.set_axis_off()
if not args.no_legend:
ax2.set_title(args.label if args.label else args.column)
else:
fig = ax2.figure
cb_ax = fig.axes[0]
cb_ax.set_label(args.label)
cb_ax.tick_params(labelsize=30)
if args.show_providerinfo:
#####################################
@@ -68,6 +80,6 @@ if __name__ == "__main__":
# df.plot(x="longitude", y="latitude", kind="scatter", colormap="YlOrRd")
if args.save:
plt.savefig("{}gps_plot.pdf".format(args.save))
plt.savefig("{}gps_plot.eps".format(args.save), bbox_inches="tight")
else:
plt.show()

View File

@@ -12,6 +12,7 @@ import matplotlib.pyplot as plt
import seaborn as sns
sns.set()
#sns.set(font_scale=1.5)
tex_fonts = {
"pgf.texsystem": "lualatex",
@@ -318,7 +319,7 @@ if __name__ == "__main__":
# added these three lines
lns_ax0 = snd_plot + srtt_plot + goodput_plot
labs_ax0 = [l.get_label() for l in lns_ax0]
ax2.legend(lns_ax0, labs_ax0, ncols=9, fontsize=9, loc="upper left", frameon=legend_frame)
ax2.legend(lns_ax0, labs_ax0, ncols=9, fontsize=9, loc="upper right", frameon=legend_frame)
#ax0.set_zorder(100)
lns_ax00 = eff_bw_plot + lte_eff_bw_plot + nr_eff_bw_plot + lte_rsrq_plot + nr_rsrq_plot

269
plot_single_transmission_paper.py Executable file
View File

@@ -0,0 +1,269 @@
#!/usr/bin/env python3
import math
import multiprocessing
import os
from argparse import ArgumentParser
import matplotlib
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
sns.set()
#sns.set(font_scale=1.5)
tex_fonts = {
"pgf.texsystem": "lualatex",
# "legend.fontsize": "x-large",
# "figure.figsize": (15, 5),
"axes.labelsize": 15, # "small",
# "axes.titlesize": "x-large",
"xtick.labelsize": 15, # "small",
"ytick.labelsize": 15, # "small",
"legend.fontsize": 15,
"axes.formatter.use_mathtext": True,
"mathtext.fontset": "dejavusans",
}
# plt.rcParams.update(tex_fonts)
def convert_cellid(value):
if isinstance(value, str):
try:
r = int(value.split(" ")[-1].replace("(", "").replace(")", ""))
return r
except Exception as e:
return -1
else:
return int(-1)
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-s", "--serial_file", required=True, help="Serial csv file.")
parser.add_argument(
"-p", "--pcap_csv_folder", required=True, help="PCAP csv folder."
)
parser.add_argument("--save", required=True, help="Location to save pdf file.")
parser.add_argument("--fancy", action="store_true", help="Create fancy plot.")
parser.add_argument(
"-i",
"--interval",
default=10,
type=int,
help="Time interval for rolling window.",
)
args = parser.parse_args()
pcap_csv_list = list()
for filename in os.listdir(args.pcap_csv_folder):
if filename.endswith(".csv") and "tcp" in filename:
pcap_csv_list.append(filename)
counter = 1
if len(pcap_csv_list) == 0:
print("No CSV files found.")
pcap_csv_list.sort(key=lambda x: int(x.split("_")[-1].replace(".csv", "")))
for csv in pcap_csv_list:
print(
"\rProcessing {} out of {} CSVs.\t({}%)\t".format(
counter, len(pcap_csv_list), math.floor(counter / len(pcap_csv_list))
)
)
# try:
transmission_df = pd.read_csv(
"{}{}".format(args.pcap_csv_folder, csv),
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
)
transmission_df = transmission_df.set_index("datetime")
transmission_df.index = pd.to_datetime(transmission_df.index)
transmission_df = transmission_df.sort_index()
# srtt to [s]
transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10 ** 6)
# key for columns and level for index
transmission_df["goodput"] = (
transmission_df["payload_size"]
.groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval)))
.transform("sum")
)
transmission_df["goodput"] = transmission_df["goodput"].apply(
lambda x: ((x * 8) / args.interval) / 10 ** 6
)
transmission_df["goodput_rolling"] = (
transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
)
transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply(
lambda x: ((x * 8) / args.interval) / 10 ** 6
)
# set meta values and remove all not needed columns
cc_algo = transmission_df["congestion_control"].iloc[0]
cc_algo = cc_algo.upper()
transmission_direction = transmission_df["direction"].iloc[0]
# transmission_df = transmission_df.filter(["goodput", "datetime", "ack_rtt", "goodput_rolling", "snd_cwnd"])
# read serial csv
serial_df = pd.read_csv(
args.serial_file, converters={"Cell_ID": convert_cellid}
)
serial_df = serial_df.set_index("datetime")
serial_df.index = pd.to_datetime(serial_df.index)
serial_df.sort_index()
# Select DataFrame rows between two dates
mask = (serial_df.index >= transmission_df.index[0]) & (serial_df.index <= transmission_df.index[-1])
serial_df = serial_df.loc[mask]
serial_df["arrival_time"] = (serial_df["time"] - serial_df["time"].iloc[0]) * 60
serial_df.index = serial_df["arrival_time"]
transmission_df.index = transmission_df["arrival_time"]
# filter active state
for i in range(1, 5):
serial_df["LTE_SCC{}_effective_bw".format(i)] = serial_df[
"LTE_SCC{}_bw".format(i)
]
mask = serial_df["LTE_SCC{}_state".format(i)].isin(["ACTIVE"])
serial_df["LTE_SCC{}_effective_bw".format(i)] = serial_df[
"LTE_SCC{}_effective_bw".format(i)
].where(mask, other=0)
# filter if sc is usesd for uplink
for i in range(1, 5):
mask = serial_df["LTE_SCC{}_UL_Configured".format(i)].isin([False])
serial_df["LTE_SCC{}_effective_bw".format(i)] = serial_df[
"LTE_SCC{}_effective_bw".format(i)
].where(mask, other=0)
# sum all effective bandwidth for 5G and 4G
serial_df["SCC1_NR5G_effective_bw"] = serial_df["SCC1_NR5G_bw"].fillna(0)
serial_df["effective_bw_sum"] = (
serial_df["SCC1_NR5G_effective_bw"]
+ serial_df["LTE_SCC1_effective_bw"]
+ serial_df["LTE_SCC2_effective_bw"]
+ serial_df["LTE_SCC3_effective_bw"]
+ serial_df["LTE_SCC4_effective_bw"]
+ serial_df["LTE_bw"]
)
bw_cols = [
"SCC1_NR5G_effective_bw",
"LTE_bw",
"LTE_SCC1_effective_bw",
"LTE_SCC2_effective_bw",
"LTE_SCC3_effective_bw",
"LTE_SCC4_effective_bw",
]
# transmission timeline
scaley = 1.5
scalex = 1.0
fig, ax = plt.subplots(2, 1, figsize=[6.4 * scaley, 4.8 * scalex])
fig.subplots_adjust(right=0.75)
if not args.fancy:
plt.title("{} with {}".format(transmission_direction, cc_algo))
fig.suptitle("{} with {}".format(transmission_direction, cc_algo))
ax0 = ax[0]
ax1 = ax0.twinx()
ax2 = ax0.twinx()
# ax2.spines.right.set_position(("axes", 1.22))
ax00 = ax[1]
snd_plot = ax0.plot(
transmission_df["snd_cwnd"].dropna(),
color="darkorange",
linestyle="dashed",
label="cwnd",
)
srtt_plot = ax1.plot(
transmission_df["srtt"].dropna(),
color="maroon",
linestyle="dotted",
label="sRTT",
)
goodput_plot = ax2.plot(
transmission_df["goodput_rolling"],
color="blue",
linestyle="solid",
label="goodput",
)
serial_df["time_rel"] = serial_df["time"] - serial_df["time"].iloc[0]
serial_df.index = serial_df["time_rel"]
ax_stacked = serial_df[bw_cols].plot.area(stacked=True, linewidth=0, ax=ax00)
ax00.set_ylabel("bandwidth [MHz]")
ax00.set_ylim(0, 200)
#ax.set_xlabel("time [minutes]")
#ax00.set_xlim([0, transmission_df.index[-1]])
ax00.xaxis.grid(True)
ax2.spines.right.set_position(("axes", 1.1))
ax0.set_ylim(0, 5000) #2500
ax1.set_ylim(0, 2) #0.3
ax2.set_ylim(0, 500)
#ax00.set_ylim(-25, 0)
ax00.set_xlabel("time [s]")
ax2.set_ylabel("goodput [mbps]")
#ax00.set_ylabel("LTE/NR RSRQ [dB]")
# ax02.set_ylabel("LTE RSRQ [dB]")
ax1.set_ylabel("sRTT [s]")
ax0.set_ylabel("cwnd [MSS]")
if args.fancy:
legend_frame = False
ax0.set_xlim([0, 60])
ax00.set_xlim([0, 60])
# added these three lines
lns_ax0 = snd_plot + srtt_plot + goodput_plot
labs_ax0 = [l.get_label() for l in lns_ax0]
ax2.legend(lns_ax0, labs_ax0, ncols=9, fontsize=9, loc="upper right", frameon=legend_frame)
#ax0.set_zorder(100)
#lns_ax00 = [ax_stacked]
#labs_ax00 = ["5G bandwidth", "4G bandwidth"]
#ax00.legend(lns_ax00, labs_ax00, ncols=3, fontsize=9, loc="upper center", frameon=legend_frame)
L = ax00.legend(ncols=3, fontsize=9, frameon=False)
L.get_texts()[0].set_text("5G main")
L.get_texts()[1].set_text("4G main")
L.get_texts()[2].set_text("4G SCC 1")
L.get_texts()[3].set_text("4G SCC 2")
L.get_texts()[4].set_text("4G SCC 3")
L.get_texts()[5].set_text("4G SCC 4")
#ax00.set_zorder(100)
plt.savefig("{}{}_plot.eps".format(args.save, csv.replace(".csv", "")), bbox_inches="tight")
#serial_df.to_csv("{}{}_plot.csv".format(args.save, csv.replace(".csv", "")))
else:
fig.legend(loc="lower right")
plt.savefig("{}{}_plot.pdf".format(args.save, csv.replace(".csv", "")), bbox_inches="tight")
# except Exception as e:
# print("Error processing file: {}".format(csv))
# print(str(e))
counter += 1
plt.close(fig)
plt.clf()