Przeglądaj źródła

Paper plots.

master
Lukas Prause 2 lat temu
rodzic
commit
aca74ca09c
1 zmienionych plików z 306 dodań i 0 usunięć
  1. +306
    -0
      plot_single_treansmission_paper.py

+ 306
- 0
plot_single_treansmission_paper.py Wyświetl plik

@@ -0,0 +1,306 @@
#!/usr/bin/env python3
import math
import multiprocessing
import os
from argparse import ArgumentParser

import matplotlib
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt

import seaborn as sns

sns.set()
#sns.set(font_scale=1.5)

tex_fonts = {
"pgf.texsystem": "lualatex",
# "legend.fontsize": "x-large",
# "figure.figsize": (15, 5),
"axes.labelsize": 15, # "small",
# "axes.titlesize": "x-large",
"xtick.labelsize": 15, # "small",
"ytick.labelsize": 15, # "small",
"legend.fontsize": 15,
"axes.formatter.use_mathtext": True,
"mathtext.fontset": "dejavusans",
}


# plt.rcParams.update(tex_fonts)


def convert_cellid(value):
if isinstance(value, str):
try:
r = int(value.split(" ")[-1].replace("(", "").replace(")", ""))
return r
except Exception as e:
return -1
else:
return int(-1)


if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-s", "--serial_file", required=True, help="Serial csv file.")
parser.add_argument(
"-p", "--pcap_csv_folder", required=True, help="PCAP csv folder."
)
parser.add_argument("--save", required=True, help="Location to save pdf file.")
parser.add_argument("--fancy", action="store_true", help="Create fancy plot.")
parser.add_argument(
"-i",
"--interval",
default=10,
type=int,
help="Time interval for rolling window.",
)

args = parser.parse_args()

pcap_csv_list = list()
for filename in os.listdir(args.pcap_csv_folder):
if filename.endswith(".csv") and "tcp" in filename:
pcap_csv_list.append(filename)

counter = 1
if len(pcap_csv_list) == 0:
print("No CSV files found.")

pcap_csv_list.sort(key=lambda x: int(x.split("_")[-1].replace(".csv", "")))

for csv in pcap_csv_list:
print(
"\rProcessing {} out of {} CSVs.\t({}%)\t".format(
counter, len(pcap_csv_list), math.floor(counter / len(pcap_csv_list))
)
)

# try:
transmission_df = pd.read_csv(
"{}{}".format(args.pcap_csv_folder, csv),
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
)

transmission_df["datetime"] = pd.to_datetime(
transmission_df["datetime"]
) - pd.Timedelta(hours=1)
transmission_df = transmission_df.set_index("datetime")
transmission_df.index = pd.to_datetime(transmission_df.index)
transmission_df = transmission_df.sort_index()

# srtt to [s]
transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10 ** 6)

# key for columns and level for index
transmission_df["goodput"] = (
transmission_df["payload_size"]
.groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval)))
.transform("sum")
)
transmission_df["goodput"] = transmission_df["goodput"].apply(
lambda x: ((x * 8) / args.interval) / 10 ** 6
)

transmission_df["goodput_rolling"] = (
transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
)
transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply(
lambda x: ((x * 8) / args.interval) / 10 ** 6
)

# set meta values and remove all not needed columns
cc_algo = transmission_df["congestion_control"].iloc[0]
cc_algo = cc_algo.upper()
transmission_direction = transmission_df["direction"].iloc[0]

# transmission_df = transmission_df.filter(["goodput", "datetime", "ack_rtt", "goodput_rolling", "snd_cwnd"])

# read serial csv
serial_df = pd.read_csv(
args.serial_file, converters={"Cell_ID": convert_cellid}
)
serial_df["datetime"] = pd.to_datetime(serial_df["datetime"]) - pd.Timedelta(
hours=1
)
serial_df = serial_df.set_index("datetime")
serial_df.index = pd.to_datetime(serial_df.index)
serial_df.sort_index()

# print(serial_df["Cell_ID"])

# serial_df["Cell_ID"] = serial_df["Cell_ID"].apply(
# lambda x: int(x.split(" ")[-1].replace("(", "").replace(")", "")))

transmission_df = pd.merge_asof(
transmission_df,
serial_df,
tolerance=pd.Timedelta("1s"),
right_index=True,
left_index=True,
)

transmission_df.index = transmission_df["arrival_time"]

# replace 0 in RSRQ with Nan
transmission_df["NR5G_RSRQ_(dB)"] = transmission_df["NR5G_RSRQ_(dB)"].replace(
0, np.NaN
)
transmission_df["RSRQ_(dB)"] = transmission_df["RSRQ_(dB)"].replace(0, np.NaN)

# filter active state
for i in range(1, 5):
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
"LTE_SCC{}_bw".format(i)
]

mask = transmission_df["LTE_SCC{}_state".format(i)].isin(["ACTIVE"])
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
"LTE_SCC{}_effective_bw".format(i)
].where(mask, other=0)

# filter if sc is usesd for uplink
for i in range(1, 5):
mask = transmission_df["LTE_SCC{}_UL_Configured".format(i)].isin([False])
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
"LTE_SCC{}_effective_bw".format(i)
].where(mask, other=0)

# sum all effective bandwidth for 5G and 4G
transmission_df["SCC1_NR5G_effective_bw"] = transmission_df[
"SCC1_NR5G_bw"
].fillna(0)

transmission_df["lte_effective_bw_sum"] = (
transmission_df["LTE_SCC1_effective_bw"].fillna(0)
+ transmission_df["LTE_SCC2_effective_bw"].fillna(0)
+ transmission_df["LTE_SCC3_effective_bw"].fillna(0)
+ transmission_df["LTE_SCC4_effective_bw"].fillna(0)
+ transmission_df["LTE_bw"].fillna(0))
transmission_df["nr_effective_bw_sum"] = transmission_df["SCC1_NR5G_effective_bw"]

transmission_df["effective_bw_sum"] = transmission_df["nr_effective_bw_sum"] + transmission_df[
"lte_effective_bw_sum"]

# transmission timeline
scaley = 1.5
scalex = 1.0
fig, ax = plt.subplots(2, 1, figsize=[6.4 * scaley, 4.8 * scalex])
fig.subplots_adjust(right=0.75)
if not args.fancy:
plt.title("{} with {}".format(transmission_direction, cc_algo))
fig.suptitle("{} with {}".format(transmission_direction, cc_algo))
ax0 = ax[0]
ax1 = ax0.twinx()
ax2 = ax0.twinx()
# ax2.spines.right.set_position(("axes", 1.22))

ax00 = ax[1]



snd_plot = ax0.plot(
transmission_df["snd_cwnd"].dropna(),
color="lime",
linestyle="dashed",
label="cwnd",
)
srtt_plot = ax1.plot(
transmission_df["srtt"].dropna(),
color="red",
linestyle="dashdot",
label="sRTT",
)
goodput_plot = ax2.plot(
transmission_df["goodput_rolling"],
color="blue",
linestyle="solid",
label="goodput",
)

# filter active state
for i in range(1, 5):
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df["LTE_SCC{}_bw".format(i)]

mask = transmission_df["LTE_SCC{}_state".format(i)].isin(["ACTIVE"])
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
"LTE_SCC{}_effective_bw".format(i)
].where(mask, other=0)

# filter if sc is usesd for uplink
for i in range(1, 5):
mask = transmission_df["LTE_SCC{}_UL_Configured".format(i)].isin([False])
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
"LTE_SCC{}_effective_bw".format(i)
].where(mask, other=0)

# sum all effective bandwidth for 5G and 4G
transmission_df["SCC1_NR5G_effective_bw"] = transmission_df["SCC1_NR5G_bw"].fillna(0)
transmission_df["effective_bw_sum"] = (
transmission_df["SCC1_NR5G_effective_bw"]
+ transmission_df["LTE_SCC1_effective_bw"]
+ transmission_df["LTE_SCC2_effective_bw"]
+ transmission_df["LTE_SCC3_effective_bw"]
+ transmission_df["LTE_SCC4_effective_bw"]
+ transmission_df["LTE_bw"]
)
bw_cols = [
"SCC1_NR5G_effective_bw",
"LTE_bw",
"LTE_SCC1_effective_bw",
"LTE_SCC2_effective_bw",
"LTE_SCC3_effective_bw",
"LTE_SCC4_effective_bw",
]

ax_stacked = transmission_df[bw_cols].plot.area(stacked=True, linewidth=0, ax=ax00)
ax00.set_ylabel("bandwidth [MHz]")
#ax.set_xlabel("time [minutes]")
ax00.set_xlim([0, transmission_df.index[-1]])
ax00.xaxis.grid(False)



ax2.spines.right.set_position(("axes", 1.1))


ax0.set_ylim(0, 5000)
ax1.set_ylim(0, 0.3)
ax2.set_ylim(0, 600)
#ax00.set_ylim(-25, 0)

ax00.set_xlabel("arrival time [s]")

ax2.set_ylabel("Goodput [mbps]")
#ax00.set_ylabel("LTE/NR RSRQ [dB]")
# ax02.set_ylabel("LTE RSRQ [dB]")
ax1.set_ylabel("sRTT [s]")
ax0.set_ylabel("cwnd [MSS]")

if args.fancy:
legend_frame = False
ax0.set_xlim([0, transmission_df.index[-1]])
ax00.set_xlim([0, transmission_df.index[-1]])
# added these three lines
lns_ax0 = snd_plot + srtt_plot + goodput_plot
labs_ax0 = [l.get_label() for l in lns_ax0]
ax2.legend(lns_ax0, labs_ax0, ncols=9, fontsize=9, loc="upper right", frameon=legend_frame)
#ax0.set_zorder(100)

lns_ax00 = [ax_stacked]
labs_ax00 = [l.get_label() for l in lns_ax00]
ax00.legend(lns_ax00, labs_ax00, ncols=3, fontsize=9, loc="upper center", frameon=legend_frame)
#ax00.set_zorder(100)
plt.savefig("{}{}_plot.eps".format(args.save, csv.replace(".csv", "")), bbox_inches="tight")
else:
fig.legend(loc="lower right")
plt.savefig("{}{}_plot.pdf".format(args.save, csv.replace(".csv", "")), bbox_inches="tight")
# except Exception as e:
# print("Error processing file: {}".format(csv))
# print(str(e))
counter += 1

plt.close(fig)
plt.clf()

Ładowanie…
Anuluj
Zapisz