Du kan inte välja fler än 25 ämnen Ämnen måste starta med en bokstav eller siffra, kan innehålla bindestreck ('-') och vara max 35 tecken långa.

307 lines
11KB

  1. #!/usr/bin/env python3
  2. import math
  3. import multiprocessing
  4. import os
  5. from argparse import ArgumentParser
  6. import matplotlib
  7. import numpy as np
  8. import pandas as pd
  9. import matplotlib.pyplot as plt
  10. import seaborn as sns
  11. sns.set()
  12. #sns.set(font_scale=1.5)
  13. tex_fonts = {
  14. "pgf.texsystem": "lualatex",
  15. # "legend.fontsize": "x-large",
  16. # "figure.figsize": (15, 5),
  17. "axes.labelsize": 15, # "small",
  18. # "axes.titlesize": "x-large",
  19. "xtick.labelsize": 15, # "small",
  20. "ytick.labelsize": 15, # "small",
  21. "legend.fontsize": 15,
  22. "axes.formatter.use_mathtext": True,
  23. "mathtext.fontset": "dejavusans",
  24. }
  25. # plt.rcParams.update(tex_fonts)
  26. def convert_cellid(value):
  27. if isinstance(value, str):
  28. try:
  29. r = int(value.split(" ")[-1].replace("(", "").replace(")", ""))
  30. return r
  31. except Exception as e:
  32. return -1
  33. else:
  34. return int(-1)
  35. if __name__ == "__main__":
  36. parser = ArgumentParser()
  37. parser.add_argument("-s", "--serial_file", required=True, help="Serial csv file.")
  38. parser.add_argument(
  39. "-p", "--pcap_csv_folder", required=True, help="PCAP csv folder."
  40. )
  41. parser.add_argument("--save", required=True, help="Location to save pdf file.")
  42. parser.add_argument("--fancy", action="store_true", help="Create fancy plot.")
  43. parser.add_argument(
  44. "-i",
  45. "--interval",
  46. default=10,
  47. type=int,
  48. help="Time interval for rolling window.",
  49. )
  50. args = parser.parse_args()
  51. pcap_csv_list = list()
  52. for filename in os.listdir(args.pcap_csv_folder):
  53. if filename.endswith(".csv") and "tcp" in filename:
  54. pcap_csv_list.append(filename)
  55. counter = 1
  56. if len(pcap_csv_list) == 0:
  57. print("No CSV files found.")
  58. pcap_csv_list.sort(key=lambda x: int(x.split("_")[-1].replace(".csv", "")))
  59. for csv in pcap_csv_list:
  60. print(
  61. "\rProcessing {} out of {} CSVs.\t({}%)\t".format(
  62. counter, len(pcap_csv_list), math.floor(counter / len(pcap_csv_list))
  63. )
  64. )
  65. # try:
  66. transmission_df = pd.read_csv(
  67. "{}{}".format(args.pcap_csv_folder, csv),
  68. dtype=dict(is_retranmission=bool, is_dup_ack=bool),
  69. )
  70. transmission_df["datetime"] = pd.to_datetime(
  71. transmission_df["datetime"]
  72. ) - pd.Timedelta(hours=1)
  73. transmission_df = transmission_df.set_index("datetime")
  74. transmission_df.index = pd.to_datetime(transmission_df.index)
  75. transmission_df = transmission_df.sort_index()
  76. # srtt to [s]
  77. transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10 ** 6)
  78. # key for columns and level for index
  79. transmission_df["goodput"] = (
  80. transmission_df["payload_size"]
  81. .groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval)))
  82. .transform("sum")
  83. )
  84. transmission_df["goodput"] = transmission_df["goodput"].apply(
  85. lambda x: ((x * 8) / args.interval) / 10 ** 6
  86. )
  87. transmission_df["goodput_rolling"] = (
  88. transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
  89. )
  90. transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply(
  91. lambda x: ((x * 8) / args.interval) / 10 ** 6
  92. )
  93. # set meta values and remove all not needed columns
  94. cc_algo = transmission_df["congestion_control"].iloc[0]
  95. cc_algo = cc_algo.upper()
  96. transmission_direction = transmission_df["direction"].iloc[0]
  97. # transmission_df = transmission_df.filter(["goodput", "datetime", "ack_rtt", "goodput_rolling", "snd_cwnd"])
  98. # read serial csv
  99. serial_df = pd.read_csv(
  100. args.serial_file, converters={"Cell_ID": convert_cellid}
  101. )
  102. serial_df["datetime"] = pd.to_datetime(serial_df["datetime"]) - pd.Timedelta(
  103. hours=1
  104. )
  105. serial_df = serial_df.set_index("datetime")
  106. serial_df.index = pd.to_datetime(serial_df.index)
  107. serial_df.sort_index()
  108. # print(serial_df["Cell_ID"])
  109. # serial_df["Cell_ID"] = serial_df["Cell_ID"].apply(
  110. # lambda x: int(x.split(" ")[-1].replace("(", "").replace(")", "")))
  111. transmission_df = pd.merge_asof(
  112. transmission_df,
  113. serial_df,
  114. tolerance=pd.Timedelta("1s"),
  115. right_index=True,
  116. left_index=True,
  117. )
  118. transmission_df.index = transmission_df["arrival_time"]
  119. # replace 0 in RSRQ with Nan
  120. transmission_df["NR5G_RSRQ_(dB)"] = transmission_df["NR5G_RSRQ_(dB)"].replace(
  121. 0, np.NaN
  122. )
  123. transmission_df["RSRQ_(dB)"] = transmission_df["RSRQ_(dB)"].replace(0, np.NaN)
  124. # filter active state
  125. for i in range(1, 5):
  126. transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
  127. "LTE_SCC{}_bw".format(i)
  128. ]
  129. mask = transmission_df["LTE_SCC{}_state".format(i)].isin(["ACTIVE"])
  130. transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
  131. "LTE_SCC{}_effective_bw".format(i)
  132. ].where(mask, other=0)
  133. # filter if sc is usesd for uplink
  134. for i in range(1, 5):
  135. mask = transmission_df["LTE_SCC{}_UL_Configured".format(i)].isin([False])
  136. transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
  137. "LTE_SCC{}_effective_bw".format(i)
  138. ].where(mask, other=0)
  139. # sum all effective bandwidth for 5G and 4G
  140. transmission_df["SCC1_NR5G_effective_bw"] = transmission_df[
  141. "SCC1_NR5G_bw"
  142. ].fillna(0)
  143. transmission_df["lte_effective_bw_sum"] = (
  144. transmission_df["LTE_SCC1_effective_bw"].fillna(0)
  145. + transmission_df["LTE_SCC2_effective_bw"].fillna(0)
  146. + transmission_df["LTE_SCC3_effective_bw"].fillna(0)
  147. + transmission_df["LTE_SCC4_effective_bw"].fillna(0)
  148. + transmission_df["LTE_bw"].fillna(0))
  149. transmission_df["nr_effective_bw_sum"] = transmission_df["SCC1_NR5G_effective_bw"]
  150. transmission_df["effective_bw_sum"] = transmission_df["nr_effective_bw_sum"] + transmission_df[
  151. "lte_effective_bw_sum"]
  152. # transmission timeline
  153. scaley = 1.5
  154. scalex = 1.0
  155. fig, ax = plt.subplots(2, 1, figsize=[6.4 * scaley, 4.8 * scalex])
  156. fig.subplots_adjust(right=0.75)
  157. if not args.fancy:
  158. plt.title("{} with {}".format(transmission_direction, cc_algo))
  159. fig.suptitle("{} with {}".format(transmission_direction, cc_algo))
  160. ax0 = ax[0]
  161. ax1 = ax0.twinx()
  162. ax2 = ax0.twinx()
  163. # ax2.spines.right.set_position(("axes", 1.22))
  164. ax00 = ax[1]
  165. snd_plot = ax0.plot(
  166. transmission_df["snd_cwnd"].dropna(),
  167. color="lime",
  168. linestyle="dashed",
  169. label="cwnd",
  170. )
  171. srtt_plot = ax1.plot(
  172. transmission_df["srtt"].dropna(),
  173. color="red",
  174. linestyle="dashdot",
  175. label="sRTT",
  176. )
  177. goodput_plot = ax2.plot(
  178. transmission_df["goodput_rolling"],
  179. color="blue",
  180. linestyle="solid",
  181. label="goodput",
  182. )
  183. # filter active state
  184. for i in range(1, 5):
  185. transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df["LTE_SCC{}_bw".format(i)]
  186. mask = transmission_df["LTE_SCC{}_state".format(i)].isin(["ACTIVE"])
  187. transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
  188. "LTE_SCC{}_effective_bw".format(i)
  189. ].where(mask, other=0)
  190. # filter if sc is usesd for uplink
  191. for i in range(1, 5):
  192. mask = transmission_df["LTE_SCC{}_UL_Configured".format(i)].isin([False])
  193. transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
  194. "LTE_SCC{}_effective_bw".format(i)
  195. ].where(mask, other=0)
  196. # sum all effective bandwidth for 5G and 4G
  197. transmission_df["SCC1_NR5G_effective_bw"] = transmission_df["SCC1_NR5G_bw"].fillna(0)
  198. transmission_df["effective_bw_sum"] = (
  199. transmission_df["SCC1_NR5G_effective_bw"]
  200. + transmission_df["LTE_SCC1_effective_bw"]
  201. + transmission_df["LTE_SCC2_effective_bw"]
  202. + transmission_df["LTE_SCC3_effective_bw"]
  203. + transmission_df["LTE_SCC4_effective_bw"]
  204. + transmission_df["LTE_bw"]
  205. )
  206. bw_cols = [
  207. "SCC1_NR5G_effective_bw",
  208. "LTE_bw",
  209. "LTE_SCC1_effective_bw",
  210. "LTE_SCC2_effective_bw",
  211. "LTE_SCC3_effective_bw",
  212. "LTE_SCC4_effective_bw",
  213. ]
  214. ax_stacked = transmission_df[bw_cols].plot.area(stacked=True, linewidth=0, ax=ax00)
  215. ax00.set_ylabel("bandwidth [MHz]")
  216. #ax.set_xlabel("time [minutes]")
  217. ax00.set_xlim([0, transmission_df.index[-1]])
  218. ax00.xaxis.grid(False)
  219. ax2.spines.right.set_position(("axes", 1.1))
  220. ax0.set_ylim(0, 5000)
  221. ax1.set_ylim(0, 0.3)
  222. ax2.set_ylim(0, 600)
  223. #ax00.set_ylim(-25, 0)
  224. ax00.set_xlabel("arrival time [s]")
  225. ax2.set_ylabel("Goodput [mbps]")
  226. #ax00.set_ylabel("LTE/NR RSRQ [dB]")
  227. # ax02.set_ylabel("LTE RSRQ [dB]")
  228. ax1.set_ylabel("sRTT [s]")
  229. ax0.set_ylabel("cwnd [MSS]")
  230. if args.fancy:
  231. legend_frame = False
  232. ax0.set_xlim([0, transmission_df.index[-1]])
  233. ax00.set_xlim([0, transmission_df.index[-1]])
  234. # added these three lines
  235. lns_ax0 = snd_plot + srtt_plot + goodput_plot
  236. labs_ax0 = [l.get_label() for l in lns_ax0]
  237. ax2.legend(lns_ax0, labs_ax0, ncols=9, fontsize=9, loc="upper right", frameon=legend_frame)
  238. #ax0.set_zorder(100)
  239. lns_ax00 = [ax_stacked]
  240. labs_ax00 = [l.get_label() for l in lns_ax00]
  241. ax00.legend(lns_ax00, labs_ax00, ncols=3, fontsize=9, loc="upper center", frameon=legend_frame)
  242. #ax00.set_zorder(100)
  243. plt.savefig("{}{}_plot.eps".format(args.save, csv.replace(".csv", "")), bbox_inches="tight")
  244. else:
  245. fig.legend(loc="lower right")
  246. plt.savefig("{}{}_plot.pdf".format(args.save, csv.replace(".csv", "")), bbox_inches="tight")
  247. # except Exception as e:
  248. # print("Error processing file: {}".format(csv))
  249. # print(str(e))
  250. counter += 1
  251. plt.close(fig)
  252. plt.clf()