Вы не можете выбрать более 25 тем Темы должны начинаться с буквы или цифры, могут содержать дефисы(-) и должны содержать не более 35 символов.

280 lines
9.5KB

  1. #!/usr/bin/env python3
  2. import math
  3. import multiprocessing
  4. import os
  5. from argparse import ArgumentParser
  6. import matplotlib
  7. import numpy as np
  8. import pandas as pd
  9. import matplotlib.pyplot as plt
  10. import seaborn as sns
  11. sns.set()
  12. #sns.set(font_scale=1.5)
  13. tex_fonts = {
  14. "pgf.texsystem": "lualatex",
  15. # "legend.fontsize": "x-large",
  16. # "figure.figsize": (15, 5),
  17. "axes.labelsize": 15, # "small",
  18. # "axes.titlesize": "x-large",
  19. "xtick.labelsize": 15, # "small",
  20. "ytick.labelsize": 15, # "small",
  21. "legend.fontsize": 15,
  22. "axes.formatter.use_mathtext": True,
  23. "mathtext.fontset": "dejavusans",
  24. }
  25. # plt.rcParams.update(tex_fonts)
  26. def convert_cellid(value):
  27. if isinstance(value, str):
  28. try:
  29. r = int(value.split(" ")[-1].replace("(", "").replace(")", ""))
  30. return r
  31. except Exception as e:
  32. return -1
  33. else:
  34. return int(-1)
  35. if __name__ == "__main__":
  36. parser = ArgumentParser()
  37. parser.add_argument("-s", "--serial_file", required=True, help="Serial csv file.")
  38. parser.add_argument(
  39. "-p", "--pcap_csv_folder", required=True, help="PCAP csv folder."
  40. )
  41. parser.add_argument("--save", required=True, help="Location to save pdf file.")
  42. parser.add_argument("--fancy", action="store_true", help="Create fancy plot.")
  43. parser.add_argument(
  44. "-i",
  45. "--interval",
  46. default=10,
  47. type=int,
  48. help="Time interval for rolling window.",
  49. )
  50. args = parser.parse_args()
  51. pcap_csv_list = list()
  52. for filename in os.listdir(args.pcap_csv_folder):
  53. if filename.endswith(".csv") and "tcp" in filename:
  54. pcap_csv_list.append(filename)
  55. counter = 1
  56. if len(pcap_csv_list) == 0:
  57. print("No CSV files found.")
  58. pcap_csv_list.sort(key=lambda x: int(x.split("_")[-1].replace(".csv", "")))
  59. for csv in pcap_csv_list:
  60. print(
  61. "\rProcessing {} out of {} CSVs.\t({}%)\t".format(
  62. counter, len(pcap_csv_list), math.floor(counter / len(pcap_csv_list))
  63. )
  64. )
  65. # try:
  66. transmission_df = pd.read_csv(
  67. "{}{}".format(args.pcap_csv_folder, csv),
  68. dtype=dict(is_retranmission=bool, is_dup_ack=bool),
  69. )
  70. transmission_df = transmission_df.set_index("datetime")
  71. transmission_df.index = pd.to_datetime(transmission_df.index)
  72. transmission_df = transmission_df.sort_index()
  73. # srtt to [s]
  74. transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10 ** 6)
  75. # key for columns and level for index
  76. transmission_df["goodput"] = (
  77. transmission_df["payload_size"]
  78. .groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval)))
  79. .transform("sum")
  80. )
  81. transmission_df["goodput"] = transmission_df["goodput"].apply(
  82. lambda x: ((x * 8) / args.interval) / 10 ** 6
  83. )
  84. transmission_df["goodput_rolling"] = (
  85. transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
  86. )
  87. transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply(
  88. lambda x: ((x * 8) / args.interval) / 10 ** 6
  89. )
  90. # set meta values and remove all not needed columns
  91. cc_algo = transmission_df["congestion_control"].iloc[0]
  92. cc_algo = cc_algo.upper()
  93. transmission_direction = transmission_df["direction"].iloc[0]
  94. # transmission_df = transmission_df.filter(["goodput", "datetime", "ack_rtt", "goodput_rolling", "snd_cwnd"])
  95. # read serial csv
  96. serial_df = pd.read_csv(
  97. args.serial_file, converters={"Cell_ID": convert_cellid}
  98. )
  99. serial_df = serial_df.set_index("datetime")
  100. serial_df.index = pd.to_datetime(serial_df.index)
  101. serial_df.sort_index()
  102. # Select DataFrame rows between two dates
  103. mask = (serial_df.index >= transmission_df.index[0]) & (serial_df.index <= transmission_df.index[-1])
  104. serial_df = serial_df.loc[mask]
  105. transmission_df.index = transmission_df["arrival_time"]
  106. # filter active state
  107. for i in range(1, 5):
  108. serial_df["LTE_SCC{}_effective_bw".format(i)] = serial_df[
  109. "LTE_SCC{}_bw".format(i)
  110. ]
  111. mask = serial_df["LTE_SCC{}_state".format(i)].isin(["ACTIVE"])
  112. serial_df["LTE_SCC{}_effective_bw".format(i)] = serial_df[
  113. "LTE_SCC{}_effective_bw".format(i)
  114. ].where(mask, other=0)
  115. # filter if sc is usesd for uplink
  116. for i in range(1, 5):
  117. mask = serial_df["LTE_SCC{}_UL_Configured".format(i)].isin([False])
  118. serial_df["LTE_SCC{}_effective_bw".format(i)] = serial_df[
  119. "LTE_SCC{}_effective_bw".format(i)
  120. ].where(mask, other=0)
  121. # sum all effective bandwidth for 5G and 4G
  122. serial_df["SCC1_NR5G_effective_bw"] = serial_df[
  123. "SCC1_NR5G_bw"
  124. ].fillna(0)
  125. serial_df["lte_effective_bw_sum"] = (
  126. serial_df["LTE_SCC1_effective_bw"].fillna(0)
  127. + serial_df["LTE_SCC2_effective_bw"].fillna(0)
  128. + serial_df["LTE_SCC3_effective_bw"].fillna(0)
  129. + serial_df["LTE_SCC4_effective_bw"].fillna(0)
  130. + serial_df["LTE_bw"].fillna(0))
  131. serial_df["nr_effective_bw_sum"] = serial_df["SCC1_NR5G_effective_bw"]
  132. serial_df["effective_bw_sum"] = serial_df["nr_effective_bw_sum"] + serial_df[
  133. "lte_effective_bw_sum"]
  134. # transmission timeline
  135. scaley = 1.5
  136. scalex = 1.0
  137. fig, ax = plt.subplots(2, 1, figsize=[6.4 * scaley, 4.8 * scalex])
  138. fig.subplots_adjust(right=0.75)
  139. if not args.fancy:
  140. plt.title("{} with {}".format(transmission_direction, cc_algo))
  141. fig.suptitle("{} with {}".format(transmission_direction, cc_algo))
  142. ax0 = ax[0]
  143. ax1 = ax0.twinx()
  144. ax2 = ax0.twinx()
  145. # ax2.spines.right.set_position(("axes", 1.22))
  146. ax00 = ax[1]
  147. snd_plot = ax0.plot(
  148. transmission_df["snd_cwnd"].dropna(),
  149. color="lime",
  150. linestyle="dashed",
  151. label="cwnd",
  152. )
  153. srtt_plot = ax1.plot(
  154. transmission_df["srtt"].dropna(),
  155. color="red",
  156. linestyle="dashdot",
  157. label="sRTT",
  158. )
  159. goodput_plot = ax2.plot(
  160. transmission_df["goodput_rolling"],
  161. color="blue",
  162. linestyle="solid",
  163. label="goodput",
  164. )
  165. # sum all effective bandwidth for 5G and 4G
  166. serial_df["SCC1_NR5G_effective_bw"] = serial_df["SCC1_NR5G_bw"].fillna(0)
  167. serial_df["effective_bw_sum"] = (
  168. serial_df["SCC1_NR5G_effective_bw"]
  169. + serial_df["LTE_SCC1_effective_bw"]
  170. + serial_df["LTE_SCC2_effective_bw"]
  171. + serial_df["LTE_SCC3_effective_bw"]
  172. + serial_df["LTE_SCC4_effective_bw"]
  173. + serial_df["LTE_bw"]
  174. )
  175. bw_cols = [
  176. "SCC1_NR5G_effective_bw",
  177. "LTE_bw",
  178. "LTE_SCC1_effective_bw",
  179. "LTE_SCC2_effective_bw",
  180. "LTE_SCC3_effective_bw",
  181. "LTE_SCC4_effective_bw",
  182. ]
  183. serial_df["time_rel"] = serial_df["time"] - serial_df["time"].iloc[0]
  184. serial_df.index = serial_df["time_rel"] / 60
  185. ax_stacked = serial_df[bw_cols].plot.area(stacked=True, linewidth=0, ax=ax00)
  186. ax00.set_ylabel("bandwidth [MHz]")
  187. #ax.set_xlabel("time [minutes]")
  188. #ax00.set_xlim([0, transmission_df.index[-1]])
  189. ax00.xaxis.grid(False)
  190. ax2.spines.right.set_position(("axes", 1.1))
  191. ax0.set_ylim(0, 5000)
  192. ax1.set_ylim(0, 0.3)
  193. ax2.set_ylim(0, 600)
  194. #ax00.set_ylim(-25, 0)
  195. ax00.set_xlabel("arrival time [s]")
  196. ax2.set_ylabel("Goodput [mbps]")
  197. #ax00.set_ylabel("LTE/NR RSRQ [dB]")
  198. # ax02.set_ylabel("LTE RSRQ [dB]")
  199. ax1.set_ylabel("sRTT [s]")
  200. ax0.set_ylabel("cwnd [MSS]")
  201. if args.fancy:
  202. legend_frame = False
  203. ax0.set_xlim([0, transmission_df.index[-1]])
  204. ax00.set_xlim([0, transmission_df.index[-1]])
  205. # added these three lines
  206. lns_ax0 = snd_plot + srtt_plot + goodput_plot
  207. labs_ax0 = [l.get_label() for l in lns_ax0]
  208. ax2.legend(lns_ax0, labs_ax0, ncols=9, fontsize=9, loc="upper right", frameon=legend_frame)
  209. #ax0.set_zorder(100)
  210. #lns_ax00 = [ax_stacked]
  211. #labs_ax00 = ["5G bandwidth", "4G bandwidth"]
  212. #ax00.legend(lns_ax00, labs_ax00, ncols=3, fontsize=9, loc="upper center", frameon=legend_frame)
  213. L = ax00.legend(ncols=3, fontsize=9, frameon=False)
  214. L.get_texts()[0].set_text("5G main")
  215. L.get_texts()[1].set_text("4G main")
  216. L.get_texts()[2].set_text("4G SCC 1")
  217. L.get_texts()[3].set_text("4G SCC 2")
  218. L.get_texts()[4].set_text("4G SCC 3")
  219. L.get_texts()[5].set_text("4G SCC 4")
  220. #ax00.set_zorder(100)
  221. plt.savefig("{}{}_plot.eps".format(args.save, csv.replace(".csv", "")), bbox_inches="tight")
  222. else:
  223. fig.legend(loc="lower right")
  224. plt.savefig("{}{}_plot.pdf".format(args.save, csv.replace(".csv", "")), bbox_inches="tight")
  225. # except Exception as e:
  226. # print("Error processing file: {}".format(csv))
  227. # print(str(e))
  228. counter += 1
  229. plt.close(fig)
  230. plt.clf()