Compare commits

..

247 Commits

Author SHA1 Message Date
Lukas Prause
0f9ff99d90 Adds srtts to csv. 2023-08-31 13:29:35 +02:00
Lukas Prause
8129a2bd95 Removing index from frame. 2023-08-28 12:57:24 +02:00
Lukas Prause
1c498208da Merge branch 'master' of ssh://git.black-mesa.xyz:434/langspielplatte/measurement-scripts 2023-08-24 14:05:21 +02:00
Lukas Prause
2e4ff28fc2 Adds new script. 2023-08-24 14:05:14 +02:00
8c2f78cd02 Changes cwnd scaling. 2023-07-24 10:05:59 +02:00
cf18199ba3 Changes plot label. 2023-07-18 08:39:13 +02:00
c43826dc9c Changes plot color and y scale. 2023-07-14 10:49:30 +02:00
b92ee09af9 Changes plot color and y scale. 2023-07-14 10:25:56 +02:00
fdf04fb21e Changes plot color and y scale. 2023-07-14 09:57:37 +02:00
a75b0b74a0 Changes plot color and y scale. 2023-07-14 09:49:31 +02:00
Lukas Prause
9f8db93f7c Debug 2023-07-13 13:21:46 +02:00
Lukas Prause
4e85d7a3e5 Debug 2023-07-13 13:12:12 +02:00
Lukas Prause
0785c1e4e6 Debug 2023-07-13 12:37:46 +02:00
a9f9c42ab1 Valuefixes 2023-07-13 10:25:14 +02:00
44f20be108 Valuefixes 2023-07-13 10:20:54 +02:00
dc578c8a1b Valuefixes 2023-07-13 10:15:13 +02:00
e4fc32a1a2 Valuefixes 2023-07-13 10:05:26 +02:00
ec443c9bd4 Valuefixes 2023-07-13 09:55:12 +02:00
009d59c499 Valuefixes 2023-07-13 09:43:21 +02:00
85c4bfeb75 Valuefixes 2023-07-13 09:15:06 +02:00
Lukas Prause
502de2d864 Merge timing. 2023-07-12 16:16:01 +02:00
Lukas Prause
a713b9e262 Merge timing. 2023-07-12 15:18:14 +02:00
Lukas Prause
98fe00c02f Merge timing. 2023-07-12 14:55:49 +02:00
Lukas Prause
a97563fe61 Fixes typo and and too large dataset. 2023-07-12 14:36:21 +02:00
Lukas Prause
951bac5f1e Fixes typo and and too large dataset. 2023-07-12 14:17:33 +02:00
Lukas Prause
97f0946ad0 Fixes typo and and too large dataset. 2023-07-12 13:54:43 +02:00
Lukas Prause
baf2207a4f Fixes typo and and too large dataset. 2023-07-12 13:54:00 +02:00
345e6546ce stacked 2023-07-11 20:37:57 +02:00
a32df7b8aa fix stacked 2023-07-11 20:13:29 +02:00
Lukas Prause
aca74ca09c Paper plots. 2023-07-11 16:28:29 +02:00
Lukas Prause
a25288a737 Removes fontsize scaling 2023-07-11 13:18:54 +02:00
Lukas Prause
67ca4d66b0 Removes fontsize scaling 2023-07-11 13:18:00 +02:00
Lukas Prause
a701d378e2 Changes fontsize scale. 2023-07-11 13:06:25 +02:00
Lukas Prause
eb281c976c merge 2023-07-11 13:04:13 +02:00
Lukas Prause
060ffaad74 merge 2023-07-11 13:03:30 +02:00
dde4350a00 Add unit to cwnd label. 2023-07-11 10:15:55 +02:00
ef9740177e Removes legend border frame. 2023-07-11 10:04:32 +02:00
af2a53abb3 Legend fixes 2023-07-11 09:57:29 +02:00
11749d39a3 Legend fixes 2023-07-11 09:54:20 +02:00
4b291f04ba Legend fixes 2023-07-11 09:50:45 +02:00
772f2d704a Legend fixes 2023-07-11 09:47:22 +02:00
32184560f4 Legend fixes 2023-07-11 09:43:00 +02:00
0dfc17f950 Legend fixes 2023-07-11 09:25:14 +02:00
Lukas Prause
91ba0827e0 Adds fancy legend. 2023-07-10 17:01:51 +02:00
Lukas Prause
f3e155fd87 Adds fancy legend. 2023-07-10 16:56:30 +02:00
Lukas Prause
46e8bf95ba Adds fancy legend. 2023-07-10 16:52:33 +02:00
Lukas Prause
7cc030a4cc Adds fancy legend. 2023-07-10 16:47:25 +02:00
Lukas Prause
70a1e5b82e Adds fancy legend. 2023-07-10 16:01:57 +02:00
Lukas Prause
75be22b719 Adds fancy legend. 2023-07-10 15:53:35 +02:00
Lukas Prause
de04d94779 Adds fancy plot settings for eps export. 2023-07-10 15:27:47 +02:00
Lukas Prause
14eca54f98 Adds fancy plot settings for eps export. 2023-07-10 14:37:29 +02:00
Lukas Prause
4c33e4872e Styling polts 2023-07-06 15:25:27 +02:00
Lukas Prause
8168c46925 Bugfix 2023-07-06 15:07:25 +02:00
Lukas Prause
05cb425096 Bugfix 2023-07-06 14:53:26 +02:00
Lukas Prause
f594955371 Bugfix 2023-07-06 13:40:38 +02:00
Lukas Prause
041f4d0c2c Adds plot styling. 2023-07-06 12:47:02 +02:00
7f1e2699c9 Adds a filter for uplink bandwidth. 2023-07-06 10:06:25 +02:00
Lukas Prause
d6062ee78b Adds script to plot the usage of bandwidth. 2023-07-05 15:38:12 +02:00
Lukas Prause
8a91736f39 Adds script to plot the usage of bandwidth. 2023-07-05 15:20:30 +02:00
Lukas Prause
29b5e02469 Merge branch 'master' of ssh://git.black-mesa.xyz:434/langspielplatte/measurement-scripts 2023-07-05 15:15:58 +02:00
Lukas Prause
f7abcf1fdf Adds script to plot the usage of bandwidth. 2023-07-05 15:15:45 +02:00
7764e1a49d Changes to bandwidth calculation. 2023-07-05 10:20:39 +02:00
Lukas Prause
9eabd701e4 Bugfix 2023-06-30 13:29:09 +02:00
Lukas Prause
e46cc7e8bd Bugfix 2023-06-30 13:23:26 +02:00
Lukas Prause
c4d2a66d83 Merge branch 'master' of ssh://git.black-mesa.xyz:434/langspielplatte/measurement-scripts 2023-06-30 13:12:58 +02:00
Lukas Prause
a2b57d32f1 Bugfix 2023-06-30 13:12:50 +02:00
bb94a75417 Adds support automatic timestamp offset calculation. 2023-06-29 09:46:44 +02:00
Lukas Prause
3496f8385f Adds support automatic timestamp offset calculation. 2023-06-28 15:36:03 +02:00
Lukas Prause
30fa09168e Adds support automatic timestamp offset calculation. 2023-06-27 15:42:22 +02:00
Lukas Prause
6f1f5afa07 Adds support for negative time offset in gps timestamp. 2023-06-27 14:14:44 +02:00
Lukas Prause
f276dbd242 Adds support for negative time offset in gps timestamp. 2023-06-27 13:53:40 +02:00
Lukas Prause
b7e09741e1 Merge branch 'master' of ssh://git.black-mesa.xyz:434/langspielplatte/measurement-scripts 2023-06-26 12:26:46 +02:00
Lukas Prause
180f4dcc8a Refactor GPS scripts. 2023-06-21 14:35:07 +02:00
Lukas Prause
80f292767b Bufix: Exiting threads on error. 2023-06-21 14:21:06 +02:00
e5bea755ba Changes calculation of 5G handover. 2023-04-28 09:00:35 +02:00
5dcb2450b3 Changes calculation of 5G handover. 2023-04-28 08:39:32 +02:00
Lukas Prause
be15a51017 Changing plots again 2023-04-27 15:17:51 +02:00
Lukas Prause
f650d98eb5 Changes goodpit calculation. 2023-04-25 14:22:40 +02:00
Lukas Prause
b578f70876 Changes goodput caluclation 2023-04-25 14:19:27 +02:00
Lukas Prause
e5483760a5 Changes scaleing. 2023-04-25 13:11:07 +02:00
Lukas Prause
725b2d9081 Adds stackplots. 2023-04-24 17:05:20 +02:00
Lukas Prause
719413bebb Adds stackplots. 2023-04-24 17:00:56 +02:00
Lukas Prause
bfe0a2ef0e Adds stackplots. 2023-04-24 16:57:04 +02:00
Lukas Prause
e0d972c937 Adds stackplots. 2023-04-24 16:50:10 +02:00
Lukas Prause
9693685434 Adds stackplots. 2023-04-24 16:23:24 +02:00
Lukas Prause
d65ad5b10e Adds stackplots. 2023-04-24 16:17:13 +02:00
Lukas Prause
6a72050cf1 Adds stackplots. 2023-04-24 16:08:30 +02:00
Lukas Prause
4a2365c671 Adds stackplot for bandwith. 2023-04-24 10:56:24 +02:00
Lukas Prause
1473a0c25a Adds stackplot for bandwith. 2023-04-24 10:48:18 +02:00
Lukas Prause
5eb8a5ea8b Adds stackplot for bandwith. 2023-04-24 10:44:08 +02:00
Lukas Prause
dd086d77e0 Try to fix bandwidth ploting. 2023-04-24 10:13:15 +02:00
Lukas Prause
8528f89484 Try to fix bandwidth ploting. 2023-04-24 10:08:08 +02:00
Lukas Prause
6f3530e2a6 Changes for new modem. 2023-04-21 13:36:22 +02:00
Lukas Prause
26c10c5127 Changes for new modem. 2023-04-21 13:32:37 +02:00
Lukas Prause
259db62584 Changes for new modem. 2023-04-21 13:25:04 +02:00
Lukas Prause
b35104f3ba Changes for new modem. 2023-04-21 13:22:03 +02:00
Lukas Prause
a0668a89d4 Changes for new modem. 2023-04-21 13:05:11 +02:00
Lukas Prause
3837fe81f8 Changes for new modem. 2023-04-20 16:14:41 +02:00
Lukas Prause
95b39c8aaa Changes for new modem. 2023-04-20 16:12:02 +02:00
Lukas Prause
0f234adabb Changes for new modem. 2023-04-20 16:04:52 +02:00
Lukas Prause
f77140eb6b Changes for new modem. 2023-04-20 15:58:31 +02:00
Lukas Prause
ac801dc5ac Changes for new modem. 2023-04-20 15:53:43 +02:00
a845747a9c Serial output parser for EM919x. 2023-03-21 10:54:33 +01:00
Lukas Prause
b3073886c8 Refactor plots. 2023-03-17 15:19:24 +01:00
Lukas Prause
dbc4b4dd72 Refactor plots. 2023-03-17 15:16:32 +01:00
Lukas Prause
c463195a25 Pls fix. 2023-03-16 18:14:54 +01:00
Lukas Prause
61e99e6e83 Plot pcid and scid. 2023-03-16 15:33:13 +01:00
Lukas Prause
7d2d047903 Plot pcid and scid. 2023-03-16 15:17:55 +01:00
Lukas Prause
97567140ad Plot pcid and scid. 2023-03-16 15:11:13 +01:00
Lukas Prause
7a35d5014d Plot pcid and scid. 2023-03-16 14:51:18 +01:00
Lukas Prause
3362ba2c60 Adds delimiter to serial output. 2023-03-16 12:57:18 +01:00
58935bd3c6 Adds support for EM9191. 2023-03-16 10:34:12 +01:00
Lukas Prause
7bac2f7fd7 Moves bandwidth converters to serial format script. 2023-03-14 13:14:24 +01:00
Lukas Prause
a77425cfa2 Bugfix 2023-03-13 13:43:40 +01:00
Lukas Prause
178862a0e3 Adds monitoring for carrier aggregation. 2023-03-13 12:30:10 +01:00
861c764d75 Adds DL_bandwidth to plot. 2023-03-13 10:06:40 +01:00
Lukas Prause
51da4e6899 Plot changes 2023-03-09 14:50:32 +01:00
Lukas Prause
e2625998c6 Adds 'some' error handling. 2023-03-09 14:45:13 +01:00
Lukas Prause
d74c87a4d2 Fix grouper issue. 2023-03-06 13:39:10 +01:00
Lukas Prause
266cfb5e8c Fix grouper issue. 2023-03-06 13:21:34 +01:00
83d40c3f04 Bugfix 2023-03-03 10:28:13 +01:00
a5d837f865 Memory management 2023-03-01 09:03:34 +01:00
b2fa7f38be Memory management 2023-03-01 08:45:58 +01:00
6eefc8c081 Memory management 2023-03-01 08:43:02 +01:00
Lukas Prause
50afb7e4e9 Adds compare between multiple folders 2023-02-28 15:54:30 +01:00
b95d5202d5 Bugfixes 2023-02-28 09:20:31 +01:00
1b0c0b9c63 Bugfixes 2023-02-28 09:03:18 +01:00
38b7bf68ec Bugfixes 2023-02-28 08:47:40 +01:00
Lukas Prause
8004c74acf Adds script for cdf plots. 2023-02-27 13:05:36 +01:00
6d0b4d747d Changes ACK_RTT to sRTT in CDF plot. 2023-02-27 10:48:00 +01:00
29590640ef Changes ACK_RTT to sRTT in CDF plot. 2023-02-27 09:45:06 +01:00
a6953f2796 Bugfix: Missing goodput in figure. 2023-02-15 09:46:03 +01:00
Lukas Prause
08784f671e Removes correlation plot. 2023-02-10 15:51:04 +01:00
Lukas Prause
eae11e0eef Merge branch 'master' of ssh://git.black-mesa.xyz:434/langspielplatte/measurement-scripts 2023-02-10 14:29:53 +01:00
Lukas Prause
5348128bfc merge 2023-02-10 14:29:50 +01:00
8f85fa95b6 push 2023-02-03 10:53:02 +01:00
6eca677b52 Adds correlation plots. 2023-02-03 09:04:56 +01:00
d93529ea0f Changes ACK RTT to srtt and adds arrival time on x-axis. 2023-02-03 07:53:01 +01:00
Lukas Prause
c44c359b2f Todo: Refactor 2023-02-02 17:43:06 +01:00
Lukas Prause
81a2381d59 Bugfix: Removes wrong if statement. 2023-02-02 17:42:06 +01:00
Lukas Prause
5a9e3750a9 Adds only calculation mode. 2023-02-01 14:57:39 +01:00
Lukas Prause
37412e4f96 Adds only calculation mode. 2023-02-01 14:48:48 +01:00
Lukas Prause
d028cfc0bb Renames column date_and_time to datetime. 2023-01-31 16:11:30 +01:00
Lukas Prause
d5912242b7 Merge branch 'master' of ssh://git.black-mesa.xyz:434/langspielplatte/measurement-scripts 2023-01-31 15:49:57 +01:00
Lukas Prause
b99e2f7f12 Refactor 2023-01-31 15:49:39 +01:00
4d09e0d2f1 Adds export to pickle format. 2023-01-24 13:51:14 +01:00
97aed222ba Sorting indexes before merge. 2023-01-24 09:09:10 +01:00
Lukas Prause
d3880d4ffc Extends color list for cell plotting. 2023-01-23 13:16:09 +01:00
Lukas Prause
1b97e12b14 Extends color list for cell plotting. 2023-01-23 12:41:49 +01:00
Lukas Prause
e1b0cfa32a Adds differnt plots fpr goodput, cwnd, rtt und cqi. 2023-01-20 14:59:04 +01:00
16b9929dcb Refactor and adds ne script (WIP) for plotting single transmissions 2023-01-20 10:40:58 +01:00
Lukas Prause
fce276836a Background color for cell id. 2023-01-19 15:28:34 +01:00
Lukas Prause
45c8ba0a56 Background color for cell id. 2023-01-19 15:24:11 +01:00
Lukas Prause
edad08d92d Background color for cell id. 2023-01-19 15:22:37 +01:00
Lukas Prause
bcab8745a0 Background color for cell id. 2023-01-19 15:18:20 +01:00
Lukas Prause
be88aec9b3 Background color for cell id. 2023-01-19 15:15:39 +01:00
Lukas Prause
2f877015d3 Background color for cell id. 2023-01-19 15:14:14 +01:00
Lukas Prause
481b6299f0 Background color for cell id. 2023-01-19 15:10:05 +01:00
Lukas Prause
295b21fac7 Background color for cell id. 2023-01-19 15:00:37 +01:00
Lukas Prause
ce13ee1ec4 Background color for cell id. 2023-01-19 14:58:13 +01:00
Lukas Prause
6bd4ecfc04 Background color for cell id. 2023-01-19 14:47:19 +01:00
Lukas Prause
4dab08b710 Background color for cell id. 2023-01-19 14:44:45 +01:00
Lukas Prause
64360e9a28 Background color for cell id. 2023-01-19 14:39:03 +01:00
Lukas Prause
675764bdc6 Background color for cell id. 2023-01-19 14:26:47 +01:00
Lukas Prause
b55d38c5b9 Background color for cell id. 2023-01-19 14:24:04 +01:00
Lukas Prause
f0df8aeb2c Background color for cell id. 2023-01-19 14:11:25 +01:00
Lukas Prause
84546710ed Background color for cell id. 2023-01-19 14:08:59 +01:00
Lukas Prause
b7676674ee Background color for cell id. 2023-01-19 13:00:55 +01:00
7d134fc34a Adds rolling window for goodput 2023-01-17 12:26:15 +01:00
881495b3d0 Bugfix, plot ack_rtt 2023-01-14 10:37:26 +01:00
6eddf0a125 Bugfix, plot ack_rtt 2023-01-14 10:36:41 +01:00
04f1d78741 Bugfix, plot ack_rtt 2023-01-14 10:34:25 +01:00
02d76c1c96 Bugfix, plot ack_rtt 2023-01-14 10:31:15 +01:00
72ba200c9e Bugfix, plot ack_rtt 2023-01-14 10:29:20 +01:00
c66c6e9ad2 Bugfix, plot ack_rtt 2023-01-14 10:28:13 +01:00
c434464bb1 Bugfix, plot ack_rtt 2023-01-14 10:25:32 +01:00
86ed3c3109 Bugfix, plot ack_rtt 2023-01-14 10:21:21 +01:00
77583205ac Bugfix, plot ack_rtt 2023-01-14 10:20:26 +01:00
d5d23f0c34 Bugfix, plot ack_rtt 2023-01-14 10:19:38 +01:00
c8f5e1f08f Bugfix, plot ack_rtt 2023-01-14 10:17:28 +01:00
4a5856edca Bugfix, plot ack_rtt 2023-01-14 10:15:46 +01:00
28d9eb8cf9 Bugfix, plot ack_rtt 2023-01-14 10:14:19 +01:00
e924df98a5 Adds cooloring for cell ids. 2023-01-13 10:24:42 +01:00
c4ea007aa2 Adds RTT CDF plot. 2023-01-13 09:53:07 +01:00
Lukas Prause
0b93810204 Changes goodput calculation. 2023-01-12 14:07:48 +01:00
Lukas Prause
4ddb6497ae Changes goodput calculation. 2023-01-12 14:06:21 +01:00
Lukas Prause
fd382fa252 Changes goodput calculation. 2023-01-12 14:04:48 +01:00
Lukas Prause
7187e62c9c Changes goodput calculation. 2023-01-12 14:03:23 +01:00
Lukas Prause
3d29c1c572 Changes goodput calculation. 2023-01-12 14:00:42 +01:00
Lukas Prause
6a0aa5ba05 Changes goodput calculation. 2023-01-12 13:02:42 +01:00
Lukas Prause
452e103d10 Changes goodput calculation. 2023-01-12 13:01:43 +01:00
Lukas Prause
c947e967ce Changes goodput calculation. 2023-01-12 12:58:36 +01:00
Lukas Prause
f0d224b452 Changes goodput calculation. 2023-01-12 12:52:20 +01:00
Lukas Prause
d3e3d3d31a Changes goodput calculation. 2023-01-12 12:50:08 +01:00
Lukas Prause
494ae460df Changes goodput calculation. 2023-01-12 12:47:24 +01:00
Lukas Prause
8a9806bb6a Changes goodput calculation. 2023-01-12 12:44:49 +01:00
ec0195de38 Changes goodput calculation. 2023-01-12 10:33:58 +01:00
a8e5ce0dbd Changes goodput calculation. 2023-01-12 10:26:49 +01:00
27590e4ed6 Changes goodput calculation. 2023-01-12 10:04:48 +01:00
57a1d5f249 Changes goodput calculation. 2023-01-12 10:00:08 +01:00
981a668fe6 Changes goodput calculation. 2023-01-12 09:57:53 +01:00
77ed647603 Changes goodput calculation. 2023-01-12 09:54:39 +01:00
f037f94040 Changes goodput calculation. 2023-01-12 09:51:58 +01:00
10ce080108 Changes goodput calculation. 2023-01-12 09:50:54 +01:00
6ee6416c37 Changes goodput calculation. 2023-01-12 09:47:20 +01:00
0f67ef76a8 Changes goodput calculation. 2023-01-12 09:43:42 +01:00
86285146ab Changes goodput calculation. 2023-01-12 09:38:00 +01:00
a3e760a682 Changes goodput calculation. 2023-01-12 09:33:47 +01:00
69dfc9a1d8 Revert changes 2023-01-11 10:16:46 +01:00
cb59a663b8 Changes to inplace operations for large dataframes. 2023-01-11 10:12:22 +01:00
237dfe73d9 Changes to inplace operations for large dataframes. 2023-01-11 10:10:32 +01:00
3f140fa016 Changes to inplace operations for large dataframes. 2023-01-11 10:05:03 +01:00
50d03e85b7 Changes to inplace operations for large dataframes. 2023-01-11 10:02:04 +01:00
ab7ef014f8 Changes to inplace operations for large dataframes. 2023-01-11 10:00:13 +01:00
2754882147 Changes to inplace operations for large dataframes. 2023-01-11 09:53:30 +01:00
41ccc31ac7 Changes to inplace operations for large dataframes. 2023-01-11 09:49:19 +01:00
53d89bad05 Changes to inplace operations for large dataframes. 2023-01-11 09:44:57 +01:00
a72552d373 Changes to inplace operations for large dataframes. 2023-01-11 09:43:20 +01:00
c81e6635b4 Changes to inplace operations for large dataframes. 2023-01-11 09:03:41 +01:00
Lukas Prause
17421f713e Adds plot for cdf. 2023-01-10 16:05:39 +01:00
Lukas Prause
7430b4c651 Adds plot for cdf. 2023-01-10 16:03:51 +01:00
Lukas Prause
91bae23ab1 Adds a script for resetting the lte gps. 2023-01-10 13:21:38 +01:00
Lukas Prause
0b63242bbd Adds a script for resetting the lte gps. 2023-01-10 13:15:06 +01:00
Lukas Prause
c9b7d8fc0d Adds exception for tshark. 2023-01-05 15:57:56 +01:00
Lukas Prause
f06dad1281 Fix formatin of GPS String. 2022-12-13 12:21:04 +01:00
Lukas Prause
90023312dc Adds pdf export. 2022-12-12 13:47:24 +01:00
Lukas Prause
45b8c74b02 Adds pdf export. 2022-12-12 13:21:43 +01:00
Lukas Prause
877f0d9d3e Adds pdf export. 2022-12-12 13:11:47 +01:00
Lukas Prause
832a41fa46 Adds systime to nmea file. 2022-12-12 12:54:22 +01:00
Lukas Prause
6e4b0a3466 Adds pdf export. 2022-12-09 16:36:17 +01:00
Lukas Prause
eb7c832b98 Plotting data related to gps loaction. 2022-12-09 16:28:12 +01:00
Lukas Prause
3f248c6d66 Adds a new handling for reconnects. 2022-12-07 13:41:30 +01:00
Lukas Prause
9ac527c8d7 Adds a new handling for reconnects. 2022-12-07 12:28:26 +01:00
Lukas Prause
070c0600d0 Adds a new handling for reconnects. 2022-12-06 17:06:40 +01:00
Lukas Prause
20bbc02385 Adds a new handling for reconnects. 2022-12-06 16:40:13 +01:00
Lukas Prause
2239911bc0 Merge branch 'master' of ssh://git.black-mesa.xyz:434/langspielplatte/measurement-scripts 2022-12-06 16:30:52 +01:00
Lukas Prause
ccfd3c3d06 Adds a new handling for reconnects. 2022-12-06 16:30:42 +01:00
91e98bc524 BUGFIX: Format of reconnect log 2022-11-15 13:53:27 +00:00
Lukas Prause
ba023f3573 Increases the serial response waiting time. 2022-11-14 16:47:08 +01:00
Lukas Prause
69e6284550 Adds logging for modem reconnections. 2022-11-14 13:28:20 +01:00
Lukas Prause
c568e03ca8 Adds error notification as sound for a faulty interface. 2022-11-14 13:18:36 +01:00
Lukas Prause
ae2827319e Adds error notification as sound for a faulty interface. 2022-11-14 13:06:38 +01:00
Lukas Prause
20a70979c1 Adds error notification as sound for a faulty interface. 2022-11-14 13:04:44 +01:00
Lukas Prause
0e2a9789b2 Changes measurement parameters. 2022-11-11 15:16:19 +01:00
Lukas Prause
22d9744763 Longer mesurements 2022-11-08 15:17:20 +01:00
Lukas Prause
d5ed4efca8 Merge branch 'master' of ssh://git.black-mesa.xyz:434/langspielplatte/measurement-scripts 2022-11-08 15:14:05 +01:00
Lukas Prause
39aae62867 Adds tso and a lager guradspace for iperf measurements. 2022-11-08 15:09:11 +01:00
19 changed files with 3016 additions and 68 deletions

View File

@@ -4,6 +4,31 @@ WORKING_DIR="/home/prause/measurement-scripts/"
DATE=$(date +%F_%H-%M-%S)
NEW_DIR="/home/prause/mobile_measurements/$DATE/"
ethtool -k wwan0
STATE=$?
if test $STATE -ne 0
then
for n in 1 2 3 ; do
for f in 400 500 600 700 800 900 1000 1100 1200 1300 1400 1500 1600; do
beep -f $f -l 20
done
done
exit 1
fi
/root/connect-modem.py -l telekom
if test $STATE -ne 0
then
for n in 1 2 3 ; do
for f in 400 500 600 700 800 900 1000 1100 1200 1300 1400 1500 1600; do
beep -f $f -l 20
done
done
exit 1
fi
export PIPENV_VENV_IN_PROJECT=1
mkdir $NEW_DIR
cd $WORKING_DIR
@@ -11,10 +36,11 @@ cd $WORKING_DIR
beep -f 130 -l 100 -n -f 262 -l 100 -n -f 330 -l 100 -n -f 392 -l 100 -n -f 523 -l 100 -n -f 660 -l 100 -n -f 784 -l 300 -n -f 660 -l 300 -n -f 146 -l 100 -n -f 262 -l 100 -n -f 311 -l 100 -n -f 415 -l 100 -n -f 523 -l 100 -n -f 622 -l 100 -n -f 831 -l 300 -n -f 622 -l 300 -n -f 155 -l 100 -n -f 294 -l 100 -n -f 349 -l 100 -n -f 466 -l 100 -n -f 588 -l 100 -n -f 699 -l 100 -n -f 933 -l 300 -n -f 933 -l 100 -n -f 933 -l 100 -n -f 933 -l 100 -n -f 1047 -l 400
pipenv run pipenv run ./measurement_main.py -c 130.75.73.69 \
--bandwidth \
--set time=30 \
--set time=60 \
--gps /dev/serial/by-id/usb-u-blox_AG_-_www.u-blox.com_u-blox_5_-_GPS_Receiver-if00 \
--serial /dev/ttyUSB2 \
-n 10 \
-n 600 \
--folder $NEW_DIR \
--prefix automated_$DATE \
-i wwan0
-i wwan0 \
| tee $NEW_DIR/automated_measurement_$DATE.log

View File

@@ -0,0 +1,198 @@
#!/usr/bin/env python3
import math
import multiprocessing
import os
from argparse import ArgumentParser
import matplotlib
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
sns.set()
#sns.set(font_scale=1.5)
tex_fonts = {
"pgf.texsystem": "lualatex",
# "legend.fontsize": "x-large",
# "figure.figsize": (15, 5),
"axes.labelsize": 15, # "small",
# "axes.titlesize": "x-large",
"xtick.labelsize": 15, # "small",
"ytick.labelsize": 15, # "small",
"legend.fontsize": 15,
"axes.formatter.use_mathtext": True,
"mathtext.fontset": "dejavusans",
}
# plt.rcParams.update(tex_fonts)
def convert_cellid(value):
if isinstance(value, str):
try:
r = int(value.split(" ")[-1].replace("(", "").replace(")", ""))
return r
except Exception as e:
return -1
else:
return int(-1)
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-s", "--serial_file", required=True, help="Serial csv file.")
parser.add_argument(
"-p", "--pcap_csv_folder", required=True, help="PCAP csv folder."
)
parser.add_argument("--save", required=True, help="Location to save pdf file.")
parser.add_argument(
"-i",
"--interval",
default=10,
type=int,
help="Time interval for rolling window.",
)
args = parser.parse_args()
pcap_csv_list = list()
for filename in os.listdir(args.pcap_csv_folder):
if filename.endswith(".csv") and "tcp" in filename:
pcap_csv_list.append(filename)
counter = 1
if len(pcap_csv_list) == 0:
print("No CSV files found.")
pcap_csv_list.sort(key=lambda x: int(x.split("_")[-1].replace(".csv", "")))
concat_frame = None
for csv in pcap_csv_list:
print(
"\rProcessing {} out of {} CSVs.\t({}%)\t".format(
counter, len(pcap_csv_list), math.floor(counter / len(pcap_csv_list))
)
)
# try:
transmission_df = pd.read_csv(
"{}{}".format(args.pcap_csv_folder, csv),
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
)
transmission_df["datetime"] = pd.to_datetime(
transmission_df["datetime"]
) - pd.Timedelta(hours=1)
transmission_df = transmission_df.set_index("datetime")
transmission_df.index = pd.to_datetime(transmission_df.index)
transmission_df = transmission_df.sort_index()
# srtt to [s]
transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10 ** 6)
# key for columns and level for index
transmission_df["goodput"] = (
transmission_df["payload_size"]
.groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval)))
.transform("sum")
)
transmission_df["goodput"] = transmission_df["goodput"].apply(
lambda x: ((x * 8) / args.interval) / 10 ** 6
)
transmission_df["goodput_rolling"] = (
transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
)
transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply(
lambda x: ((x * 8) / args.interval) / 10 ** 6
)
# set meta values and remove all not needed columns
cc_algo = transmission_df["congestion_control"].iloc[0]
cc_algo = cc_algo.upper()
transmission_direction = transmission_df["direction"].iloc[0]
# transmission_df = transmission_df.filter(["goodput", "datetime", "ack_rtt", "goodput_rolling", "snd_cwnd"])
# read serial csv
serial_df = pd.read_csv(
args.serial_file, converters={"Cell_ID": convert_cellid}
)
serial_df["datetime"] = pd.to_datetime(serial_df["datetime"]) - pd.Timedelta(
hours=1
)
serial_df = serial_df.set_index("datetime")
serial_df.index = pd.to_datetime(serial_df.index)
serial_df.sort_index()
# print(serial_df["Cell_ID"])
# serial_df["Cell_ID"] = serial_df["Cell_ID"].apply(
# lambda x: int(x.split(" ")[-1].replace("(", "").replace(")", "")))
transmission_df = pd.merge_asof(
transmission_df,
serial_df,
tolerance=pd.Timedelta("1s"),
right_index=True,
left_index=True,
)
#transmission_df.index = transmission_df["arrival_time"]
# replace 0 in RSRQ with Nan
transmission_df["NR5G_RSRQ_(dB)"] = transmission_df["NR5G_RSRQ_(dB)"].replace(
0, np.NaN
)
transmission_df["RSRQ_(dB)"] = transmission_df["RSRQ_(dB)"].replace(0, np.NaN)
# filter active state
for i in range(1, 5):
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
"LTE_SCC{}_bw".format(i)
]
mask = transmission_df["LTE_SCC{}_state".format(i)].isin(["ACTIVE"])
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
"LTE_SCC{}_effective_bw".format(i)
].where(mask, other=0)
# filter if sc is usesd for uplink
for i in range(1, 5):
mask = transmission_df["LTE_SCC{}_UL_Configured".format(i)].isin([False])
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
"LTE_SCC{}_effective_bw".format(i)
].where(mask, other=0)
# sum all effective bandwidth for 5G and 4G
transmission_df["SCC1_NR5G_effective_bw"] = transmission_df[
"SCC1_NR5G_bw"
].fillna(0)
transmission_df["lte_effective_bw_sum"] = (
transmission_df["LTE_SCC1_effective_bw"].fillna(0)
+ transmission_df["LTE_SCC2_effective_bw"].fillna(0)
+ transmission_df["LTE_SCC3_effective_bw"].fillna(0)
+ transmission_df["LTE_SCC4_effective_bw"].fillna(0)
+ transmission_df["LTE_bw"].fillna(0))
transmission_df["nr_effective_bw_sum"] = transmission_df["SCC1_NR5G_effective_bw"]
transmission_df["effective_bw_sum"] = transmission_df["nr_effective_bw_sum"] + transmission_df[
"lte_effective_bw_sum"]
transmission_df = transmission_df.filter(["goodput", "effective_bw_sum", "srtt"])
transmission_df = transmission_df.reset_index(drop=True)
if concat_frame is None:
concat_frame = transmission_df
else:
concat_frame = pd.concat([concat_frame, transmission_df])
concat_frame.to_csv("{}_concat_bw_gp.csv".format(args.save))

192
calc_gps_map_csv.py Executable file
View File

@@ -0,0 +1,192 @@
#!/usr/bin/env python3
import multiprocessing
import os
from argparse import ArgumentParser
from datetime import datetime
from math import ceil
from time import sleep
import pandas as pd
import geopandas as gpd
import contextily as cx
import matplotlib.pyplot as plt
def csv_to_dataframe(csv_list, dummy):
global n
global frame_list
transmission_df = None
for csv in csv_list:
tmp_df = pd.read_csv(
"{}{}".format(args.pcap_csv_folder, csv),
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
)
#tmp_df["datetime"] = pd.to_datetime(tmp_df["datetime"]) - pd.Timedelta(hours=1)
tmp_df["datetime"] = pd.to_datetime(tmp_df["datetime"])
tmp_df = tmp_df.set_index("datetime")
tmp_df.index = pd.to_datetime(tmp_df.index)
if transmission_df is None:
transmission_df = tmp_df
else:
transmission_df = pd.concat([transmission_df, tmp_df])
n.value += 1
frame_list.append(transmission_df)
from itertools import islice
def chunk(it, size):
it = iter(it)
return iter(lambda: tuple(islice(it, size)), ())
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-f", "--gps_file", required=True, help="GPS csv file.")
parser.add_argument("-s", "--serial_file", required=True, help="Serial csv file.")
parser.add_argument("-p", "--pcap_csv_folder", required=True, help="PCAP csv folder.")
parser.add_argument("--save", default=None, help="Location to save pdf file.")
parser.add_argument("--time_offset", default=0, type=int, help="Minutes added to GPS datetime.")
parser.add_argument("--neg_offset", default=False, action="store_true", help="Subtract GPS time offset.")
parser.add_argument("--auto_offset", default=False, action="store_true", help="Calculate GPS time offset.")
parser.add_argument(
"-c",
"--cores",
default=1,
type=int,
help="Number of cores for multiprocessing.",
)
parser.add_argument(
"-i",
"--interval",
default=10,
type=int,
help="Time interval for rolling window.",
)
args = parser.parse_args()
manager = multiprocessing.Manager()
n = manager.Value("i", 0)
frame_list = manager.list()
jobs = []
# load all pcap csv into one dataframe
pcap_csv_list = list()
for filename in os.listdir(args.pcap_csv_folder):
if filename.endswith(".csv") and "tcp" in filename:
pcap_csv_list.append(filename)
parts = chunk(pcap_csv_list, ceil(len(pcap_csv_list) / args.cores))
print("Start processing with {} jobs.".format(args.cores))
for p in parts:
process = multiprocessing.Process(target=csv_to_dataframe, args=(p, "dummy"))
jobs.append(process)
for j in jobs:
j.start()
print("Started all jobs.")
# Ensure all of the processes have finished
finished_job_counter = 0
working = ["|", "/", "-", "\\", "|", "/", "-", "\\"]
w = 0
while len(jobs) != finished_job_counter:
sleep(1)
print(
"\r\t{}{}{}\t Running {} jobs ({} finished). Processed {} out of {} pcap csv files. ({}%) ".format(
working[w],
working[w],
working[w],
len(jobs),
finished_job_counter,
n.value,
len(pcap_csv_list),
round((n.value / len(pcap_csv_list)) * 100, 2),
),
end="",
)
finished_job_counter = 0
for j in jobs:
if not j.is_alive():
finished_job_counter += 1
if (w + 1) % len(working) == 0:
w = 0
else:
w += 1
print("\r\nSorting table...")
transmission_df = pd.concat(frame_list)
frame_list = None
transmission_df = transmission_df.sort_index()
print("Calculate goodput...")
transmission_df["goodput"] = transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
transmission_df["goodput"] = transmission_df["goodput"].apply(
lambda x: ((x * 8) / args.interval) / 10**6
)
# load dataframe an put it into geopandas
df = pd.read_csv(args.gps_file)
df["kmh"] = df["speed (knots)"].apply(lambda x: x * 1.852)
if not args.auto_offset and args.time_offset > 0:
if args.neg_offset:
df["datetime"] = pd.to_datetime(df["datetime"]) - pd.Timedelta(minutes=args.time_offset)
else:
df["datetime"] = pd.to_datetime(df["datetime"]) + pd.Timedelta(minutes=args.time_offset)
elif args.auto_offset:
gps_first = datetime.strptime(df["datetime"].iloc[0], "%Y-%m-%d %H:%M:%S.%f")
pcap_first = pd.to_datetime(transmission_df.first_valid_index())
calc_offset = gps_first - pcap_first
if gps_first > pcap_first:
time_offset = gps_first - pcap_first
df["datetime"] = pd.to_datetime(df["datetime"]) - time_offset
else:
time_offset = pcap_first - gps_first
df["datetime"] = pd.to_datetime(df["datetime"]) + time_offset
else:
df["datetime"] = pd.to_datetime(df["datetime"])
df = df.set_index("datetime")
df.index = pd.to_datetime(df.index)
gdf = gpd.GeoDataFrame(
df,
geometry=gpd.points_from_xy(df["longitude"], df["latitude"]),
crs="EPSG:4326",
)
gdf = pd.merge_asof(
gdf,
transmission_df,
tolerance=pd.Timedelta("10s"),
right_index=True,
left_index=True,
)
# read serial csv
serial_df = pd.read_csv(args.serial_file)
#serial_df["datetime"] = pd.to_datetime(serial_df["datetime"]) - pd.Timedelta(hours=1)
serial_df["datetime"] = pd.to_datetime(serial_df["datetime"])
serial_df = serial_df.set_index("datetime")
serial_df.index = pd.to_datetime(serial_df.index)
gdf = pd.merge_asof(
gdf,
serial_df,
tolerance=pd.Timedelta("1s"),
right_index=True,
left_index=True,
)
# format to needed format and add basemap as background
df_wm = gdf.to_crs(epsg=3857)
#df_wm.to_csv("debug-data.csv")
# ax2 = df_wm.plot(figsize=(10, 10), alpha=0.5, edgecolor='k')
df_wm.to_csv("{}gps_plot.csv".format(args.save))
print("Saved calculations to: {}gps_plot.csv".format(args.save))

228
cdf_compare.py Executable file
View File

@@ -0,0 +1,228 @@
#!/usr/bin/env python3
import multiprocessing
import os
import pickle
from argparse import ArgumentParser
from math import ceil
from time import sleep
import matplotlib
import pandas as pd
import matplotlib.pyplot as plt
from mpl_toolkits import axisartist
from mpl_toolkits.axes_grid1 import host_subplot
def csv_to_dataframe(csv_list, folder, dummy):
global n
global frame_list
tmp_df = None
for csv in csv_list:
tmp_df = pd.read_csv(
"{}{}".format(folder, csv),
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
)
tmp_df["datetime"] = pd.to_datetime(tmp_df["datetime"]) - pd.Timedelta(hours=1)
tmp_df = tmp_df.set_index("datetime")
tmp_df.index = pd.to_datetime(tmp_df.index)
if tmp_df is None:
tmp_df = tmp_df
else:
tmp_df = pd.concat([tmp_df, tmp_df])
n.value += 1
tmp_df = tmp_df.filter(
["srtt", "datetime", "payload_size", "congestion_control", "direction"])
frame_list.append(tmp_df)
del tmp_df
from itertools import islice
def chunk(it, size):
it = iter(it)
return iter(lambda: tuple(islice(it, size)), ())
def plot_cdf(dataframe, column_name, axis=None):
stats_df = dataframe \
.groupby(column_name) \
[column_name] \
.agg("count") \
.pipe(pd.DataFrame) \
.rename(columns={column_name: "frequency"})
# PDF
stats_df["PDF"] = stats_df["frequency"] / sum(stats_df["frequency"])
# CDF
stats_df["CDF"] = stats_df["PDF"].cumsum()
stats_df = stats_df.reset_index()
if axis:
stats_df.plot(x=column_name, y=["CDF"], grid=True, ax=axis)
else:
stats_df.plot(x=column_name, y=["CDF"], grid=True)
del stats_df
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-s", "--serials", required=True, help="Serial csv files. Comma separated.")
parser.add_argument("-f", "--folders", required=True, help="PCAP csv folders. Comma separated.")
parser.add_argument("--save", default=None, help="Location to save pdf file.")
parser.add_argument(
"-c",
"--cores",
default=1,
type=int,
help="Number of cores for multiprocessing.",
)
parser.add_argument(
"-i",
"--interval",
default=2,
type=int,
help="Time interval for rolling window.",
)
args = parser.parse_args()
transmission_df_list = list()
for f in args.folders.split(","):
manager = multiprocessing.Manager()
n = manager.Value("i", 0)
frame_list = manager.list()
jobs = []
# load all pcap csv into one dataframe
pcap_csv_list = list()
for filename in os.listdir(f):
if filename.endswith(".csv") and "tcp" in filename:
pcap_csv_list.append(filename)
parts = chunk(pcap_csv_list, ceil(len(pcap_csv_list) / args.cores))
print("Start processing with {} jobs.".format(args.cores))
for p in parts:
process = multiprocessing.Process(target=csv_to_dataframe, args=(p, f, "dummy"))
jobs.append(process)
for j in jobs:
j.start()
print("Started all jobs.")
# Ensure all the processes have finished
finished_job_counter = 0
working = ["|", "/", "-", "\\", "|", "/", "-", "\\"]
w = 0
while len(jobs) != finished_job_counter:
sleep(1)
print(
"\r\t{}{}{}\t Running {} jobs ({} finished). Processed {} out of {} pcap csv files. ({}%) ".format(
working[w],
working[w],
working[w],
len(jobs),
finished_job_counter,
n.value,
len(pcap_csv_list),
round((n.value / len(pcap_csv_list)) * 100, 2),
),
end="",
)
finished_job_counter = 0
for j in jobs:
if not j.is_alive():
finished_job_counter += 1
if (w + 1) % len(working) == 0:
w = 0
else:
w += 1
print("\r\nSorting table...")
transmission_df = pd.concat(frame_list)
frame_list = None
transmission_df = transmission_df.sort_index()
#
# Don't forget to add new columns to the filter argument in the function above!
#
transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10 ** 6)
# key for columns and level for index
transmission_df["goodput"] = transmission_df["payload_size"].groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval))).transform("sum")
transmission_df["goodput"] = transmission_df["goodput"].apply(
lambda x: ((x * 8) / args.interval) / 10**6
)
transmission_df["goodput_rolling"] = transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply(
lambda x: ((x * 8) / args.interval) / 10 ** 6
)
# set meta values
cc_algo = transmission_df["congestion_control"].iloc[0]
cc_algo = cc_algo.upper()
transmission_direction = transmission_df["direction"].iloc[0]
#transmission_df = transmission_df.filter(["srtt", "datetime", "srtt", "payload_size"])
transmission_df = transmission_df.drop(columns=["congestion_control", "direction"])
# read serial csv
#serial_df = pd.read_csv(args.serial_file)
#serial_df["datetime"] = pd.to_datetime(serial_df["datetime"]) - pd.Timedelta(hours=1)
#serial_df = serial_df.set_index("datetime")
#serial_df.index = pd.to_datetime(serial_df.index)
#serial_df.sort_index()
#transmission_df = pd.merge_asof(
# transmission_df,
# serial_df,
# tolerance=pd.Timedelta("1s"),
# right_index=True,
# left_index=True,
#)
transmission_df_list.append(dict(
df=transmission_df,
cc_algo=cc_algo,
transmission_direction=transmission_direction
))
del transmission_df
# Plot sRTT CDF
legend = list()
plot_cdf(transmission_df_list[0]["df"], "srtt")
legend.append(transmission_df_list[0]["cc_algo"])
for i in range(1, len(transmission_df_list)):
plot_cdf(transmission_df_list[i]["df"], "srtt", axis=plt.gca())
legend.append(transmission_df_list[i]["cc_algo"])
#plt.xscale("log")
plt.xlim(0, 0.15)
plt.xlabel("sRTT [s]")
plt.ylabel("CDF")
plt.legend(legend)
plt.title("{}".format(transmission_df_list[0]["transmission_direction"]))
plt.savefig("{}{}_cdf_compare_plot.pdf".format(args.save, "srtt"))
plt.clf()
# Plot goodput CDF
legend = list()
plot_cdf(transmission_df_list[0]["df"], "goodput_rolling")
legend.append(transmission_df_list[0]["cc_algo"])
for i in range(1, len(transmission_df_list)):
plot_cdf(transmission_df_list[i]["df"], "goodput_rolling", axis=plt.gca())
legend.append(transmission_df_list[i]["cc_algo"])
plt.xlabel("goodput [mbps]")
plt.ylabel("CDF")
plt.legend(legend)
plt.title("{}".format(transmission_df_list[0]["transmission_direction"]))
plt.savefig("{}{}_cdf_compare_plot.pdf".format(args.save, "goodput"))

57
format_gps_to_csv.py Normal file → Executable file
View File

@@ -0,0 +1,57 @@
#!/usr/bin/env python3
from argparse import ArgumentParser
from time import sleep
import pandas as pd
import csv
from datetime import datetime
import math
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-f", "--file", required=True, help="NMEA file.")
args = parser.parse_args()
epoch = datetime.utcfromtimestamp(0)
outputfile = open(args.file.replace("nmea", "csv"), "w")
writer = csv.writer(outputfile, delimiter=",", lineterminator="\n")
writer.writerow(
["datetime", "time_epoch", "latitude", "longitude", "speed (knots)", "systime_epoch"]
)
csv_string = ""
for line in open(args.file, "r").readlines():
if line.startswith("$GPRMC"):
row = line.split(",")
time = row[1]
lat = row[3]
lat_direction = row[4]
lon = row[5]
lon_direction = row[6]
speed = row[7]
date = row[9]
systime_epoch = row[13] if len(row) < 14 else 0
date_and_time = datetime.strptime(date + " " + time, "%d%m%y %H%M%S.%f")
time_since_epoch = date_and_time.timestamp()
date_and_time = date_and_time.strftime("%y-%m-%d %H:%M:%S.%f") # [:-3]
lat = round(math.floor(float(lat) / 100) + (float(lat) % 100) / 60, 6)
if lat_direction == "S":
lat = lat * -1
lon = round(math.floor(float(lon) / 100) + (float(lon) % 100) / 60, 6)
if lon_direction == "W":
lon = lon * -1
writer.writerow([date_and_time, time_since_epoch, lat, lon, speed, systime_epoch])
outputfile.close()
sleep(1)
gps_df = pd.read_csv(args.file.replace("nmea", "csv"))
#gps_df["datetime"] = pd.to_datetime(
# gps_df["systime_epoch"].apply(lambda x: datetime.fromtimestamp(x))
#)
gps_df.to_csv(args.file.replace("nmea", "csv"))

191
format_serial_txt_to_csv.py Executable file
View File

@@ -0,0 +1,191 @@
#!/usr/bin/env python3
import csv
import datetime
from argparse import ArgumentParser
import pandas as pd
def convert_bandwidth(value):
try:
value = int(value)
except:
value = -1
if value == 0:
return 1.4
elif value == 1:
return 3
elif value == 2:
return 5
elif value == 3:
return 10
elif value == 4:
return 15
elif value == 5:
return 20
else:
return 0
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-f", "--file", required=True, help="Input txt file.")
args = parser.parse_args()
file = open(args.file, "r")
content = file.read()
file.close()
all_csv_lines = list()
csv_header = ["time"]
h1 = False
h2 = False
h3 = False
for line in content.split("\n"):
if line == "" or line == "\n":
break
raw_columns = line.split(";")
csv_line = list()
csv_line.append(raw_columns[0])
for i in range(1, len(raw_columns)):
col = raw_columns[i]
if 'AT+QNWCFG="nr5g_csi"' in col:
if not h1:
csv_header += ["mcs_PDSCH", "ri_PDSCH", "downlink_cqi", "pmi"]
h1 = True
tmp = raw_columns[i + 1].replace('+QNWCFG: "nr5g_csi",', "")
csv_line += tmp.split(",")
elif "AT+QENDC" in col:
if not h2:
csv_header += [
"endc_avl",
"plmn_info_list_r15_avl",
"endc_rstr",
"5G_basic",
]
h2 = True
tmp = raw_columns[i + 1].replace("+QENDC: ", "")
csv_line += tmp.split(",")
elif 'AT+QENG="servingcell"' in col:
if not h3:
csv_header += [
"connection_state",
"is_tdd",
"mcc",
"mnc",
"cellID",
"PCID",
"earfcn",
"freq_band_ind",
"UL_bandwidth",
"DL_bandwidth",
"TAC",
"RSRP",
"RSRQ",
"RSSI",
"SINR",
"CQI_1-30",
"tx_power",
"srxlev",
"MCC",
"MNC",
"PCID",
"RSRP",
"SINR",
"RSRQ",
"ARFCN",
"band",
]
h3 = True
if "NOCONN" in raw_columns[i + 1]:
csv_line.append("NOCONN")
csv_line += raw_columns[i + 2].replace('+QENG: "LTE",', "").split(",")
csv_line += (
raw_columns[i + 3].replace('+QENG:"NR5G-NSA",', "").split(",")
)
elif "SEARCH" in raw_columns[i + 1]:
csv_line.append("SEARCH")
csv_line += [""] * 25
elif "OK" == raw_columns[i + 1]:
csv_line.append("OK")
csv_line += [""] * 25
else:
csv_line.append("undefined")
csv_line += [""] * 25
all_csv_lines.append(csv_line)
outputfile = open(args.file.replace("txt", "csv"), "w")
writer = csv.writer(outputfile, delimiter=",", lineterminator="\n", escapechar='\\')
writer.writerow(csv_header)
#print(all_csv_lines)
for l in all_csv_lines:
#print(l)
writer.writerow(l)
outputfile.close()
outputfile = open(args.file.replace("txt", "csv"), "r")
serial_df = pd.read_csv(outputfile,
converters={"UL_bandwidth": convert_bandwidth, "DL_bandwidth": convert_bandwidth},
)
serial_df = serial_df.drop(columns=["MCC", "MNC"])
serial_df["datetime"] = pd.to_datetime(
serial_df["time"].apply(lambda x: datetime.datetime.fromtimestamp(x))
)
serial_df.to_csv(args.file.replace("txt", "csv"))
outputfile.close()
exit()
delete_string = [
'AT+QNWCFG="nr5g_csi";',
'+QNWCFG: "nr5g_csi"',
'AT+QENG="servingcell";+QENG: "servingcell",',
"+QENG:",
"AT+QENDC;+QENDC:",
]
for d in delete_string:
content = content.replace(d, ",")
content = (
content.replace(";", "")
.replace(" ", "")
.replace(",,,", ",")
.replace('"', "")
.replace("LTE,", "")
.replace("NR5G-NSA,", "")
)
header = "time,mcs,ri,cqi,pmi,conn_state,is_tdd,MCC,MNC,cellID,PCID,earfcn,freq_band_ind,UL_bandwidth,DL_bandwidth,TAC,RSRP,RSRQ,RSSI,SINR,CQI,tx_power,srxlev,MCC,MNC,PCID,RSRP,SINR,RSRQ,ARFCN,band,endc_avl,plmn_info_list_r15_avl,endc_rstr,5G_basic\n"
csv_path = args.file.replace("txt", "csv")
print("Write to: {}".format(csv_path))
csv_string = header
for csv_line in content.split("\n"):
if len(header.split(",")) == len(csv_line.split(",")):
csv_string += csv_line + "\n"
else:
# print("{} found {}".format(len(header.split(",")), len(csv_line.split(","))))
print("Could not interpret string: {}".format(csv_line))
print(
"Expect {} columns got {}".format(
len(header.split(",")), len(csv_line.split(","))
)
)
csv_string_io = StringIO(csv_string)
serial_df = pd.read_csv(csv_string_io)
serial_df = serial_df
serial_df["datetime"] = pd.to_datetime(
serial_df["time"].apply(lambda x: datetime.datetime.fromtimestamp(x))
)
serial_df.to_csv(csv_path)
print(serial_df)

View File

@@ -0,0 +1,57 @@
#!/usr/bin/env python3
import datetime
import re
from argparse import ArgumentParser
import pandas as pd
KEY_VALUE_REGEX = r"(.+):(.+)"
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-f", "--file", required=True, help="Input txt file.")
args = parser.parse_args()
file = open(args.file, "r")
content = file.read()
file.close()
serial_df = None
p = re.compile(KEY_VALUE_REGEX)
for part in content.split(";;;"):
if part == "":
break
part = part.replace("\t", "\n").strip()
time = None
line_dict = dict(time=None)
for line in part.split("\n"):
if not line.startswith("!") or line == "" or line == "\n":
if line_dict["time"] is None:
time = line
line_dict["time"] = [time]
m = p.match(line)
if m:
key = m.group(1).strip().replace(" ", "_")
value = m.group(2).replace("MHz", "").replace("---", "").strip()
line_dict[key] = [value]
if len(line_dict) > 1:
#print("line:")
#print(line_dict)
#print("serial_df:")
#print(serial_df)
if serial_df is None:
serial_df = pd.DataFrame.from_dict(line_dict, orient="columns",)
else:
serial_df = pd.concat([serial_df, pd.DataFrame.from_dict(line_dict, orient="columns")])
serial_df = serial_df.copy()
serial_df["datetime"] = pd.to_datetime(
serial_df["time"].apply(lambda x: datetime.datetime.fromtimestamp(int(x)))
)
serial_df.to_csv(args.file.replace("txt", "csv"))
#serial_df = serial_df.filter(["datetime", "LTE_bw", "LTE_SCC2_bw", "LTE_SCC3_bw", "LTE_SCC4_bw", "SCC1_NR5G_bw", "NR5G_dl_bw", "NR5G_ul_bw", "LTE_SCC1_bw", "NR5G_bw"])
#print(serial_df.to_string())

356
format_throughput_pcap_to_csv.py Executable file
View File

@@ -0,0 +1,356 @@
#!/usr/bin/env python3
import datetime
from io import StringIO
from math import ceil
import pandas as pd
import multiprocessing
import os
import re
import subprocess
from argparse import ArgumentParser
from time import sleep, time
# tshark -r ./tcp-cap-test/test__bandwidth_reverse_tcp_bbr_1.pcap -Y "tcp.stream eq 1" -T fields -e frame.time_relative -e ip.len -e ip.hdr_len -e tcp.hdr_len -e tcp.analysis.ack_rtt -e tcp.analysis.bytes_in_flight -e tcp.analysis.retransmission -e tcp.analysis.duplicate_ack -e ip.dst -e ip.src -e tcp.options.mss_val -E header=y -E separator=, -E quote=d
from util import chunk_list
def format_tcp_trace_to_csv(pcap_number, packets_to_keep, is_reverse=False):
txt_name = "{}{}{}.txt".format(args.folder, args.tcp_trace, pcap_number)
try:
txt_file = open(txt_name, "r")
except IOError as e:
print("\rCan not open file {}\n {} {}".format(txt_name, e.errno, e.strerror))
return
content = txt_file.read()
txt_file.close()
csv_string = ""
csv_string += "time_tcp_probe,snd_cwnd,snd_wnd,srtt\n"
uptime = None
counter = 0
lines = content.split("\n")
start_time = None
for line in lines:
counter += 1
if uptime is None:
uptime = float(line.split(" ")[0])
else:
if is_reverse:
line_filter = "src=[::ffff:{}]:{}".format(args.server, args.port)
else:
line_filter = "dest={}:{}".format(args.server, args.port)
# ignore tcp packets from iperf syn (packets to keep = len of tcp.stream.eq 1)
if line_filter in line and counter >= (len(lines) - packets_to_keep):
match = re.match(
r".* (\d+\.\d+): tcp_probe:.*snd_cwnd=(\d+).*snd_wnd=(\d+).*srtt=(\d+)",
line,
)
if match:
if start_time is None:
start_time = float(match.group(1)) - uptime
time = float(match.group(1)) - (uptime + start_time)
snd_cwnd = match.group(2)
snd_wnd = match.group(3)
srtt = match.group(4)
csv_string += "{},{},{},{}\n".format(time, snd_cwnd, snd_wnd, srtt)
csv_string_io = StringIO(csv_string)
trace_df = pd.read_csv(csv_string_io)
if len(trace_df) <= 1:
print("\rFaulty tcp trace file for pcap no: {}".format(pcap_number))
return None
return trace_df
def format_pcaps_to_csv(pcaps, dummy):
global n
for pcap in pcaps:
if pcap.endswith(".pcap") and pcap.startswith(args.prefix):
match = re.match(regex, pcap)
if match:
# metadata from pcap filename
direction = "upload"
if "_reverse_" in pcap:
direction = "download"
congestion_control = match.group(2)
pcap_number = match.group(3)
# analyse traffic from pcap (receiver side)
tshark_command = [
"tshark",
"-r",
"{}{}".format(args.folder, pcap),
# remove this for mobile measurements
# "-Y",
# "tcp.stream eq 1",
"-T",
"fields",
"-e",
"frame.time_relative",
"-e",
"ip.len",
"-e",
"ip.hdr_len",
"-e",
"tcp.hdr_len",
"-e",
"tcp.analysis.ack_rtt",
"-e",
"tcp.analysis.bytes_in_flight",
"-e",
"tcp.analysis.retransmission",
"-e",
"tcp.analysis.duplicate_ack",
"-e",
"ip.src",
"-e",
"ip.dst",
"-e",
"tcp.options.mss_val",
"-e",
"tcp.window_size",
"-e",
"frame.time_epoch",
"-e",
"tcp.stream", # have to be the last value in line!
"-E",
"header=y",
"-E",
"separator=,",
"-E",
"quote=d",
]
tshark_out = None
try:
tshark_out = subprocess.check_output(tshark_command).decode("utf-8")
except subprocess.CalledProcessError as tsharkexec:
if tsharkexec.returncode == 2:
print("\rtshark could not open pcap: {}".format(pcap))
else:
print("\rtshark exited with code: {}".format(tsharkexec.returncode))
print(tsharkexec.output)
continue
# Convert String into StringIO
csv_string_io = StringIO(tshark_out)
conv_bool = lambda x: (True if x != "" else False)
pcap_df = pd.read_csv(
csv_string_io,
converters={
"tcp.analysis.retransmission": conv_bool,
"tcp.analysis.duplicate_ack": conv_bool,
},
)
last_tcp_stream_in_pcap = pcap_df["tcp.stream"].max()
pcap_df = pcap_df.loc[pcap_df["tcp.stream"] == last_tcp_stream_in_pcap]
pcap_df["payload_size"] = pcap_df["ip.len"] - (
pcap_df["ip.hdr_len"] + pcap_df["tcp.hdr_len"]
)
pcap_df["direction"] = direction
pcap_df["congestion_control"] = congestion_control
pcap_df["pcap_number"] = pcap_number
pcap_df["datetime"] = pd.to_datetime(
pcap_df["frame.time_epoch"].apply(
lambda x: datetime.datetime.fromtimestamp(x)
)
)
pcap_df = pcap_df.drop(
columns=["tcp.stream", "ip.len", "ip.hdr_len", "tcp.hdr_len"]
)
pcap_df.rename(
columns={
"frame.time_relative": "arrival_time",
"ip.src": "src_ip",
"ip.dst": "dst_ip",
"tcp.options.mss_val": "mss",
"tcp.analysis.ack_rtt": "ack_rtt",
"tcp.analysis.bytes_in_flight": "bytes_in_flight",
"tcp.window_size": "receive_window_size",
"tcp.analysis.retransmission": "is_retranmission",
"tcp.analysis.duplicate_ack": "is_dup_ack",
"frame.time_epoch": "time_epoch",
},
inplace=True,
)
pcap_df = pcap_df.sort_values("arrival_time")
try:
# join tcp_trace data with pcap data
merge_srtt = True
if merge_srtt:
tcp_trace_df = format_tcp_trace_to_csv(
pcap_number,
len(pcap_df),
is_reverse=True if "_reverse_" in pcap else False,
)
if tcp_trace_df is None:
print(
"\rNo tcp trace file for pcap no {} found".format(
pcap_number
)
)
continue ## break before but stoped the thread
merged_df = pd.merge_asof(
pcap_df.loc[pcap_df["src_ip"] != args.server],
tcp_trace_df,
left_on="arrival_time",
right_on="time_tcp_probe",
tolerance=0.01,
)
merged_df = pd.concat(
[merged_df, pcap_df.loc[pcap_df["src_ip"] == args.server]]
)
merged_df = merged_df.sort_values("arrival_time")
merged_df.to_csv(
"{}{}".format(args.folder, pcap).replace(".pcap", ".csv")
)
else:
pcap_df.to_csv(
"{}{}".format(args.folder, pcap).replace(".pcap", ".csv")
)
except:
print("\rCould not merge data for pcap no: {}".format(pcap))
pcap_df.to_csv(
"{}{}".format(args.folder, pcap).replace(".pcap", ".csv")
)
n.value += 1
else:
print("File does not match regex: {}".format(pcap))
else:
print("File is not from type PCAP: {}".format(pcap))
from itertools import islice
def chunk(it, size):
it = iter(it)
return iter(lambda: tuple(islice(it, size)), ())
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-f", "--folder", required=True, help="Folder with pcaps.")
parser.add_argument(
"-p",
"--prefix",
required=True,
help="Filename prefix e.g. 2021-03-17_bandwidth_tcp_bbr_",
)
parser.add_argument(
"-t",
"--tcp_trace",
required=True,
help="Format of tcp trace txt files e.g: 2021_03_30_bandwidth_reverse_tcp_tcp_trace_ for "
"2021_03_30_bandwidth_reverse_tcp_tcp_trace_1.txt",
)
parser.add_argument(
"-c",
"--cores",
default=1,
type=int,
help="Number of cores for multiprocessing.",
)
parser.add_argument(
"--port",
default=5201,
type=int,
help="iPerf3 port used for measurements",
)
parser.add_argument(
"--server",
default="130.75.73.69",
type=str,
help="iPerf3 server ip used for measurements",
)
args = parser.parse_args()
manager = multiprocessing.Manager()
# regex for protokoll, algo and bitrate
regex = r".*_bandwidth_[reverse_]*(.+)_(.+)_(\d+)\.pcap"
csv_header = "n,start_time,end_time,payload_size,protocol,algorithm,direction,packages_received,syns_in_pcap\n"
n = manager.Value("i", 0)
filenames = os.listdir(args.folder)
number_of_files = len(filenames)
pcap_list = []
jobs = []
st = time()
for filename in filenames:
if filename.endswith(".pcap") and filename.startswith(args.prefix):
if re.match(regex, filename):
pcap_list.append(filename)
pcap_list.sort()
print("Found {} pcap files in {} files.".format(len(pcap_list), len(filenames)))
if len(pcap_list) == 0:
print("Abort no pcaps found with prefix: {}".format(args.prefix))
print("{}{}".format(args.folder, args.prefix))
exit(1)
parts = chunk(pcap_list, ceil(len(pcap_list) / args.cores))
print("Start processing with {} jobs.".format(args.cores))
for p in parts:
process = multiprocessing.Process(target=format_pcaps_to_csv, args=(p, "dummy"))
jobs.append(process)
for j in jobs:
j.start()
print("Started all jobs.")
# Ensure all of the processes have finished
finished_job_counter = 0
working = ["|", "/", "-", "\\", "|", "/", "-", "\\"]
w = 0
while len(jobs) != finished_job_counter:
sleep(1)
print(
"\r\t{}{}{}\t Running {} jobs ({} finished). Processed {} out of {} pcaps. ({}%) ".format(
working[w],
working[w],
working[w],
len(jobs),
finished_job_counter,
n.value,
len(pcap_list),
round((n.value / len(pcap_list)) * 100, 2),
),
end="",
)
finished_job_counter = 0
for j in jobs:
if not j.is_alive():
finished_job_counter += 1
if (w + 1) % len(working) == 0:
w = 0
else:
w += 1
print("")
et = time()
# get the execution time
elapsed_time = et - st
print("Execution time:", elapsed_time, "seconds")

View File

@@ -23,9 +23,16 @@ GET_IPV4_SHELL_COMMAND = "ip a | grep {} | grep inet | cut -d' ' -f6 | cut -d'/'
NR_CQI_COMMAND = b'AT+QNWCFG="nr5g_csi"\r\n'
NR_SERVINGCELL_COMMAND = b'AT+QENG="servingcell"\r\n'
NR_EN_DC_STATUS_COMMAND = b"AT+QENDC\r\n"
NR_SERIAL_RESPOND_TIME = 0.3 # s
NE_CA_COMMAND = b'AT+QCAINFO\r\n'
NR_SERIAL_RESPOND_TIME = 0.5 # s
CMD_TIME_EPOCH = "date +%s"
TIMEOUT_OFFSET = 10.0
WAIT_AFTER_IPERF = 5.0
modem_serial_obj = None
gps_serial_obj = None
MODEM_MODEL = None
class ProcessHandler:
@@ -192,6 +199,10 @@ def is_tcp_probe_enabled():
)
def disable_tso(interface):
os.system("ethtool -K {} tx off sg off tso off gro off".format(interface))
def enable_tcp_probe():
os.system("echo '1' > /sys/kernel/debug/tracing/events/tcp/tcp_probe/enable")
@@ -211,7 +222,8 @@ def raise_receive_window():
def monitor_serial(ser, output_file):
run_cmds = [NR_CQI_COMMAND, NR_SERVINGCELL_COMMAND, NR_EN_DC_STATUS_COMMAND]
#run_cmds = [NR_CQI_COMMAND, NR_SERVINGCELL_COMMAND, NR_EN_DC_STATUS_COMMAND, NE_CA_COMMAND]
run_cmds = [b"at!gstatus?\r\n", b"AT!NRINFO?\r\n"]
try:
while ser.is_open:
response = subprocess.check_output(CMD_TIME_EPOCH, shell=True).decode(
@@ -219,15 +231,15 @@ def monitor_serial(ser, output_file):
)
for cmd in run_cmds:
ser.write(cmd)
sleep(0.3)
sleep(NR_SERIAL_RESPOND_TIME)
response += ser.read(ser.inWaiting()).decode("utf-8")
response = (
response.replace("\n", ";")
.replace("\r", "")
.replace(";;OK", ";")
.replace(";;", ";")
)
write_to_file(output_file, response + "\n")
#response = (
# response.replace("\n", ";")
# .replace("\r", "")
# .replace(";;OK", ";")
# .replace(";;", ";")
#)
write_to_file(output_file, response + ";;;\n")
except:
if not ser.is_open:
print_message("Serial port is closed. Exit monitoring thread.")
@@ -238,6 +250,53 @@ def monitor_serial(ser, output_file):
return
def start_serial_monitoring(ser, baudrate, folder, prefix):
global modem_serial_obj
print_message("Opening serial port for {}".format(ser))
modem_serial_obj = serial.Serial(
port=ser,
baudrate=baudrate,
)
modem_serial_obj.isOpen()
ser_filepath = "{}{}_serial_monitor_output.txt".format(
folder, prefix
)
ser_thread = Thread(
target=monitor_serial,
args=(
modem_serial_obj,
ser_filepath,
),
)
ser_thread.start()
def is_serial_monitoring_running():
return modem_serial_obj.is_open
def start_gps_monitoring(gps, baudrate, folder, prefix):
global gps_serial_obj
print_message("Opening GPS serial port for {}".format(gps))
gps_serial_obj = serial.Serial(
gps,
baudrate=baudrate,
)
gps_ser_filepath = "{}{}_gps.nmea".format(
folder, prefix
)
gps_ser_thread = Thread(
target=monitor_gps,
args=(
gps_serial_obj,
gps_ser_filepath,
),
)
gps_ser_thread.start()
def monitor_gps(ser, output_file):
ser.flushInput()
ser.flushOutput()
@@ -245,10 +304,13 @@ def monitor_gps(ser, output_file):
ser.readline()
try:
while ser.is_open:
nmea_sentence = ser.readline() #GPRMC
nmea_sentence = ser.readline() # GPRMC
nmea_str = nmea_sentence.decode("utf-8")
if nmea_str.startswith("$GPRMC"):
write_to_file(output_file, nmea_str)
time_epoch = subprocess.check_output(CMD_TIME_EPOCH, shell=True).decode(
"utf-8"
)
write_to_file(output_file, "{},{}".format(nmea_str.replace("\n", ""), time_epoch))
except:
if not ser.is_open:
print_message("GPS serial port is closed. Exit monitoring thread.")
@@ -261,12 +323,45 @@ def monitor_gps(ser, output_file):
def connect_moden(provider="telekom"):
print_message("Connect modem with provider {} ...".format(provider))
if MODEM_MODEL == "EM9191":
os.system("/root/connection_mbim.py -l {}".format(provider))
else:
os.system("/root/connect-modem.py -l {}".format(provider))
print_message("...done")
def reconnect_modem(provider="telekom"):
def reconnect_modem(provider="telekom", hard=False):
#TODO
os.system("/root/connection_mbim.py -s")
sleep(2)
os.system("/root/connection_mbim.py -l {}".format(provider))
return
global modem_serial_obj
print_message("Reonnect modem with provider {} ...".format(provider))
if hard:
print_message("Performing HARD reconnect...")
try:
if modem_serial_obj.is_open:
modem_serial_obj.write(b'at+cfun=4\r\n')
sleep(NR_SERIAL_RESPOND_TIME)
sleep(2)
modem_serial_obj.write(b'at+cfun=1\r\n')
sleep(NR_SERIAL_RESPOND_TIME)
except Exception as e:
if not modem_serial_obj.is_open:
print_message("Serial port is closed. {}".format(e))
os.system("/root/connect-modem.py -s")
else:
print_message(
"Something went wrong while writing to serial. {}".format(e)
)
os.system("/root/connect-modem.py -s")
sleep(2)
os.system("/root/connect-modem.py -s")
sleep(5)
os.system("/root/connect-modem.py -l {}".format(provider))
else:
os.system("/root/connect-modem.py -s")
sleep(5)
os.system("/root/connect-modem.py -l {}".format(provider))
@@ -339,6 +434,7 @@ class Server:
sleep(2)
ws_filter = ""
congestion_control_index = 0
if server_is_sender:
# server sends
if not is_tcp_probe_enabled():
@@ -441,7 +537,7 @@ class Server:
"--one-off",
]
subprocess.call(iperf_command)
sleep(2)
sleep(WAIT_AFTER_IPERF)
processHandler.kill_all()
congestion_control_index = (congestion_control_index + 1) % len(
tcp_algo
@@ -532,46 +628,11 @@ class Client:
sleep(1)
print_message("Start measurement")
ser = None
if self.config["serial"] is not None:
print_message("Opening serial port for {}".format(self.config["serial"]))
ser = serial.Serial(
port=self.config["serial"],
baudrate=self.config["baudrate"],
)
ser.isOpen()
ser_filepath = "{}{}_serial_monitor_output.txt".format(
self.config["folder"], self.config["prefix"]
)
ser_thread = Thread(
target=monitor_serial,
args=(
ser,
ser_filepath,
),
)
ser_thread.start()
gps_ser = None
start_serial_monitoring(self.config["serial"], self.config["baudrate"], self.config["folder"], self.config["prefix"])
if self.config["gps"] is not None:
print_message("Opening GPS serial port for {}".format(self.config["gps"]))
gps_ser = serial.Serial(
self.config["gps"],
baudrate=self.config["gps_baudrate"],
)
gps_ser_filepath = "{}{}_gps.nmea".format(
self.config["folder"], self.config["prefix"]
)
gps_ser_thread = Thread(
target=monitor_gps,
args=(
gps_ser,
gps_ser_filepath,
),
)
gps_ser_thread.start()
start_gps_monitoring(self.config["gps"], self.config["gps_baudrate"], self.config["folder"], self.config["prefix"])
if self.config["bandwidth"]:
self.bandwidth()
@@ -586,18 +647,17 @@ class Client:
elif self.config["ping"]:
self.ping()
if ser is not None:
if modem_serial_obj is not None:
print_message("Closing serial port...")
ser.close()
modem_serial_obj.close()
sleep(2)
print_message("done...")
if gps_ser is not None:
if gps_serial_obj is not None:
print_message("Closing GPS serial port...")
gps_ser.close()
gps_serial_obj.close()
sleep(2)
print_message("done...")
def ping(self):
c = "ping {} -I {} -i {} -c {}".format(
self.config["server"],
@@ -762,8 +822,18 @@ class Client:
ws_filter = "{} and port {}".format("tcp", self.config["port"])
print_message("Use ws filter: {}".format(ws_filter))
for n in range(1, self.config["number_of_measurements"] + 1):
reconnect_count = 0
if not is_modem_connected():
background_write_to_file(
filepath="{}{}_reconnect.log".format(
self.config["folder"], self.config["prefix"]
),
content='{}\n'.format(datetime.timestamp(datetime.now())),
)
reconnect_modem()
sleep(2)
if not is_serial_monitoring_running():
start_serial_monitoring()
print_message(
"{} of {}".format(n, self.config["number_of_measurements"])
)
@@ -807,12 +877,30 @@ class Client:
iperf_return = 0
while not is_measurement_done or iperf_return != 0:
if iperf_return != 0:
reconnect_modem()
background_write_to_file(
filepath="{}{}_reconnect.log".format(
self.config["folder"], self.config["prefix"]
),
content='{}\n'.format(datetime.timestamp(datetime.now())),
)
reconnect_modem(hard=reconnect_count > 5)
reconnect_count += 1
sleep(2)
if not is_serial_monitoring_running():
start_serial_monitoring()
try:
try:
iperf_return = subprocess.call(iperf_command, timeout=float(time) + TIMEOUT_OFFSET)
iperf_return = subprocess.call(
iperf_command, timeout=float(time) + TIMEOUT_OFFSET
)
except:
print_message("iPerf timed out...")
background_write_to_file(
filepath="{}{}_reconnect.log".format(
self.config["folder"], self.config["prefix"]
),
content='{}\n'.format(datetime.timestamp(datetime.now())),
)
reconnect_modem()
except KeyboardInterrupt:
exit()
@@ -831,8 +919,18 @@ class Client:
enable_tcp_probe()
print_message("tcp probe is now enabled")
for n in range(1, self.config["number_of_measurements"] + 1):
reconnect_count = 0
if not is_modem_connected():
background_write_to_file(
filepath="{}{}_reconnect.log".format(
self.config["folder"], self.config["prefix"]
),
content='{}\n'.format(datetime.timestamp(datetime.now())),
)
reconnect_modem()
sleep(2)
if not is_serial_monitoring_running():
start_serial_monitoring()
print_message(
"{} of {}".format(n, self.config["number_of_measurements"])
)
@@ -875,12 +973,30 @@ class Client:
iperf_return = 0
while not is_measurement_done or iperf_return != 0:
if iperf_return != 0:
reconnect_modem()
background_write_to_file(
filepath="{}{}_reconnect.log".format(
self.config["folder"], self.config["prefix"]
),
content='{}\n'.format(datetime.timestamp(datetime.now())),
)
reconnect_modem(hard=reconnect_count > 5)
reconnect_count += 1
sleep(2)
if not is_serial_monitoring_running():
start_serial_monitoring()
try:
try:
iperf_return = subprocess.call(iperf_command, timeout=float(time) + TIMEOUT_OFFSET)
iperf_return = subprocess.call(
iperf_command, timeout=float(time) + TIMEOUT_OFFSET
)
except:
print_message("iPerf timed out...")
background_write_to_file(
filepath="{}{}_reconnect.log".format(
self.config["folder"], self.config["prefix"]
),
content='{}\n'.format(datetime.timestamp(datetime.now())),
)
reconnect_modem()
except KeyboardInterrupt:
exit()
@@ -889,7 +1005,7 @@ class Client:
congestion_control_index = (congestion_control_index + 1) % len(
tcp_algo
)
sleep(4)
sleep(WAIT_AFTER_IPERF + 2)
def cbr(self):
bitrate = "1M"
@@ -1097,6 +1213,7 @@ if __name__ == "__main__":
default=None,
help="Start in client mode and set the server IPv4 address.",
)
parser.add_argument("--modem", default="EM9191", help="Modem model name.")
parser.add_argument(
"--prefix", default=now.strftime("%Y-%m-%d"), help="Prefix on filename."
)
@@ -1198,6 +1315,9 @@ if __name__ == "__main__":
args = parser.parse_args()
disable_tso(args.interface)
MODEM_MODEL = args.modem
if args.server:
asyncio.run(start_server(args))
elif args.client is not None:

85
plot_gps_csv.py Executable file
View File

@@ -0,0 +1,85 @@
#!/usr/bin/env python3
from argparse import ArgumentParser
import numpy as np
import pandas as pd
import geopandas as gpd
import contextily as cx
import matplotlib.pyplot as plt
import seaborn as sns
sns.set()
#sns.set(font_scale=1.5)
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-f", "--file", required=True, help="Messfahrt csv")
parser.add_argument("-a", "--column", required=True, help="Column to plot")
parser.add_argument("-l", "--label", help="Label above the plot.")
parser.add_argument("--no_legend", action="store_false", default=True, help="Do not show legend.")
parser.add_argument("--save", default=None, help="Location to save pdf file.")
parser.add_argument(
"--show_providerinfo",
default=False,
help="Show providerinfo for map tiles an zoom levels.",
)
args = parser.parse_args()
df = pd.read_csv(args.file)
gdf = gpd.GeoDataFrame(
df,
geometry=gpd.points_from_xy(df["longitude"], df["latitude"]),
crs="EPSG:4326",
)
gdf["srtt"] = gdf["srtt"].apply(lambda x: x / 10 ** 6)
gdf["is_retranmission"] = gdf["is_retranmission"].replace(True, np.NaN).dropna().astype(float)
print("Start plotting...")
df_wm = gdf.to_crs(epsg=3857)
ax2 = df_wm.plot()
ax2 = df_wm.plot(column=args.column, cmap="hot", legend=args.no_legend, ax=ax2, legend_kwds={"label": args.label},)
# ax2 = df_wm.plot.scatter(x="longitude", y="latitude", c="kmh", cmap="hot")
# zoom 17 is pretty
cx.add_basemap(ax2, source=cx.providers.OpenStreetMap.Mapnik, zoom=17)
# gdf.plot()
ax2.set_axis_off()
if not args.no_legend:
ax2.set_title(args.label if args.label else args.column)
else:
fig = ax2.figure
cb_ax = fig.axes[0]
cb_ax.set_label(args.label)
cb_ax.tick_params(labelsize=30)
if args.show_providerinfo:
#####################################
# Identifying how many tiles
latlon_outline = gdf.to_crs("epsg:4326").total_bounds
def_zoom = cx.tile._calculate_zoom(*latlon_outline)
print(f"Default Zoom level {def_zoom}")
cx.howmany(*latlon_outline, def_zoom, ll=True)
cx.howmany(*latlon_outline, def_zoom + 1, ll=True)
cx.howmany(*latlon_outline, def_zoom + 2, ll=True)
# Checking out some of the other providers and tiles
print(cx.providers.CartoDB.Voyager)
print(cx.providers.Stamen.TonerLite)
print(cx.providers.Stamen.keys())
#####################################
# df.plot(x="longitude", y="latitude", kind="scatter", colormap="YlOrRd")
if args.save:
plt.savefig("{}gps_plot.eps".format(args.save), bbox_inches="tight")
else:
plt.show()

71
plot_gps_new.py Executable file
View File

@@ -0,0 +1,71 @@
#!/usr/bin/env python3
from argparse import ArgumentParser
import pandas as pd
import geopandas as gpd
import contextily as cx
import matplotlib.pyplot as plt
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-f", "--file", required=True, help="Messfahrt csv")
parser.add_argument("-a", "--column", required=True, help="Column to plot")
parser.add_argument("-l", "--label", help="Label above the plot.")
parser.add_argument("--no_legend", action="store_false", default=True, help="Do not show legend.")
parser.add_argument("--save", default=None, help="Location to save pdf file.")
parser.add_argument(
"--show_providerinfo",
default=False,
help="Show providerinfo for map tiles an zoom levels.",
)
args = parser.parse_args()
df = pd.read_csv(args.file)
gdf = gpd.GeoDataFrame(
df,
geometry=gpd.points_from_xy(df["longitude"], df["latitude"]),
crs="EPSG:4326",
)
gdf["srtt"] = gdf["srtt"].apply(lambda x: x / 10 ** 6)
print("Start plotting...")
df_wm = gdf.to_crs(epsg=3857)
ax2 = df_wm.plot()
ax2 = df_wm.plot(args.column, cmap="hot", legend=args.no_legend, ax=ax2)
# ax2 = df_wm.plot.scatter(x="longitude", y="latitude", c="kmh", cmap="hot")
# zoom 17 is pretty
cx.add_basemap(ax2, source=cx.providers.OpenStreetMap.Mapnik, zoom=17)
# gdf.plot()
ax2.set_axis_off()
ax2.set_title(args.label if args.label else args.column)
if args.show_providerinfo:
#####################################
# Identifying how many tiles
latlon_outline = gdf.to_crs("epsg:4326").total_bounds
def_zoom = cx.tile._calculate_zoom(*latlon_outline)
print(f"Default Zoom level {def_zoom}")
cx.howmany(*latlon_outline, def_zoom, ll=True)
cx.howmany(*latlon_outline, def_zoom + 1, ll=True)
cx.howmany(*latlon_outline, def_zoom + 2, ll=True)
# Checking out some of the other providers and tiles
print(cx.providers.CartoDB.Voyager)
print(cx.providers.Stamen.TonerLite)
print(cx.providers.Stamen.keys())
#####################################
# df.plot(x="longitude", y="latitude", kind="scatter", colormap="YlOrRd")
if args.save:
plt.savefig("{}gps_plot.pdf".format(args.save))
else:
plt.show()

View File

166
plot_single_transmission.py Executable file
View File

@@ -0,0 +1,166 @@
#!/usr/bin/env python3
import math
import multiprocessing
import os
from argparse import ArgumentParser
import matplotlib
import pandas as pd
import matplotlib.pyplot as plt
# Using seaborn's style
#plt.style.use('seaborn')
tex_fonts = {
"pgf.texsystem": "lualatex",
# "legend.fontsize": "x-large",
# "figure.figsize": (15, 5),
"axes.labelsize": 15, # "small",
# "axes.titlesize": "x-large",
"xtick.labelsize": 15, # "small",
"ytick.labelsize": 15, # "small",
"legend.fontsize": 15,
"axes.formatter.use_mathtext": True,
"mathtext.fontset": "dejavusans",
}
#plt.rcParams.update(tex_fonts)
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-s", "--serial_file", required=True, help="Serial csv file.")
parser.add_argument("-p", "--pcap_csv_folder", required=True, help="PCAP csv folder.")
parser.add_argument("--save", required=True, help="Location to save pdf file.")
parser.add_argument(
"-i",
"--interval",
default=10,
type=int,
help="Time interval for rolling window.",
)
args = parser.parse_args()
pcap_csv_list = list()
for filename in os.listdir(args.pcap_csv_folder):
if filename.endswith(".csv") and "tcp" in filename:
pcap_csv_list.append(filename)
counter = 1
if len(pcap_csv_list) == 0:
print("No CSV files found.")
pcap_csv_list.sort(key=lambda x: int(x.split("_")[-1].replace(".csv", "")))
for csv in pcap_csv_list:
print("\rProcessing {} out of {} CSVs.\t({}%)\t".format(counter, len(pcap_csv_list), math.floor(counter/len(pcap_csv_list))))
#try:
transmission_df = pd.read_csv(
"{}{}".format(args.pcap_csv_folder, csv),
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
)
transmission_df["datetime"] = pd.to_datetime(transmission_df["datetime"]) - pd.Timedelta(hours=1)
transmission_df = transmission_df.set_index("datetime")
transmission_df.index = pd.to_datetime(transmission_df.index)
transmission_df = transmission_df.sort_index()
# srtt to [s]
transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10**6)
# key for columns and level for index
transmission_df["goodput"] = transmission_df["payload_size"].groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval))).transform("sum")
transmission_df["goodput"] = transmission_df["goodput"].apply(
lambda x: ((x * 8) / args.interval) / 10**6
)
transmission_df["goodput_rolling"] = transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply(
lambda x: ((x * 8) / args.interval) / 10 ** 6
)
# set meta values and remove all not needed columns
cc_algo = transmission_df["congestion_control"].iloc[0]
cc_algo = cc_algo.upper()
transmission_direction = transmission_df["direction"].iloc[0]
#transmission_df = transmission_df.filter(["goodput", "datetime", "ack_rtt", "goodput_rolling", "snd_cwnd"])
# read serial csv
serial_df = pd.read_csv(args.serial_file)
serial_df["datetime"] = pd.to_datetime(serial_df["datetime"]) - pd.Timedelta(hours=1)
serial_df = serial_df.set_index("datetime")
serial_df.index = pd.to_datetime(serial_df.index)
serial_df.sort_index()
transmission_df = pd.merge_asof(
transmission_df,
serial_df,
tolerance=pd.Timedelta("1s"),
right_index=True,
left_index=True,
)
transmission_df = transmission_df.rename(columns={"PCID": "lte_pcid", "PCID.1": "nr_pcid"})
transmission_df.index = transmission_df["arrival_time"]
# transmission timeline
scaley = 1.5
scalex = 1.0
plt.title("{} with {}".format(transmission_direction, cc_algo))
fig, ax = plt.subplots(2, 1, figsize=[6.4 * scaley, 4.8 * scalex])
fig.subplots_adjust(right=0.75)
fig.suptitle("{} with {}".format(transmission_direction, cc_algo))
ax0 = ax[0]
ax1 = ax0.twinx()
ax2 = ax0.twinx()
#ax2.spines.right.set_position(("axes", 1.22))
ax00 = ax[1]
ax01 = ax00.twinx()
# Plot vertical lines
lte_handovers = transmission_df["lte_pcid"].diff().dropna()
for index, value in lte_handovers.items():
if value > 0:
ax00.axvline(index, ymin=0, ymax=1, color="skyblue", label="4G Handover")
nr_handovers = transmission_df["nr_pcid"].diff().dropna()
for index, value in nr_handovers.items():
if value > 0:
ax00.axvline(index, ymin=0, ymax=1, color="greenyellow", label="5G Handover")
ax0.plot(transmission_df["snd_cwnd"].dropna(), color="lime", linestyle="dashed", label="cwnd")
ax1.plot(transmission_df["srtt"].dropna(), color="red", linestyle="dashdot", label="sRTT")
ax2.plot(transmission_df["goodput_rolling"], color="blue", linestyle="solid", label="goodput")
ax00.plot(transmission_df["downlink_cqi"].dropna(), color="magenta", linestyle="dotted", label="CQI")
ax01.plot(transmission_df["DL_bandwidth"].dropna(), color="peru", linestyle="dotted", label="bandwidth")
ax2.spines.right.set_position(("axes", 1.1))
ax0.set_ylim(0, 5000)
ax1.set_ylim(0, 0.3)
ax2.set_ylim(0, 500)
ax00.set_ylim(0, 16)
ax01.set_ylim(0, 21)
ax00.set_xlabel("arrival time [s]")
ax2.set_ylabel("Goodput [mbps]")
ax00.set_ylabel("CQI")
ax1.set_ylabel("sRTT [s]")
ax0.set_ylabel("cwnd")
ax01.set_ylabel("Bandwidth [MHz]")
fig.legend(loc="lower right")
plt.savefig("{}{}_plot.pdf".format(args.save, csv.replace(".csv", "")))
#except Exception as e:
# print("Error processing file: {}".format(csv))
# print(str(e))
counter += 1
plt.close(fig)
plt.clf()

View File

@@ -0,0 +1,343 @@
#!/usr/bin/env python3
import math
import multiprocessing
import os
from argparse import ArgumentParser
import matplotlib
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
sns.set()
#sns.set(font_scale=1.5)
tex_fonts = {
"pgf.texsystem": "lualatex",
# "legend.fontsize": "x-large",
# "figure.figsize": (15, 5),
"axes.labelsize": 15, # "small",
# "axes.titlesize": "x-large",
"xtick.labelsize": 15, # "small",
"ytick.labelsize": 15, # "small",
"legend.fontsize": 15,
"axes.formatter.use_mathtext": True,
"mathtext.fontset": "dejavusans",
}
# plt.rcParams.update(tex_fonts)
def convert_cellid(value):
if isinstance(value, str):
try:
r = int(value.split(" ")[-1].replace("(", "").replace(")", ""))
return r
except Exception as e:
return -1
else:
return int(-1)
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-s", "--serial_file", required=True, help="Serial csv file.")
parser.add_argument(
"-p", "--pcap_csv_folder", required=True, help="PCAP csv folder."
)
parser.add_argument("--save", required=True, help="Location to save pdf file.")
parser.add_argument("--fancy", action="store_true", help="Create fancy plot.")
parser.add_argument(
"-i",
"--interval",
default=10,
type=int,
help="Time interval for rolling window.",
)
args = parser.parse_args()
pcap_csv_list = list()
for filename in os.listdir(args.pcap_csv_folder):
if filename.endswith(".csv") and "tcp" in filename:
pcap_csv_list.append(filename)
counter = 1
if len(pcap_csv_list) == 0:
print("No CSV files found.")
pcap_csv_list.sort(key=lambda x: int(x.split("_")[-1].replace(".csv", "")))
for csv in pcap_csv_list:
print(
"\rProcessing {} out of {} CSVs.\t({}%)\t".format(
counter, len(pcap_csv_list), math.floor(counter / len(pcap_csv_list))
)
)
# try:
transmission_df = pd.read_csv(
"{}{}".format(args.pcap_csv_folder, csv),
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
)
transmission_df["datetime"] = pd.to_datetime(
transmission_df["datetime"]
) - pd.Timedelta(hours=1)
transmission_df = transmission_df.set_index("datetime")
transmission_df.index = pd.to_datetime(transmission_df.index)
transmission_df = transmission_df.sort_index()
# srtt to [s]
transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10 ** 6)
# key for columns and level for index
transmission_df["goodput"] = (
transmission_df["payload_size"]
.groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval)))
.transform("sum")
)
transmission_df["goodput"] = transmission_df["goodput"].apply(
lambda x: ((x * 8) / args.interval) / 10 ** 6
)
transmission_df["goodput_rolling"] = (
transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
)
transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply(
lambda x: ((x * 8) / args.interval) / 10 ** 6
)
# set meta values and remove all not needed columns
cc_algo = transmission_df["congestion_control"].iloc[0]
cc_algo = cc_algo.upper()
transmission_direction = transmission_df["direction"].iloc[0]
# transmission_df = transmission_df.filter(["goodput", "datetime", "ack_rtt", "goodput_rolling", "snd_cwnd"])
# read serial csv
serial_df = pd.read_csv(
args.serial_file, converters={"Cell_ID": convert_cellid}
)
serial_df["datetime"] = pd.to_datetime(serial_df["datetime"]) - pd.Timedelta(
hours=1
)
serial_df = serial_df.set_index("datetime")
serial_df.index = pd.to_datetime(serial_df.index)
serial_df.sort_index()
# print(serial_df["Cell_ID"])
# serial_df["Cell_ID"] = serial_df["Cell_ID"].apply(
# lambda x: int(x.split(" ")[-1].replace("(", "").replace(")", "")))
transmission_df = pd.merge_asof(
transmission_df,
serial_df,
tolerance=pd.Timedelta("1s"),
right_index=True,
left_index=True,
)
transmission_df.index = transmission_df["arrival_time"]
# replace 0 in RSRQ with Nan
transmission_df["NR5G_RSRQ_(dB)"] = transmission_df["NR5G_RSRQ_(dB)"].replace(
0, np.NaN
)
transmission_df["RSRQ_(dB)"] = transmission_df["RSRQ_(dB)"].replace(0, np.NaN)
# filter active state
for i in range(1, 5):
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
"LTE_SCC{}_bw".format(i)
]
mask = transmission_df["LTE_SCC{}_state".format(i)].isin(["ACTIVE"])
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
"LTE_SCC{}_effective_bw".format(i)
].where(mask, other=0)
# filter if sc is usesd for uplink
for i in range(1, 5):
mask = transmission_df["LTE_SCC{}_UL_Configured".format(i)].isin([False])
transmission_df["LTE_SCC{}_effective_bw".format(i)] = transmission_df[
"LTE_SCC{}_effective_bw".format(i)
].where(mask, other=0)
# sum all effective bandwidth for 5G and 4G
transmission_df["SCC1_NR5G_effective_bw"] = transmission_df[
"SCC1_NR5G_bw"
].fillna(0)
transmission_df["lte_effective_bw_sum"] = (
transmission_df["LTE_SCC1_effective_bw"].fillna(0)
+ transmission_df["LTE_SCC2_effective_bw"].fillna(0)
+ transmission_df["LTE_SCC3_effective_bw"].fillna(0)
+ transmission_df["LTE_SCC4_effective_bw"].fillna(0)
+ transmission_df["LTE_bw"].fillna(0))
transmission_df["nr_effective_bw_sum"] = transmission_df["SCC1_NR5G_effective_bw"]
transmission_df["effective_bw_sum"] = transmission_df["nr_effective_bw_sum"] + transmission_df[
"lte_effective_bw_sum"]
# transmission timeline
scaley = 1.5
scalex = 1.0
fig, ax = plt.subplots(2, 1, figsize=[6.4 * scaley, 4.8 * scalex])
fig.subplots_adjust(right=0.75)
if not args.fancy:
plt.title("{} with {}".format(transmission_direction, cc_algo))
fig.suptitle("{} with {}".format(transmission_direction, cc_algo))
ax0 = ax[0]
ax1 = ax0.twinx()
ax2 = ax0.twinx()
# ax2.spines.right.set_position(("axes", 1.22))
ax00 = ax[1]
ax01 = ax00.twinx()
ax02 = ax00.twinx()
# Plot vertical lines
first = True
lte_handovers = transmission_df["Cell_ID"].dropna().diff()
lte_hanover_plot = None
for index, value in lte_handovers.items():
if value > 0:
if first:
lte_hanover_plot = ax00.axvline(
index, ymin=0, ymax=1, color="skyblue", label="4G Handover"
)
first = False
else:
ax00.axvline(index, ymin=0, ymax=1, color="skyblue")
first = True
nr_handovers = (
transmission_df["NR5G_Cell_ID"].replace(0, np.NaN).dropna().diff()
)
nr_hanover_plot = None
for index, value in nr_handovers.items():
if value > 0:
if first:
nr_hanover_plot = ax00.axvline(
index, ymin=0, ymax=1, color="greenyellow", label="5G Handover"
)
first = False
else:
ax00.axvline(index, ymin=0, ymax=1, color="greenyellow")
snd_plot = ax0.plot(
transmission_df["snd_cwnd"].dropna(),
color="lime",
linestyle="dashed",
label="cwnd",
)
srtt_plot = ax1.plot(
transmission_df["srtt"].dropna(),
color="red",
linestyle="dashdot",
label="sRTT",
)
goodput_plot = ax2.plot(
transmission_df["goodput_rolling"],
color="blue",
linestyle="solid",
label="goodput",
)
# ax2.plot(transmission_df["goodput"], color="blue", linestyle="solid", label="goodput")
eff_bw_plot = ax01.plot(
transmission_df["effective_bw_sum"].dropna(),
color="peru",
linestyle="solid",
label="bandwidth",
)
lte_eff_bw_plot = ax01.plot(
transmission_df["lte_effective_bw_sum"].dropna(),
color="lightsteelblue",
linestyle="solid",
label="4G bandwidth",
alpha=0.5,
)
nr_eff_bw_plot = ax01.plot(
transmission_df["nr_effective_bw_sum"].dropna(),
color="cornflowerblue",
linestyle="solid",
label="5G bandwidth",
alpha=0.5,
)
# ax01.stackplot(transmission_df["arrival_time"].to_list(),
# [transmission_df["lte_bw_sum"].to_list(), transmission_df["nr_bw_sum"].to_list()],
# colors=["lightsteelblue", "cornflowerblue"],
# labels=["4G bandwidth", "5G bandwidth"]
# )
lte_rsrq_plot = ax02.plot(
transmission_df["RSRQ_(dB)"].dropna(),
color="purple",
linestyle="dotted",
label="LTE RSRQ",
)
nr_rsrq_plot = ax00.plot(
transmission_df["NR5G_RSRQ_(dB)"].dropna(),
color="magenta",
linestyle="dotted",
label="NR RSRQ",
)
ax2.spines.right.set_position(("axes", 1.1))
ax02.spines.right.set_position(("axes", 1.1))
ax0.set_ylim(0, 5000)
ax1.set_ylim(0, 0.3)
ax2.set_ylim(0, 600)
ax00.set_ylim(-25, 0)
ax01.set_ylim(0, 250)
# second dB axis
ax02.set_ylim(-25, 0)
ax02.set_axis_off()
ax00.set_xlabel("arrival time [s]")
ax2.set_ylabel("Goodput [mbps]")
ax00.set_ylabel("LTE/NR RSRQ [dB]")
# ax02.set_ylabel("LTE RSRQ [dB]")
ax1.set_ylabel("sRTT [s]")
ax0.set_ylabel("cwnd [MSS]")
ax01.set_ylabel("Bandwidth [MHz]")
if args.fancy:
legend_frame = False
ax0.set_xlim([0, transmission_df.index[-1]])
ax00.set_xlim([0, transmission_df.index[-1]])
# added these three lines
lns_ax0 = snd_plot + srtt_plot + goodput_plot
labs_ax0 = [l.get_label() for l in lns_ax0]
ax2.legend(lns_ax0, labs_ax0, ncols=9, fontsize=9, loc="upper right", frameon=legend_frame)
#ax0.set_zorder(100)
lns_ax00 = eff_bw_plot + lte_eff_bw_plot + nr_eff_bw_plot + lte_rsrq_plot + nr_rsrq_plot
if lte_hanover_plot:
lns_ax00.append(lte_hanover_plot)
if nr_hanover_plot:
lns_ax00.append(nr_hanover_plot)
labs_ax00 = [l.get_label() for l in lns_ax00]
ax02.legend(lns_ax00, labs_ax00, ncols=3, fontsize=9, loc="upper center", frameon=legend_frame)
#ax00.set_zorder(100)
plt.savefig("{}{}_plot.eps".format(args.save, csv.replace(".csv", "")), bbox_inches="tight")
else:
fig.legend(loc="lower right")
plt.savefig("{}{}_plot.pdf".format(args.save, csv.replace(".csv", "")), bbox_inches="tight")
# except Exception as e:
# print("Error processing file: {}".format(csv))
# print(str(e))
counter += 1
plt.close(fig)
plt.clf()

269
plot_single_transmission_paper.py Executable file
View File

@@ -0,0 +1,269 @@
#!/usr/bin/env python3
import math
import multiprocessing
import os
from argparse import ArgumentParser
import matplotlib
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
sns.set()
#sns.set(font_scale=1.5)
tex_fonts = {
"pgf.texsystem": "lualatex",
# "legend.fontsize": "x-large",
# "figure.figsize": (15, 5),
"axes.labelsize": 15, # "small",
# "axes.titlesize": "x-large",
"xtick.labelsize": 15, # "small",
"ytick.labelsize": 15, # "small",
"legend.fontsize": 15,
"axes.formatter.use_mathtext": True,
"mathtext.fontset": "dejavusans",
}
# plt.rcParams.update(tex_fonts)
def convert_cellid(value):
if isinstance(value, str):
try:
r = int(value.split(" ")[-1].replace("(", "").replace(")", ""))
return r
except Exception as e:
return -1
else:
return int(-1)
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-s", "--serial_file", required=True, help="Serial csv file.")
parser.add_argument(
"-p", "--pcap_csv_folder", required=True, help="PCAP csv folder."
)
parser.add_argument("--save", required=True, help="Location to save pdf file.")
parser.add_argument("--fancy", action="store_true", help="Create fancy plot.")
parser.add_argument(
"-i",
"--interval",
default=10,
type=int,
help="Time interval for rolling window.",
)
args = parser.parse_args()
pcap_csv_list = list()
for filename in os.listdir(args.pcap_csv_folder):
if filename.endswith(".csv") and "tcp" in filename:
pcap_csv_list.append(filename)
counter = 1
if len(pcap_csv_list) == 0:
print("No CSV files found.")
pcap_csv_list.sort(key=lambda x: int(x.split("_")[-1].replace(".csv", "")))
for csv in pcap_csv_list:
print(
"\rProcessing {} out of {} CSVs.\t({}%)\t".format(
counter, len(pcap_csv_list), math.floor(counter / len(pcap_csv_list))
)
)
# try:
transmission_df = pd.read_csv(
"{}{}".format(args.pcap_csv_folder, csv),
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
)
transmission_df = transmission_df.set_index("datetime")
transmission_df.index = pd.to_datetime(transmission_df.index)
transmission_df = transmission_df.sort_index()
# srtt to [s]
transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10 ** 6)
# key for columns and level for index
transmission_df["goodput"] = (
transmission_df["payload_size"]
.groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval)))
.transform("sum")
)
transmission_df["goodput"] = transmission_df["goodput"].apply(
lambda x: ((x * 8) / args.interval) / 10 ** 6
)
transmission_df["goodput_rolling"] = (
transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
)
transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply(
lambda x: ((x * 8) / args.interval) / 10 ** 6
)
# set meta values and remove all not needed columns
cc_algo = transmission_df["congestion_control"].iloc[0]
cc_algo = cc_algo.upper()
transmission_direction = transmission_df["direction"].iloc[0]
# transmission_df = transmission_df.filter(["goodput", "datetime", "ack_rtt", "goodput_rolling", "snd_cwnd"])
# read serial csv
serial_df = pd.read_csv(
args.serial_file, converters={"Cell_ID": convert_cellid}
)
serial_df = serial_df.set_index("datetime")
serial_df.index = pd.to_datetime(serial_df.index)
serial_df.sort_index()
# Select DataFrame rows between two dates
mask = (serial_df.index >= transmission_df.index[0]) & (serial_df.index <= transmission_df.index[-1])
serial_df = serial_df.loc[mask]
serial_df["arrival_time"] = (serial_df["time"] - serial_df["time"].iloc[0]) * 60
serial_df.index = serial_df["arrival_time"]
transmission_df.index = transmission_df["arrival_time"]
# filter active state
for i in range(1, 5):
serial_df["LTE_SCC{}_effective_bw".format(i)] = serial_df[
"LTE_SCC{}_bw".format(i)
]
mask = serial_df["LTE_SCC{}_state".format(i)].isin(["ACTIVE"])
serial_df["LTE_SCC{}_effective_bw".format(i)] = serial_df[
"LTE_SCC{}_effective_bw".format(i)
].where(mask, other=0)
# filter if sc is usesd for uplink
for i in range(1, 5):
mask = serial_df["LTE_SCC{}_UL_Configured".format(i)].isin([False])
serial_df["LTE_SCC{}_effective_bw".format(i)] = serial_df[
"LTE_SCC{}_effective_bw".format(i)
].where(mask, other=0)
# sum all effective bandwidth for 5G and 4G
serial_df["SCC1_NR5G_effective_bw"] = serial_df["SCC1_NR5G_bw"].fillna(0)
serial_df["effective_bw_sum"] = (
serial_df["SCC1_NR5G_effective_bw"]
+ serial_df["LTE_SCC1_effective_bw"]
+ serial_df["LTE_SCC2_effective_bw"]
+ serial_df["LTE_SCC3_effective_bw"]
+ serial_df["LTE_SCC4_effective_bw"]
+ serial_df["LTE_bw"]
)
bw_cols = [
"SCC1_NR5G_effective_bw",
"LTE_bw",
"LTE_SCC1_effective_bw",
"LTE_SCC2_effective_bw",
"LTE_SCC3_effective_bw",
"LTE_SCC4_effective_bw",
]
# transmission timeline
scaley = 1.5
scalex = 1.0
fig, ax = plt.subplots(2, 1, figsize=[6.4 * scaley, 4.8 * scalex])
fig.subplots_adjust(right=0.75)
if not args.fancy:
plt.title("{} with {}".format(transmission_direction, cc_algo))
fig.suptitle("{} with {}".format(transmission_direction, cc_algo))
ax0 = ax[0]
ax1 = ax0.twinx()
ax2 = ax0.twinx()
# ax2.spines.right.set_position(("axes", 1.22))
ax00 = ax[1]
snd_plot = ax0.plot(
transmission_df["snd_cwnd"].dropna(),
color="darkorange",
linestyle="dashed",
label="cwnd",
)
srtt_plot = ax1.plot(
transmission_df["srtt"].dropna(),
color="maroon",
linestyle="dotted",
label="sRTT",
)
goodput_plot = ax2.plot(
transmission_df["goodput_rolling"],
color="blue",
linestyle="solid",
label="goodput",
)
serial_df["time_rel"] = serial_df["time"] - serial_df["time"].iloc[0]
serial_df.index = serial_df["time_rel"]
ax_stacked = serial_df[bw_cols].plot.area(stacked=True, linewidth=0, ax=ax00)
ax00.set_ylabel("bandwidth [MHz]")
ax00.set_ylim(0, 200)
#ax.set_xlabel("time [minutes]")
#ax00.set_xlim([0, transmission_df.index[-1]])
ax00.xaxis.grid(True)
ax2.spines.right.set_position(("axes", 1.1))
ax0.set_ylim(0, 5000) #2500
ax1.set_ylim(0, 2) #0.3
ax2.set_ylim(0, 500)
#ax00.set_ylim(-25, 0)
ax00.set_xlabel("time [s]")
ax2.set_ylabel("goodput [mbps]")
#ax00.set_ylabel("LTE/NR RSRQ [dB]")
# ax02.set_ylabel("LTE RSRQ [dB]")
ax1.set_ylabel("sRTT [s]")
ax0.set_ylabel("cwnd [MSS]")
if args.fancy:
legend_frame = False
ax0.set_xlim([0, 60])
ax00.set_xlim([0, 60])
# added these three lines
lns_ax0 = snd_plot + srtt_plot + goodput_plot
labs_ax0 = [l.get_label() for l in lns_ax0]
ax2.legend(lns_ax0, labs_ax0, ncols=9, fontsize=9, loc="upper right", frameon=legend_frame)
#ax0.set_zorder(100)
#lns_ax00 = [ax_stacked]
#labs_ax00 = ["5G bandwidth", "4G bandwidth"]
#ax00.legend(lns_ax00, labs_ax00, ncols=3, fontsize=9, loc="upper center", frameon=legend_frame)
L = ax00.legend(ncols=3, fontsize=9, frameon=False)
L.get_texts()[0].set_text("5G main")
L.get_texts()[1].set_text("4G main")
L.get_texts()[2].set_text("4G SCC 1")
L.get_texts()[3].set_text("4G SCC 2")
L.get_texts()[4].set_text("4G SCC 3")
L.get_texts()[5].set_text("4G SCC 4")
#ax00.set_zorder(100)
plt.savefig("{}{}_plot.eps".format(args.save, csv.replace(".csv", "")), bbox_inches="tight")
#serial_df.to_csv("{}{}_plot.csv".format(args.save, csv.replace(".csv", "")))
else:
fig.legend(loc="lower right")
plt.savefig("{}{}_plot.pdf".format(args.save, csv.replace(".csv", "")), bbox_inches="tight")
# except Exception as e:
# print("Error processing file: {}".format(csv))
# print(str(e))
counter += 1
plt.close(fig)
plt.clf()

View File

@@ -0,0 +1,179 @@
#!/usr/bin/env python3
import math
import multiprocessing
import os
from argparse import ArgumentParser
import matplotlib
import pandas as pd
import matplotlib.pyplot as plt
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-s", "--serial_file", required=True, help="Serial csv file.")
parser.add_argument("-p", "--pcap_csv_folder", required=True, help="PCAP csv folder.")
parser.add_argument("--save", default=None, help="Location to save pdf file.")
parser.add_argument(
"-i",
"--interval",
default=10,
type=int,
help="Time interval for rolling window.",
)
args = parser.parse_args()
manager = multiprocessing.Manager()
n = manager.Value("i", 0)
frame_list = manager.list()
jobs = []
# load all pcap csv into one dataframe
pcap_csv_list = list()
for filename in os.listdir(args.pcap_csv_folder):
if filename.endswith(".csv") and "tcp" in filename:
pcap_csv_list.append(filename)
counter = 1
if len(pcap_csv_list) == 0:
print("No CSV files found.")
pcap_csv_list.sort(key=lambda x: int(x.split("_")[-1].replace(".csv", "")))
for csv in pcap_csv_list:
print("\rProcessing {} out of {} CSVs.\t({}%)\t".format(counter, len(pcap_csv_list), math.floor(counter/len(pcap_csv_list))))
try:
transmission_df = pd.read_csv(
"{}{}".format(args.pcap_csv_folder, csv),
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
)
transmission_df["datetime"] = pd.to_datetime(transmission_df["datetime"]) - pd.Timedelta(hours=1)
transmission_df = transmission_df.set_index("datetime")
transmission_df.index = pd.to_datetime(transmission_df.index)
transmission_df = transmission_df.sort_index()
# srtt to [s]
transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10**6)
# key for columns and level for index
transmission_df["goodput"] = transmission_df["payload_size"].groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval))).transform("sum")
transmission_df["goodput"] = transmission_df["goodput"].apply(
lambda x: ((x * 8) / args.interval) / 10**6
)
transmission_df["goodput_rolling"] = transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply(
lambda x: ((x * 8) / args.interval) / 10 ** 6
)
# set meta values and remove all not needed columns
cc_algo = transmission_df["congestion_control"].iloc[0]
cc_algo = cc_algo.upper()
transmission_direction = transmission_df["direction"].iloc[0]
#transmission_df = transmission_df.filter(["goodput", "datetime", "ack_rtt", "goodput_rolling", "snd_cwnd"])
# read serial csv
serial_df = pd.read_csv(args.serial_file)
serial_df["datetime"] = pd.to_datetime(serial_df["datetime"]) - pd.Timedelta(hours=1)
serial_df = serial_df.set_index("datetime")
serial_df.index = pd.to_datetime(serial_df.index)
serial_df.sort_index()
transmission_df = pd.merge_asof(
transmission_df,
serial_df,
tolerance=pd.Timedelta("1s"),
right_index=True,
left_index=True,
)
# transmission timeline
scaley = 1.5
scalex = 1.0
fig, ax = plt.subplots(figsize=[6.4 * scaley, 4.8 * scalex])
plt.title("{} with {}".format(transmission_direction, cc_algo))
fig.subplots_adjust(right=0.75)
twin1 = ax.twinx()
twin2 = ax.twinx()
twin3 = ax.twinx()
twin4 = ax.twinx()
# Offset the right spine of twin2. The ticks and label have already been
# placed on the right by twinx above.
twin2.spines.right.set_position(("axes", 1.1))
twin3.spines.right.set_position(("axes", 1.2))
twin4.spines.right.set_position(("axes", 1.3))
# create list fo color indices
transmission_df["index"] = transmission_df.index
color_dict = dict()
color_list = list()
i = 0
for cell_id in transmission_df["cellID"]:
if cell_id not in color_dict:
color_dict[cell_id] = i
i += 1
color_list.append(color_dict[cell_id])
transmission_df["cell_color"] = color_list
color_dict = None
color_list = None
cmap = matplotlib.cm.get_cmap("Set3")
unique_cells = transmission_df["cell_color"].unique()
color_list = cmap.colors * (round(len(unique_cells) / len(cmap.colors)) + 1)
for c in transmission_df["cell_color"].unique():
bounds = transmission_df[["index", "cell_color"]].groupby("cell_color").agg(["min", "max"]).loc[c]
ax.axvspan(bounds.min(), bounds.max(), alpha=0.3, color=color_list[c])
p4, = twin3.plot(transmission_df["snd_cwnd"].dropna(), color="lime", linestyle="dashed", label="cwnd")
p3, = twin2.plot(transmission_df["srtt"].dropna(), color="red", linestyle="dashdot", label="sRTT")
p1, = ax.plot(transmission_df["goodput_rolling"], color="blue", linestyle="solid", label="goodput")
p2, = twin1.plot(transmission_df["downlink_cqi"].dropna(), color="magenta", linestyle="dotted", label="CQI")
p5, = twin4.plot(transmission_df["DL_bandwidth"].dropna(), color="peru", linestyle="dotted", label="DL_bandwidth")
ax.set_xlim(transmission_df["index"].min(), transmission_df["index"].max())
ax.set_ylim(0, 500)
twin1.set_ylim(0, 15)
twin2.set_ylim(0, 0.2) #twin2.set_ylim(0, transmission_df["ack_rtt"].max())
twin3.set_ylim(0, transmission_df["snd_cwnd"].max() + 10)
twin4.set_ylim(0, 21)
ax.set_xlabel("arrival time")
ax.set_ylabel("Goodput [mbps]")
twin1.set_ylabel("CQI")
twin2.set_ylabel("sRTT [s]")
twin3.set_ylabel("cwnd")
twin4.set_ylabel("DL_bandwidth")
ax.yaxis.label.set_color(p1.get_color())
twin1.yaxis.label.set_color(p2.get_color())
twin2.yaxis.label.set_color(p3.get_color())
twin3.yaxis.label.set_color(p4.get_color())
twin4.yaxis.label.set_color(p5.get_color())
tkw = dict(size=4, width=1.5)
ax.tick_params(axis='y', colors=p1.get_color(), **tkw)
twin1.tick_params(axis='y', colors=p2.get_color(), **tkw)
twin2.tick_params(axis='y', colors=p3.get_color(), **tkw)
twin3.tick_params(axis='y', colors=p4.get_color(), **tkw)
twin4.tick_params(axis='y', colors=p5.get_color(), **tkw)
ax.tick_params(axis='x', **tkw)
#ax.legend(handles=[p1, p2, p3])
if args.save:
plt.savefig("{}{}_plot.pdf".format(args.save, csv.replace(".csv", "")))
except Exception as e:
print("Error processing file: {}".format(csv))
print(str(e))
counter += 1
plt.clf()

95
plot_stacked_bandwidth.py Executable file
View File

@@ -0,0 +1,95 @@
#!/usr/bin/env python3
from argparse import ArgumentParser
import pandas as pd
import matplotlib.pyplot as plt
plt_params = {
"pgf.texsystem": "lualatex",
#"legend.fontsize": "x-large",
#"figure.figsize": (15, 5),
"axes.labelsize": 15, # "small",
"axes.titlesize": "x-large",
"xtick.labelsize": 15, # "small",
"ytick.labelsize": 15, # "small",
"legend.fontsize": 15,
"axes.formatter.use_mathtext": True,
"mathtext.fontset": "dejavusans",
}
#plt.rcParams.update(plt_params)
import seaborn as sns
sns.set()
sns.set(font_scale=1.5)
plt.rcParams["figure.figsize"] = (10, 3)
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-f", "--file", required=True, help="Serial CSV")
parser.add_argument("--save", default=None, help="Location to save pdf file.")
args = parser.parse_args()
df = pd.read_csv(args.file)
df["time_rel"] = df["time"] - df["time"].iloc[0]
df.index = df["time_rel"] / 60
# filter active state
for i in range(1, 5):
df["LTE_SCC{}_effective_bw".format(i)] = df["LTE_SCC{}_bw".format(i)]
mask = df["LTE_SCC{}_state".format(i)].isin(["ACTIVE"])
df["LTE_SCC{}_effective_bw".format(i)] = df[
"LTE_SCC{}_effective_bw".format(i)
].where(mask, other=0)
# filter if sc is usesd for uplink
for i in range(1, 5):
mask = df["LTE_SCC{}_UL_Configured".format(i)].isin([False])
df["LTE_SCC{}_effective_bw".format(i)] = df[
"LTE_SCC{}_effective_bw".format(i)
].where(mask, other=0)
# sum all effective bandwidth for 5G and 4G
df["SCC1_NR5G_effective_bw"] = df["SCC1_NR5G_bw"].fillna(0)
df["effective_bw_sum"] = (
df["SCC1_NR5G_effective_bw"]
+ df["LTE_SCC1_effective_bw"]
+ df["LTE_SCC2_effective_bw"]
+ df["LTE_SCC3_effective_bw"]
+ df["LTE_SCC4_effective_bw"]
+ df["LTE_bw"]
)
bw_cols = [
"SCC1_NR5G_effective_bw",
"LTE_bw",
"LTE_SCC1_effective_bw",
"LTE_SCC2_effective_bw",
"LTE_SCC3_effective_bw",
"LTE_SCC4_effective_bw",
]
ax = df[bw_cols].plot.area(stacked=True, linewidth=0)
ax.set_ylabel("bandwidth [MHz]")
ax.set_xlabel("time [minutes]")
ax.set_xlim([0, df.index[-1]])
ax.xaxis.grid(False)
L = plt.legend(ncols=2, fontsize=12, frameon=False)
L.get_texts()[0].set_text("5G main")
L.get_texts()[1].set_text("4G main")
L.get_texts()[2].set_text("4G SCC 1")
L.get_texts()[3].set_text("4G SCC 2")
L.get_texts()[4].set_text("4G SCC 3")
L.get_texts()[5].set_text("4G SCC 4")
if args.save:
plt.savefig("{}-used_bandwidth.eps".format(args.save), bbox_inches="tight")
else:
plt.show()

281
plot_transmission_timeline.py Executable file
View File

@@ -0,0 +1,281 @@
#!/usr/bin/env python3
import multiprocessing
import os
import pickle
from argparse import ArgumentParser
from math import ceil
from time import sleep
import matplotlib
import pandas as pd
import matplotlib.pyplot as plt
from mpl_toolkits import axisartist
from mpl_toolkits.axes_grid1 import host_subplot
def csv_to_dataframe(csv_list, dummy):
global n
global frame_list
transmission_df = None
for csv in csv_list:
tmp_df = pd.read_csv(
"{}{}".format(args.pcap_csv_folder, csv),
dtype=dict(is_retranmission=bool, is_dup_ack=bool),
)
tmp_df["datetime"] = pd.to_datetime(tmp_df["datetime"]) - pd.Timedelta(hours=1)
tmp_df = tmp_df.set_index("datetime")
tmp_df.index = pd.to_datetime(tmp_df.index)
if transmission_df is None:
transmission_df = tmp_df
else:
transmission_df = pd.concat([transmission_df, tmp_df])
n.value += 1
frame_list.append(transmission_df)
from itertools import islice
def chunk(it, size):
it = iter(it)
return iter(lambda: tuple(islice(it, size)), ())
def plot_cdf(dataframe, column_name):
stats_df = dataframe \
.groupby(column_name) \
[column_name] \
.agg("count") \
.pipe(pd.DataFrame) \
.rename(columns={column_name: "frequency"})
# PDF
stats_df["PDF"] = stats_df["frequency"] / sum(stats_df["frequency"])
# CDF
stats_df["CDF"] = stats_df["PDF"].cumsum()
stats_df = stats_df.reset_index()
stats_df.plot(x=column_name, y=["CDF"], grid=True)
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-s", "--serial_file", required=True, help="Serial csv file.")
parser.add_argument("-p", "--pcap_csv_folder", required=True, help="PCAP csv folder.")
parser.add_argument("--save", default=None, help="Location to save pdf file.")
parser.add_argument("--export", default=None, help="Export figure as an pickle file.")
parser.add_argument(
"-c",
"--cores",
default=1,
type=int,
help="Number of cores for multiprocessing.",
)
parser.add_argument(
"-i",
"--interval",
default=10,
type=int,
help="Time interval for rolling window.",
)
args = parser.parse_args()
manager = multiprocessing.Manager()
n = manager.Value("i", 0)
frame_list = manager.list()
jobs = []
# load all pcap csv into one dataframe
pcap_csv_list = list()
for filename in os.listdir(args.pcap_csv_folder):
if filename.endswith(".csv") and "tcp" in filename:
pcap_csv_list.append(filename)
parts = chunk(pcap_csv_list, ceil(len(pcap_csv_list) / args.cores))
print("Start processing with {} jobs.".format(args.cores))
for p in parts:
process = multiprocessing.Process(target=csv_to_dataframe, args=(p, "dummy"))
jobs.append(process)
for j in jobs:
j.start()
print("Started all jobs.")
# Ensure all the processes have finished
finished_job_counter = 0
working = ["|", "/", "-", "\\", "|", "/", "-", "\\"]
w = 0
while len(jobs) != finished_job_counter:
sleep(1)
print(
"\r\t{}{}{}\t Running {} jobs ({} finished). Processed {} out of {} pcap csv files. ({}%) ".format(
working[w],
working[w],
working[w],
len(jobs),
finished_job_counter,
n.value,
len(pcap_csv_list),
round((n.value / len(pcap_csv_list)) * 100, 2),
),
end="",
)
finished_job_counter = 0
for j in jobs:
if not j.is_alive():
finished_job_counter += 1
if (w + 1) % len(working) == 0:
w = 0
else:
w += 1
print("\r\nSorting table...")
transmission_df = pd.concat(frame_list)
frame_list = None
transmission_df = transmission_df.sort_index()
print("Calculate goodput...")
#print(transmission_df)
# srtt to [s]
transmission_df["srtt"] = transmission_df["srtt"].apply(lambda x: x / 10 ** 6)
# key for columns and level for index
transmission_df["goodput"] = transmission_df["payload_size"].groupby(pd.Grouper(level="datetime", freq="{}s".format(args.interval))).transform("sum")
transmission_df["goodput"] = transmission_df["goodput"].apply(
lambda x: ((x * 8) / args.interval) / 10**6
)
transmission_df["goodput_rolling"] = transmission_df["payload_size"].rolling("{}s".format(args.interval)).sum()
transmission_df["goodput_rolling"] = transmission_df["goodput_rolling"].apply(
lambda x: ((x * 8) / args.interval) / 10 ** 6
)
# set meta values and remove all not needed columns
cc_algo = transmission_df["congestion_control"].iloc[0]
cc_algo = cc_algo.upper()
transmission_direction = transmission_df["direction"].iloc[0]
transmission_df = transmission_df.filter(["goodput", "datetime", "srtt", "goodput_rolling"])
# read serial csv
serial_df = pd.read_csv(args.serial_file)
serial_df["datetime"] = pd.to_datetime(serial_df["datetime"]) - pd.Timedelta(hours=1)
serial_df = serial_df.set_index("datetime")
serial_df.index = pd.to_datetime(serial_df.index)
serial_df.sort_index()
transmission_df = pd.merge_asof(
transmission_df,
serial_df,
tolerance=pd.Timedelta("1s"),
right_index=True,
left_index=True,
)
# transmission timeline
scaley = 1.5
scalex = 1.0
fig, ax = plt.subplots(figsize=[6.4 * scaley, 4.8 * scalex])
plt.title("{} with {}".format(transmission_direction, cc_algo))
fig.subplots_adjust(right=0.75)
twin1 = ax.twinx()
twin2 = ax.twinx()
# Offset the right spine of twin2. The ticks and label have already been
# placed on the right by twinx above.
twin2.spines.right.set_position(("axes", 1.2))
# create list fo color indices
transmission_df["index"] = transmission_df.index
color_dict = dict()
color_list = list()
i = 0
for cell_id in transmission_df["cellID"]:
if cell_id not in color_dict:
color_dict[cell_id] = i
i += 1
color_list.append(color_dict[cell_id])
transmission_df["cell_color"] = color_list
color_dict = None
color_list = None
cmap = matplotlib.cm.get_cmap("Set3")
unique_cells = transmission_df["cell_color"].unique()
color_list = cmap.colors * (round(len(unique_cells) / len(cmap.colors)) + 1)
for c in transmission_df["cell_color"].unique():
bounds = transmission_df[["index", "cell_color"]].groupby("cell_color").agg(["min", "max"]).loc[c]
ax.axvspan(bounds.min(), bounds.max(), alpha=0.3, color=color_list[c])
p1, = ax.plot(transmission_df["goodput_rolling"], "-", color="blue", label="goodput")
p2, = twin1.plot(transmission_df["downlink_cqi"], "--", color="green", label="CQI")
p3, = twin2.plot(transmission_df["srtt"], "-.", color="red", label="sRTT")
ax.set_xlim(transmission_df["index"].min(), transmission_df["index"].max())
ax.set_ylim(0, 500)
twin1.set_ylim(0, 15)
twin2.set_ylim(0, 1)
ax.set_xlabel("Time")
ax.set_ylabel("Goodput")
twin1.set_ylabel("CQI")
twin2.set_ylabel("sRTT")
ax.yaxis.label.set_color(p1.get_color())
twin1.yaxis.label.set_color(p2.get_color())
twin2.yaxis.label.set_color(p3.get_color())
tkw = dict(size=4, width=1.5)
ax.tick_params(axis='y', colors=p1.get_color(), **tkw)
twin1.tick_params(axis='y', colors=p2.get_color(), **tkw)
twin2.tick_params(axis='y', colors=p3.get_color(), **tkw)
ax.tick_params(axis='x', **tkw)
#ax.legend(handles=[p1, p2, p3])
if args.save:
plt.savefig("{}timeline_plot.pdf".format(args.save))
if args.export:
pickle.dump(fig, open("{}timeline_plot.pkl".format(args.export), "wb"))
#goodput cdf
plt.clf()
print("Calculate and polt goodput CDF...")
plot_cdf(transmission_df, "goodput")
plt.xlabel("goodput [mbps]")
plt.ylabel("CDF")
plt.legend([cc_algo])
plt.title("{} with {}".format(transmission_direction, cc_algo))
if args.save:
plt.savefig("{}{}_cdf_plot.pdf".format(args.save, "goodput"))
else:
plt.show()
# rtt cdf
plt.clf()
print("Calculate and polt rtt CDF...")
plot_cdf(transmission_df, "srtt")
plt.xlabel("sRTT [s]")
plt.ylabel("CDF")
plt.xscale("log")
plt.legend([cc_algo])
plt.title("{} with {}".format(transmission_direction, cc_algo))
if args.save:
plt.savefig("{}{}_cdf_plot.pdf".format(args.save, "srtt"))
else:
plt.show()

34
reset_modem_gps.py Normal file
View File

@@ -0,0 +1,34 @@
#!/usr/bin/env python3
from argparse import ArgumentParser
import serial
from time import sleep
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-s", "--serial", required=True, help="Serial Interface")
args = parser.parse_args()
command_order = [
b'AT!CUSTOM="GPSENABLE",1',
b'AT!CUSTOM="GPSSEL",0',
b'AT!CUSTOM="GPSLPM",0',
b'AT!GPSNMEACONFIG=1,1',
b'AT+WANT=1',
b'AT!GPSNMEASENTENCE=FF'
]
ser = serial.Serial(
port=args.serial,
baudrate=115200,
)
if ser.is_open:
ser.write(b'At!Reset')
sleep(0.5)
ser.write(b'AT!ENTERCND="A710"')
sleep(0.5)
for cmd in command_order:
ser.write(cmd)
sleep(0.5)
print(cmd)