Commit 563fd66b authored by hardy's avatar hardy

bring ulsch and dlsch stats for gnb

parent 30c5668b
...@@ -483,12 +483,13 @@ class RANManagement(): ...@@ -483,12 +483,13 @@ class RANManagement():
#stats monitoring during runtime #stats monitoring during runtime
time.sleep(20) time.sleep(20)
monitor_file='stats_monitor.py' monitor_file='../ci-scripts/stats_monitor.py'
conf_file='../ci-scripts/stats_monitor_conf.yaml'
if self.eNB_Stats=='yes': if self.eNB_Stats=='yes':
if (self.air_interface[self.eNB_instance] == 'lte-softmodem') or (self.air_interface[self.eNB_instance] == 'ocp-enb'): if (self.air_interface[self.eNB_instance] == 'lte-softmodem') or (self.air_interface[self.eNB_instance] == 'ocp-enb'):
mySSH.command('echo $USER; nohup python3 ../ci-scripts/' + monitor_file + ' enb 2>&1 > enb_stats_monitor_execution.log &', '\$', 5) mySSH.command('echo $USER; nohup python3 ' + monitor_file + ' ' + conf_file + ' enb 2>&1 > enb_stats_monitor_execution.log &', '\$', 5)
else: else:
mySSH.command('echo $USER; nohup python3 ../ci-scripts/' + monitor_file + ' gnb 2>&1 > gnb_stats_monitor_execution.log &', '\$', 5) mySSH.command('echo $USER; nohup python3 ' + monitor_file + ' ' + conf_file + ' gnb 2>&1 > gnb_stats_monitor_execution.log &', '\$', 5)
......
"""
To create graphs and pickle from runtime statistics in L1,MAC,RRC,PDCP files
"""
import subprocess import subprocess
import time import time
import shlex import shlex
import re import re
import sys import sys
import matplotlib.pyplot as plt
import pickle import pickle
import matplotlib.pyplot as plt
import numpy as np import numpy as np
import os import yaml
def collect(d, node_type):
if node_type=='enb': class StatMonitor():
cmd='cat L1_stats.log MAC_stats.log PDCP_stats.log RRC_stats.log' def __init__(self,cfg_file):
else: #'gnb' with open(cfg_file,'r') as file:
cmd='cat nrL1_stats.log nrMAC_stats.log nrPDCP_stats.log nrRRC_stats.log' self.d = yaml.load(file)
process=subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE) for node in self.d:
output = process.stdout.readlines() for metric in self.d[node]:
for l in output: self.d[node][metric]=[]
tmp=l.decode("utf-8")
result=re.match(rf'^.*\bPHR\b ([0-9]+).+\bbler\b ([0-9]+\.[0-9]+).+\bmcsoff\b ([0-9]+).+\bmcs\b ([0-9]+)',tmp)
if result is not None: def process_gnb (self,node_type,output):
d['PHR'].append(int(result.group(1))) for line in output:
d['bler'].append(float(result.group(2))) tmp=line.decode("utf-8")
d['mcsoff'].append(int(result.group(3))) result=re.match(r'^.*\bdlsch_rounds\b ([0-9]+)\/([0-9]+).*\bdlsch_errors\b ([0-9]+)',tmp)
d['mcs'].append(int(result.group(4))) if result is not None:
self.d[node_type]['dlsch_err'].append(int(result.group(3)))
percentage=float(result.group(2))/float(result.group(1))
def graph(d, node_type): self.d[node_type]['dlsch_err_perc_round_1'].append(percentage)
result=re.match(r'^.*\bulsch_rounds\b ([0-9]+)\/([0-9]+).*\bulsch_errors\b ([0-9]+)',tmp)
if result is not None:
figure, axis = plt.subplots(4, 1,figsize=(10, 10)) self.d[node_type]['ulsch_err'].append(int(result.group(3)))
percentage=float(result.group(2))/float(result.group(1))
major_ticks = np.arange(0, len(d['PHR'])+1, 1) self.d[node_type]['ulsch_err_perc_round_1'].append(percentage)
axis[0].set_xticks(major_ticks)
axis[0].set_xticklabels([])
axis[0].plot(d['PHR'],marker='o') def process_enb (self,node_type,output):
axis[0].set_xlabel('time') for line in output:
axis[0].set_ylabel('PHR') tmp=line.decode("utf-8")
axis[0].set_title("PHR") result=re.match(r'^.*\bPHR\b ([0-9]+).+\bbler\b ([0-9]+\.[0-9]+).+\bmcsoff\b ([0-9]+).+\bmcs\b ([0-9]+)',tmp)
if result is not None:
major_ticks = np.arange(0, len(d['bler'])+1, 1) self.d[node_type]['PHR'].append(int(result.group(1)))
axis[1].set_xticks(major_ticks) self.d[node_type]['bler'].append(float(result.group(2)))
axis[1].set_xticklabels([]) self.d[node_type]['mcsoff'].append(int(result.group(3)))
axis[1].plot(d['bler'],marker='o') self.d[node_type]['mcs'].append(int(result.group(4)))
axis[1].set_xlabel('time')
axis[1].set_ylabel('bler')
axis[1].set_title("bler") def collect(self,node_type):
if node_type=='enb':
major_ticks = np.arange(0, len(d['mcsoff'])+1, 1) cmd='cat L1_stats.log MAC_stats.log PDCP_stats.log RRC_stats.log'
axis[2].set_xticks(major_ticks) else: #'gnb'
axis[2].set_xticklabels([]) cmd='cat nrL1_stats.log nrMAC_stats.log nrPDCP_stats.log nrRRC_stats.log'
axis[2].plot(d['mcsoff'],marker='o') process=subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE)
axis[2].set_xlabel('time') output = process.stdout.readlines()
axis[2].set_ylabel('mcsoff') if node_type=='enb':
axis[2].set_title("mcsoff") self.process_enb(node_type,output)
else: #'gnb'
major_ticks = np.arange(0, len(d['mcs'])+1, 1) self.process_gnb(node_type,output)
axis[3].set_xticks(major_ticks)
axis[3].set_xticklabels([])
axis[3].plot(d['mcs'],marker='o') def graph(self,node_type):
axis[3].set_xlabel('time') col = 1
axis[3].set_ylabel('mcs') figure, axis = plt.subplots(len(self.d[node_type]), col ,figsize=(10, 10))
axis[3].set_title("mcs") i=0
for metric in self.d[node_type]:
plt.tight_layout() major_ticks = np.arange(0, len(self.d[node_type][metric])+1, 1)
# Combine all the operations and display axis[i].set_xticks(major_ticks)
plt.savefig(node_type+'_stats_monitor.png') axis[i].set_xticklabels([])
plt.show() axis[i].plot(self.d[node_type][metric],marker='o')
axis[i].set_xlabel('time')
if __name__ == "__main__": axis[i].set_ylabel(metric)
axis[i].set_title(metric)
i+=1
plt.tight_layout()
# Combine all the operations and display
plt.savefig(node_type+'_stats_monitor.png')
plt.show()
node_type = sys.argv[1]#enb or gnb
d={} if __name__ == "__main__":
d['PHR']=[]
d['bler']=[]
d['mcsoff']=[]
d['mcs']=[]
cfg_filename = sys.argv[1] #yaml file as metrics config
node = sys.argv[2]#enb or gnb
mon=StatMonitor(cfg_filename)
cmd='ps aux | grep modem | grep -v grep' #collecting stats when modem process is stopped
process=subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE) CMD='ps aux | grep mode | grep -v grep'
process=subprocess.Popen(CMD, shell=True, stdout=subprocess.PIPE)
output = process.stdout.readlines() output = process.stdout.readlines()
while len(output)!=0 : while len(output)!=0 :
collect(d, node_type) mon.collect(node)
process=subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE) process=subprocess.Popen(CMD, shell=True, stdout=subprocess.PIPE)
output = process.stdout.readlines() output = process.stdout.readlines()
time.sleep(1) time.sleep(1)
print('process stopped') print('Process stopped')
with open(node_type+'_stats_monitor.pickle', 'wb') as handle: with open(node+'_stats_monitor.pickle', 'wb') as handle:
pickle.dump(d, handle, protocol=pickle.HIGHEST_PROTOCOL) pickle.dump(mon.d, handle, protocol=pickle.HIGHEST_PROTOCOL)
graph(d, node_type) mon.graph(node)
import subprocess
import time
import shlex
import re
import sys
import matplotlib.pyplot as plt
import pickle
import numpy as np
import os
def collect(d, node_type):
if node_type=='enb':
cmd='cat L1_stats.log MAC_stats.log PDCP_stats.log RRC_stats.log'
else: #'gnb'
cmd='cat nrL1_stats.log nrMAC_stats.log nrPDCP_stats.log nrRRC_stats.log'
process=subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE)
output = process.stdout.readlines()
for l in output:
tmp=l.decode("utf-8")
result=re.match(rf'^.*\bPHR\b ([0-9]+).+\bbler\b ([0-9]+\.[0-9]+).+\bmcsoff\b ([0-9]+).+\bmcs\b ([0-9]+)',tmp)
if result is not None:
d['PHR'].append(int(result.group(1)))
d['bler'].append(float(result.group(2)))
d['mcsoff'].append(int(result.group(3)))
d['mcs'].append(int(result.group(4)))
def graph(d, node_type):
figure, axis = plt.subplots(4, 1,figsize=(10, 10))
major_ticks = np.arange(0, len(d['PHR'])+1, 1)
axis[0].set_xticks(major_ticks)
axis[0].set_xticklabels([])
axis[0].plot(d['PHR'],marker='o')
axis[0].set_xlabel('time')
axis[0].set_ylabel('PHR')
axis[0].set_title("PHR")
major_ticks = np.arange(0, len(d['bler'])+1, 1)
axis[1].set_xticks(major_ticks)
axis[1].set_xticklabels([])
axis[1].plot(d['bler'],marker='o')
axis[1].set_xlabel('time')
axis[1].set_ylabel('bler')
axis[1].set_title("bler")
major_ticks = np.arange(0, len(d['mcsoff'])+1, 1)
axis[2].set_xticks(major_ticks)
axis[2].set_xticklabels([])
axis[2].plot(d['mcsoff'],marker='o')
axis[2].set_xlabel('time')
axis[2].set_ylabel('mcsoff')
axis[2].set_title("mcsoff")
major_ticks = np.arange(0, len(d['mcs'])+1, 1)
axis[3].set_xticks(major_ticks)
axis[3].set_xticklabels([])
axis[3].plot(d['mcs'],marker='o')
axis[3].set_xlabel('time')
axis[3].set_ylabel('mcs')
axis[3].set_title("mcs")
plt.tight_layout()
# Combine all the operations and display
plt.savefig(node_type+'_stats_monitor.png')
plt.show()
if __name__ == "__main__":
node_type = sys.argv[1]#enb or gnb
d={}
d['PHR']=[]
d['bler']=[]
d['mcsoff']=[]
d['mcs']=[]
cmd='ps aux | grep modem | grep -v grep'
process=subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
output = process.stdout.readlines()
while len(output)!=0 :
collect(d, node_type)
process=subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
output = process.stdout.readlines()
time.sleep(1)
print('process stopped')
with open(node_type+'_stats_monitor.pickle', 'wb') as handle:
pickle.dump(d, handle, protocol=pickle.HIGHEST_PROTOCOL)
graph(d, node_type)
...@@ -5,7 +5,7 @@ enb : ...@@ -5,7 +5,7 @@ enb :
mcs: mcs:
gnb : gnb :
PHR: dlsch_err:
bler: dlsch_err_perc_round_1:
mcsoff: ulsch_err:
mcs: ulsch_err_perc_round_1:
\ No newline at end of file \ No newline at end of file
...@@ -19,22 +19,25 @@ class Stat_Monitor(): ...@@ -19,22 +19,25 @@ class Stat_Monitor():
self.d[node][metric]=[] self.d[node][metric]=[]
def process_enb (self,node_type,output): def process_gnb (self,node_type,output):
for l in output: for line in output:
tmp=l.decode("utf-8") tmp=line.decode("utf-8")
result=re.match(rf'^.*\bPHR\b ([0-9]+).+\bbler\b ([0-9]+\.[0-9]+).+\bmcsoff\b ([0-9]+).+\bmcs\b ([0-9]+)',tmp) result=re.match(r'^.*\bdlsch_rounds\b ([0-9]+)\/([0-9]+).*\bdlsch_errors\b ([0-9]+)',tmp)
if result is not None: if result is not None:
self.d[node_type]['PHR'].append(int(result.group(1))) self.d[node_type]['dlsch_err'].append(int(result.group(3)))
self.d[node_type]['bler'].append(float(result.group(2))) percentage=float(result.group(2))/float(result.group(1))
self.d[node_type]['mcsoff'].append(int(result.group(3))) self.d[node_type]['dlsch_err_perc_round_1'].append(percentage)
self.d[node_type]['mcs'].append(int(result.group(4))) result=re.match(r'^.*\bulsch_rounds\b ([0-9]+)\/([0-9]+).*\bulsch_errors\b ([0-9]+)',tmp)
if result is not None:
self.d[node_type]['ulsch_err'].append(int(result.group(3)))
percentage=float(result.group(2))/float(result.group(1))
self.d[node_type]['ulsch_err_perc_round_1'].append(percentage)
def process_gnb (self,node_type,output): def process_enb (self,node_type,output):
for l in output: for line in output:
tmp=l.decode("utf-8") tmp=line.decode("utf-8")
result=re.match(rf'^.*\bPHR\b ([0-9]+).+\bbler\b ([0-9]+\.[0-9]+).+\bmcsoff\b ([0-9]+).+\bmcs\b ([0-9]+)',tmp) result=re.match(r'^.*\bPHR\b ([0-9]+).+\bbler\b ([0-9]+\.[0-9]+).+\bmcsoff\b ([0-9]+).+\bmcs\b ([0-9]+)',tmp)
if result is not None: if result is not None:
self.d[node_type]['PHR'].append(int(result.group(1))) self.d[node_type]['PHR'].append(int(result.group(1)))
self.d[node_type]['bler'].append(float(result.group(2))) self.d[node_type]['bler'].append(float(result.group(2)))
......
...@@ -122,8 +122,8 @@ ...@@ -122,8 +122,8 @@
<testCase id="070001"> <testCase id="070001">
<class>Iperf</class> <class>Iperf</class>
<desc>iperf (UL/3Mbps/UDP)(60 sec)(single-ue profile)</desc> <desc>iperf (UL/1Mbps/UDP)(20 sec)(single-ue profile)</desc>
<iperf_args>-u -b 3M -t 60</iperf_args> <iperf_args>-u -b 1M -t 20</iperf_args>
<direction>UL</direction> <direction>UL</direction>
<id>nrmodule2_quectel</id> <id>nrmodule2_quectel</id>
<iperf_packetloss_threshold>5</iperf_packetloss_threshold> <iperf_packetloss_threshold>5</iperf_packetloss_threshold>
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment