Commit 9fa556ff authored by gabrielC's avatar gabrielC

Merge branch 'Enhancement-142-OAI_UE_autotest_framework' into develop_integration_w50

parents ca078ff5 af68b397
......@@ -235,6 +235,7 @@ add_boolean_option(DEBUG_OMG False "???")
add_boolean_option(XFORMS False "This adds the possibility to see the signal oscilloscope")
add_boolean_option(PRINT_STATS False "This adds the possibility to see the status")
add_boolean_option(T_TRACER False "Activate the T tracer, a debugging/monitoring framework" )
add_boolean_option(UE_AUTOTEST_TRACE False "Activate UE autotest specific logs")
add_boolean_option(DEBUG_CONSOLE False "makes debugging easier, disables stdout/stderr buffering")
......
......@@ -75,7 +75,7 @@ class openair(core):
return (stdout, stderr)
def connect(self, username, password, prompt='PEXPECT_OAI'):
max_retries=100
max_retries=10
i=0
while i <= max_retries:
self.prompt1 = prompt
......@@ -97,9 +97,9 @@ class openair(core):
# need to look for twice the string of the prompt
self.oai.prompt()
self.oai.prompt()
self.oai.sendline('uptime')
self.oai.prompt()
print self.oai.before
# self.oai.sendline('uptime')
# self.oai.prompt()
# print self.oai.before
break
except Exception, e:
error=''
......@@ -171,7 +171,7 @@ class openair(core):
sys.exit(1)
def disconnect(self):
print 'disconnecting the ssh connection to ' + self.address + '\n'
# print 'disconnecting the ssh connection to ' + self.address + '\n'
self.oai.send('exit')
# self.cancel()
......
diff --git a/cmake_targets/CMakeLists.txt b/cmake_targets/CMakeLists.txt
index 07d92a1..b6a02d1 100644
--- a/cmake_targets/CMakeLists.txt
+++ b/cmake_targets/CMakeLists.txt
@@ -235,6 +235,7 @@ add_boolean_option(DEBUG_OMG False "???")
add_boolean_option(XFORMS False "This adds the possibility to see the signal oscilloscope")
add_boolean_option(PRINT_STATS False "This adds the possibility to see the status")
add_boolean_option(T_TRACER False "Activate the T tracer, a debugging/monitoring framework" )
+add_boolean_option(UE_AUTOTEST_TRACE False "Activate UE autotest specific logs")
add_boolean_option(DEBUG_CONSOLE False "makes debugging easier, disables stdout/stderr buffering")
diff --git a/openair1/PHY/LTE_TRANSPORT/initial_sync.c b/openair1/PHY/LTE_TRANSPORT/initial_sync.c
index 663978e..b77cb1b 100644
--- a/openair1/PHY/LTE_TRANSPORT/initial_sync.c
+++ b/openair1/PHY/LTE_TRANSPORT/initial_sync.c
@@ -476,6 +476,15 @@ int initial_sync(PHY_VARS_UE *ue, runmode_t mode)
//#endif
if (ue->UE_scan_carrier == 0) {
+
+ #if UE_AUTOTEST_TRACE
+ LOG_I(PHY,"[UE %d] AUTOTEST Cell Sync : frame = %d, rx_offset %d, freq_offset %d \n",
+ ue->Mod_id,
+ ue->proc.proc_rxtx[0].frame_rx,
+ ue->rx_offset,
+ ue->common_vars.freq_offset );
+ #endif
+
if (ue->mac_enabled==1) {
LOG_I(PHY,"[UE%d] Sending synch status to higher layers\n",ue->Mod_id);
//mac_resynch();
diff --git a/openair1/SCHED/phy_procedures_lte_ue.c b/openair1/SCHED/phy_procedures_lte_ue.c
index 5dc629b..353a049 100644
--- a/openair1/SCHED/phy_procedures_lte_ue.c
+++ b/openair1/SCHED/phy_procedures_lte_ue.c
@@ -3551,6 +3551,13 @@ int phy_procedures_UE_RX(PHY_VARS_UE *ue,UE_rxtx_proc_t *proc,uint8_t eNB_id,uin
LOG_D(PHY,"[UE %d] Calculating bitrate Frame %d: total_TBS = %d, total_TBS_last = %d, bitrate %f kbits\n",
ue->Mod_id,frame_rx,ue->total_TBS[eNB_id],
ue->total_TBS_last[eNB_id],(float) ue->bitrate[eNB_id]/1000.0);
+
+ #if UE_AUTOTEST_TRACE
+ if ((frame_rx % 100 == 0)) {
+ LOG_I(PHY,"[UE %d] AUTOTEST Metric : UE_DLSCH_BITRATE = %5.2f kbps (frame = %d) \n", ue->Mod_id, (float) ue->bitrate[eNB_id]/1000.0, frame_rx);
+ }
+ #endif
+
}
diff --git a/openair1/SCHED/phy_procedures_lte_ue.c b/openair1/SCHED/phy_procedures_lte_ue.c
index 5dc629b..4d31ad3 100644
--- a/openair1/SCHED/phy_procedures_lte_ue.c
+++ b/openair1/SCHED/phy_procedures_lte_ue.c
@@ -3347,6 +3347,22 @@ int phy_procedures_UE_RX(PHY_VARS_UE *ue,UE_rxtx_proc_t *proc,uint8_t eNB_id,uin
// first slot has been processed (FFTs + Channel Estimation, PCFICH/PHICH/PDCCH)
+ #if UE_AUTOTEST_TRACE
+ if ( (frame_rx % 10 == 0) && (subframe_rx == 0)) {
+ printf("AUTOTEST Metric : UE_FREQ_OFFSET = %d Hz (frame = %d) \n", ue->common_vars.freq_offset, frame_rx);
+ printf("AUTOTEST Metric : UE_RX_OFFSET = %d (frame = %d) \n", ue->rx_offset, frame_rx);
+
+ printf("AUTOTEST Metric : RRC Measurments RSRP[0] %.2f dBm/RE, RSSI %.2f dBm, RSRQ[0] %.2f dB, N0 %d dBm/RE, NF %.1f dB (frame = %d)\n",
+ 10*log10(ue->measurements.rsrp[0])-ue->rx_total_gain_dB,
+ 10*log10(ue->measurements.rssi)-ue->rx_total_gain_dB,
+ 10*log10(ue->measurements.rsrq[0]),
+ ue->measurements.n0_power_tot_dBm,
+ (double)ue->measurements.n0_power_tot_dBm+132.24,
+ frame_rx);
+ }
+ #endif
+
+
// do procedures for C-RNTI
if (ue->dlsch[eNB_id][0]->active == 1) {
VCD_SIGNAL_DUMPER_DUMP_FUNCTION_BY_NAME(VCD_SIGNAL_DUMPER_FUNCTIONS_PDSCH_PROC, VCD_FUNCTION_IN);
diff --git a/cmake_targets/build_oai b/cmake_targets/build_oai
index 25e9a1c..0b04bb7 100755
--- a/cmake_targets/build_oai
+++ b/cmake_targets/build_oai
@@ -54,6 +54,7 @@ BUILD_DOXYGEN=0
T_TRACER="False"
DISABLE_HARDWARE_DEPENDENCY="False"
CMAKE_BUILD_TYPE=""
+UE_AUTOTEST_TRACE="False"
trap handle_ctrl_c INT
function print_help() {
@@ -130,6 +131,8 @@ Options
Enables the T tracer.
--disable-hardware-dependency
Disable HW dependency during installation
+--ue-autotest-trace
+ Enable specific traces for UE autotest framework
Usage (first build):
oaisim (eNB + UE): ./build_oai -I --oaisim -x --install-system-files
Eurecom EXMIMO + COTS UE : ./build_oai -I --eNB -x --install-system-files
@@ -285,6 +288,10 @@ function main() {
echo_info "Disabling hardware dependency for compiling software"
DISABLE_HARDWARE_DEPENDENCY="True"
shift 1;;
+ --ue-autotest-trace)
+ UE_AUTOTEST_TRACE="True"
+ echo_info "Enabling autotest specific trace for UE"
+ shift 1;;
-h | --help)
print_help
exit 1;;
@@ -457,6 +464,7 @@ function main() {
echo "set (DEADLINE_SCHEDULER \"${DEADLINE_SCHEDULER_FLAG_USER}\" )" >>$cmake_file
echo "set (CPU_AFFINITY \"${CPU_AFFINITY_FLAG_USER}\" )" >>$cmake_file
echo "set ( T_TRACER $T_TRACER )" >> $cmake_file
+ echo "set (UE_AUTOTEST_TRACE $UE_AUTOTEST_TRACE)" >> $cmake_file
echo 'include(${CMAKE_CURRENT_SOURCE_DIR}/../CMakeLists.txt)' >> $cmake_file
cd $DIR/$lte_build_dir/build
cmake ..
This source diff could not be displayed because it is too large. You can view the blob instead.
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8"/>
<title>OAI5G UE Autotest Report</title>
<script type="text/javascript">
function showhide(id) {
var e = document.getElementById(id);
e.style.display = (e.style.display == 'block') ? 'none' : 'block';
}
</script>
</head>
<style>
table, th, td {
border: 1px solid black;
border-collapse: collapse;
padding: 5px;
}
</style>
<body>
<center>
<h2>OAI5G UE Autotest Report</h2>
</center>
<p>
<table border>
<caption>Test session configuration</caption>
<tr><td>Start time</td><td>{{test_session_start_time}}</td></tr>
<tr><td>Stop time</td><td>{{test_session_stop_time}}</td></tr>
<tr><td>Duration</td><td>{{test_session_duration}}</td></tr>
<tr><td>MTC host</td><td>{{mtc_host}}</td></tr>
<tr><td>User</td><td>{{user}}</td></tr>
<tr><td>Password</td><td>{{password}}</td></tr>
</table>
</p>
<h3>Test Setup</h3>
To be complete
<br></br>
<h3>UE phy-test performances tests results</h3>
<h4>Objectives</h4>
<p>Checks that OAI UE can achieve at least 75 percent of the theoretical throughput.</p>
<p>Tests are done for all MCS (0 to 28) for 5MHz and 10MHz bandwidth.</p>
<h4>Results</h4>
<table>
<TR><TH>ID</TH><TH>TAG</TH><TH>VERDICT</TH><TH>NB RUNS</TH><TH>PASS</TH><TH>FAILED</TH><TH>INCON</TH><TH>SKIPPED</TH><TH>SEG FAULT</TH><TH>TC Timeout</TH><TH>Start</TH><TH>Stop</TH><TH>Duration</TH><TH>Details</TH></TR>
{% for result in test_results|sort(attribute='testcase_name') %}
<TR>
<TD >{{result.testcase_name}}</TD>
<TD align="right">{{result.tags}}</TD>
{% if result.testcase_verdict == "PASS" %}
<TD align="center" style="background-color:green">{{result.testcase_verdict}}</TD>
{% elif result.testcase_verdict == "FAIL" %}
<TD align="center" style="background-color:red">{{result.testcase_verdict}}</TD>
{% else %}
<TD align="center" style="background-color:orange">{{result.testcase_verdict}}</TD>
{% endif %}
<TD align='center'>{{result.nruns}}</TD>
<TD align='center'>{{result.nb_run_pass}}</TD>
<TD align='center'>{{result.nb_run_failed}}</TD>
<TD align='center'>{{result.nb_run_inc}}</TD>
<TD align='center'>{{result.nb_run_skip}}</TD>
<TD align='center'>{{result.nb_seg_fault}}</TD>
<TD >{{result.testcase_timeout}}</TD>
<TD >{{result.testcase_time_start.strftime('%Y-%m-%d %H:%M:%S')}}</TD>
<TD >{{result.testcase_time_stop.strftime('%Y-%m-%d %H:%M:%S')}}</TD>
<TD >{{result.testcase_duration}}</TD>
<TD ><a href="{{ result.testcase_name }}/{{ result.testcase_name }}_report.html">{{ result.testcase_name }}_report.html</a></TD>
</TR>
{% endfor %}
</table>
<br></br>
<h3>UE phy-test stability tests results</h3>
<h4>Objectives</h4>
<p>To be complete</p>
<h4>Results</h4>
To be complete
</table>
</body>
</html>
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8"/>
<title>OAI5G UE test case report</title>
<script type="text/javascript">
function showhide(id) {
var e = document.getElementById(id);
e.style.display = (e.style.display == 'block') ? 'none' : 'block';
}
</script>
</head>
<style>
table, th, td {
border: 1px solid black;
border-collapse: collapse;
padding: 5px;
}
</style>
<body>
<center>
<h2>OAI5G UE test case report details</h2>
</center>
<h3>Test Case description</h3>
<p>
<table border>
<tr><td>ID</td><td>{{testcase_name}}</td></tr>
<tr><td>TAG</td><td>{{tags}}</td></tr>
<tr><td>class</td><td>{{testcaseclass}}</td></tr>
<tr><td>description</td><td></td></tr>
<tr><td>timeout</td><td>{{testcase_timeout}}</td></tr>
<tr><td>number of runs</td><td>{{nruns}}</td></tr>
<tr><td>eNB machine</td><td>{{testcase_eNBMachine}}</td></tr>
<tr><td>UE machine</td><td>{{testcase_UEMachine}}</td></tr>
</table>
</p>
<h3>Test Case execution</h3>
<p>
<table border>
<tr><td>testcase_time_start</td><td>{{testcase_time_start}}</td></tr>
<tr><td>testcase_time_stop</td><td>{{testcase_time_stop}}</td></tr>
<tr><td>testcase_duration</td><td>{{testcase_duration}}</td></tr>
<tr><td>Nb runs</td><td>{{nruns}}</td></tr>
<tr><td>Nb PASS</td><td>{{nb_run_pass}}</td></tr>
<tr><td>Nb FAILED</td><td>{{nb_run_failed}}</td></tr>
<tr><td>Nb INCONCLUSIVE</td><td>{{nb_run_inc}}</td></tr>
<tr>
<td>testcase_verdict</td>
{% if testcase_verdict == "PASS" %}
<TD align="center" style="background-color:green">{{testcase_verdict}}</TD>
{% elif testcase_verdict == "FAIL" %}
<TD align="center" style="background-color:red">{{testcase_verdict}}</TD>
{% else %}
<TD align="center" style="background-color:orange">{{testcase_verdict}}</TD>
{% endif %}
</tr>
<tr><td>Nb Seg Fault</td><td>{{nb_seg_fault}}</td></tr>
</table>
</p>
<h3>Test Case runs results</h3>
{% for run_results in runs_results|sort(attribute='run_id') %}
<h4>RUN {{run_results.run_id}} </h4>
<table border>
<tr><td>run_start_time </td><td>{{run_results.run_start_time}}</td></tr>
<tr><td>run_stop_time</td><td>{{run_results.run_stop_time}}</td></tr>
<tr><td>run_duration</td><td>{{run_results.run_duration}}</td></tr>
<tr>
<td>run_verdict</td>
{% if run_results.run_verdict == "PASS" %}
<TD align="center" style="background-color:green">{{run_results.run_verdict}}</TD>
{% elif run_results.run_verdict == "FAIL" %}
<TD align="center" style="background-color:red">{{run_results.run_verdict}}</TD>
{% else %}
<TD align="center" style="background-color:orange">{{run_results.run_verdict}}</TD>
{% endif %}
</tr>
<tr><td>Seg Fault Satus</td>
{% if run_results.ue_seg_fault_status == "NO_SEG_FAULT" %}
<TD align="center" style="background-color:green">{{run_results.ue_seg_fault_status}}</TD>
{% elif run_results.ue_seg_fault_status == "SEG_FAULT" %}
<TD align="center" style="background-color:red">{{run_results.ue_seg_fault_status}}</TD>
{% else %}
<TD align="center" style="background-color:orange">unknown</TD>
{% endif %}
</tr>
</table>
{% for run_metrics in run_results.runs_metrics %}
<br></br>
<table border>
<tr><td>metric_id</td><td>{{run_metrics.metric_id}}</td></tr>
<tr><td>Description</td><td>{{run_metrics.metric_desc}}</td></tr>
<tr><td>Unit of measure</td><td>{{run_metrics.metric_uom}}</td></tr>
<tr><td>metric_min</td><td>{{run_metrics.metric_min}}</td></tr>
<tr><td>metric_max</td><td>{{run_metrics.metric_max}}</td></tr>
<tr><td>metric_mean</td><td>{{run_metrics.metric_mean}}</td></tr>
<tr><td>metric_median</td><td>{{run_metrics.metric_median}}</td></tr>
<tr><td colspan="2"></td></tr>
{% if run_metrics.pass_fail_stat is defined %}
<tr><td>Pass/fail stat</td><td>{{run_metrics.pass_fail_stat}}</td></tr>
{% endif %}
{% if run_metrics.pass_fail_min_limit is defined %}
<tr><td>Pass/fail min limit</td><td>{{run_metrics.pass_fail_min_limit}}</td></tr>
{% endif %}
{% if run_metrics.pass_fail_max_limit is defined %}
<tr><td>Pass/fail max limit</td><td>{{run_metrics.pass_fail_max_limit}}</td></tr>
{% endif %}
<tr><td colspan="2"></td></tr>
<tr><td>metric_fig</td><td><IMG src={{run_metrics.metric_fig}}></td></tr>
</table>
{% endfor %}
{% if run_results.run_traffic.traffic_count != 0 %}
<br></br>
<table border>
<TR><TH>Iperf metric</TH><TH>min</TH><TH>max</TH><TH>mean</TH><TH>median</TH><TR>
<TR><td>Bandwidth </td><td>{{run_results.run_traffic.bw_min}}</td><td>{{run_results.run_traffic.bw_max}}</td><td>{{run_results.run_traffic.bw_mean}}</td><td>{{run_results.run_traffic.bw_median}}</td><TR>
<TR><td>Jitter</td><td>{{run_results.run_traffic.jitter_min}}</td><td>{{run_results.run_traffic.jitter_max}}</td><td>{{run_results.run_traffic.jitter_mean}}</td><td>{{run_results.run_traffic.jitter_median}}</td><TR>
<TR><td>Loss rate</td><td>{{run_results.run_traffic.rl_min}}</td><td>{{run_results.run_traffic.rl_max}}</td><td>{{run_results.run_traffic.rl_mean}}</td><td>{{run_results.run_traffic.rl_median}}</td><TR>
<TR><td colspan="5"></td></TR>
<TR><td>Iperf duration</td><td>{{run_results.run_traffic.iperf_duration}}</td><td></td><td>Pass/Fail criteria (min duration)</td><td>{{run_results.run_traffic.dur_pass_fail_crit}}</td><TR>
<TR><td colspan="5"></td></TR>
<tr><td>traffic_fig</td><td colspan="4"><IMG src={{run_results.run_traffic.traffic_fig}}></td></tr>
</table>
{% endif %}
{% endfor %}
</body>
</html>
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -35,12 +35,15 @@ import getopt
import sys
from subprocess import call
import encoder
sys.path.append(os.path.expandvars('$OPENAIR_DIR/cmake_targets/autotests/tools/'))
#test_cases = ('030001', '030901', '031001', '031601', '031701', '031801', '031901', '032001', '032101', '032201', '032301', '032501', '032601', '032801')
test_cases = ('030030' , '030030' )
test_cases = ('032800' , '032730' )
nb_run = 3
nb_run = 2
def error_opt(msg):
print("Option error: " + msg)
......@@ -58,14 +61,27 @@ def main(args):
# metric = {}
# metric['id'] = 'UE_DLSCH_BITRATE'
# metric['description'] = 'UE downlink physical throughput'
# metric['regex'] = '(UE_DLSCH_BITRATE) =\s+(\d+\.\d+) kbps.+frame = (\d+)\)'
# metric['unit_of_meas'] = 'kbps'
# metric['min_limit'] = 14668.8
#AUTOTEST Metric : RRC Measurments RSRP[0]=-97.60 dBm/RE, RSSI=-72.83 dBm, RSRQ[0] 9.03 dB, N0 -125 dBm/RE, NF 7.2 dB (frame = 4490)
metric = {}
metric['id'] = 'UE_DLSCH_BITRATE'
metric['description'] = 'UE downlink physical throughput'
metric['regex'] = '(UE_DLSCH_BITRATE) =\s+(\d+\.\d+) kbps.+frame = (\d+)\)'
metric['id'] = 'UE_DL_RRC_MEAS'
metric['description'] = 'UE downlink RRC Measurments'
metric['nb_metric'] = 5
# metric['regex'] = 'AUTOTEST Metric : RRC Measurments (RSRP\[0\])=(-?\d+\.?\d*)\s+(.+),\s+(RSRQ\[0\])=(-?\d+\.?\d*)\s+(.+),,\s+(N0)=(-?\d+\.?\d*)\s+(.+),,\s+(NF)=(-?\d+\.?\d*)\s+(.+)\s+\(frame = (\d+)\) '
metric['regex'] = 'AUTOTEST Metric : RRC Measurments (RSRP\[0\])=(-?\d+\.?\d*)\s+(.+)\,\s+(RSSI)=(-?\d+\.?\d*)\s+(.+)\,\s+(RSRQ\[0\])=(-?\d+\.?\d*)\s+(.+)\,\s+(N0)=(-?\d+\.?\d*)\s+(.+)\,\s+(NF)=(-?\d+\.?\d*)\s+(.+)\s+\(frame = (\d+)\)'
metric['unit_of_meas'] = 'kbps'
metric['min_limit'] = 14668.8
#report_path = log_path+'/report/'
#os.system(' mkdir -p ' + report_path)
......@@ -74,58 +90,44 @@ def main(args):
#return(0)
for test_case in test_cases:
# print test_case
if test_case == '030001':
metric['min_limit'] = 500.0
if test_case == '030901':
metric['min_limit'] = 640.0
if test_case == '031001':
metric['min_limit'] = 3200.0
if test_case == '031601':
metric['min_limit'] = 5920.0
if test_case == '031701':
metric['min_limit'] = 6000.0
if test_case == '031801':
metric['min_limit'] = 6200.0
if test_case == '031901':
metric['min_limit'] = 7000.0
if test_case == '032001':
metric['min_limit'] = 7800.0
if test_case == '032101':
metric['min_limit'] = 8000.0
if test_case == '032201':
metric['min_limit'] = 9000.0
if test_case == '032301':
metric['min_limit'] = 10000.0
if test_case == '032501':
metric['min_limit'] = 11000.0
if test_case == '032601':
metric['min_limit'] = 12000.0
if test_case == '032801':
metric['min_limit'] = 12500.0
if test_case == '035201':
metric['min_limit'] = 14668.8
if test_case == '036001':
metric['min_limit'] = 25363.2
test_results = []
for test_case in test_cases:
for i in range(0, nb_run):
fname = 'log//'+test_case+'/run_'+str(i)+'/UE_exec_'+str(i)+'_.log'
fname = '..//log//'+test_case+'/run_'+str(i)+'/UE_exec_'+str(i)+'_.log'
args = {'metric' : metric,
'file' : fname }
cell_synch_status = analyser.check_cell_synchro(fname)
if cell_synch_status == 'CELL_SYNCH':
print '!!!!!!!!!!!!!! Cell synchronized !!!!!!!!!!!'
metric_checks_flag = 0
else :
print '!!!!!!!!!!!!!! Cell NOT NOT synchronized !!!!!!!!!!!'
# cell_synch_status = analyser.check_cell_synchro(fname)
# if cell_synch_status == 'CELL_SYNCH':
# print '!!!!!!!!!!!!!! Cell synchronized !!!!!!!!!!!'
# metric_checks_flag = 0
# else :
# print '!!!!!!!!!!!!!! Cell NOT NOT synchronized !!!!!!!!!!!'
# metric_extracted = analyser.do_extract_metrics(args)
# metrics_extracted = analyser.do_extract_metrics_new(args)
# de-xmlfy test report
xml_file = '..//log//'+test_case+'/test.'+test_case+'_ng.xml'
print xml_file
# test_result =
# test_results.append(test_result)
# xmlFile = logdir_local_testcase + '/test.' + testcasename + '.xml'
# xml="\n<testcase classname=\'"+ testcaseclass + "\' name=\'" + testcasename + "."+tags + "\' Run_result=\'" + test_result_string + "\' time=\'" + str(duration) + " s \' RESULT=\'" + testcase_verdict + "\'></testcase> \n"
# write_file(xmlFile, xml, mode="w")
# xmlFile_ng = logdir_local_testcase + '/test.' + testcasename + '_ng.xml'
# xml_ng = xmlify(test_result, wrap=testcasename, indent=" ")
# write_file(xmlFile_ng, xml_ng, mode="w")
# print "min = "+ str( metric_extracted['metric_min'] )
# print "min_index = "+ str( metric_extracted['metric_min_index'] )
......@@ -143,16 +145,27 @@ def main(args):
# print fname
# analyser.do_img_metrics(metric, metric_extracted, fname)
# fname = 'log//'+test_case+'/run_'+str(i)+'/UE_traffic_'+str(i)+'_.log'
# fname = 'log//'+test_case+'/run_'+str(i)+'/UE_traffic_'+str(i)+'_.log'
# args = {'file' : fname }
# args = {'file' : fname }
# traffic_metrics = analyser.do_extract_traffic_metrics(args)
# traffic_metrics = analyser.do_extract_traffic_metrics(args)
# fname= 'report/iperf_'+test_case+'_'+str(i)+'.png'
# fname= 'report/iperf_'+test_case+'_'+str(i)+'.png'
# print fname
# analyser.do_img_traffic(traffic_metrics, fname)
# print fname
# analyser.do_img_traffic(traffic_metrics, fname)
for test_result in test_results:
cmd = 'mkdir -p ' + report_dir + '/'+ test_result['testcase_name']
result = os.system(cmd)
report_file = report_dir + '/'+ test_result['testcase_name'] + '/'+ test_result['testcase_name']+ '_report.html'
analyser.create_test_report_detailed_html(test_result, report_file )
print test_result
......
......@@ -41,7 +41,7 @@ from jinja2 import Environment, FileSystemLoader
PATH = os.path.dirname(os.path.abspath(__file__))
TEMPLATE_ENVIRONMENT = Environment(
autoescape=False,
loader=FileSystemLoader(os.path.join(PATH, 'templates')),
loader=FileSystemLoader(os.path.join(PATH, '../templates')),
trim_blocks=False)
......@@ -103,6 +103,129 @@ def do_extract_metrics(args):
}
return(ret)
def do_extract_metrics_new(args):
# print ""
# print "do_extract_metrics ... "
fname = args['file']
metric = args['metric']
print(fname)
print 'metric id = ' + metric['id']
print 'metric regex = ' + metric['regex']
count = 0
mmin = 0
mmin_index = 0
mmax = 0
mmax_index = 0
mean = 0
median = 0
toto = [('id', 'S20'), ('metric', np.float), ('frame', np.int)]
print toto
np_format = []
for x in range(0, metric['nb_metric']):
np_format.append( ('id'+str(x), 'S20') )
np_format.append( ('metric'+str(x), np.float) )
np_format.append( ('uom'+str(x), 'S20') )
np_format.append( ('frame', np.int))
print np_format
output = np.fromregex(fname,metric['regex'], np_format)
print output
count = output['frame'].size
print count
if count > 0:
fontP = FontProperties()
fontP.set_size('small')
fig = plt.figure(1)
plt.figure(figsize=(10,10))
plot_xmax = np.amax(output['frame'])+np.amin(output['frame'])
for x in range(0, metric['nb_metric']):
metric_name = output['id'+str(x)][0]
metric_uom = output['uom'+str(x)][0]
mmin = np.amin(output['metric'+str(x)])
mmax = np.amax(output['metric'+str(x)])
mmean = np.mean(output['metric'+str(x)])
mmedian = np.median(output['metric'+str(x)])
plot_loc = 100*metric['nb_metric']+10+x+1
sbplt = plt.subplot(plot_loc)
sbplt.plot(output['frame'], output['metric'+str(x)], color='b' )
sbplt.set_title( metric_name+' ('+metric_uom+')')
if mmin < 0:
sbplot_ymin=mmin+mmin/10
else:
sbplot_ymin=0
sbplt.set_ylim(ymin=sbplot_ymin)
if mmax > 0:
sbplot_ymax=mmax+mmax/10
else:
sbplot_ymax=0
sbplt.set_ylim(ymax=sbplot_ymax)
sbplt.set_xlim(xmax=plot_xmax)
sbplt.set_xlim(xmin=0)
text='min: '+str(mmin)+'\nmax: '+str(mmax)+'\nmean: '+str(mmean)+'\nmedian: '+str(mmedian)
sbplt.text( plot_xmax+10,sbplot_ymin,text)
sbplt.set_xlabel('frame')
sbplt.set_ylabel(metric_name)
plt.tight_layout()
fname = "toto.png"
# lgd = plt.legend(prop=fontP, bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
mng = plt.get_current_fig_manager()
plt.savefig(fname, bbox_inches='tight')
plt.close()
mmin = np.amin(output['metric']);
mmin_index = np.argmin(output['metric']);
mmax = np.amax(output['metric']);
mmax_index = np.argmax(output['metric']);
mean = np.mean(output['metric']);
median = np.median(output['metric']);
# print ( ( (metric['min_limit'] > output['metric']).sum() / float(output['metric'].size) ) * 100 )
ret = { 'metric_count' : count,
'metric_buf' : output,
'metric_min' : mmin,
'metric_min_index' : mmin_index,
'metric_max' : mmax,
'metric_max_index' : mmax_index,
'metric_mean' : mean,
'metric_median' : median,
}
return(ret)
#
#
#
......@@ -182,24 +305,30 @@ def do_img_metrics(metric_def, metric_data, fname):
# print output['metric'].size
plt.scatter(output['frame'], output['metric'], color='b', alpha=0.33, s = 1 , label=metric_def['id'])
plt.plot([0, output['frame'][metric_data['metric_count']-1]],[ metric_def['min_limit'],metric_def['min_limit']], 'r-', lw=2, label='min limit') # Red straight line
if 'min_limit' in metric_def:
plt.plot([0, output['frame'][metric_data['metric_count']-1]],[ metric_def['min_limit'],metric_def['min_limit']], 'r-', lw=2, label='min limit') # Red straight line
plt.title('Physical throughput ('+metric_def['unit_of_meas']+')')
plt.title(metric_def['id'] +' ('+metric_def['unit_of_meas']+')')
plt.xlabel('frame')
plt.ylabel(metric_def['id'])
# Set graphic minimum Y axis
# -------------------------
if metric_data['metric_min'] == 0 :
plt.ylim(ymin=-metric_def['min_limit']/10)
if metric_data['metric_min'] < 0:
plt.ylim(ymin=metric_data['metric_min']+metric_data['metric_min']/10)
else :
plt.ylim(ymin=0)
y_axis_max = 0
if metric_data['metric_max'] > metric_def['min_limit']:
y_axis_max =metric_data['metric_max']+metric_data['metric_max']/10
if 'min_limit' in metric_def:
if metric_data['metric_max'] > metric_def['min_limit']:
y_axis_max =metric_data['metric_max']+metric_data['metric_max']/10
else:
y_axis_max =metric_def['min_limit']+metric_def['min_limit']/10
else:
y_axis_max =metric_def['min_limit']+metric_def['min_limit']/10
y_axis_max =metric_data['metric_max']+metric_data['metric_max']/10
plt.ylim(ymax=y_axis_max)
......@@ -219,9 +348,6 @@ def do_img_metrics(metric_def, metric_data, fname):
def do_extract_traffic_metrics(args):
print ""
print "do_extract_traffic_metrics ... "
fname = args['file']
# print(fname)
......@@ -325,7 +451,8 @@ def do_img_traffic(traffic_data, fname):
ax1.set_xlim(xmax=np.amax(output['interval_stop']))
text='min: '+str(traffic_data['bw_min'])+'\nmax: '+str(traffic_data['bw_max'])+'\nmean: '+str(traffic_data['bw_mean'])+'\nmedian: '+str(traffic_data['bw_median'])
ax1.text( np.amax(output['interval_stop'])+10,0,text)
ax1.set_xlabel('time (s)')
ax1.set_ylabel(' ')
ax2=plt.subplot(312)
plt.plot(output['interval_stop'], output['jitter'], color='b' )
......@@ -334,6 +461,8 @@ def do_img_traffic(traffic_data, fname):
ax2.set_ylim(ymin=-1)
text='min: '+str(traffic_data['jitter_min'])+'\nmax: '+str(traffic_data['jitter_max'])+'\nmean: '+str(traffic_data['jitter_mean'])+'\nmedian: '+str(traffic_data['jitter_median'])
ax2.text( np.amax(output['interval_stop'])+10,0,text)
ax2.set_xlabel('time (s)')
ax2.set_ylabel(' ')
ax3=plt.subplot(313)
plt.plot(output['interval_stop'], output['rate_lost'], color='b')
......@@ -342,10 +471,11 @@ def do_img_traffic(traffic_data, fname):
ax3.set_ylim(ymin=-1)
text='min: '+str(traffic_data['rl_min'])+'\nmax: '+str(traffic_data['rl_max'])+'\nmean: '+str(traffic_data['rl_mean'])+'\nmedian: '+str(traffic_data['rl_median'])
ax3.text( np.amax(output['interval_stop'])+10,0,text)
ax3.set_xlabel('time (s)')
ax3.set_ylabel(' ')
# plt.title('Physical throughput ('+metric_def['unit_of_meas']+')')
plt.xlabel('time (s)')
# plt.xlabel('time (s)')
# plt.ylabel(metric_def['id'])
# Set graphic minimum Y axis
......@@ -365,7 +495,7 @@ def do_img_traffic(traffic_data, fname):
plt.tight_layout()
lgd = plt.legend(prop=fontP, bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
# lgd = plt.legend(prop=fontP, bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
mng = plt.get_current_fig_manager()
plt.savefig(fname, bbox_inches='tight')
plt.close()
......@@ -395,7 +525,7 @@ def check_cell_synchro(fname):
m = re.search('AUTOTEST Cell Sync \:', line)
if m :
print line
#print line
return 'CELL_SYNCH'
return 'CELL_NOT_SYNCH'
......@@ -407,7 +537,7 @@ def check_exec_seg_fault(fname):
for line in f:
m = re.search('Segmentation fault', line)
if m :
print line
#print line
return 'SEG_FAULT'
return 'NO_SEG_FAULT'
......
......@@ -54,6 +54,7 @@ BUILD_DOXYGEN=0
T_TRACER="False"
DISABLE_HARDWARE_DEPENDENCY="False"
CMAKE_BUILD_TYPE=""
UE_AUTOTEST_TRACE="False"
trap handle_ctrl_c INT
function print_help() {
......@@ -130,6 +131,8 @@ Options
Enables the T tracer.
--disable-hardware-dependency
Disable HW dependency during installation
--ue-autotest-trace
Enable specific traces for UE autotest framework
Usage (first build):
oaisim (eNB + UE): ./build_oai -I --oaisim -x --install-system-files
Eurecom EXMIMO + COTS UE : ./build_oai -I --eNB -x --install-system-files
......@@ -285,6 +288,10 @@ function main() {
echo_info "Disabling hardware dependency for compiling software"
DISABLE_HARDWARE_DEPENDENCY="True"
shift 1;;
--ue-autotest-trace)
UE_AUTOTEST_TRACE="True"
echo_info "Enabling autotest specific trace for UE"
shift 1;;
-h | --help)
print_help
exit 1;;
......@@ -457,6 +464,7 @@ function main() {
echo "set (DEADLINE_SCHEDULER \"${DEADLINE_SCHEDULER_FLAG_USER}\" )" >>$cmake_file
echo "set (CPU_AFFINITY \"${CPU_AFFINITY_FLAG_USER}\" )" >>$cmake_file
echo "set ( T_TRACER $T_TRACER )" >> $cmake_file
echo "set (UE_AUTOTEST_TRACE $UE_AUTOTEST_TRACE)" >> $cmake_file
echo 'include(${CMAKE_CURRENT_SOURCE_DIR}/../CMakeLists.txt)' >> $cmake_file
cd $DIR/$lte_build_dir/build
cmake ..
......
......@@ -972,6 +972,11 @@ unsigned char phy_threegpplte_turbo_decoder8(short *y,
} else
n2 = n;
if(n2<256)
{
printf("phy_threegpplte_turbo_decoder8 : frame length < 256\n");
return 255;
}
for (iind=0; iind < 188 && f1f2mat[iind].nb_bits != n; iind++);
......
......@@ -476,6 +476,15 @@ int initial_sync(PHY_VARS_UE *ue, runmode_t mode)
//#endif
if (ue->UE_scan_carrier == 0) {
#if UE_AUTOTEST_TRACE
LOG_I(PHY,"[UE %d] AUTOTEST Cell Sync : frame = %d, rx_offset %d, freq_offset %d \n",
ue->Mod_id,
ue->proc.proc_rxtx[0].frame_rx,
ue->rx_offset,
ue->common_vars.freq_offset );
#endif
if (ue->mac_enabled==1) {
LOG_I(PHY,"[UE%d] Sending synch status to higher layers\n",ue->Mod_id);
//mac_resynch();
......
......@@ -3593,6 +3593,13 @@ int phy_procedures_UE_RX(PHY_VARS_UE *ue,UE_rxtx_proc_t *proc,uint8_t eNB_id,uin
LOG_D(PHY,"[UE %d] Calculating bitrate Frame %d: total_TBS = %d, total_TBS_last = %d, bitrate %f kbits\n",
ue->Mod_id,frame_rx,ue->total_TBS[eNB_id],
ue->total_TBS_last[eNB_id],(float) ue->bitrate[eNB_id]/1000.0);
#if UE_AUTOTEST_TRACE
if ((frame_rx % 100 == 0)) {
LOG_I(PHY,"[UE %d] AUTOTEST Metric : UE_DLSCH_BITRATE = %5.2f kbps (frame = %d) \n", ue->Mod_id, (float) ue->bitrate[eNB_id]/1000.0, frame_rx);
}
#endif
}
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment