Commit 2c07f008 authored by Robert Schmidt's avatar Robert Schmidt

Merge branch 'ci-remove-unused-code' into 'develop' (!3058)

CI: remove unused code, clean up imports

* Remove pyshark, matplotlib imports, as they are not used
* Remove stats_monitor.py, as the CI does not use it
* Clean up many warnings shown by pyflakes regarding includes that we don't use
parents ec06c78b 670e1fb8
...@@ -50,9 +50,9 @@ CN_CONTAINERS = ["", "-c nrf", "-c amf", "-c smf", "-c upf", "-c ausf", "-c udm" ...@@ -50,9 +50,9 @@ CN_CONTAINERS = ["", "-c nrf", "-c amf", "-c smf", "-c upf", "-c ausf", "-c udm"
def OC_login(cmd, ocUserName, ocPassword, ocProjectName): def OC_login(cmd, ocUserName, ocPassword, ocProjectName):
if ocUserName == '' or ocPassword == '' or ocProjectName == '': if ocUserName == '' or ocPassword == '' or ocProjectName == '':
HELP.GenericHelp(CONST.Version) HELP.GenericHelp(CONST.Version)
sys.exit('Insufficient Parameter: no OC Credentials') raise ValueError('Insufficient Parameter: no OC Credentials')
if OCRegistry.startswith("http") or OCRegistry.endswith("/"): if OCRegistry.startswith("http") or OCRegistry.endswith("/"):
sys.exit(f'ocRegistry {OCRegistry} should not start with http:// or https:// and not end on a slash /') raise ValueError(f'ocRegistry {OCRegistry} should not start with http:// or https:// and not end on a slash /')
ret = cmd.run(f'oc login -u {ocUserName} -p {ocPassword} --server {OCUrl}') ret = cmd.run(f'oc login -u {ocUserName} -p {ocPassword} --server {OCUrl}')
if ret.returncode != 0: if ret.returncode != 0:
logging.error('\u001B[1m OC Cluster Login Failed\u001B[0m') logging.error('\u001B[1m OC Cluster Login Failed\u001B[0m')
...@@ -244,7 +244,7 @@ class Cluster: ...@@ -244,7 +244,7 @@ class Cluster:
if self.testSvrId == None: self.testSvrId = self.eNBIPAddress if self.testSvrId == None: self.testSvrId = self.eNBIPAddress
if self.imageToPull == '': if self.imageToPull == '':
HELP.GenericHelp(CONST.Version) HELP.GenericHelp(CONST.Version)
sys.exit('Insufficient Parameter') raise ValueError('Insufficient Parameter')
logging.debug(f'Pull OC image {self.imageToPull} to server {self.testSvrId}') logging.debug(f'Pull OC image {self.imageToPull} to server {self.testSvrId}')
self.testCase_id = HTML.testCase_id self.testCase_id = HTML.testCase_id
cmd = cls_cmd.getConnection(self.testSvrId) cmd = cls_cmd.getConnection(self.testSvrId)
...@@ -282,19 +282,19 @@ class Cluster: ...@@ -282,19 +282,19 @@ class Cluster:
def BuildClusterImage(self, HTML): def BuildClusterImage(self, HTML):
if self.ranRepository == '' or self.ranBranch == '' or self.ranCommitID == '': if self.ranRepository == '' or self.ranBranch == '' or self.ranCommitID == '':
HELP.GenericHelp(CONST.Version) HELP.GenericHelp(CONST.Version)
sys.exit(f'Insufficient Parameter: ranRepository {self.ranRepository} ranBranch {ranBranch} ranCommitID {self.ranCommitID}') raise ValueError(f'Insufficient Parameter: ranRepository {self.ranRepository} ranBranch {ranBranch} ranCommitID {self.ranCommitID}')
lIpAddr = self.eNBIPAddress lIpAddr = self.eNBIPAddress
lSourcePath = self.eNBSourceCodePath lSourcePath = self.eNBSourceCodePath
if lIpAddr == '' or lSourcePath == '': if lIpAddr == '' or lSourcePath == '':
sys.exit('Insufficient Parameter: eNBSourceCodePath missing') raise ValueError('Insufficient Parameter: eNBSourceCodePath missing')
ocUserName = self.OCUserName ocUserName = self.OCUserName
ocPassword = self.OCPassword ocPassword = self.OCPassword
ocProjectName = self.OCProjectName ocProjectName = self.OCProjectName
if ocUserName == '' or ocPassword == '' or ocProjectName == '': if ocUserName == '' or ocPassword == '' or ocProjectName == '':
HELP.GenericHelp(CONST.Version) HELP.GenericHelp(CONST.Version)
sys.exit('Insufficient Parameter: no OC Credentials') raise ValueError('Insufficient Parameter: no OC Credentials')
if self.OCRegistry.startswith("http") or self.OCRegistry.endswith("/"): if self.OCRegistry.startswith("http") or self.OCRegistry.endswith("/"):
sys.exit(f'ocRegistry {self.OCRegistry} should not start with http:// or https:// and not end on a slash /') raise ValueError(f'ocRegistry {self.OCRegistry} should not start with http:// or https:// and not end on a slash /')
logging.debug(f'Building on cluster triggered from server: {lIpAddr}') logging.debug(f'Building on cluster triggered from server: {lIpAddr}')
self.cmd = cls_cmd.RemoteCmd(lIpAddr) self.cmd = cls_cmd.RemoteCmd(lIpAddr)
......
...@@ -31,7 +31,6 @@ import subprocess as sp ...@@ -31,7 +31,6 @@ import subprocess as sp
import os import os
import paramiko import paramiko
import uuid import uuid
import sys
import time import time
SSHTIMEOUT=7 SSHTIMEOUT=7
......
...@@ -36,18 +36,12 @@ import re # reg ...@@ -36,18 +36,12 @@ import re # reg
import logging import logging
import os import os
import shutil import shutil
import subprocess
import time import time
import pyshark
import threading
import cls_cmd
from multiprocessing import Process, Lock, SimpleQueue
from zipfile import ZipFile from zipfile import ZipFile
#----------------------------------------------------------- #-----------------------------------------------------------
# OAI Testing modules # OAI Testing modules
#----------------------------------------------------------- #-----------------------------------------------------------
import cls_cluster as OC
import cls_cmd import cls_cmd
import sshconnection as SSH import sshconnection as SSH
import helpreadme as HELP import helpreadme as HELP
......
...@@ -25,14 +25,10 @@ ...@@ -25,14 +25,10 @@
#--------------------------------------------------------------------- #---------------------------------------------------------------------
#to use isfile #to use isfile
import os
import sys
import logging import logging
#time.sleep #time.sleep
import time import time
import re import re
import subprocess
from datetime import datetime
import yaml import yaml
import cls_cmd import cls_cmd
...@@ -208,13 +204,6 @@ class Module_UE: ...@@ -208,13 +204,6 @@ class Module_UE:
def _enableTrace(self): def _enableTrace(self):
raise Exception("not implemented") raise Exception("not implemented")
mySSH = sshconnection.SSHConnection()
mySSH.open(self.HostIPAddress, self.HostUsername, self.HostPassword)
#delete old artifacts
mySSH.command('echo ' + ' ' + ' | sudo -S rm -rf ci_qlog','\$',5)
#start Trace, artifact is created in home dir
mySSH.command('echo $USER; nohup sudo -E QLog/QLog -s ci_qlog -f NR5G.cfg > /dev/null 2>&1 &','\$', 5)
mySSH.close()
def _disableTrace(self): def _disableTrace(self):
raise Exception("not implemented") raise Exception("not implemented")
......
...@@ -31,14 +31,12 @@ ...@@ -31,14 +31,12 @@
#----------------------------------------------------------- #-----------------------------------------------------------
# Import # Import
#----------------------------------------------------------- #-----------------------------------------------------------
import sys # arg
import re # reg import re # reg
import fileinput import fileinput
import logging import logging
import os import os
import time import time
import subprocess import subprocess
from multiprocessing import Process, Lock, SimpleQueue
import constants as CONST import constants as CONST
......
...@@ -34,7 +34,6 @@ ...@@ -34,7 +34,6 @@
#----------------------------------------------------------- #-----------------------------------------------------------
import sys # arg import sys # arg
import re # reg import re # reg
import pexpect # pexpect
import time # sleep import time # sleep
import os import os
import logging import logging
...@@ -44,16 +43,11 @@ import json ...@@ -44,16 +43,11 @@ import json
#import our libs #import our libs
import helpreadme as HELP import helpreadme as HELP
import constants as CONST import constants as CONST
import cls_cluster as OC
import sshconnection import sshconnection
import cls_module import cls_module
import cls_cmd import cls_cmd
logging.getLogger("matplotlib").setLevel(logging.WARNING)
import matplotlib.pyplot as plt
import numpy as np
#----------------------------------------------------------- #-----------------------------------------------------------
# Helper functions used here and in other classes # Helper functions used here and in other classes
#----------------------------------------------------------- #-----------------------------------------------------------
......
...@@ -34,7 +34,6 @@ import sshconnection ...@@ -34,7 +34,6 @@ import sshconnection
#to update the HTML object #to update the HTML object
import cls_oai_html import cls_oai_html
import cls_cmd import cls_cmd
from multiprocessing import SimpleQueue
#for log folder maintenance #for log folder maintenance
import os import os
import re import re
......
...@@ -36,8 +36,6 @@ import re # reg ...@@ -36,8 +36,6 @@ import re # reg
import logging import logging
import os import os
from pathlib import Path from pathlib import Path
import time
from multiprocessing import Process, Lock, SimpleQueue
#----------------------------------------------------------- #-----------------------------------------------------------
# OAI Testing modules # OAI Testing modules
......
...@@ -38,8 +38,6 @@ import os ...@@ -38,8 +38,6 @@ import os
import time import time
import signal import signal
from multiprocessing import Process, Lock, SimpleQueue
#----------------------------------------------------------- #-----------------------------------------------------------
# OAI Testing modules # OAI Testing modules
#----------------------------------------------------------- #-----------------------------------------------------------
......
...@@ -57,17 +57,13 @@ import cls_oai_html ...@@ -57,17 +57,13 @@ import cls_oai_html
#----------------------------------------------------------- #-----------------------------------------------------------
import sys # arg import sys # arg
import re # reg import re # reg
import pexpect # pexpect
import time # sleep import time # sleep
import os import os
import subprocess import subprocess
import xml.etree.ElementTree as ET import xml.etree.ElementTree as ET
import logging import logging
import datetime
import signal import signal
import subprocess
import traceback import traceback
from multiprocessing import Process, Lock, SimpleQueue
logging.basicConfig( logging.basicConfig(
level=logging.DEBUG, level=logging.DEBUG,
stream=sys.stdout, stream=sys.stdout,
...@@ -161,8 +157,6 @@ def ExecuteActionWithParam(action): ...@@ -161,8 +157,6 @@ def ExecuteActionWithParam(action):
success = CONTAINERS.BuildRunTests(HTML) success = CONTAINERS.BuildRunTests(HTML)
elif action == 'Initialize_eNB': elif action == 'Initialize_eNB':
RAN.eNB_Trace=test.findtext('eNB_Trace')
RAN.eNB_Stats=test.findtext('eNB_Stats')
datalog_rt_stats_file=test.findtext('rt_stats_cfg') datalog_rt_stats_file=test.findtext('rt_stats_cfg')
if datalog_rt_stats_file is None: if datalog_rt_stats_file is None:
RAN.datalog_rt_stats_file='datalog_rt_stats.default.yaml' RAN.datalog_rt_stats_file='datalog_rt_stats.default.yaml'
......
import argparse import argparse
import os
import re import re
import subprocess import subprocess
import sys import sys
......
...@@ -36,7 +36,6 @@ import re # reg ...@@ -36,7 +36,6 @@ import re # reg
import logging import logging
import os import os
import time import time
from multiprocessing import Process, Lock, SimpleQueue
import yaml import yaml
import cls_cmd import cls_cmd
...@@ -90,8 +89,6 @@ class RANManagement(): ...@@ -90,8 +89,6 @@ class RANManagement():
self.runtime_stats= '' self.runtime_stats= ''
self.datalog_rt_stats={} self.datalog_rt_stats={}
self.datalog_rt_stats_file='datalog_rt_stats.default.yaml' self.datalog_rt_stats_file='datalog_rt_stats.default.yaml'
self.eNB_Trace = '' #if 'yes', Tshark will be launched at initialization
self.eNB_Stats = '' #if 'yes', Statistics Monitor will be launched at initialization
self.USRPIPAddress = '' self.USRPIPAddress = ''
#checkers from xml #checkers from xml
self.ran_checkers={} self.ran_checkers={}
...@@ -130,23 +127,6 @@ class RANManagement(): ...@@ -130,23 +127,6 @@ class RANManagement():
mySSH = SSH.SSHConnection() mySSH = SSH.SSHConnection()
cwd = os.getcwd() cwd = os.getcwd()
#Get pcap on enb and/or gnb if enabled in the xml
if self.eNB_Trace=='yes':
if self.air_interface[self.eNB_instance] == 'lte-softmodem':
pcapfile_prefix="enb_"
else:
pcapfile_prefix="gnb_"
mySSH.open(lIpAddr, lUserName, lPassWord)
eth_interface = 'any'
fltr = 'sctp'
logging.debug('\u001B[1m Launching tshark on xNB on interface ' + eth_interface + ' with filter "' + fltr + '"\u001B[0m')
pcapfile = pcapfile_prefix + self.testCase_id + '_log.pcap'
mySSH.command('echo ' + lPassWord + ' | sudo -S rm -f /tmp/' + pcapfile , '\$', 5)
mySSH.command('echo $USER; nohup sudo -E tshark -i ' + eth_interface + ' -f "' + fltr + '" -w /tmp/' + pcapfile + ' > /dev/null 2>&1 &','\$', 5)
mySSH.close()
# If tracer options is on, running tshark on EPC side and capture traffic b/ EPC and eNB # If tracer options is on, running tshark on EPC side and capture traffic b/ EPC and eNB
if EPC.IPAddress != "none": if EPC.IPAddress != "none":
localEpcIpAddr = EPC.IPAddress localEpcIpAddr = EPC.IPAddress
...@@ -227,19 +207,6 @@ class RANManagement(): ...@@ -227,19 +207,6 @@ class RANManagement():
mySSH.command('echo ' + lPassWord + ' | sudo -S rm -Rf enb_' + self.testCase_id + '.log', '\$', 5) mySSH.command('echo ' + lPassWord + ' | sudo -S rm -Rf enb_' + self.testCase_id + '.log', '\$', 5)
mySSH.command('echo $USER; nohup sudo -E stdbuf -o0 ./my-lte-softmodem-run' + str(self.eNB_instance) + '.sh > ' + lSourcePath + '/cmake_targets/enb_' + self.testCase_id + '.log 2>&1 &', lUserName, 10) mySSH.command('echo $USER; nohup sudo -E stdbuf -o0 ./my-lte-softmodem-run' + str(self.eNB_instance) + '.sh > ' + lSourcePath + '/cmake_targets/enb_' + self.testCase_id + '.log 2>&1 &', lUserName, 10)
#stats monitoring during runtime
time.sleep(20)
monitor_file='../ci-scripts/stats_monitor.py'
conf_file='../ci-scripts/stats_monitor_conf.yaml'
if self.eNB_Stats=='yes':
if self.air_interface[self.eNB_instance] == 'lte-softmodem':
mySSH.command('echo $USER; nohup python3 ' + monitor_file + ' ' + conf_file + ' ' + self.testCase_id + ' enb 2>&1 > enb_stats_monitor_execution.log &', '\$', 5)
else:
mySSH.command('echo $USER; nohup python3 ' + monitor_file + ' ' + conf_file + ' ' + self.testCase_id + ' gnb 2>&1 > gnb_stats_monitor_execution.log &', '\$', 5)
self.eNBLogFiles[int(self.eNB_instance)] = 'enb_' + self.testCase_id + '.log' self.eNBLogFiles[int(self.eNB_instance)] = 'enb_' + self.testCase_id + '.log'
if extra_options != '': if extra_options != '':
self.eNBOptions[int(self.eNB_instance)] = extra_options self.eNBOptions[int(self.eNB_instance)] = extra_options
...@@ -453,9 +420,6 @@ class RANManagement(): ...@@ -453,9 +420,6 @@ class RANManagement():
#debug / tentative #debug / tentative
mySSH.copyout(self.eNBIPAddress, self.eNBUserName, self.eNBPassword, './nrL1_stats.log', self.eNBSourceCodePath + '/cmake_targets/') mySSH.copyout(self.eNBIPAddress, self.eNBUserName, self.eNBPassword, './nrL1_stats.log', self.eNBSourceCodePath + '/cmake_targets/')
mySSH.copyout(self.eNBIPAddress, self.eNBUserName, self.eNBPassword, './nrMAC_stats.log', self.eNBSourceCodePath + '/cmake_targets/') mySSH.copyout(self.eNBIPAddress, self.eNBUserName, self.eNBPassword, './nrMAC_stats.log', self.eNBSourceCodePath + '/cmake_targets/')
mySSH.copyout(self.eNBIPAddress, self.eNBUserName, self.eNBPassword, './gnb_stats_monitor.pickle', self.eNBSourceCodePath + '/cmake_targets/')
mySSH.copyout(self.eNBIPAddress, self.eNBUserName, self.eNBPassword, './gnb_stats_monitor.png', self.eNBSourceCodePath + '/cmake_targets/')
#
mySSH.copyout(self.eNBIPAddress, self.eNBUserName, self.eNBPassword, './' + fileToAnalyze, self.eNBSourceCodePath + '/cmake_targets/') mySSH.copyout(self.eNBIPAddress, self.eNBUserName, self.eNBPassword, './' + fileToAnalyze, self.eNBSourceCodePath + '/cmake_targets/')
logging.debug('\u001B[1m Analyzing ' + nodeB_prefix + 'NB logfile \u001B[0m ' + fileToAnalyze) logging.debug('\u001B[1m Analyzing ' + nodeB_prefix + 'NB logfile \u001B[0m ' + fileToAnalyze)
logStatus = self.AnalyzeLogFile_eNB(fileToAnalyze, HTML, self.ran_checkers) logStatus = self.AnalyzeLogFile_eNB(fileToAnalyze, HTML, self.ran_checkers)
......
"""
To create graphs and pickle from runtime statistics in L1,MAC,RRC,PDCP files
"""
import subprocess
import time
import shlex
import re
import sys
import pickle
import matplotlib.pyplot as plt
import numpy as np
import yaml
import os
class StatMonitor():
def __init__(self,cfg_file):
with open(cfg_file,'r') as file:
self.d = yaml.load(file)
for node in self.d:#so far we have enb or gnb as nodes
for metric_l1 in self.d[node]: #first level of metric keys
if metric_l1!="graph": #graph is a reserved word to configure graph paging, so it is disregarded
if self.d[node][metric_l1] is None:#first level is None -> create array
self.d[node][metric_l1]=[]
else: #first level is not None -> there is a second level -> create array
for metric_l2 in self.d[node][metric_l1]:
self.d[node][metric_l1][metric_l2]=[]
def process_gnb (self,node_type,output):
for line in output:
tmp=line.decode("utf-8")
result=re.match(r'^.*\bdlsch_rounds\b ([0-9]+)\/([0-9]+).*\bdlsch_errors\b ([0-9]+)',tmp)
if result is not None:
self.d[node_type]['dlsch_err'].append(int(result.group(3)))
percentage=float(result.group(2))/float(result.group(1))
self.d[node_type]['dlsch_err_perc_round_1'].append(percentage)
result=re.match(r'^.*\bulsch_rounds\b ([0-9]+)\/([0-9]+).*\bulsch_errors\b ([0-9]+)',tmp)
if result is not None:
self.d[node_type]['ulsch_err'].append(int(result.group(3)))
percentage=float(result.group(2))/float(result.group(1))
self.d[node_type]['ulsch_err_perc_round_1'].append(percentage)
for k in self.d[node_type]['rt']:
result=re.match(rf'^.*\b{k}\b:\s+([0-9\.]+) us;\s+([0-9]+);\s+([0-9\.]+) us;',tmp)
if result is not None:
self.d[node_type]['rt'][k].append(float(result.group(3)))
def process_enb (self,node_type,output):
for line in output:
tmp=line.decode("utf-8")
result=re.match(r'^.*\bPHR\b ([0-9]+).+\bbler\b ([0-9]+\.[0-9]+).+\bmcsoff\b ([0-9]+).+\bmcs\b ([0-9]+)',tmp)
if result is not None:
self.d[node_type]['PHR'].append(int(result.group(1)))
self.d[node_type]['bler'].append(float(result.group(2)))
self.d[node_type]['mcsoff'].append(int(result.group(3)))
self.d[node_type]['mcs'].append(int(result.group(4)))
def collect(self,testcase_id,node_type):
if node_type=='enb':
files = ["L1_stats.log", "MAC_stats.log", "PDCP_stats.log", "RRC_stats.log"]
else: #'gnb'
files = ["nrL1_stats.log", "nrMAC_stats.log", "nrPDCP_stats.log", "nrRRC_stats.log"]
#append each file's contents to another file (prepended with CI-) for debug
for f in files:
if os.path.isfile(f):
cmd = 'cat '+ f + ' >> CI-'+testcase_id+'-'+f
subprocess.Popen(cmd,shell=True)
#join the files for further processing
cmd='cat '
for f in files:
if os.path.isfile(f):
cmd += f+' '
process=subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE)
output = process.stdout.readlines()
if node_type=='enb':
self.process_enb(node_type,output)
else: #'gnb'
self.process_gnb(node_type,output)
def graph(self,testcase_id, node_type):
for page in self.d[node_type]['graph']:#work out a set a graphs per page
col = 1
figure, axis = plt.subplots(len(self.d[node_type]['graph'][page]), col ,figsize=(10, 10))
i=0
for m in self.d[node_type]['graph'][page]:#metric may refer to 1 level or 2 levels
metric_path=m.split('.')
if len(metric_path)==1:#1 level
metric_l1=metric_path[0]
major_ticks = np.arange(0, len(self.d[node_type][metric_l1])+1, 1)
axis[i].set_xticks(major_ticks)
axis[i].set_xticklabels([])
axis[i].plot(self.d[node_type][metric_l1],marker='o')
axis[i].set_xlabel('time')
axis[i].set_ylabel(metric_l1)
axis[i].set_title(metric_l1)
else:#2 levels
metric_l1=metric_path[0]
metric_l2=metric_path[1]
major_ticks = np.arange(0, len(self.d[node_type][metric_l1][metric_l2])+1, 1)
axis[i].set_xticks(major_ticks)
axis[i].set_xticklabels([])
axis[i].plot(self.d[node_type][metric_l1][metric_l2],marker='o')
axis[i].set_xlabel('time')
axis[i].set_ylabel(metric_l2)
axis[i].set_title(metric_l2)
i+=1
plt.tight_layout()
#save as png
plt.savefig(node_type+'_stats_monitor_'+testcase_id+'_'+page+'.png')
if __name__ == "__main__":
cfg_filename = sys.argv[1] #yaml file as metrics config
testcase_id = sys.argv[2] #test case id to name files accordingly, especially if we have several tests in a sequence
node = sys.argv[3]#enb or gnb
mon=StatMonitor(cfg_filename)
#collecting stats when modem process is stopped
CMD='ps aux | grep modem | grep -v grep'
process=subprocess.Popen(CMD, shell=True, stdout=subprocess.PIPE)
output = process.stdout.readlines()
while len(output)!=0 :
mon.collect(testcase_id,node)
process=subprocess.Popen(CMD, shell=True, stdout=subprocess.PIPE)
output = process.stdout.readlines()
time.sleep(1)
print('Process stopped')
with open(node+'_stats_monitor.pickle', 'wb') as handle:
pickle.dump(mon.d, handle, protocol=pickle.HIGHEST_PROTOCOL)
mon.graph(testcase_id, node)
...@@ -107,8 +107,6 @@ ...@@ -107,8 +107,6 @@
<eNB_instance>0</eNB_instance> <eNB_instance>0</eNB_instance>
<eNB_serverId>0</eNB_serverId> <eNB_serverId>0</eNB_serverId>
<air_interface>lte</air_interface> <air_interface>lte</air_interface>
<eNB_Trace>yes</eNB_Trace>
<eNB_Stats>yes</eNB_Stats>
<USRP_IPAddress>172.21.19.13</USRP_IPAddress> <USRP_IPAddress>172.21.19.13</USRP_IPAddress>
</testCase> </testCase>
...@@ -120,7 +118,6 @@ ...@@ -120,7 +118,6 @@
<eNB_instance>1</eNB_instance> <eNB_instance>1</eNB_instance>
<eNB_serverId>1</eNB_serverId> <eNB_serverId>1</eNB_serverId>
<air_interface>nr</air_interface> <air_interface>nr</air_interface>
<eNB_Stats>yes</eNB_Stats>
<rt_stats_cfg>datalog_rt_stats.2x2.yaml</rt_stats_cfg> <rt_stats_cfg>datalog_rt_stats.2x2.yaml</rt_stats_cfg>
<USRP_IPAddress>172.21.19.14</USRP_IPAddress> <USRP_IPAddress>172.21.19.14</USRP_IPAddress>
</testCase> </testCase>
......
...@@ -111,8 +111,6 @@ ...@@ -111,8 +111,6 @@
<eNB_instance>0</eNB_instance> <eNB_instance>0</eNB_instance>
<eNB_serverId>0</eNB_serverId> <eNB_serverId>0</eNB_serverId>
<air_interface>lte</air_interface> <air_interface>lte</air_interface>
<eNB_Trace>yes</eNB_Trace>
<eNB_Stats>yes</eNB_Stats>
<USRP_IPAddress>172.21.19.13</USRP_IPAddress> <USRP_IPAddress>172.21.19.13</USRP_IPAddress>
</testCase> </testCase>
...@@ -124,7 +122,6 @@ ...@@ -124,7 +122,6 @@
<eNB_instance>1</eNB_instance> <eNB_instance>1</eNB_instance>
<eNB_serverId>1</eNB_serverId> <eNB_serverId>1</eNB_serverId>
<air_interface>nr</air_interface> <air_interface>nr</air_interface>
<eNB_Stats>yes</eNB_Stats>
<rt_stats_cfg>datalog_rt_stats.2x2.yaml</rt_stats_cfg> <rt_stats_cfg>datalog_rt_stats.2x2.yaml</rt_stats_cfg>
<USRP_IPAddress>172.21.19.14</USRP_IPAddress> <USRP_IPAddress>172.21.19.14</USRP_IPAddress>
</testCase> </testCase>
......
...@@ -114,7 +114,6 @@ ...@@ -114,7 +114,6 @@
<eNB_instance>0</eNB_instance> <eNB_instance>0</eNB_instance>
<eNB_serverId>0</eNB_serverId> <eNB_serverId>0</eNB_serverId>
<air_interface>lte</air_interface> <air_interface>lte</air_interface>
<eNB_Trace>yes</eNB_Trace>
</testCase> </testCase>
<testCase id="040000"> <testCase id="040000">
......
...@@ -79,7 +79,6 @@ ...@@ -79,7 +79,6 @@
<eNB_instance>0</eNB_instance> <eNB_instance>0</eNB_instance>
<eNB_serverId>0</eNB_serverId> <eNB_serverId>0</eNB_serverId>
<air_interface>lte</air_interface> <air_interface>lte</air_interface>
<eNB_Trace>yes</eNB_Trace>
</testCase> </testCase>
......
...@@ -77,7 +77,6 @@ ...@@ -77,7 +77,6 @@
<eNB_instance>0</eNB_instance> <eNB_instance>0</eNB_instance>
<eNB_serverId>0</eNB_serverId> <eNB_serverId>0</eNB_serverId>
<air_interface>lte</air_interface> <air_interface>lte</air_interface>
<eNB_Trace>yes</eNB_Trace>
</testCase> </testCase>
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment