Commit 5c74bca5 authored by hardy's avatar hardy

Merge remote-tracking branch 'origin/ci-improvements' into integration_2022_wk03_b

parents c9a8b466 425bb149
...@@ -221,7 +221,7 @@ class Containerize(): ...@@ -221,7 +221,7 @@ class Containerize():
if (self.ranAllowMerge): if (self.ranAllowMerge):
imageTag = 'ci-temp' imageTag = 'ci-temp'
if self.ranTargetBranch == 'develop': if self.ranTargetBranch == 'develop':
mySSH.command('git diff HEAD..origin/develop -- docker/Dockerfile.base' + self.dockerfileprefix + ' | grep --colour=never -i INDEX', '\$', 5) mySSH.command('git diff HEAD..origin/develop -- cmake_targets/build_oai cmake_targets/tools/build_helper docker/Dockerfile.base' + self.dockerfileprefix + ' | grep --colour=never -i INDEX', '\$', 5)
result = re.search('index', mySSH.getBefore()) result = re.search('index', mySSH.getBefore())
if result is not None: if result is not None:
forceBaseImageBuild = True forceBaseImageBuild = True
...@@ -341,7 +341,7 @@ class Containerize(): ...@@ -341,7 +341,7 @@ class Containerize():
# Remove all intermediate build images # Remove all intermediate build images
if self.ranAllowMerge and forceBaseImageBuild: if self.ranAllowMerge and forceBaseImageBuild:
mySSH.command(self.cli + ' image rm ' + baseImage + ':' + baseTag + ' || true', '\$', 30) mySSH.command(self.cli + ' image rm ' + baseImage + ':' + baseTag + ' || true', '\$', 30)
mySSH.command(self.cli + ' image rm ran-build:' + imageTag + ' || true','\$', 5) mySSH.command(self.cli + ' image rm ran-build:' + imageTag + ' || true','\$', 30)
# Cleaning any created tmp volume # Cleaning any created tmp volume
mySSH.command(self.cli + ' volume prune --force || true','\$', 15) mySSH.command(self.cli + ' volume prune --force || true','\$', 15)
mySSH.close() mySSH.close()
...@@ -586,11 +586,20 @@ class Containerize(): ...@@ -586,11 +586,20 @@ class Containerize():
mySSH.command('cd ' + lSourcePath + '/' + self.yamlPath[self.eNB_instance], '\$', 5) mySSH.command('cd ' + lSourcePath + '/' + self.yamlPath[self.eNB_instance], '\$', 5)
# Currently support only one # Currently support only one
mySSH.command('docker-compose --file ci-docker-compose.yml config', '\$', 5) mySSH.command('docker-compose --file ci-docker-compose.yml config', '\$', 5)
containerName = ''
containerToKill = False
result = re.search('container_name: (?P<container_name>[a-zA-Z0-9\-\_]+)', mySSH.getBefore()) result = re.search('container_name: (?P<container_name>[a-zA-Z0-9\-\_]+)', mySSH.getBefore())
if self.eNB_logFile[self.eNB_instance] == '': if self.eNB_logFile[self.eNB_instance] == '':
self.eNB_logFile[self.eNB_instance] = 'enb_' + HTML.testCase_id + '.log' self.eNB_logFile[self.eNB_instance] = 'enb_' + HTML.testCase_id + '.log'
if result is not None: if result is not None:
containerName = result.group('container_name') containerName = result.group('container_name')
containerToKill = True
if containerToKill:
mySSH.command('docker inspect ' + containerName, '\$', 30)
result = re.search('Error: No such object: ' + containerName, mySSH.getBefore())
if result is not None:
containerToKill = False
if containerToKill:
mySSH.command('docker kill --signal INT ' + containerName, '\$', 30) mySSH.command('docker kill --signal INT ' + containerName, '\$', 30)
time.sleep(5) time.sleep(5)
mySSH.command('docker logs ' + containerName + ' > ' + lSourcePath + '/cmake_targets/' + self.eNB_logFile[self.eNB_instance], '\$', 30) mySSH.command('docker logs ' + containerName + ' > ' + lSourcePath + '/cmake_targets/' + self.eNB_logFile[self.eNB_instance], '\$', 30)
...@@ -603,20 +612,26 @@ class Containerize(): ...@@ -603,20 +612,26 @@ class Containerize():
mySSH.close() mySSH.close()
# Analyzing log file! # Analyzing log file!
copyin_res = mySSH.copyin(lIpAddr, lUserName, lPassWord, lSourcePath + '/cmake_targets/' + self.eNB_logFile[self.eNB_instance], '.') if containerToKill:
copyin_res = mySSH.copyin(lIpAddr, lUserName, lPassWord, lSourcePath + '/cmake_targets/' + self.eNB_logFile[self.eNB_instance], '.')
else:
copyin_res = 0
nodeB_prefix = 'e' nodeB_prefix = 'e'
if (copyin_res == -1): if (copyin_res == -1):
HTML.htmleNBFailureMsg='Could not copy ' + nodeB_prefix + 'NB logfile to analyze it!' HTML.htmleNBFailureMsg='Could not copy ' + nodeB_prefix + 'NB logfile to analyze it!'
HTML.CreateHtmlTestRow('N/A', 'KO', CONST.ENB_PROCESS_NOLOGFILE_TO_ANALYZE) HTML.CreateHtmlTestRow('N/A', 'KO', CONST.ENB_PROCESS_NOLOGFILE_TO_ANALYZE)
else: else:
logging.debug('\u001B[1m Analyzing ' + nodeB_prefix + 'NB logfile \u001B[0m ' + self.eNB_logFile[self.eNB_instance]) if containerToKill:
logStatus = RAN.AnalyzeLogFile_eNB(self.eNB_logFile[self.eNB_instance], HTML) logging.debug('\u001B[1m Analyzing ' + nodeB_prefix + 'NB logfile \u001B[0m ' + self.eNB_logFile[self.eNB_instance])
logStatus = RAN.AnalyzeLogFile_eNB(self.eNB_logFile[self.eNB_instance], HTML)
else:
logStatus = 0
if (logStatus < 0): if (logStatus < 0):
HTML.CreateHtmlTestRow(RAN.runtime_stats, 'KO', logStatus) HTML.CreateHtmlTestRow(RAN.runtime_stats, 'KO', logStatus)
else: else:
HTML.CreateHtmlTestRow(RAN.runtime_stats, 'OK', CONST.ALL_PROCESSES_OK) HTML.CreateHtmlTestRow(RAN.runtime_stats, 'OK', CONST.ALL_PROCESSES_OK)
# all the xNB run logs shall be on the server 0 for logCollecting # all the xNB run logs shall be on the server 0 for logCollecting
if self.eNB_serverId[self.eNB_instance] != '0': if containerToKill and self.eNB_serverId[self.eNB_instance] != '0':
mySSH.copyout(self.eNBIPAddress, self.eNBUserName, self.eNBPassword, './' + self.eNB_logFile[self.eNB_instance], self.eNBSourceCodePath + '/cmake_targets/') mySSH.copyout(self.eNBIPAddress, self.eNBUserName, self.eNBPassword, './' + self.eNB_logFile[self.eNB_instance], self.eNBSourceCodePath + '/cmake_targets/')
logging.info('\u001B[1m Undeploying OAI Object Pass\u001B[0m') logging.info('\u001B[1m Undeploying OAI Object Pass\u001B[0m')
...@@ -689,7 +704,7 @@ class Containerize(): ...@@ -689,7 +704,7 @@ class Containerize():
def CaptureOnDockerNetworks(self): def CaptureOnDockerNetworks(self):
cmd = 'cd ' + self.yamlPath[0] + ' && docker-compose -f docker-compose-ci.yml config | grep com.docker.network.bridge.name | sed -e "s@^.*name: @@"' cmd = 'cd ' + self.yamlPath[0] + ' && docker-compose -f docker-compose-ci.yml config | grep com.docker.network.bridge.name | sed -e "s@^.*name: @@"'
networkNames = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT, universal_newlines=True, timeout=10) networkNames = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT, universal_newlines=True, timeout=10)
cmd = 'sudo nohup tshark -f "not tcp and not arp and not port 53 and not host archive.ubuntu.com and not host security.ubuntu.com"' cmd = 'sudo nohup tshark -f "not tcp and not arp and not port 53 and not host archive.ubuntu.com and not host security.ubuntu.com and not port 2152"'
for name in networkNames.split('\n'): for name in networkNames.split('\n'):
res = re.search('rfsim', name) res = re.search('rfsim', name)
if res is not None: if res is not None:
...@@ -700,7 +715,7 @@ class Containerize(): ...@@ -700,7 +715,7 @@ class Containerize():
logging.debug(cmd) logging.debug(cmd)
networkNames = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT, universal_newlines=True, timeout=10) networkNames = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT, universal_newlines=True, timeout=10)
def UndeployGenObject(self, HTML): def UndeployGenObject(self, HTML, RAN):
self.exitStatus = 0 self.exitStatus = 0
ymlPath = self.yamlPath[0].split('/') ymlPath = self.yamlPath[0].split('/')
logPath = '../cmake_targets/log/' + ymlPath[1] logPath = '../cmake_targets/log/' + ymlPath[1]
...@@ -730,8 +745,36 @@ class Containerize(): ...@@ -730,8 +745,36 @@ class Containerize():
cmd = 'cd ' + self.yamlPath[0] + ' && docker logs ' + cName + ' > ' + cName + '.log 2>&1' cmd = 'cd ' + self.yamlPath[0] + ' && docker logs ' + cName + ' > ' + cName + '.log 2>&1'
logging.debug(cmd) logging.debug(cmd)
deployStatus = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT, universal_newlines=True, timeout=30) deployStatus = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT, universal_newlines=True, timeout=30)
fullStatus = True
if anyLogs: if anyLogs:
cmd = 'mkdir -p '+ logPath + ' && mv ' + self.yamlPath[0] + '/*.log ' + logPath cmd = 'mkdir -p '+ logPath + ' && cp ' + self.yamlPath[0] + '/*.log ' + logPath
logging.debug(cmd)
deployStatus = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT, universal_newlines=True, timeout=10)
# Analyzing log file!
listOfPossibleRanContainers = ['enb', 'gnb', 'cu', 'du']
for container in listOfPossibleRanContainers:
filename = self.yamlPath[0] + '/rfsim?g-oai-' + container + '.log'
cmd = 'ls ' + filename
containerStatus = True
try:
lsStatus = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT, universal_newlines=True, timeout=10)
filename = str(lsStatus).strip()
except:
containerStatus = False
if not containerStatus:
continue
logging.debug('\u001B[1m Analyzing xNB logfile ' + filename + ' \u001B[0m')
# For the moment just assume this exists
logStatus = RAN.AnalyzeLogFile_eNB(filename, HTML)
if (logStatus < 0):
fullStatus = False
HTML.CreateHtmlTestRow(RAN.runtime_stats, 'KO', logStatus)
else:
HTML.CreateHtmlTestRow(RAN.runtime_stats, 'OK', CONST.ALL_PROCESSES_OK)
cmd = 'rm ' + self.yamlPath[0] + '/*.log'
logging.debug(cmd) logging.debug(cmd)
deployStatus = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT, universal_newlines=True, timeout=10) deployStatus = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT, universal_newlines=True, timeout=10)
if self.tsharkStarted: if self.tsharkStarted:
...@@ -747,6 +790,7 @@ class Containerize(): ...@@ -747,6 +790,7 @@ class Containerize():
logging.debug(cmd) logging.debug(cmd)
copyStatus = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT, universal_newlines=True, timeout=10) copyStatus = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT, universal_newlines=True, timeout=10)
logging.debug('\u001B[1m Undeploying \u001B[0m')
cmd = 'cd ' + self.yamlPath[0] + ' && docker-compose -f docker-compose-ci.yml down' cmd = 'cd ' + self.yamlPath[0] + ' && docker-compose -f docker-compose-ci.yml down'
logging.debug(cmd) logging.debug(cmd)
try: try:
...@@ -763,8 +807,12 @@ class Containerize(): ...@@ -763,8 +807,12 @@ class Containerize():
logging.debug(cmd) logging.debug(cmd)
deployStatus = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT, universal_newlines=True, timeout=100) deployStatus = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT, universal_newlines=True, timeout=100)
HTML.CreateHtmlTestRow('n/a', 'OK', CONST.ALL_PROCESSES_OK) if fullStatus:
logging.info('\u001B[1m Undeploying OAI Object(s) PASS\u001B[0m') HTML.CreateHtmlTestRow('n/a', 'OK', CONST.ALL_PROCESSES_OK)
logging.info('\u001B[1m Undeploying OAI Object(s) PASS\u001B[0m')
else:
HTML.CreateHtmlTestRow('n/a', 'KO', CONST.ALL_PROCESSES_OK)
logging.info('\u001B[1m Undeploying OAI Object(s) FAIL\u001B[0m')
def PingFromContainer(self, HTML): def PingFromContainer(self, HTML):
self.exitStatus = 0 self.exitStatus = 0
......
...@@ -133,7 +133,7 @@ class EPCManagement(): ...@@ -133,7 +133,7 @@ class EPCManagement():
mySSH.open(self.IPAddress, self.UserName, self.Password) mySSH.open(self.IPAddress, self.UserName, self.Password)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE): if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
logging.debug('Using the OAI EPC Release 14 MME in Docker') logging.debug('Using the OAI EPC Release 14 MME in Docker')
mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-mme /bin/bash -c "nohup tshark -i eth0 -i lo:s10 -w /tmp/mme_check_run.pcap 2>&1 > /dev/null"', '\$', 5) mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-mme /bin/bash -c "nohup tshark -i eth0 -i lo:s10 -f "not port 2152" -w /tmp/mme_check_run.pcap 2>&1 > /dev/null"', '\$', 5)
time.sleep(5) time.sleep(5)
mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-mme /bin/bash -c "nohup ./bin/oai_mme -c ./etc/' + self.mmeConfFile + ' > mme_check_run.log 2>&1"', '\$', 5) mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-mme /bin/bash -c "nohup ./bin/oai_mme -c ./etc/' + self.mmeConfFile + ' > mme_check_run.log 2>&1"', '\$', 5)
elif re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE): elif re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE):
...@@ -198,8 +198,8 @@ class EPCManagement(): ...@@ -198,8 +198,8 @@ class EPCManagement():
mySSH.open(self.IPAddress, self.UserName, self.Password) mySSH.open(self.IPAddress, self.UserName, self.Password)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE): if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
logging.debug('Using the OAI EPC Release 14 SPGW-CUPS in Docker') logging.debug('Using the OAI EPC Release 14 SPGW-CUPS in Docker')
mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-spgwc /bin/bash -c "nohup tshark -i eth0 -i lo:p5c -i lo:s5c -w /tmp/spgwc_check_run.pcap 2>&1 > /dev/null"', '\$', 5) mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-spgwc /bin/bash -c "nohup tshark -i eth0 -i lo:p5c -i lo:s5c -f "not port 2152" -w /tmp/spgwc_check_run.pcap 2>&1 > /dev/null"', '\$', 5)
mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-spgwu-tiny /bin/bash -c "nohup tshark -i eth0 -w /tmp/spgwu_check_run.pcap 2>&1 > /dev/null"', '\$', 5) mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-spgwu-tiny /bin/bash -c "nohup tshark -i eth0 -f "not port 2152" -w /tmp/spgwu_check_run.pcap 2>&1 > /dev/null"', '\$', 5)
time.sleep(5) time.sleep(5)
mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-spgwc /bin/bash -c "nohup ./bin/oai_spgwc -o -c ./etc/spgw_c.conf > spgwc_check_run.log 2>&1"', '\$', 5) mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-spgwc /bin/bash -c "nohup ./bin/oai_spgwc -o -c ./etc/spgw_c.conf > spgwc_check_run.log 2>&1"', '\$', 5)
time.sleep(5) time.sleep(5)
...@@ -532,6 +532,7 @@ class EPCManagement(): ...@@ -532,6 +532,7 @@ class EPCManagement():
logging.debug('Terminating OAI CN5G') logging.debug('Terminating OAI CN5G')
mySSH.command('cd /opt/oai-cn5g-fed/docker-compose', '\$', 5) mySSH.command('cd /opt/oai-cn5g-fed/docker-compose', '\$', 5)
mySSH.command('./core-network.sh stop nrf spgwu', '\$', 60) mySSH.command('./core-network.sh stop nrf spgwu', '\$', 60)
mySSH.command('docker volume prune --force || true', '\$', 60)
time.sleep(2) time.sleep(2)
mySSH.command('tshark -r /tmp/oai-cn5g.pcap | egrep --colour=never "Tracking area update" ','\$', 30) mySSH.command('tshark -r /tmp/oai-cn5g.pcap | egrep --colour=never "Tracking area update" ','\$', 30)
result = re.search('Tracking area update request', mySSH.getBefore()) result = re.search('Tracking area update request', mySSH.getBefore())
...@@ -760,6 +761,7 @@ class EPCManagement(): ...@@ -760,6 +761,7 @@ class EPCManagement():
nbContainers += 1 nbContainers += 1
mySSH.command('docker-compose down', '\$', 60) mySSH.command('docker-compose down', '\$', 60)
mySSH.command('docker volume prune --force || true', '\$', 60)
mySSH.command('docker inspect --format=\'{{.State.Health.Status}}\' ' + listOfContainers, '\$', 10) mySSH.command('docker inspect --format=\'{{.State.Health.Status}}\' ' + listOfContainers, '\$', 10)
noMoreContainerNb = mySSH.getBefore().count('No such object') noMoreContainerNb = mySSH.getBefore().count('No such object')
mySSH.command('docker inspect --format=\'{{.Name}}\' prod-oai-public-net prod-oai-private-net', '\$', 10) mySSH.command('docker inspect --format=\'{{.Name}}\' prod-oai-public-net prod-oai-private-net', '\$', 10)
......
...@@ -909,7 +909,7 @@ elif re.match('^TesteNB$', mode, re.IGNORECASE) or re.match('^TestUE$', mode, re ...@@ -909,7 +909,7 @@ elif re.match('^TesteNB$', mode, re.IGNORECASE) or re.match('^TestUE$', mode, re
if CONTAINERS.exitStatus==1: if CONTAINERS.exitStatus==1:
RAN.prematureExit = True RAN.prematureExit = True
elif action == 'UndeployGenObject': elif action == 'UndeployGenObject':
CONTAINERS.UndeployGenObject(HTML) CONTAINERS.UndeployGenObject(HTML, RAN)
if CONTAINERS.exitStatus==1: if CONTAINERS.exitStatus==1:
RAN.prematureExit = True RAN.prematureExit = True
elif action == 'PingFromContainer': elif action == 'PingFromContainer':
......
...@@ -159,6 +159,7 @@ class RANManagement(): ...@@ -159,6 +159,7 @@ class RANManagement():
# Raphael: here add a check if git clone or git fetch went smoothly # Raphael: here add a check if git clone or git fetch went smoothly
mySSH.command('git config user.email "jenkins@openairinterface.org"', '\$', 5) mySSH.command('git config user.email "jenkins@openairinterface.org"', '\$', 5)
mySSH.command('git config user.name "OAI Jenkins"', '\$', 5) mySSH.command('git config user.name "OAI Jenkins"', '\$', 5)
mySSH.command('git advice.detachedHead false', '\$', 5)
# Checking the BUILD INFO file # Checking the BUILD INFO file
if not self.backgroundBuild: if not self.backgroundBuild:
mySSH.command('ls *.txt', '\$', 5) mySSH.command('ls *.txt', '\$', 5)
...@@ -203,6 +204,7 @@ class RANManagement(): ...@@ -203,6 +204,7 @@ class RANManagement():
else: else:
logging.debug('Merging with the target branch: ' + self.ranTargetBranch) logging.debug('Merging with the target branch: ' + self.ranTargetBranch)
mySSH.command('git merge --ff origin/' + self.ranTargetBranch + ' -m "Temporary merge for CI"', '\$', 5) mySSH.command('git merge --ff origin/' + self.ranTargetBranch + ' -m "Temporary merge for CI"', '\$', 5)
logging.debug(mySSH.getBefore()) # print what git said when merging/checking out
mySSH.command('source oaienv', '\$', 5) mySSH.command('source oaienv', '\$', 5)
mySSH.command('cd cmake_targets', '\$', 5) mySSH.command('cd cmake_targets', '\$', 5)
mySSH.command('mkdir -p log', '\$', 5) mySSH.command('mkdir -p log', '\$', 5)
...@@ -212,6 +214,7 @@ class RANManagement(): ...@@ -212,6 +214,7 @@ class RANManagement():
mySSH.command('echo "./build_oai ' + self.Build_eNB_args + '" > ./my-lte-softmodem-build.sh', '\$', 5) mySSH.command('echo "./build_oai ' + self.Build_eNB_args + '" > ./my-lte-softmodem-build.sh', '\$', 5)
mySSH.command('chmod 775 ./my-lte-softmodem-build.sh', '\$', 5) mySSH.command('chmod 775 ./my-lte-softmodem-build.sh', '\$', 5)
mySSH.command('echo ' + lPassWord + ' | sudo -S ls', '\$', 5) mySSH.command('echo ' + lPassWord + ' | sudo -S ls', '\$', 5)
logging.debug(mySSH.getBefore()) # print current directory contents for verification
mySSH.command('echo $USER; nohup sudo -E ./my-lte-softmodem-build.sh' + ' > ' + lSourcePath + '/cmake_targets/compile_oai_enb.log ' + ' 2>&1 &', lUserName, 5) mySSH.command('echo $USER; nohup sudo -E ./my-lte-softmodem-build.sh' + ' > ' + lSourcePath + '/cmake_targets/compile_oai_enb.log ' + ' 2>&1 &', lUserName, 5)
mySSH.close() mySSH.close()
HTML.CreateHtmlTestRow(self.Build_eNB_args, 'OK', CONST.ALL_PROCESSES_OK) HTML.CreateHtmlTestRow(self.Build_eNB_args, 'OK', CONST.ALL_PROCESSES_OK)
...@@ -371,10 +374,11 @@ class RANManagement(): ...@@ -371,10 +374,11 @@ class RANManagement():
result = re.search('interfaceToUse=(?P<eth_interface>[a-zA-Z0-9\-\_]+)done', mySSH.getBefore()) result = re.search('interfaceToUse=(?P<eth_interface>[a-zA-Z0-9\-\_]+)done', mySSH.getBefore())
if result is not None: if result is not None:
eth_interface = result.group('eth_interface') eth_interface = result.group('eth_interface')
logging.debug('\u001B[1m Launching tshark on interface ' + eth_interface + '\u001B[0m') fltr = 'port 38412 or port 36412 or port 36422' # NGAP, S1AP, X2AP
logging.debug('\u001B[1m Launching tshark on interface ' + eth_interface + ' with filter "' + fltr + '"\u001B[0m')
pcapfile = pcapfile_prefix + self.testCase_id + '_log.pcap' pcapfile = pcapfile_prefix + self.testCase_id + '_log.pcap'
mySSH.command('echo ' + lPassWord + ' | sudo -S rm -f /tmp/' + pcapfile , '\$', 5) mySSH.command('echo ' + lPassWord + ' | sudo -S rm -f /tmp/' + pcapfile , '\$', 5)
mySSH.command('echo $USER; nohup sudo -E tshark -i ' + eth_interface + ' -w /tmp/' + pcapfile + ' > /dev/null 2>&1 &','\$', 5) mySSH.command('echo $USER; nohup sudo -E tshark -i ' + eth_interface + ' -f "' + fltr + '" -w /tmp/' + pcapfile + ' > /dev/null 2>&1 &','\$', 5)
mySSH.close() mySSH.close()
...@@ -390,10 +394,11 @@ class RANManagement(): ...@@ -390,10 +394,11 @@ class RANManagement():
result = re.search('interfaceToUse=(?P<eth_interface>[a-zA-Z0-9\-\_]+)done', mySSH.getBefore()) result = re.search('interfaceToUse=(?P<eth_interface>[a-zA-Z0-9\-\_]+)done', mySSH.getBefore())
if result is not None: if result is not None:
eth_interface = result.group('eth_interface') eth_interface = result.group('eth_interface')
logging.debug('\u001B[1m Launching tshark on interface ' + eth_interface + '\u001B[0m') fltr = 'port 38412 or port 36412 or port 36422' # NGAP, S1AP, X2AP
logging.debug('\u001B[1m Launching tshark on interface ' + eth_interface + ' with filter "' + fltr + '"\u001B[0m')
self.epcPcapFile = 'enb_' + self.testCase_id + '_s1log.pcap' self.epcPcapFile = 'enb_' + self.testCase_id + '_s1log.pcap'
mySSH.command('echo ' + localEpcPassword + ' | sudo -S rm -f /tmp/' + self.epcPcapFile , '\$', 5) mySSH.command('echo ' + localEpcPassword + ' | sudo -S rm -f /tmp/' + self.epcPcapFile , '\$', 5)
mySSH.command('echo $USER; nohup sudo tshark -f "host ' + lIpAddr +'" -i ' + eth_interface + ' -w /tmp/' + self.epcPcapFile + ' > /tmp/tshark.log 2>&1 &', localEpcUserName, 5) mySSH.command('echo $USER; nohup sudo tshark -f "host ' + lIpAddr +'" -i ' + eth_interface + ' -f "' + fltr + '" -w /tmp/' + self.epcPcapFile + ' > /tmp/tshark.log 2>&1 &', localEpcUserName, 5)
mySSH.close() mySSH.close()
mySSH.open(lIpAddr, lUserName, lPassWord) mySSH.open(lIpAddr, lUserName, lPassWord)
mySSH.command('cd ' + lSourcePath, '\$', 5) mySSH.command('cd ' + lSourcePath, '\$', 5)
...@@ -738,7 +743,10 @@ class RANManagement(): ...@@ -738,7 +743,10 @@ class RANManagement():
mySSH.command('echo ' + self.eNBPassword + ' | sudo -S mv /tmp/enb_*.pcap .','\$',20) mySSH.command('echo ' + self.eNBPassword + ' | sudo -S mv /tmp/enb_*.pcap .','\$',20)
mySSH.command('echo ' + self.eNBPassword + ' | sudo -S mv /tmp/gnb_*.pcap .','\$',20) mySSH.command('echo ' + self.eNBPassword + ' | sudo -S mv /tmp/gnb_*.pcap .','\$',20)
mySSH.command('echo ' + self.eNBPassword + ' | sudo -S rm -f enb.log.zip', '\$', 5) mySSH.command('echo ' + self.eNBPassword + ' | sudo -S rm -f enb.log.zip', '\$', 5)
mySSH.command('echo ' + self.eNBPassword + ' | sudo -S zip enb.log.zip enb*.log core* enb_*record.raw enb_*.pcap gnb_*.pcap enb_*txt physim_*.log *stats.log *monitor.pickle *monitor*.png ping*.log.png log/*/*.log log/*/*.pcap', '\$', 60) mySSH.command('echo ' + self.eNBPassword + ' | sudo -S zip enb.log.zip enb*.log enb_*record.raw enb_*.pcap gnb_*.pcap enb_*txt physim_*.log *stats.log *monitor.pickle *monitor*.png ping*.log.png log/*/*.log log/*/*.pcap', '\$', 60)
result = re.search('core.\d+', mySSH.getBefore())
if result is not None:
mySSH.command('echo ' + self.eNBPassword + ' | sudo -S zip enb.log.zip core* ran_build/build/{lte,nr}-softmodem', '\$', 60) # add core and executable to zip
mySSH.command('echo ' + self.eNBPassword + ' | sudo -S rm enb*.log core* enb_*record.raw enb_*.pcap gnb_*.pcap enb_*txt physim_*.log *stats.log *monitor.pickle *monitor*.png ping*.log.png log/*/*.log log/*/*.pcap', '\$', 15) mySSH.command('echo ' + self.eNBPassword + ' | sudo -S rm enb*.log core* enb_*record.raw enb_*.pcap gnb_*.pcap enb_*txt physim_*.log *stats.log *monitor.pickle *monitor*.png ping*.log.png log/*/*.log log/*/*.pcap', '\$', 15)
mySSH.close() mySSH.close()
...@@ -1016,12 +1024,9 @@ class RANManagement(): ...@@ -1016,12 +1024,9 @@ class RANManagement():
datalog_rt_stats = yaml.load(f,Loader=yaml.FullLoader) datalog_rt_stats = yaml.load(f,Loader=yaml.FullLoader)
rt_keys = datalog_rt_stats['Ref'] #we use the keys from the Ref field rt_keys = datalog_rt_stats['Ref'] #we use the keys from the Ref field
if (os.path.isfile('./nrL1_stats.log')) and (os.path.isfile('./nrL1_stats.log')): if os.path.isfile('./nrL1_stats.log') and os.path.isfile('./nrMAC_stats.log'):
stat_files_present=True # don't use CI-nrL1_stats.log, as this will increase the processing time for
else: # no reason, we just need the last occurence
stat_files_present=False
logging.debug("NR Stats files for RT analysis not found")
if stat_files_present:
nrL1_stats = open('./nrL1_stats.log', 'r') nrL1_stats = open('./nrL1_stats.log', 'r')
nrMAC_stats = open('./nrMAC_stats.log', 'r') nrMAC_stats = open('./nrMAC_stats.log', 'r')
for line in nrL1_stats.readlines(): for line in nrL1_stats.readlines():
...@@ -1042,6 +1047,8 @@ class RANManagement(): ...@@ -1042,6 +1047,8 @@ class RANManagement():
real_time_stats[k]=tmp.group(1) real_time_stats[k]=tmp.group(1)
nrL1_stats.close() nrL1_stats.close()
nrMAC_stats.close() nrMAC_stats.close()
else:
logging.debug("NR Stats files for RT analysis not found")
#stdout log file and stat log files analysis completed #stdout log file and stat log files analysis completed
logging.debug(' File analysis (stdout, stats) completed') logging.debug(' File analysis (stdout, stats) completed')
......
...@@ -157,22 +157,24 @@ class SSHConnection(): ...@@ -157,22 +157,24 @@ class SSHConnection():
if not silent: if not silent:
logging.debug(commandline) logging.debug(commandline)
self.cmd2Results = '' self.cmd2Results = ''
noHistoryCmd = 'unset HISTFILE; ' + commandline
myHost = self.username + '@' + self.ipaddress myHost = self.username + '@' + self.ipaddress
# CAUTION: THIS METHOD IMPLIES THAT THERE ARE VALID SSH KEYS # CAUTION: THIS METHOD IMPLIES THAT THERE ARE VALID SSH KEYS
# BETWEEN THE PYTHON EXECUTOR NODE AND THE REMOTE HOST # BETWEEN THE PYTHON EXECUTOR NODE AND THE REMOTE HOST
# OTHERWISE IT WON'T WORK # OTHERWISE IT WON'T WORK
lSsh = subprocess.Popen(["ssh", "%s" % myHost, commandline],shell=False,stdout=subprocess.PIPE,stderr=subprocess.PIPE) lSsh = subprocess.Popen(["ssh", "%s" % myHost, noHistoryCmd],shell=False,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
self.cmd2Results = str(lSsh.stdout.readlines()) self.cmd2Results = str(lSsh.stdout.readlines())
def command3(self, commandline, timeout, silent=False): def command3(self, commandline, timeout, silent=False):
if not silent: if not silent:
logging.debug(commandline) logging.debug(commandline)
self.cmd2Results = '' self.cmd2Results = ''
noHistoryCmd = 'unset HISTFILE; ' + commandline
myHost = self.username + '@' + self.ipaddress myHost = self.username + '@' + self.ipaddress
# CAUTION: THIS METHOD IMPLIES THAT THERE ARE VALID SSH KEYS # CAUTION: THIS METHOD IMPLIES THAT THERE ARE VALID SSH KEYS
# BETWEEN THE PYTHON EXECUTOR NODE AND THE REMOTE HOST # BETWEEN THE PYTHON EXECUTOR NODE AND THE REMOTE HOST
# OTHERWISE IT WON'T WORK # OTHERWISE IT WON'T WORK
lSsh = subprocess.Popen(["ssh", "%s" % myHost, commandline],shell=False,stdout=subprocess.PIPE,stderr=subprocess.PIPE) lSsh = subprocess.Popen(["ssh", "%s" % myHost, noHistoryCmd],shell=False,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
return lSsh.stdout.readlines() return lSsh.stdout.readlines()
......
...@@ -61,9 +61,16 @@ class StatMonitor(): ...@@ -61,9 +61,16 @@ class StatMonitor():
def collect(self,node_type): def collect(self,node_type):
if node_type=='enb': if node_type=='enb':
cmd='cat L1_stats.log MAC_stats.log PDCP_stats.log RRC_stats.log' files = ["L1_stats.log", "MAC_stats.log", "PDCP_stats.log", "RRC_stats.log"]
else: #'gnb' else: #'gnb'
cmd='cat nrL1_stats.log nrMAC_stats.log nrPDCP_stats.log nrRRC_stats.log' files = ["nrL1_stats.log", "nrMAC_stats.log", "nrPDCP_stats.log", "nrRRC_stats.log"]
# append each file's contents to another file (prepended with CI-) for
# post-mortem/debugging analysis
for f in files:
cmd = rf'cat {f} >> CI-{f}'
subprocess.Popen(shlex.split(cmd))
# join the files for further processing
cmd = rf'cat {shlex.join(files)}'
process=subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE) process=subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE)
output = process.stdout.readlines() output = process.stdout.readlines()
if node_type=='enb': if node_type=='enb':
......
import subprocess
import time
import shlex
import re
import sys
import matplotlib.pyplot as plt
import pickle
import numpy as np
import os
def collect(d, node_type):
if node_type=='enb':
cmd='cat L1_stats.log MAC_stats.log PDCP_stats.log RRC_stats.log'
else: #'gnb'
cmd='cat nrL1_stats.log nrMAC_stats.log nrPDCP_stats.log nrRRC_stats.log'
process=subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE)
output = process.stdout.readlines()
for l in output:
tmp=l.decode("utf-8")
result=re.match(rf'^.*\bPHR\b ([0-9]+).+\bbler\b ([0-9]+\.[0-9]+).+\bmcsoff\b ([0-9]+).+\bmcs\b ([0-9]+)',tmp)
if result is not None:
d['PHR'].append(int(result.group(1)))
d['bler'].append(float(result.group(2)))
d['mcsoff'].append(int(result.group(3)))
d['mcs'].append(int(result.group(4)))
def graph(d, node_type):
figure, axis = plt.subplots(4, 1,figsize=(10, 10))
major_ticks = np.arange(0, len(d['PHR'])+1, 1)
axis[0].set_xticks(major_ticks)
axis[0].set_xticklabels([])
axis[0].plot(d['PHR'],marker='o')
axis[0].set_xlabel('time')
axis[0].set_ylabel('PHR')
axis[0].set_title("PHR")
major_ticks = np.arange(0, len(d['bler'])+1, 1)
axis[1].set_xticks(major_ticks)
axis[1].set_xticklabels([])
axis[1].plot(d['bler'],marker='o')
axis[1].set_xlabel('time')
axis[1].set_ylabel('bler')
axis[1].set_title("bler")
major_ticks = np.arange(0, len(d['mcsoff'])+1, 1)
axis[2].set_xticks(major_ticks)
axis[2].set_xticklabels([])
axis[2].plot(d['mcsoff'],marker='o')
axis[2].set_xlabel('time')
axis[2].set_ylabel('mcsoff')
axis[2].set_title("mcsoff")
major_ticks = np.arange(0, len(d['mcs'])+1, 1)
axis[3].set_xticks(major_ticks)
axis[3].set_xticklabels([])
axis[3].plot(d['mcs'],marker='o')
axis[3].set_xlabel('time')
axis[3].set_ylabel('mcs')
axis[3].set_title("mcs")
plt.tight_layout()
# Combine all the operations and display
plt.savefig(node_type+'_stats_monitor.png')
plt.show()
if __name__ == "__main__":
node_type = sys.argv[1]#enb or gnb
d={}
d['PHR']=[]
d['bler']=[]
d['mcsoff']=[]
d['mcs']=[]
cmd='ps aux | grep modem | grep -v grep'
process=subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
output = process.stdout.readlines()
while len(output)!=0 :
collect(d, node_type)
process=subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
output = process.stdout.readlines()
time.sleep(1)
print('process stopped')
with open(node_type+'_stats_monitor.pickle', 'wb') as handle:
pickle.dump(d, handle, protocol=pickle.HIGHEST_PROTOCOL)
graph(d, node_type)
"""
To create graphs and pickle from runtime statistics in L1,MAC,RRC,PDCP files
"""
import subprocess
import time
import shlex
import re
import sys
import pickle
import matplotlib.pyplot as plt
import numpy as np
import yaml
class StatMonitor():
def __init__(self,):
with open('stats_monitor_conf.yaml','r') as file:
self.d = yaml.load(file)
for node in self.d:
for metric in self.d[node]:
self.d[node][metric]=[]
def process_gnb (self,node_type,output):
for line in output:
tmp=line.decode("utf-8")
result=re.match(r'^.*\bdlsch_rounds\b ([0-9]+)\/([0-9]+).*\bdlsch_errors\b ([0-9]+)',tmp)
if result is not None:
self.d[node_type]['dlsch_err'].append(int(result.group(3)))
percentage=float(result.group(2))/float(result.group(1))
self.d[node_type]['dlsch_err_perc_round_1'].append(percentage)
result=re.match(r'^.*\bulsch_rounds\b ([0-9]+)\/([0-9]+).*\bulsch_errors\b ([0-9]+)',tmp)
if result is not None:
self.d[node_type]['ulsch_err'].append(int(result.group(3)))
percentage=float(result.group(2))/float(result.group(1))
self.d[node_type]['ulsch_err_perc_round_1'].append(percentage)
def process_enb (self,node_type,output):
for line in output:
tmp=line.decode("utf-8")
result=re.match(r'^.*\bPHR\b ([0-9]+).+\bbler\b ([0-9]+\.[0-9]+).+\bmcsoff\b ([0-9]+).+\bmcs\b ([0-9]+)',tmp)
if result is not None:
self.d[node_type]['PHR'].append(int(result.group(1)))
self.d[node_type]['bler'].append(float(result.group(2)))
self.d[node_type]['mcsoff'].append(int(result.group(3)))
self.d[node_type]['mcs'].append(int(result.group(4)))
def collect(self,node_type):
if node_type=='enb':
cmd='cat L1_stats.log MAC_stats.log PDCP_stats.log RRC_stats.log'
else: #'gnb'
cmd='cat nrL1_stats.log nrMAC_stats.log nrPDCP_stats.log nrRRC_stats.log'
process=subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE)
output = process.stdout.readlines()
if node_type=='enb':
self.process_enb(node_type,output)
else: #'gnb'
self.process_gnb(node_type,output)
def graph(self,node_type):
col = 1
figure, axis = plt.subplots(len(self.d[node_type]), col ,figsize=(10, 10))
i=0
for metric in self.d[node_type]:
major_ticks = np.arange(0, len(self.d[node_type][metric])+1, 1)
axis[i].set_xticks(major_ticks)
axis[i].set_xticklabels([])
axis[i].plot(self.d[node_type][metric],marker='o')
axis[i].set_xlabel('time')
axis[i].set_ylabel(metric)
axis[i].set_title(metric)
i+=1
plt.tight_layout()
# Combine all the operations and display
plt.savefig(node_type+'_stats_monitor.png')
plt.show()
if __name__ == "__main__":
node = sys.argv[1]#enb or gnb
mon=StatMonitor()
#collecting stats when modem process is stopped
CMD='ps aux | grep mode | grep -v grep'
process=subprocess.Popen(CMD, shell=True, stdout=subprocess.PIPE)
output = process.stdout.readlines()
while len(output)!=0 :
mon.collect(node)
process=subprocess.Popen(CMD, shell=True, stdout=subprocess.PIPE)
output = process.stdout.readlines()
time.sleep(1)
print('Process stopped')
with open(node+'_stats_monitor.pickle', 'wb') as handle:
pickle.dump(mon.d, handle, protocol=pickle.HIGHEST_PROTOCOL)
mon.graph(node)
...@@ -93,7 +93,7 @@ ...@@ -93,7 +93,7 @@
<server_container_name>rfsim5g-oai-nr-ue</server_container_name> <server_container_name>rfsim5g-oai-nr-ue</server_container_name>
<client_container_name>rfsim5g-oai-ext-dn</client_container_name> <client_container_name>rfsim5g-oai-ext-dn</client_container_name>
<server_options>-B 12.1.1.2 -u -i 1 -s</server_options> <server_options>-B 12.1.1.2 -u -i 1 -s</server_options>
<client_options>-c 12.1.1.2 -u -i 1 -t 30 -b 400K</client_options> <client_options>-c 12.1.1.2 -u -i 1 -t 30 -b 3M</client_options>
</testCase> </testCase>
<testCase id="030002"> <testCase id="030002">
...@@ -102,7 +102,7 @@ ...@@ -102,7 +102,7 @@
<server_container_name>rfsim5g-oai-ext-dn</server_container_name> <server_container_name>rfsim5g-oai-ext-dn</server_container_name>
<client_container_name>rfsim5g-oai-nr-ue</client_container_name> <client_container_name>rfsim5g-oai-nr-ue</client_container_name>
<server_options>-u -i 1 -s</server_options> <server_options>-u -i 1 -s</server_options>
<client_options>-B 12.1.1.2 -c 192.168.72.135 -u -i 1 -t 30 -b 20K</client_options> <client_options>-B 12.1.1.2 -c 192.168.72.135 -u -i 1 -t 30 -b 1M</client_options>
</testCase> </testCase>
<testCase id="100001"> <testCase id="100001">
......
...@@ -26,19 +26,19 @@ ...@@ -26,19 +26,19 @@
<htmlTabIcon>tasks</htmlTabIcon> <htmlTabIcon>tasks</htmlTabIcon>
<repeatCount>1</repeatCount> <repeatCount>1</repeatCount>
<TestCaseRequestedList> <TestCaseRequestedList>
040000 041000
000002 000002
010000 011000
000002 000002
050000 051000
050001 051001
000001 000001
010002 010002
080000 080000
</TestCaseRequestedList> </TestCaseRequestedList>
<TestCaseExclusionList></TestCaseExclusionList> <TestCaseExclusionList></TestCaseExclusionList>
<testCase id="010000"> <testCase id="011000">
<class>Initialize_OAI_UE</class> <class>Initialize_OAI_UE</class>
<desc>Initialize OAI UE (X300)</desc> <desc>Initialize OAI UE (X300)</desc>
<air_interface>nr</air_interface> <air_interface>nr</air_interface>
...@@ -51,7 +51,7 @@ ...@@ -51,7 +51,7 @@
</testCase> </testCase>
<testCase id="040000"> <testCase id="041000">
<class>Initialize_eNB</class> <class>Initialize_eNB</class>
<desc>Initialize gNB</desc> <desc>Initialize gNB</desc>
<Initialize_eNB_args>-O ci-scripts/conf_files/gnb.band78.sa.fr1.106PRB.usrpn310.conf --sa --usrp-tx-thread-config 1 --log_config.global_log_options level,nocolor,time</Initialize_eNB_args> <Initialize_eNB_args>-O ci-scripts/conf_files/gnb.band78.sa.fr1.106PRB.usrpn310.conf --sa --usrp-tx-thread-config 1 --log_config.global_log_options level,nocolor,time</Initialize_eNB_args>
...@@ -76,14 +76,14 @@ ...@@ -76,14 +76,14 @@
</testCase> </testCase>
<testCase id="050000"> <testCase id="051000">
<class>Ping</class> <class>Ping</class>
<desc>Ping from CN to UE: 20pings in 20sec</desc> <desc>Ping from CN to UE: 20pings in 20sec</desc>
<ping_args>-c 20</ping_args> <ping_args>-c 20</ping_args>
<ping_packetloss_threshold>5</ping_packetloss_threshold> <ping_packetloss_threshold>5</ping_packetloss_threshold>
</testCase> </testCase>
<testCase id="050001"> <testCase id="051001">
<class>Ping</class> <class>Ping</class>
<desc>Ping from CN to UE: 100pings in 20sec</desc> <desc>Ping from CN to UE: 100pings in 20sec</desc>
<ping_args>-c 100 -i 0.2</ping_args> <ping_args>-c 100 -i 0.2</ping_args>
......
...@@ -99,25 +99,23 @@ Options ...@@ -99,25 +99,23 @@ Options
Makes the LTE softmodem Makes the LTE softmodem
--eNBocp --eNBocp
Makes the OCP LTE softmodem Makes the OCP LTE softmodem
-gNB --gNB
Makes the NR softmodem Makes the NR softmodem
--nrUE
Makes the NR UE softmodem
--RU --RU
Makes the OAI RRU Makes the OAI RRU
--UE --UE
Makes the UE specific parts (ue_ip, usim, nvram) from the given configuration file Makes the UE specific parts (ue_ip, usim, nvram) from the given configuration file
--nrUE
Makes the NR UE softmodem
--UE-conf-nvram [configuration file] --UE-conf-nvram [configuration file]
Specify conf_nvram_path (default \"$conf_nvram_path\") Specify conf_nvram_path (default \"$conf_nvram_path\")
--UE-gen-nvram [output path] --UE-gen-nvram [output path]
Specify gen_nvram_path (default \"$gen_nvram_path\") Specify gen_nvram_path (default \"$gen_nvram_path\")
--HWLAT
Makes test program for haw latency tests
-a | --agent
Enables agent for software-defined control of the eNB
-w | --hardware -w | --hardware
EXMIMO, USRP, BLADERF, LMSSDR, IRIS, ADRV9371_ZC706, SIMU, AW2SORI, None (Default) EXMIMO, USRP, BLADERF, LMSSDR, IRIS, ADRV9371_ZC706, SIMU, AW2SORI, None (Default)
Adds this RF board support (in external packages installation and in compilation) Adds this RF board support (in external packages installation and in compilation)
-t | --transport
Selects the transport protocol type, options: None, Ethernet, benetel4g, benetel5g
-P | --phy_simulators -P | --phy_simulators
Makes the unitary tests Layer 1 simulators Makes the unitary tests Layer 1 simulators
-S | --core_simulators -S | --core_simulators
...@@ -128,6 +126,14 @@ Options ...@@ -128,6 +126,14 @@ Options
runs only specified test cases specified here. This flag is only valid with -s runs only specified test cases specified here. This flag is only valid with -s
-V | --vcd -V | --vcd
Adds a debgging facility to the binary files: GUI with major internal synchronization events Adds a debgging facility to the binary files: GUI with major internal synchronization events
-x | --xforms
Will compile with software oscilloscope features
--HWLAT
Makes test program for haw latency tests
--HWLAT_TEST
Compiles a hw latency test program
--verbose-ci
Compile with verbose instructions in CI Docker env
--verbose-compile --verbose-compile
Shows detailed compilation instructions in makefile Shows detailed compilation instructions in makefile
--cflags_processor --cflags_processor
...@@ -163,6 +169,10 @@ Options ...@@ -163,6 +169,10 @@ Options
Build for I/Q record-playback modes Build for I/Q record-playback modes
-k | --skip-shared-libraries -k | --skip-shared-libraries
Skip build for shared libraries to reduce compilation time when building frequently for debugging purposes Skip build for shared libraries to reduce compilation time when building frequently for debugging purposes
--ninja
Tell cmake to use the Ninja build system. Without, will generate make files
--sanitize-address | -fsanitize=address
Enable the address sanitizer on all targets
--ittiSIM --ittiSIM
Makes the itti simulator Makes the itti simulator
-h | --help -h | --help
......
...@@ -756,6 +756,7 @@ check_install_oai_software() { ...@@ -756,6 +756,7 @@ check_install_oai_software() {
libidn11-dev \ libidn11-dev \
libmysqlclient-dev \ libmysqlclient-dev \
libpython2.7-dev \ libpython2.7-dev \
libreadline-dev \
libsctp1 \ libsctp1 \
libsctp-dev \ libsctp-dev \
libssl-dev \ libssl-dev \
...@@ -842,6 +843,7 @@ check_install_oai_software() { ...@@ -842,6 +843,7 @@ check_install_oai_software() {
patch \ patch \
psmisc \ psmisc \
python \ python \
readline-devel \
subversion \ subversion \
xmlstarlet \ xmlstarlet \
python-pip \ python-pip \
......
...@@ -35,4 +35,4 @@ COPY . . ...@@ -35,4 +35,4 @@ COPY . .
RUN /bin/sh oaienv && \ RUN /bin/sh oaienv && \
cd cmake_targets && \ cd cmake_targets && \
mkdir -p log && \ mkdir -p log && \
./build_oai --eNB --gNB --RU --UE --nrUE --ninja -w USRP --verbose-ci ./build_oai --eNB --gNB --RU --UE --nrUE --ninja --build-lib all -w USRP --verbose-ci
...@@ -35,4 +35,4 @@ COPY . . ...@@ -35,4 +35,4 @@ COPY . .
RUN /bin/sh oaienv && \ RUN /bin/sh oaienv && \
cd cmake_targets && \ cd cmake_targets && \
mkdir -p log && \ mkdir -p log && \
./build_oai --eNB --gNB --RU --UE --nrUE --ninja -w USRP --verbose-ci ./build_oai --eNB --gNB --RU --UE --nrUE --ninja --build-lib all -w USRP --verbose-ci
...@@ -69,6 +69,8 @@ COPY --from=enb-build \ ...@@ -69,6 +69,8 @@ COPY --from=enb-build \
/oai-ran/cmake_targets/ran_build/build/libparams_libconfig.so \ /oai-ran/cmake_targets/ran_build/build/libparams_libconfig.so \
/oai-ran/cmake_targets/ran_build/build/libdfts.so \ /oai-ran/cmake_targets/ran_build/build/libdfts.so \
/oai-ran/cmake_targets/ran_build/build/liboai_iqplayer.so \ /oai-ran/cmake_targets/ran_build/build/liboai_iqplayer.so \
/oai-ran/cmake_targets/ran_build/build/libtelnetsrv.so \
/oai-ran/cmake_targets/ran_build/build/libtelnetsrv_enb.so \
/usr/local/lib/ /usr/local/lib/
COPY --from=enb-base \ COPY --from=enb-base \
......
...@@ -76,6 +76,8 @@ COPY --from=enb-build \ ...@@ -76,6 +76,8 @@ COPY --from=enb-build \
/oai-ran/cmake_targets/ran_build/build/libparams_libconfig.so \ /oai-ran/cmake_targets/ran_build/build/libparams_libconfig.so \
/oai-ran/cmake_targets/ran_build/build/libdfts.so \ /oai-ran/cmake_targets/ran_build/build/libdfts.so \
/oai-ran/cmake_targets/ran_build/build/liboai_iqplayer.so \ /oai-ran/cmake_targets/ran_build/build/liboai_iqplayer.so \
/oai-ran/cmake_targets/ran_build/build/libtelnetsrv.so \
/oai-ran/cmake_targets/ran_build/build/libtelnetsrv_enb.so \
/usr/local/lib/ /usr/local/lib/
# Now we are copying from builder-image the UHD files. # Now we are copying from builder-image the UHD files.
......
...@@ -74,6 +74,7 @@ COPY --from=gnb-build \ ...@@ -74,6 +74,7 @@ COPY --from=gnb-build \
/oai-ran/cmake_targets/ran_build/build/libldpc_optim.so \ /oai-ran/cmake_targets/ran_build/build/libldpc_optim.so \
/oai-ran/cmake_targets/ran_build/build/libldpc_optim8seg.so \ /oai-ran/cmake_targets/ran_build/build/libldpc_optim8seg.so \
/oai-ran/cmake_targets/ran_build/build/libldpc_orig.so \ /oai-ran/cmake_targets/ran_build/build/libldpc_orig.so \
/oai-ran/cmake_targets/ran_build/build/libtelnetsrv.so \
/usr/local/lib/ /usr/local/lib/
COPY --from=gnb-base \ COPY --from=gnb-base \
......
...@@ -78,6 +78,7 @@ COPY --from=gnb-build \ ...@@ -78,6 +78,7 @@ COPY --from=gnb-build \
/oai-ran/cmake_targets/ran_build/build/libldpc_optim.so \ /oai-ran/cmake_targets/ran_build/build/libldpc_optim.so \
/oai-ran/cmake_targets/ran_build/build/libldpc_optim8seg.so \ /oai-ran/cmake_targets/ran_build/build/libldpc_optim8seg.so \
/oai-ran/cmake_targets/ran_build/build/libldpc_orig.so \ /oai-ran/cmake_targets/ran_build/build/libldpc_orig.so \
/oai-ran/cmake_targets/ran_build/build/libtelnetsrv.so \
/usr/local/lib/ /usr/local/lib/
# Now we are copying from builder-image the UHD files. # Now we are copying from builder-image the UHD files.
......
...@@ -73,6 +73,7 @@ COPY --from=lte-ue-build \ ...@@ -73,6 +73,7 @@ COPY --from=lte-ue-build \
/oai-ran/cmake_targets/ran_build/build/libparams_libconfig.so \ /oai-ran/cmake_targets/ran_build/build/libparams_libconfig.so \
/oai-ran/cmake_targets/ran_build/build/libSIMU.so \ /oai-ran/cmake_targets/ran_build/build/libSIMU.so \
/oai-ran/cmake_targets/ran_build/build/libdfts.so \ /oai-ran/cmake_targets/ran_build/build/libdfts.so \
/oai-ran/cmake_targets/ran_build/build/libtelnetsrv.so \
/usr/local/lib/ /usr/local/lib/
COPY --from=lte-ue-base \ COPY --from=lte-ue-base \
......
...@@ -80,6 +80,7 @@ COPY --from=lte-ue-build \ ...@@ -80,6 +80,7 @@ COPY --from=lte-ue-build \
/oai-ran/cmake_targets/ran_build/build/libparams_libconfig.so \ /oai-ran/cmake_targets/ran_build/build/libparams_libconfig.so \
/oai-ran/cmake_targets/ran_build/build/libSIMU.so \ /oai-ran/cmake_targets/ran_build/build/libSIMU.so \
/oai-ran/cmake_targets/ran_build/build/libdfts.so \ /oai-ran/cmake_targets/ran_build/build/libdfts.so \
/oai-ran/cmake_targets/ran_build/build/libtelnetsrv.so \
/usr/local/lib/ /usr/local/lib/
# Now we are copying from builder-image the UHD files. # Now we are copying from builder-image the UHD files.
......
...@@ -76,6 +76,8 @@ COPY --from=nr-ue-build \ ...@@ -76,6 +76,8 @@ COPY --from=nr-ue-build \
/oai-ran/cmake_targets/ran_build/build/libldpc_optim.so \ /oai-ran/cmake_targets/ran_build/build/libldpc_optim.so \
/oai-ran/cmake_targets/ran_build/build/libldpc_optim8seg.so \ /oai-ran/cmake_targets/ran_build/build/libldpc_optim8seg.so \
/oai-ran/cmake_targets/ran_build/build/libldpc_orig.so \ /oai-ran/cmake_targets/ran_build/build/libldpc_orig.so \
/oai-ran/cmake_targets/ran_build/build/libtelnetsrv.so \
/oai-ran/cmake_targets/ran_build/build/libtelnetsrv_5Gue.so \
/usr/local/lib/ /usr/local/lib/
COPY --from=nr-ue-base \ COPY --from=nr-ue-base \
......
...@@ -81,6 +81,8 @@ COPY --from=nr-ue-build \ ...@@ -81,6 +81,8 @@ COPY --from=nr-ue-build \
/oai-ran/cmake_targets/ran_build/build/libldpc_optim.so \ /oai-ran/cmake_targets/ran_build/build/libldpc_optim.so \
/oai-ran/cmake_targets/ran_build/build/libldpc_optim8seg.so \ /oai-ran/cmake_targets/ran_build/build/libldpc_optim8seg.so \
/oai-ran/cmake_targets/ran_build/build/libldpc_orig.so \ /oai-ran/cmake_targets/ran_build/build/libldpc_orig.so \
/oai-ran/cmake_targets/ran_build/build/libtelnetsrv.so \
/oai-ran/cmake_targets/ran_build/build/libtelnetsrv_5Gue.so \
/usr/local/lib/ /usr/local/lib/
# Now we are copying from builder-image the UHD files. # Now we are copying from builder-image the UHD files.
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment