Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
O
OpenXG-RAN
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
zzha zzha
OpenXG-RAN
Commits
7b76f3f2
Commit
7b76f3f2
authored
Mar 25, 2021
by
Thomas Schlichter
Browse files
Options
Browse Files
Download
Plain Diff
Merge remote-tracking branch 'origin/develop' into nr_power_measurement_fixes
parents
d76c50e3
b28ac6d9
Changes
25
Show whitespace changes
Inline
Side-by-side
Showing
25 changed files
with
1672 additions
and
103 deletions
+1672
-103
ci-scripts/Jenkinsfile-GitLab-Container
ci-scripts/Jenkinsfile-GitLab-Container
+261
-0
ci-scripts/Jenkinsfile-tmp-multi-enb-benetel
ci-scripts/Jenkinsfile-tmp-multi-enb-benetel
+293
-0
ci-scripts/Jenkinsfile-tmp-multi-enb-nsa
ci-scripts/Jenkinsfile-tmp-multi-enb-nsa
+9
-5
ci-scripts/Jenkinsfile-tmp-ran
ci-scripts/Jenkinsfile-tmp-ran
+40
-4
ci-scripts/Jenkinsfile-trig-nsa
ci-scripts/Jenkinsfile-trig-nsa
+14
-4
ci-scripts/cls_containerize.py
ci-scripts/cls_containerize.py
+68
-57
ci-scripts/conf_files/benetel-4g.conf
ci-scripts/conf_files/benetel-4g.conf
+285
-0
ci-scripts/conf_files/benetel-5g.conf
ci-scripts/conf_files/benetel-5g.conf
+317
-0
ci-scripts/docker_log_split.py
ci-scripts/docker_log_split.py
+92
-0
ci-scripts/ran.py
ci-scripts/ran.py
+2
-1
ci-scripts/xml_files/benetel_multi_node_build.xml
ci-scripts/xml_files/benetel_multi_node_build.xml
+54
-0
ci-scripts/xml_files/benetel_nsa_base.xml
ci-scripts/xml_files/benetel_nsa_base.xml
+162
-0
ci-scripts/xml_files/container_image_build.xml
ci-scripts/xml_files/container_image_build.xml
+2
-2
ci-scripts/xml_files/fr1_multi_node_build.xml
ci-scripts/xml_files/fr1_multi_node_build.xml
+2
-0
cmake_targets/CMakeLists.txt
cmake_targets/CMakeLists.txt
+3
-0
cmake_targets/build_oai
cmake_targets/build_oai
+33
-7
cmake_targets/tools/build_helper
cmake_targets/tools/build_helper
+11
-6
docker/Dockerfile.eNB.rhel8.2
docker/Dockerfile.eNB.rhel8.2
+3
-2
docker/Dockerfile.eNB.ubuntu18
docker/Dockerfile.eNB.ubuntu18
+3
-2
docker/Dockerfile.gNB.rhel8.2
docker/Dockerfile.gNB.rhel8.2
+3
-2
docker/Dockerfile.gNB.ubuntu18
docker/Dockerfile.gNB.ubuntu18
+3
-2
docker/Dockerfile.lteUE.rhel8.2
docker/Dockerfile.lteUE.rhel8.2
+3
-3
docker/Dockerfile.lteUE.ubuntu18
docker/Dockerfile.lteUE.ubuntu18
+3
-2
docker/Dockerfile.nrUE.rhel8.2
docker/Dockerfile.nrUE.rhel8.2
+3
-2
docker/Dockerfile.nrUE.ubuntu18
docker/Dockerfile.nrUE.ubuntu18
+3
-2
No files found.
ci-scripts/Jenkinsfile-GitLab-Container
0 → 100644
View file @
7b76f3f2
#
!
/bin/
groovy
/*
* Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The OpenAirInterface Software Alliance licenses this file to You under
* the OAI Public License, Version 1.1 (the "License"); you may not use this file
* except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.openairinterface.org/?page_id=698
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*-------------------------------------------------------------------------------
* For more information about the OpenAirInterface (OAI) Software Alliance:
* contact@openairinterface.org
*/
// Location of the executor node
def
nodeExecutor
=
params
.
nodeExecutor
// Tags to shorten pipeline duration
def
doMandatoryTests
=
false
def
doFullTestsuite
=
false
pipeline
{
agent
{
label
nodeExecutor
}
options
{
disableConcurrentBuilds
()
timestamps
()
gitLabConnection
(
'OAI GitLab'
)
ansiColor
(
'xterm'
)
}
stages
{
stage
(
"Verify Parameters"
)
{
steps
{
script
{
JOB_TIMESTAMP
=
sh
returnStdout:
true
,
script:
'date --utc --rfc-3339=seconds | sed -e "s#+00:00##"'
JOB_TIMESTAMP
=
JOB_TIMESTAMP
.
trim
()
echo
'\u2705 \u001B[32mVerify Parameters\u001B[0m'
def
allParametersPresent
=
true
echo
'\u2705 \u001B[32mVerify Labels\u001B[0m'
if
(
"MERGE"
.
equals
(
env
.
gitlabActionType
))
{
LABEL_CHECK
=
sh
returnStdout:
true
,
script:
'ci-scripts/checkGitLabMergeRequestLabels.sh --mr-id '
+
env
.
gitlabMergeRequestIid
LABEL_CHECK
=
LABEL_CHECK
.
trim
()
if
(
LABEL_CHECK
==
'NONE'
)
{
def
message
=
"OAI "
+
JOB_NAME
+
" build ("
+
BUILD_ID
+
"): Your merge request has none of the mandatory labels:\n\n"
message
+=
" - BUILD-ONLY\n"
message
+=
" - 4G-LTE\n"
message
+=
" - 5G-NR\n"
message
+=
" - CI\n\n"
message
+=
"Not performing CI due to lack of labels"
addGitLabMRComment
comment:
message
error
(
'Not performing CI due to lack of labels'
)
}
else
if
(
LABEL_CHECK
==
'FULL'
)
{
doMandatoryTests
=
true
doFullTestsuite
=
true
}
else
if
(
LABEL_CHECK
==
'SHORTEN-5G'
)
{
doMandatoryTests
=
true
}
else
{
def
message
=
"OAI "
+
JOB_NAME
+
" build ("
+
BUILD_ID
+
"): We will perform only build stages on your Merge Request"
addGitLabMRComment
comment:
message
}
}
else
{
doMandatoryTests
=
true
doFullTestsuite
=
true
}
}
}
}
stage
(
"Verify Guidelines"
)
{
steps
{
echo
"Git URL is ${GIT_URL}"
echo
"GitLab Act is ${env.gitlabActionType}"
script
{
if
(
"MERGE"
.
equals
(
env
.
gitlabActionType
))
{
// since a bit, in push events, gitlabUserEmail is not populated
gitCommitAuthorEmailAddr
=
env
.
gitlabUserEmail
echo
"GitLab Usermail is ${gitCommitAuthorEmailAddr}"
// GitLab-Jenkins plugin integration is lacking to perform the merge by itself
// Doing it manually --> it may have merge conflicts
sh
"./ci-scripts/doGitLabMerge.sh --src-branch ${env.gitlabSourceBranch} --src-commit ${env.gitlabMergeRequestLastCommit} --target-branch ${env.gitlabTargetBranch} --target-commit ${GIT_COMMIT}"
}
else
{
echo
"Git Branch is ${GIT_BRANCH}"
echo
"Git Commit is ${GIT_COMMIT}"
// since a bit, in push events, gitlabUserEmail is not populated
gitCommitAuthorEmailAddr
=
sh
returnStdout:
true
,
script:
'git log -n1 --pretty=format:%ae ${GIT_COMMIT}'
gitCommitAuthorEmailAddr
=
gitCommitAuthorEmailAddr
.
trim
()
echo
"GitLab Usermail is ${gitCommitAuthorEmailAddr}"
sh
"git log -n1 --pretty=format:\"%s\" > .git/CI_COMMIT_MSG"
}
}
}
post
{
failure
{
script
{
def
message
=
"OAI "
+
JOB_NAME
+
" build ("
+
BUILD_ID
+
"): Merge Conflicts -- Cannot perform CI"
addGitLabMRComment
comment:
message
currentBuild
.
result
=
'FAILURE'
}
}
}
}
// Build Stages are Mandatory
// Later we will add a Ubuntu20 build
stage
(
"Image Building Processes"
)
{
parallel
{
stage
(
"Ubuntu18 Build"
)
{
steps
{
script
{
triggerSlaveJob
(
'RAN-Ubuntu18-Image-Builder'
,
'Ubuntu18-Images-Build'
)
}
}
post
{
always
{
script
{
finalizeSlaveJob
(
'RAN-Ubuntu18-Image-Builder'
)
}
}
failure
{
script
{
currentBuild
.
result
=
'FAILURE'
}
}
}
}
stage
(
"RHEL8 Build"
)
{
steps
{
script
{
triggerSlaveJob
(
'RAN-RHEL8-Image-Builder'
,
'RHEL8-Images-Build'
)
}
}
post
{
always
{
script
{
finalizeSlaveJob
(
'RAN-RHEL8-Image-Builder'
)
}
}
failure
{
script
{
currentBuild
.
result
=
'FAILURE'
}
}
}
}
}
}
}
post
{
always
{
script
{
emailext
attachmentsPattern:
'*results*.html'
,
body:
'''Hi,
Here are attached HTML report files for $PROJECT_NAME - Build # $BUILD_NUMBER - $BUILD_STATUS!
Regards,
OAI CI Team'''
,
replyTo:
'no-reply@openairinterface.org'
,
subject:
'$PROJECT_NAME - Build # $BUILD_NUMBER - $BUILD_STATUS!'
,
to:
gitCommitAuthorEmailAddr
if
(
fileExists
(
'.git/CI_COMMIT_MSG'
))
{
sh
"rm -f .git/CI_COMMIT_MSG"
}
}
}
success
{
script
{
def
message
=
"OAI "
+
JOB_NAME
+
" build ("
+
BUILD_ID
+
"): passed ("
+
BUILD_URL
+
")"
if
(
"MERGE"
.
equals
(
env
.
gitlabActionType
))
{
echo
"This is a MERGE event"
addGitLabMRComment
comment:
message
}
}
}
failure
{
script
{
def
message
=
"OAI "
+
JOB_NAME
+
" build ("
+
BUILD_ID
+
"): failed ("
+
BUILD_URL
+
")"
if
(
"MERGE"
.
equals
(
env
.
gitlabActionType
))
{
echo
"This is a MERGE event"
addGitLabMRComment
comment:
message
}
}
}
}
}
// ---- Slave Job functions
def
triggerSlaveJob
(
jobName
,
gitlabStatusName
)
{
// Workaround for the "cancelled" GitLab pipeline notification
// The slave job is triggered with the propagate false so the following commands are executed
// Its status is now PASS/SUCCESS from a stage pipeline point of view
// localStatus variable MUST be analyzed to properly assess the status
localStatus
=
build
job:
jobName
,
parameters:
[
string
(
name:
'eNB_Repository'
,
value:
String
.
valueOf
(
GIT_URL
)),
string
(
name:
'eNB_Branch'
,
value:
String
.
valueOf
(
env
.
gitlabSourceBranch
)),
string
(
name:
'eNB_CommitID'
,
value:
String
.
valueOf
(
env
.
gitlabMergeRequestLastCommit
)),
booleanParam
(
name:
'eNB_mergeRequest'
,
value:
"MERGE"
.
equals
(
env
.
gitlabActionType
)),
string
(
name:
'eNB_TargetBranch'
,
value:
String
.
valueOf
(
env
.
gitlabTargetBranch
))
],
propagate:
false
localResult
=
localStatus
.
getResult
()
echo
"${jobName} Slave Job status is ${localResult}"
gitlabCommitStatus
(
name:
gitlabStatusName
)
{
if
(
localStatus
.
resultIsBetterOrEqualTo
(
'SUCCESS'
))
{
echo
"${jobName} Slave Job is OK"
}
else
{
echo
"${jobName} Slave Job is KO"
sh
"ci-scripts/fail.sh"
}
}
}
def
triggerSlaveJobNoGitLab
(
jobName
)
{
// Workaround for the "cancelled" GitLab pipeline notification
// The slave job is triggered with the propagate false so the following commands are executed
// Its status is now PASS/SUCCESS from a stage pipeline point of view
// localStatus variable MUST be analyzed to properly assess the status
localStatus
=
build
job:
jobName
,
parameters:
[
string
(
name:
'eNB_Repository'
,
value:
String
.
valueOf
(
GIT_URL
)),
string
(
name:
'eNB_Branch'
,
value:
String
.
valueOf
(
env
.
gitlabSourceBranch
)),
string
(
name:
'eNB_CommitID'
,
value:
String
.
valueOf
(
env
.
gitlabMergeRequestLastCommit
)),
booleanParam
(
name:
'eNB_mergeRequest'
,
value:
"MERGE"
.
equals
(
env
.
gitlabActionType
)),
string
(
name:
'eNB_TargetBranch'
,
value:
String
.
valueOf
(
env
.
gitlabTargetBranch
))
],
propagate:
false
localResult
=
localStatus
.
getResult
()
echo
"${jobName} Slave Job status is ${localResult}"
if
(
localStatus
.
resultIsBetterOrEqualTo
(
'SUCCESS'
))
{
echo
"${jobName} Slave Job is OK"
}
else
{
echo
"${jobName} Slave Job is KO"
sh
"ci-scripts/fail.sh"
}
}
def
finalizeSlaveJob
(
jobName
)
{
// In case of any non-success, we are retrieving the HTML report of the last completed
// slave job. The only drop-back is that we may retrieve the HTML report of a previous build
fileName
=
"test_results-${jobName}.html"
if
(!
fileExists
(
fileName
))
{
copyArtifacts
(
projectName:
jobName
,
filter:
'test_results*.html'
,
selector:
lastCompleted
())
if
(
fileExists
(
fileName
))
{
sh
"sed -i -e 's#TEMPLATE_BUILD_TIME#${JOB_TIMESTAMP}#' ${fileName}"
archiveArtifacts
artifacts:
fileName
}
}
}
ci-scripts/Jenkinsfile-tmp-multi-enb-benetel
0 → 100644
View file @
7b76f3f2
#
!
/bin/
groovy
/*
* Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The OpenAirInterface Software Alliance licenses this file to You under
* the OAI Public License, Version 1.1 (the "License"); you may not use this file
* except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.openairinterface.org/?page_id=698
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*-------------------------------------------------------------------------------
* For more information about the OpenAirInterface (OAI) Software Alliance:
* contact@openairinterface.org
*/
// Template Jenkins Declarative Pipeline script to run Test w/ RF HW
// Location of the python executor node shall be in the same subnet as the others servers
def
pythonExecutor
=
params
.
pythonExecutor
// Location of the test XML file to be run
def
testXMLFile
=
params
.
pythonTestXmlFile
def
mainPythonAllXmlFiles
=
""
def
buildStageStatus
=
true
// Name of the test stage
def
testStageName
=
params
.
pipelineTestStageName
// Name of the phone resource
def
ciSmartPhonesResource1
=
params
.
SmartPhonesResource1
def
ciSmartPhonesResource2
=
params
.
SmartPhonesResource2
// Global Parameters. Normally they should be populated when the master job
// triggers the slave job with parameters
def
eNB_Repository
def
eNB_Branch
def
eNB_CommitID
def
eNB_AllowMergeRequestProcess
def
eNB_TargetBranch
pipeline
{
agent
{
label
pythonExecutor
}
options
{
disableConcurrentBuilds
()
ansiColor
(
'xterm'
)
lock
(
extra:
[[
resource:
ciSmartPhonesResource2
]],
resource:
ciSmartPhonesResource1
)
}
stages
{
stage
(
"Build Init"
)
{
steps
{
// update the build name and description
buildName
"${params.eNB_MR}"
buildDescription
"Branch : ${params.eNB_Branch}"
}
}
stage
(
"Verify Parameters"
)
{
steps
{
script
{
echo
'\u2705 \u001B[32mVerify Parameters\u001B[0m'
def
allParametersPresent
=
true
// It is already to late to check it
if
(
params
.
pythonExecutor
!=
null
)
{
echo
"eNB CI executor node : ${pythonExecutor}"
}
// If not present picking a default Stage Name
if
(
params
.
pipelineTestStageName
==
null
)
{
// picking default
testStageName
=
'Template Test Stage'
}
if
(
params
.
SmartPhonesResource1
==
null
)
{
allParametersPresent
=
false
}
if
(
params
.
SmartPhonesResource2
==
null
)
{
allParametersPresent
=
false
}
// 1st eNB parameters
if
(
params
.
eNB_IPAddress
==
null
)
{
allParametersPresent
=
false
}
if
(
params
.
eNB_SourceCodePath
==
null
)
{
allParametersPresent
=
false
}
if
(
params
.
eNB_Credentials
==
null
)
{
allParametersPresent
=
false
}
// 2nd eNB parameters
if
(
params
.
eNB1_IPAddress
==
null
)
{
allParametersPresent
=
false
}
if
(
params
.
eNB1_SourceCodePath
==
null
)
{
allParametersPresent
=
false
}
if
(
params
.
eNB1_Credentials
==
null
)
{
allParametersPresent
=
false
}
// 3rd eNB parameters
if
(
params
.
eNB2_IPAddress
==
null
)
{
allParametersPresent
=
false
}
if
(
params
.
eNB2_SourceCodePath
==
null
)
{
allParametersPresent
=
false
}
if
(
params
.
eNB2_Credentials
==
null
)
{
allParametersPresent
=
false
}
// the following 4 parameters should be pushed by the master trigger
// if not present, take the job GIT variables (used for developing)
if
(
params
.
eNB_Repository
==
null
)
{
eNB_Repository
=
env
.
GIT_URL
}
else
{
eNB_Repository
=
params
.
eNB_Repository
}
echo
"eNB_Repository : ${eNB_Repository}"
if
(
params
.
eNB_Branch
==
null
)
{
eNB_Branch
=
env
.
GIT_BRANCH
}
else
{
eNB_Branch
=
params
.
eNB_Branch
}
echo
"eNB_Branch : ${eNB_Branch}"
if
(
params
.
eNB_CommitID
==
null
)
{
eNB_CommitID
=
env
.
GIT_COMMIT
}
else
{
eNB_CommitID
=
params
.
eNB_CommitID
}
echo
"eNB_CommitID : ${eNB_CommitID}"
if
(
params
.
eNB_AllowMergeRequestProcess
!=
null
)
{
eNB_AllowMergeRequestProcess
=
params
.
eNB_AllowMergeRequestProcess
if
(
eNB_AllowMergeRequestProcess
)
{
if
(
params
.
eNB_TargetBranch
!=
null
)
{
eNB_TargetBranch
=
params
.
eNB_TargetBranch
}
else
{
eNB_TargetBranch
=
'develop'
}
echo
"eNB_TargetBranch : ${eNB_TargetBranch}"
}
}
if
(
params
.
EPC_IPAddress
==
null
)
{
allParametersPresent
=
false
}
if
(
params
.
EPC_Type
==
null
)
{
allParametersPresent
=
false
}
if
(
params
.
EPC_SourceCodePath
==
null
)
{
allParametersPresent
=
false
}
if
(
params
.
EPC_Credentials
==
null
)
{
allParametersPresent
=
false
}
if
(
params
.
ADB_IPAddress
==
null
)
{
allParametersPresent
=
false
}
if
(
params
.
ADB_Credentials
==
null
)
{
allParametersPresent
=
false
}
if
(
allParametersPresent
)
{
echo
"All parameters are present"
if
(
eNB_AllowMergeRequestProcess
)
{
sh
"git fetch"
sh
"./ci-scripts/doGitLabMerge.sh --src-branch ${eNB_Branch} --src-commit ${eNB_CommitID} --target-branch ${eNB_TargetBranch} --target-commit latest"
}
else
{
sh
"git fetch"
sh
"git checkout -f ${eNB_CommitID}"
}
}
else
{
echo
"Some parameters are missing"
sh
"./ci-scripts/fail.sh"
}
}
}
}
stage
(
"Build and Test"
)
{
steps
{
script
{
dir
(
'ci-scripts'
)
{
echo
"\u2705 \u001B[32m${testStageName}\u001B[0m"
// If not present picking a default XML file
if
(
params
.
pythonTestXmlFile
==
null
)
{
// picking default
testXMLFile
=
'xml_files/enb_usrpB210_band7_50PRB.xml'
echo
"Test XML file(default): ${testXMLFile}"
mainPythonAllXmlFiles
+=
"--XMLTestFile="
+
testXMLFile
+
" "
}
else
{
String
[]
myXmlTestSuite
=
testXMLFile
.
split
(
"\\r?\\n"
)
for
(
xmlFile
in
myXmlTestSuite
)
{
if
(
fileExists
(
xmlFile
))
{
mainPythonAllXmlFiles
+=
"--XMLTestFile="
+
xmlFile
+
" "
echo
"Test XML file : ${xmlFile}"
}
}
}
withCredentials
([
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.eNB_Credentials}"
,
usernameVariable:
'eNB_Username'
,
passwordVariable:
'eNB_Password'
],
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.eNB1_Credentials}"
,
usernameVariable:
'eNB1_Username'
,
passwordVariable:
'eNB1_Password'
],
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.eNB2_Credentials}"
,
usernameVariable:
'eNB2_Username'
,
passwordVariable:
'eNB2_Password'
],
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.EPC_Credentials}"
,
usernameVariable:
'EPC_Username'
,
passwordVariable:
'EPC_Password'
],
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.ADB_Credentials}"
,
usernameVariable:
'ADB_Username'
,
passwordVariable:
'ADB_Password'
]
])
{
sh
"python3 main.py --mode=InitiateHtml --ranRepository=${eNB_Repository} --ranBranch=${eNB_Branch} --ranCommitID=${eNB_CommitID} --ranAllowMerge=${eNB_AllowMergeRequestProcess} --ranTargetBranch=${eNB_TargetBranch} --ADBIPAddress=${params.ADB_IPAddress} --ADBUserName=${ADB_Username} --ADBPassword=${ADB_Password} ${mainPythonAllXmlFiles}"
String
[]
myXmlTestSuite
=
testXMLFile
.
split
(
"\\r?\\n"
)
for
(
xmlFile
in
myXmlTestSuite
)
{
if
(
fileExists
(
xmlFile
))
{
try
{
sh
"python3 main.py --mode=TesteNB --ranRepository=${eNB_Repository} --ranBranch=${eNB_Branch} --ranCommitID=${eNB_CommitID} --ranAllowMerge=${eNB_AllowMergeRequestProcess} --ranTargetBranch=${eNB_TargetBranch} --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password} --eNBSourceCodePath=${params.eNB_SourceCodePath} --eNB1IPAddress=${params.eNB1_IPAddress} --eNB1UserName=${eNB1_Username} --eNB1Password=${eNB1_Password} --eNB1SourceCodePath=${params.eNB1_SourceCodePath} --eNB2IPAddress=${params.eNB2_IPAddress} --eNB2UserName=${eNB2_Username} --eNB2Password=${eNB2_Password} --eNB2SourceCodePath=${params.eNB2_SourceCodePath} --EPCIPAddress=${params.EPC_IPAddress} --EPCType=${params.EPC_Type} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCSourceCodePath=${params.EPC_SourceCodePath} --ADBIPAddress=${params.ADB_IPAddress} --ADBUserName=${ADB_Username} --ADBPassword=${ADB_Password} --XMLTestFile=${xmlFile}"
}
catch
(
Exception
e
)
{
currentBuild
.
result
=
'FAILURE'
buildStageStatus
=
false
}
}
}
sh
"python3 main.py --mode=FinalizeHtml --finalStatus=${buildStageStatus} --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password}"
}
}
}
}
}
stage
(
'Log Collection'
)
{
parallel
{
stage
(
'Log Collection (eNB - Build)'
)
{
steps
{
withCredentials
([
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.eNB_Credentials}"
,
usernameVariable:
'eNB_Username'
,
passwordVariable:
'eNB_Password'
]
])
{
echo
'\u2705 \u001B[32mLog Collection (eNB - Build)\u001B[0m'
sh
"python3 ci-scripts/main.py --mode=LogCollectBuild --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password} --eNBSourceCodePath=${params.eNB_SourceCodePath}"
echo
'\u2705 \u001B[32mLog Transfer (eNB - Build)\u001B[0m'
sh
"sshpass -p \'${eNB_Password}\' scp -o 'StrictHostKeyChecking no' -o 'ConnectTimeout 10' ${eNB_Username}@${params.eNB_IPAddress}:${eNB_SourceCodePath}/cmake_targets/build.log.zip ./build.log.${env.BUILD_ID}.zip || true"
}
script
{
if
(
fileExists
(
"build.log.${env.BUILD_ID}.zip"
))
{
archiveArtifacts
"build.log.${env.BUILD_ID}.zip"
}
}
}
}
stage
(
'Log Collection (eNB - Run)'
)
{
steps
{
withCredentials
([
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.eNB_Credentials}"
,
usernameVariable:
'eNB_Username'
,
passwordVariable:
'eNB_Password'
]
])
{
echo
'\u2705 \u001B[32mLog Collection (eNB - Run)\u001B[0m'
sh
"python3 ci-scripts/main.py --mode=LogCollecteNB --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password} --eNBSourceCodePath=${params.eNB_SourceCodePath}"
echo
'\u2705 \u001B[32mLog Transfer (eNB - Run)\u001B[0m'
sh
"sshpass -p \'${eNB_Password}\' scp -o 'StrictHostKeyChecking no' -o 'ConnectTimeout 10' ${eNB_Username}@${params.eNB_IPAddress}:${eNB_SourceCodePath}/cmake_targets/enb.log.zip ./enb.log.${env.BUILD_ID}.zip || true"
}
script
{
if
(
fileExists
(
"enb.log.${env.BUILD_ID}.zip"
))
{
archiveArtifacts
"enb.log.${env.BUILD_ID}.zip"
}
if
(
fileExists
(
"ci-scripts/test_results.html"
))
{
sh
"mv ci-scripts/test_results.html test_results-${JOB_NAME}.html"
sh
"sed -i -e 's#TEMPLATE_JOB_NAME#${JOB_NAME}#' -e 's@build #TEMPLATE_BUILD_ID@build #${BUILD_ID}@' -e 's#Build-ID: TEMPLATE_BUILD_ID#Build-ID: <a href=\"${BUILD_URL}\">${BUILD_ID}</a>#' -e 's#TEMPLATE_STAGE_NAME#${testStageName}#' test_results-${JOB_NAME}.html"
archiveArtifacts
"test_results-${JOB_NAME}.html"
}
}
}
}
}
}
}
post
{
always
{
script
{
if
(
params
.
pipelineZipsConsoleLog
!=
null
)
{
if
(
params
.
pipelineZipsConsoleLog
)
{
echo
"Archiving Jenkins console log"
sh
"wget --no-check-certificate --no-proxy ${env.JENKINS_URL}/job/${env.JOB_NAME}/${env.BUILD_ID}/consoleText -O consoleText.log || true"
sh
"zip -m consoleText.log.${env.BUILD_ID}.zip consoleText.log || true"
if
(
fileExists
(
"consoleText.log.${env.BUILD_ID}.zip"
))
{
archiveArtifacts
"consoleText.log.${env.BUILD_ID}.zip"
}
}
}
}
}
}
}
ci-scripts/Jenkinsfile-tmp-multi-enb-nsa
View file @
7b76f3f2
...
@@ -34,8 +34,9 @@ def buildStageStatus = true
...
@@ -34,8 +34,9 @@ def buildStageStatus = true
def
testStageName
=
params
.
pipelineTestStageName
def
testStageName
=
params
.
pipelineTestStageName
// Name of the phone resource
// Name of the phone resource
def
ciSmartPhoneResource1
=
params
.
smartphonesResource1
def
ciSmartPhonesResource1
=
params
.
SmartPhonesResource1
def
ciSmartPhoneResource2
=
params
.
smartphonesResource2
def
ciSmartPhonesResource2
=
params
.
SmartPhonesResource2
def
ciSmartPhonesResource3
=
params
.
SmartPhonesResource3
// Global Parameters. Normally they should be populated when the master job
// Global Parameters. Normally they should be populated when the master job
// triggers the slave job with parameters
// triggers the slave job with parameters
...
@@ -52,7 +53,7 @@ pipeline {
...
@@ -52,7 +53,7 @@ pipeline {
options
{
options
{
disableConcurrentBuilds
()
disableConcurrentBuilds
()
ansiColor
(
'xterm'
)
ansiColor
(
'xterm'
)
lock
(
extra:
[[
resource:
ciSmartPhone
Resource2
]],
resource:
ciSmartPhone
Resource1
)
lock
(
extra:
[[
resource:
ciSmartPhone
sResource2
],[
resource:
ciSmartPhonesResource3
]],
resource:
ciSmartPhones
Resource1
)
}
}
stages
{
stages
{
stage
(
"Build Init"
)
{
stage
(
"Build Init"
)
{
...
@@ -78,10 +79,13 @@ pipeline {
...
@@ -78,10 +79,13 @@ pipeline {
testStageName
=
'Template Test Stage'
testStageName
=
'Template Test Stage'
}
}
if
(
params
.
smartp
honesResource1
==
null
)
{
if
(
params
.
SmartP
honesResource1
==
null
)
{
allParametersPresent
=
false
allParametersPresent
=
false
}
}
if
(
params
.
smartphonesResource2
==
null
)
{
if
(
params
.
SmartPhonesResource2
==
null
)
{
allParametersPresent
=
false
}
if
(
params
.
SmartPhonesResource3
==
null
)
{
allParametersPresent
=
false
allParametersPresent
=
false
}
}
// 1st eNB parameters
// 1st eNB parameters
...
...
ci-scripts/Jenkinsfile-tmp-ran
View file @
7b76f3f2
...
@@ -241,6 +241,10 @@ pipeline {
...
@@ -241,6 +241,10 @@ pipeline {
stage
(
"Terminate"
)
{
stage
(
"Terminate"
)
{
parallel
{
parallel
{
stage
(
'Terminate UE'
)
{
stage
(
'Terminate UE'
)
{
// Bypassing this stage if there are no abd server defined
when
{
expression
{
params
.
ADB_IPAddress
!=
"none"
}
}
steps
{
steps
{
echo
'\u2705 \u001B[32mTerminate UE\u001B[0m'
echo
'\u2705 \u001B[32mTerminate UE\u001B[0m'
withCredentials
([
withCredentials
([
...
@@ -275,6 +279,10 @@ pipeline {
...
@@ -275,6 +279,10 @@ pipeline {
}
}
}
}
stage
(
'Terminate SPGW'
)
{
stage
(
'Terminate SPGW'
)
{
// Bypassing this stage if EPC server is not defined
when
{
expression
{
params
.
EPC_IPAddress
!=
"none"
}
}
steps
{
steps
{
echo
'\u2705 \u001B[32mTerminate SPGW\u001B[0m'
echo
'\u2705 \u001B[32mTerminate SPGW\u001B[0m'
withCredentials
([
withCredentials
([
...
@@ -292,6 +300,10 @@ pipeline {
...
@@ -292,6 +300,10 @@ pipeline {
}
}
}
}
stage
(
'Terminate MME'
)
{
stage
(
'Terminate MME'
)
{
// Bypassing this stage if EPC server is not defined
when
{
expression
{
params
.
EPC_IPAddress
!=
"none"
}
}
steps
{
steps
{
echo
'\u2705 \u001B[32mTerminate MME\u001B[0m'
echo
'\u2705 \u001B[32mTerminate MME\u001B[0m'
withCredentials
([
withCredentials
([
...
@@ -309,6 +321,10 @@ pipeline {
...
@@ -309,6 +321,10 @@ pipeline {
}
}
}
}
stage
(
'Terminate HSS'
)
{
stage
(
'Terminate HSS'
)
{
// Bypassing this stage if EPC server is not defined
when
{
expression
{
params
.
EPC_IPAddress
!=
"none"
}
}
steps
{
steps
{
echo
'\u2705 \u001B[32mTerminate HSS\u001B[0m'
echo
'\u2705 \u001B[32mTerminate HSS\u001B[0m'
withCredentials
([
withCredentials
([
...
@@ -371,6 +387,10 @@ pipeline {
...
@@ -371,6 +387,10 @@ pipeline {
}
}
}
}
stage
(
'Log Collection (SPGW)'
)
{
stage
(
'Log Collection (SPGW)'
)
{
// Bypassing this stage if EPC server is not defined
when
{
expression
{
params
.
EPC_IPAddress
!=
"none"
}
}
steps
{
steps
{
withCredentials
([
withCredentials
([
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.EPC_Credentials}"
,
usernameVariable:
'EPC_Username'
,
passwordVariable:
'EPC_Password'
]
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.EPC_Credentials}"
,
usernameVariable:
'EPC_Username'
,
passwordVariable:
'EPC_Password'
]
...
@@ -389,6 +409,10 @@ pipeline {
...
@@ -389,6 +409,10 @@ pipeline {
}
}
}
}
stage
(
'Log Collection (MME)'
)
{
stage
(
'Log Collection (MME)'
)
{
// Bypassing this stage if EPC server is not defined
when
{
expression
{
params
.
EPC_IPAddress
!=
"none"
}
}
steps
{
steps
{
withCredentials
([
withCredentials
([
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.EPC_Credentials}"
,
usernameVariable:
'EPC_Username'
,
passwordVariable:
'EPC_Password'
]
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.EPC_Credentials}"
,
usernameVariable:
'EPC_Username'
,
passwordVariable:
'EPC_Password'
]
...
@@ -407,6 +431,10 @@ pipeline {
...
@@ -407,6 +431,10 @@ pipeline {
}
}
}
}
stage
(
'Log Collection (HSS)'
)
{
stage
(
'Log Collection (HSS)'
)
{
// Bypassing this stage if EPC server is not defined
when
{
expression
{
params
.
EPC_IPAddress
!=
"none"
}
}
steps
{
steps
{
withCredentials
([
withCredentials
([
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.EPC_Credentials}"
,
usernameVariable:
'EPC_Username'
,
passwordVariable:
'EPC_Password'
]
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.EPC_Credentials}"
,
usernameVariable:
'EPC_Username'
,
passwordVariable:
'EPC_Password'
]
...
@@ -425,6 +453,10 @@ pipeline {
...
@@ -425,6 +453,10 @@ pipeline {
}
}
}
}
stage
(
'Log Collection (Ping)'
)
{
stage
(
'Log Collection (Ping)'
)
{
// Bypassing this stage if EPC server is not defined
when
{
expression
{
params
.
EPC_IPAddress
!=
"none"
}
}
steps
{
steps
{
withCredentials
([
withCredentials
([
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.EPC_Credentials}"
,
usernameVariable:
'EPC_Username'
,
passwordVariable:
'EPC_Password'
]
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.EPC_Credentials}"
,
usernameVariable:
'EPC_Username'
,
passwordVariable:
'EPC_Password'
]
...
@@ -443,6 +475,10 @@ pipeline {
...
@@ -443,6 +475,10 @@ pipeline {
}
}
}
}
stage
(
'Log Collection (Iperf)'
)
{
stage
(
'Log Collection (Iperf)'
)
{
// Bypassing this stage if EPC server is not defined
when
{
expression
{
params
.
EPC_IPAddress
!=
"none"
}
}
steps
{
steps
{
withCredentials
([
withCredentials
([
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.EPC_Credentials}"
,
usernameVariable:
'EPC_Username'
,
passwordVariable:
'EPC_Password'
]
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.EPC_Credentials}"
,
usernameVariable:
'EPC_Username'
,
passwordVariable:
'EPC_Password'
]
...
@@ -482,7 +518,7 @@ pipeline {
...
@@ -482,7 +518,7 @@ pipeline {
// Making sure that we really shutdown every thing before leaving
// Making sure that we really shutdown every thing before leaving
failure
{
failure
{
script
{
script
{
if
(
!
termStatusArray
[
termUE
]
)
{
if
(
(!
termStatusArray
[
termUE
])
&&
(
params
.
ADB_IPAddress
!=
"none"
)
)
{
withCredentials
([
withCredentials
([
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.ADB_Credentials}"
,
usernameVariable:
'ADB_Username'
,
passwordVariable:
'ADB_Password'
]
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.ADB_Credentials}"
,
usernameVariable:
'ADB_Username'
,
passwordVariable:
'ADB_Password'
]
])
{
])
{
...
@@ -496,21 +532,21 @@ pipeline {
...
@@ -496,21 +532,21 @@ pipeline {
sh
"python3 ci-scripts/main.py --mode=TerminateeNB --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password}"
sh
"python3 ci-scripts/main.py --mode=TerminateeNB --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password}"
}
}
}
}
if
(
!
termStatusArray
[
termSPGW
]
)
{
if
(
(!
termStatusArray
[
termSPGW
])
&&
(
params
.
EPC_IPAddress
!=
"none"
)
)
{
withCredentials
([
withCredentials
([
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.EPC_Credentials}"
,
usernameVariable:
'EPC_Username'
,
passwordVariable:
'EPC_Password'
]
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.EPC_Credentials}"
,
usernameVariable:
'EPC_Username'
,
passwordVariable:
'EPC_Password'
]
])
{
])
{
sh
"python3 ci-scripts/main.py --mode=TerminateSPGW --EPCIPAddress=${params.EPC_IPAddress} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCType=${params.EPC_Type} --EPCSourceCodePath=${params.EPC_SourceCodePath}"
sh
"python3 ci-scripts/main.py --mode=TerminateSPGW --EPCIPAddress=${params.EPC_IPAddress} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCType=${params.EPC_Type} --EPCSourceCodePath=${params.EPC_SourceCodePath}"
}
}
}
}
if
(
!
termStatusArray
[
termMME
]
)
{
if
(
(!
termStatusArray
[
termMME
])
&&
(
params
.
EPC_IPAddress
!=
"none"
)
)
{
withCredentials
([
withCredentials
([
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.EPC_Credentials}"
,
usernameVariable:
'EPC_Username'
,
passwordVariable:
'EPC_Password'
]
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.EPC_Credentials}"
,
usernameVariable:
'EPC_Username'
,
passwordVariable:
'EPC_Password'
]
])
{
])
{
sh
"python3 ci-scripts/main.py --mode=TerminateMME --EPCIPAddress=${params.EPC_IPAddress} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCType=${params.EPC_Type} --EPCSourceCodePath=${params.EPC_SourceCodePath}"
sh
"python3 ci-scripts/main.py --mode=TerminateMME --EPCIPAddress=${params.EPC_IPAddress} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCType=${params.EPC_Type} --EPCSourceCodePath=${params.EPC_SourceCodePath}"
}
}
}
}
if
(
!
termStatusArray
[
termHSS
]
)
{
if
(
(!
termStatusArray
[
termHSS
])
&&
(
params
.
EPC_IPAddress
!=
"none"
)
)
{
withCredentials
([
withCredentials
([
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.EPC_Credentials}"
,
usernameVariable:
'EPC_Username'
,
passwordVariable:
'EPC_Password'
]
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.EPC_Credentials}"
,
usernameVariable:
'EPC_Username'
,
passwordVariable:
'EPC_Password'
]
])
{
])
{
...
...
ci-scripts/Jenkinsfile-trig-nsa
View file @
7b76f3f2
...
@@ -33,19 +33,21 @@ pipeline {
...
@@ -33,19 +33,21 @@ pipeline {
label
pythonExecutor
label
pythonExecutor
}
}
stages
{
stages
{
stage
(
"
NSA Test Loop
"
)
{
stage
(
"
Launcher
"
)
{
steps
{
steps
{
script
{
script
{
//retrieve MR that are opened nd with tag READY_TO_BE_MERGED
MR_LIST
=
sh
returnStdout:
true
,
script:
'curl --silent "https://gitlab.eurecom.fr/api/v4/projects/oai%2Fopenairinterface5g/merge_requests?state=opened&per_page=100&labels=READY_TO_BE_MERGED" | jq ".[].iid" || true '
MR_LIST
=
sh
returnStdout:
true
,
script:
'curl --silent "https://gitlab.eurecom.fr/api/v4/projects/oai%2Fopenairinterface5g/merge_requests?state=opened&per_page=100&labels=READY_TO_BE_MERGED" | jq ".[].iid" || true '
echo
"List of selected MR:\n${MR_LIST}"
echo
"List of selected MR:\n${MR_LIST}"
def
MR_ARRAY
=
MR_LIST
.
split
(
'\n'
)
def
MR_ARRAY
=
MR_LIST
.
split
(
'\n'
)
//for every selected MR, retrieve the branch name and the latest commit
for
(
MR
in
MR_ARRAY
)
{
for
(
MR
in
MR_ARRAY
)
{
SRC_BRANCH
=
sh
returnStdout:
true
,
script:
"""curl --silent "https://gitlab.eurecom.fr/api/v4/projects/oai%2Fopenairinterface5g/merge_requests/${MR}" | jq ".source_branch" || true """
SRC_BRANCH
=
sh
returnStdout:
true
,
script:
"""curl --silent "https://gitlab.eurecom.fr/api/v4/projects/oai%2Fopenairinterface5g/merge_requests/${MR}" | jq ".source_branch" || true """
SRC_BRANCH
=
SRC_BRANCH
.
trim
()
SRC_BRANCH
=
SRC_BRANCH
.
trim
()
COMMIT_ID
=
sh
returnStdout:
true
,
script:
"""curl --silent "https://gitlab.eurecom.fr/api/v4/projects/oai%2Fopenairinterface5g/merge_requests/${MR}" | jq ".sha" || true """
COMMIT_ID
=
sh
returnStdout:
true
,
script:
"""curl --silent "https://gitlab.eurecom.fr/api/v4/projects/oai%2Fopenairinterface5g/merge_requests/${MR}" | jq ".sha" || true """
COMMIT_ID
=
COMMIT_ID
.
trim
()
COMMIT_ID
=
COMMIT_ID
.
trim
()
echo
"Testing NSA on : ${MR} ${SRC_BRANCH} ${COMMIT_ID}"
echo
"Testing NSA on : ${MR} ${SRC_BRANCH} ${COMMIT_ID}"
//calling sub job
//calling
NSA
sub job
build
job:
"RAN-CI-NSA-B210"
,
wait
:
false
,
propagate
:
false
,
parameters:
[
build
job:
"RAN-CI-NSA-B210"
,
wait
:
false
,
propagate
:
false
,
parameters:
[
string
(
name:
'eNB_MR'
,
value:
String
.
valueOf
(
MR
)),
string
(
name:
'eNB_MR'
,
value:
String
.
valueOf
(
MR
)),
string
(
name:
'eNB_Branch'
,
value:
String
.
valueOf
(
SRC_BRANCH
)),
string
(
name:
'eNB_Branch'
,
value:
String
.
valueOf
(
SRC_BRANCH
)),
...
@@ -53,6 +55,14 @@ pipeline {
...
@@ -53,6 +55,14 @@ pipeline {
string
(
name:
'eNB_TargetBranch'
,
value:
String
.
valueOf
(
TARGET_BRANCH
)),
string
(
name:
'eNB_TargetBranch'
,
value:
String
.
valueOf
(
TARGET_BRANCH
)),
booleanParam
(
name:
'eNB_AllowMergeRequestProcess'
,
value:
Boolean
.
valueOf
(
ALLOW_MERGE
))
booleanParam
(
name:
'eNB_AllowMergeRequestProcess'
,
value:
Boolean
.
valueOf
(
ALLOW_MERGE
))
]
]
//calling Benetel sub job
build
job:
"RAN-CI-BENETEL"
,
wait
:
false
,
propagate
:
false
,
parameters:
[
string
(
name:
'eNB_MR'
,
value:
String
.
valueOf
(
MR
)),
string
(
name:
'eNB_Branch'
,
value:
String
.
valueOf
(
SRC_BRANCH
)),
string
(
name:
'eNB_CommitID'
,
value:
String
.
valueOf
(
COMMIT_ID
)),
string
(
name:
'eNB_TargetBranch'
,
value:
String
.
valueOf
(
TARGET_BRANCH
)),
booleanParam
(
name:
'eNB_AllowMergeRequestProcess'
,
value:
Boolean
.
valueOf
(
ALLOW_MERGE
))
]
}
}
}
}
}
}
...
...
ci-scripts/cls_containerize.py
View file @
7b76f3f2
...
@@ -35,6 +35,7 @@ import sys # arg
...
@@ -35,6 +35,7 @@ import sys # arg
import
re
# reg
import
re
# reg
import
logging
import
logging
import
os
import
os
import
shutil
import
time
import
time
from
multiprocessing
import
Process
,
Lock
,
SimpleQueue
from
multiprocessing
import
Process
,
Lock
,
SimpleQueue
from
zipfile
import
ZipFile
from
zipfile
import
ZipFile
...
@@ -124,7 +125,7 @@ class Containerize():
...
@@ -124,7 +125,7 @@ class Containerize():
self
.
cli
=
'docker'
self
.
cli
=
'docker'
self
.
dockerfileprefix
=
'.ubuntu18'
self
.
dockerfileprefix
=
'.ubuntu18'
elif
self
.
host
==
'Red Hat'
:
elif
self
.
host
==
'Red Hat'
:
self
.
cli
=
'podman'
self
.
cli
=
'
sudo
podman'
self
.
dockerfileprefix
=
'.rhel8.2'
self
.
dockerfileprefix
=
'.rhel8.2'
imageNames
=
[]
imageNames
=
[]
...
@@ -173,6 +174,7 @@ class Containerize():
...
@@ -173,6 +174,7 @@ class Containerize():
# if the branch is not develop, then it is a merge request and we need to do
# if the branch is not develop, then it is a merge request and we need to do
# the potential merge. Note that merge conflicts should already been checked earlier
# the potential merge. Note that merge conflicts should already been checked earlier
imageTag
=
'develop'
imageTag
=
'develop'
sharedTag
=
'develop'
if
(
self
.
ranAllowMerge
):
if
(
self
.
ranAllowMerge
):
imageTag
=
'ci-temp'
imageTag
=
'ci-temp'
if
self
.
ranTargetBranch
==
''
:
if
self
.
ranTargetBranch
==
''
:
...
@@ -189,37 +191,22 @@ class Containerize():
...
@@ -189,37 +191,22 @@ class Containerize():
mySSH
.
command
(
'sudo cp /etc/rhsm/ca/redhat-uep.pem tmp/ca/'
,
'\$'
,
5
)
mySSH
.
command
(
'sudo cp /etc/rhsm/ca/redhat-uep.pem tmp/ca/'
,
'\$'
,
5
)
mySSH
.
command
(
'sudo cp /etc/pki/entitlement/*.pem tmp/entitlement/'
,
'\$'
,
5
)
mySSH
.
command
(
'sudo cp /etc/pki/entitlement/*.pem tmp/entitlement/'
,
'\$'
,
5
)
#mySSH.close()
#return 0
sharedimage
=
'ran-build'
sharedimage
=
'ran-build'
# Let's remove any previous run artifacts if still there
# Let's remove any previous run artifacts if still there
mySSH
.
command
(
self
.
cli
+
' image prune --force'
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' image prune --force'
,
'\$'
,
30
)
mySSH
.
command
(
self
.
cli
+
' image rm '
+
sharedimage
+
':'
+
imageTag
,
'\$'
,
5
)
if
(
not
self
.
ranAllowMerge
):
mySSH
.
command
(
self
.
cli
+
' image rm '
+
sharedimage
+
':'
+
sharedTag
,
'\$'
,
30
)
for
image
,
pattern
in
imageNames
:
for
image
,
pattern
in
imageNames
:
mySSH
.
command
(
self
.
cli
+
' image rm '
+
image
+
':'
+
imageTag
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' image rm '
+
image
+
':'
+
imageTag
,
'\$'
,
30
)
# Build the shared image
mySSH
.
command
(
self
.
cli
+
' build --target '
+
sharedimage
+
' --tag '
+
sharedimage
+
':'
+
imageTag
+
' --file docker/Dockerfile.ran'
+
self
.
dockerfileprefix
+
' --build-arg NEEDED_GIT_PROXY="http://proxy.eurecom.fr:8080" . > cmake_targets/log/ran-build.log 2>&1'
,
'\$'
,
1600
)
# Build the target image(s)
previousImage
=
sharedimage
+
':'
+
imageTag
danglingShaOnes
=
[]
for
image
,
pattern
in
imageNames
:
# the archived Dockerfiles have "ran-build:latest" as base image
# we need to update them with proper tag
mySSH
.
command
(
'sed -i -e "s#'
+
sharedimage
+
':latest#'
+
sharedimage
+
':'
+
imageTag
+
'#" docker/Dockerfile.'
+
pattern
+
self
.
dockerfileprefix
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' build --target '
+
image
+
' --tag '
+
image
+
':'
+
imageTag
+
' --file docker/Dockerfile.'
+
pattern
+
self
.
dockerfileprefix
+
' . > cmake_targets/log/'
+
image
+
'.log 2>&1'
,
'\$'
,
1200
)
# Retrieving the dangling image(s) for the log collection
mySSH
.
command
(
self
.
cli
+
' images --filter "dangling=true" --filter "since='
+
previousImage
+
'" -q | sed -e "s#^#sha=#"'
,
'\$'
,
5
)
result
=
re
.
search
(
'sha=(?P<imageShaOne>[a-zA-Z0-9\-\_]+)'
,
mySSH
.
getBefore
())
if
result
is
not
None
:
danglingShaOnes
.
append
((
image
,
result
.
group
(
'imageShaOne'
)))
previousImage
=
image
+
':'
+
imageTag
imageTag
=
'ci-temp'
# Build the shared image only on Push Events (not on Merge Requests)
# First verify if images were properly created.
if
(
not
self
.
ranAllowMerge
):
mySSH
.
command
(
self
.
cli
+
' build --target '
+
sharedimage
+
' --tag '
+
sharedimage
+
':'
+
sharedTag
+
' --file docker/Dockerfile.ran'
+
self
.
dockerfileprefix
+
' --build-arg NEEDED_GIT_PROXY="http://proxy.eurecom.fr:8080" . > cmake_targets/log/ran-build.log 2>&1'
,
'\$'
,
1600
)
# First verify if the shared image was properly created.
status
=
True
status
=
True
mySSH
.
command
(
self
.
cli
+
' image inspect --format=
\'
Size = {{.Size}} bytes
\'
'
+
sharedimage
+
':'
+
image
Tag
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' image inspect --format=
\'
Size = {{.Size}} bytes
\'
'
+
sharedimage
+
':'
+
shared
Tag
,
'\$'
,
5
)
if
mySSH
.
getBefore
().
count
(
'
No such object
'
)
!=
0
:
if
mySSH
.
getBefore
().
count
(
'
o such image
'
)
!=
0
:
logging
.
error
(
'
Could not build properly ran-build
'
)
logging
.
error
(
'
\u001B
[1m Could not build properly ran-build
\u001B
[0m
'
)
status
=
False
status
=
False
else
:
else
:
result
=
re
.
search
(
'Size *= *(?P<size>[0-9\-]+) *bytes'
,
mySSH
.
getBefore
())
result
=
re
.
search
(
'Size *= *(?P<size>[0-9\-]+) *bytes'
,
mySSH
.
getBefore
())
...
@@ -240,11 +227,40 @@ class Containerize():
...
@@ -240,11 +227,40 @@ class Containerize():
self
.
allImagesSize
[
'ran-build'
]
=
str
(
round
(
imageSize
,
1
))
+
' Gbytes'
self
.
allImagesSize
[
'ran-build'
]
=
str
(
round
(
imageSize
,
1
))
+
' Gbytes'
else
:
else
:
logging
.
debug
(
'ran-build size is unknown'
)
logging
.
debug
(
'ran-build size is unknown'
)
# If the shared image failed, no need to continue
if
not
status
:
# Recover the name of the failed container?
mySSH
.
command
(
self
.
cli
+
' ps --quiet --filter "status=exited" -n1 | xargs '
+
self
.
cli
+
' rm -f'
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' image prune --force'
,
'\$'
,
30
)
mySSH
.
close
()
logging
.
error
(
'
\u001B
[1m Building OAI Images Failed
\u001B
[0m'
)
HTML
.
CreateHtmlTestRow
(
self
.
imageKind
,
'KO'
,
CONST
.
ALL_PROCESSES_OK
)
HTML
.
CreateHtmlTabFooter
(
False
)
sys
.
exit
(
1
)
else
:
# Recover build logs, for the moment only possible when build is successful
mySSH
.
command
(
self
.
cli
+
' create --name test '
+
sharedimage
+
':'
+
sharedTag
,
'\$'
,
5
)
mySSH
.
command
(
'mkdir -p cmake_targets/log/ran-build'
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' cp test:/oai-ran/cmake_targets/log/. cmake_targets/log/ran-build'
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' rm -f test'
,
'\$'
,
5
)
# Build the target image(s)
for
image
,
pattern
in
imageNames
:
for
image
,
pattern
in
imageNames
:
# the archived Dockerfiles have "ran-build:latest" as base image
# we need to update them with proper tag
mySSH
.
command
(
'sed -i -e "s#'
+
sharedimage
+
':latest#'
+
sharedimage
+
':'
+
sharedTag
+
'#" docker/Dockerfile.'
+
pattern
+
self
.
dockerfileprefix
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' build --target '
+
image
+
' --tag '
+
image
+
':'
+
imageTag
+
' --file docker/Dockerfile.'
+
pattern
+
self
.
dockerfileprefix
+
' . > cmake_targets/log/'
+
image
+
'.log 2>&1'
,
'\$'
,
1200
)
# split the log
mySSH
.
command
(
'mkdir -p cmake_targets/log/'
+
image
,
'\$'
,
5
)
mySSH
.
command
(
'python3 ci-scripts/docker_log_split.py --logfilename=cmake_targets/log/'
+
image
+
'.log'
,
'\$'
,
5
)
# checking the status of the build
mySSH
.
command
(
self
.
cli
+
' image inspect --format=
\'
Size = {{.Size}} bytes
\'
'
+
image
+
':'
+
imageTag
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' image inspect --format=
\'
Size = {{.Size}} bytes
\'
'
+
image
+
':'
+
imageTag
,
'\$'
,
5
)
if
mySSH
.
getBefore
().
count
(
'
No such object
'
)
!=
0
:
if
mySSH
.
getBefore
().
count
(
'
o such image
'
)
!=
0
:
logging
.
error
(
'
Could not build properly '
+
image
)
logging
.
error
(
'
\u001B
[1m Could not build properly '
+
image
+
'
\u001B
[0m'
)
status
=
False
status
=
False
# Here we should check if the last container corresponds to a failed command and destroy it
mySSH
.
command
(
self
.
cli
+
' ps --quiet --filter "status=exited" -n1 | xargs '
+
self
.
cli
+
' rm -f'
,
'\$'
,
5
)
self
.
allImagesSize
[
image
]
=
'N/A -- Build Failed'
else
:
else
:
result
=
re
.
search
(
'Size *= *(?P<size>[0-9\-]+) *bytes'
,
mySSH
.
getBefore
())
result
=
re
.
search
(
'Size *= *(?P<size>[0-9\-]+) *bytes'
,
mySSH
.
getBefore
())
if
result
is
not
None
:
if
result
is
not
None
:
...
@@ -264,36 +280,24 @@ class Containerize():
...
@@ -264,36 +280,24 @@ class Containerize():
self
.
allImagesSize
[
image
]
=
str
(
round
(
imageSize
,
1
))
+
' Gbytes'
self
.
allImagesSize
[
image
]
=
str
(
round
(
imageSize
,
1
))
+
' Gbytes'
else
:
else
:
logging
.
debug
(
'ran-build size is unknown'
)
logging
.
debug
(
'ran-build size is unknown'
)
if
not
status
:
self
.
allImagesSize
[
image
]
=
'unknown'
mySSH
.
close
()
# Now pruning dangling images in between target builds
logging
.
error
(
'
\u001B
[1m Building OAI Images Failed
\u001B
[0m'
)
mySSH
.
command
(
self
.
cli
+
' image prune --force'
,
'\$'
,
30
)
HTML
.
CreateHtmlTestRow
(
self
.
imageKind
,
'KO'
,
CONST
.
ALL_PROCESSES_OK
)
#HTML.CreateHtmlNextTabHeaderTestRow(self.collectInfo, self.allImagesSize)
HTML
.
CreateHtmlTabFooter
(
False
)
sys
.
exit
(
1
)
# Recover build logs, for the moment only possible when build is successful
# Analyzing the logs
mySSH
.
command
(
self
.
cli
+
' create --name test '
+
sharedimage
+
':'
+
imageTag
,
'\$'
,
5
)
mySSH
.
command
(
'cd '
+
lSourcePath
+
'/cmake_targets'
,
'\$'
,
5
)
mySSH
.
command
(
'mkdir -p cmake_targets/log/ran-build'
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' cp test:/oai-ran/cmake_targets/log/. cmake_targets/log/ran-build'
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' rm -f test'
,
'\$'
,
5
)
for
image
,
shaone
in
danglingShaOnes
:
mySSH
.
command
(
'mkdir -p cmake_targets/log/'
+
image
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' create --name test '
+
shaone
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' cp test:/oai-ran/cmake_targets/log/. cmake_targets/log/'
+
image
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' rm -f test'
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' image prune --force'
,
'\$'
,
5
)
mySSH
.
command
(
'cd cmake_targets'
,
'\$'
,
5
)
mySSH
.
command
(
'mkdir -p build_log_'
+
self
.
testCase_id
,
'\$'
,
5
)
mySSH
.
command
(
'mkdir -p build_log_'
+
self
.
testCase_id
,
'\$'
,
5
)
mySSH
.
command
(
'mv log/* '
+
'build_log_'
+
self
.
testCase_id
,
'\$'
,
5
)
mySSH
.
command
(
'mv log/* '
+
'build_log_'
+
self
.
testCase_id
,
'\$'
,
5
)
#mySSH.close()
mySSH
.
command
(
'cd /tmp/CI-eNB/cmake_targets'
,
'\$'
,
5
)
mySSH
.
command
(
'cd '
+
lSourcePath
+
'/cmake_targets'
,
'\$'
,
5
)
mySSH
.
command
(
'rm -f build_log_'
+
self
.
testCase_id
+
'.zip || true'
,
'\$'
,
5
)
if
(
os
.
path
.
isfile
(
'./build_log_'
+
self
.
testCase_id
+
'.zip'
)):
if
(
os
.
path
.
isfile
(
'./build_log_'
+
self
.
testCase_id
+
'.zip'
)):
os
.
remove
(
'./build_log_'
+
self
.
testCase_id
+
'.zip'
)
os
.
remove
(
'./build_log_'
+
self
.
testCase_id
+
'.zip'
)
if
(
os
.
path
.
isdir
(
'./build_log_'
+
self
.
testCase_id
)):
shutil
.
rmtree
(
'./build_log_'
+
self
.
testCase_id
)
mySSH
.
command
(
'zip -r -qq build_log_'
+
self
.
testCase_id
+
'.zip build_log_'
+
self
.
testCase_id
,
'\$'
,
5
)
mySSH
.
command
(
'zip -r -qq build_log_'
+
self
.
testCase_id
+
'.zip build_log_'
+
self
.
testCase_id
,
'\$'
,
5
)
mySSH
.
copyin
(
lIpAddr
,
lUserName
,
lPassWord
,
lSourcePath
+
'/cmake_targets/build_log_'
+
self
.
testCase_id
+
'.zip'
,
'.'
)
mySSH
.
copyin
(
lIpAddr
,
lUserName
,
lPassWord
,
lSourcePath
+
'/cmake_targets/build_log_'
+
self
.
testCase_id
+
'.zip'
,
'.'
)
#
mySSH.command('rm -f build_log_' + self.testCase_id + '.zip','\$', 5)
mySSH
.
command
(
'rm -f build_log_'
+
self
.
testCase_id
+
'.zip'
,
'\$'
,
5
)
mySSH
.
close
()
mySSH
.
close
()
ZipFile
(
'build_log_'
+
self
.
testCase_id
+
'.zip'
).
extractall
(
'.'
)
ZipFile
(
'build_log_'
+
self
.
testCase_id
+
'.zip'
).
extractall
(
'.'
)
...
@@ -328,9 +332,16 @@ class Containerize():
...
@@ -328,9 +332,16 @@ class Containerize():
files
[
fil
]
=
errorandwarnings
files
[
fil
]
=
errorandwarnings
self
.
collectInfo
[
image
]
=
files
self
.
collectInfo
[
image
]
=
files
if
status
:
logging
.
info
(
'
\u001B
[1m Building OAI Image(s) Pass
\u001B
[0m'
)
logging
.
info
(
'
\u001B
[1m Building OAI Image(s) Pass
\u001B
[0m'
)
HTML
.
CreateHtmlTestRow
(
self
.
imageKind
,
'OK'
,
CONST
.
ALL_PROCESSES_OK
)
HTML
.
CreateHtmlTestRow
(
self
.
imageKind
,
'OK'
,
CONST
.
ALL_PROCESSES_OK
)
HTML
.
CreateHtmlNextTabHeaderTestRow
(
self
.
collectInfo
,
self
.
allImagesSize
)
HTML
.
CreateHtmlNextTabHeaderTestRow
(
self
.
collectInfo
,
self
.
allImagesSize
)
else
:
logging
.
error
(
'
\u001B
[1m Building OAI Images Failed
\u001B
[0m'
)
HTML
.
CreateHtmlTestRow
(
self
.
imageKind
,
'KO'
,
CONST
.
ALL_PROCESSES_OK
)
HTML
.
CreateHtmlNextTabHeaderTestRow
(
self
.
collectInfo
,
self
.
allImagesSize
)
HTML
.
CreateHtmlTabFooter
(
False
)
sys
.
exit
(
1
)
def
DeployObject
(
self
,
HTML
,
EPC
):
def
DeployObject
(
self
,
HTML
,
EPC
):
if
self
.
eNB_serverId
[
self
.
eNB_instance
]
==
'0'
:
if
self
.
eNB_serverId
[
self
.
eNB_instance
]
==
'0'
:
...
...
ci-scripts/conf_files/benetel-4g.conf
0 → 100644
View file @
7b76f3f2
Active_eNBs
= (
"eNB-Eurecom-LTEBox"
);
# Asn1_verbosity, choice in: none, info, annoying
Asn1_verbosity
=
"none"
;
eNBs
=
(
{
# real_time choice in {hard, rt-preempt, no}
real_time
=
"no"
;
//////////
Identification
parameters
:
eNB_ID
=
0
xe00
;
cell_type
=
"CELL_MACRO_ENB"
;
eNB_name
=
"eNB-Eurecom-LTEBox"
;
//
Tracking
area
code
,
0
x0000
and
0
xfffe
are
reserved
values
tracking_area_code
=
1
;
plmn_list
= ( {
mcc
=
222
;
mnc
=
01
;
mnc_length
=
2
; } );
tr_s_preference
=
"local_mac"
//////////
Physical
parameters
:
component_carriers
= (
{
node_function
=
"NGFI_RCC_IF4p5"
;
node_timing
=
"synch_to_ext_device"
;
node_synch_ref
=
0
;
frame_type
=
"FDD"
;
tdd_config
=
3
;
tdd_config_s
=
0
;
prefix_type
=
"NORMAL"
;
eutra_band
=
7
;
downlink_frequency
=
2655000000
L
;
uplink_frequency_offset
= -
120000000
;
Nid_cell
=
0
;
N_RB_DL
=
100
;
Nid_cell_mbsfn
=
0
;
nb_antenna_ports
=
1
;
nb_antennas_tx
=
1
;
nb_antennas_rx
=
1
;
tx_gain
=
90
;
rx_gain
=
125
;
pbch_repetition
=
"FALSE"
;
prach_root
=
0
;
prach_config_index
=
0
;
prach_high_speed
=
"DISABLE"
;
prach_zero_correlation
=
1
;
prach_freq_offset
=
90
;
pucch_delta_shift
=
1
;
pucch_nRB_CQI
=
0
;
pucch_nCS_AN
=
0
;
pucch_n1_AN
=
0
;
pdsch_referenceSignalPower
= -
10
;
pdsch_p_b
=
0
;
pusch_n_SB
=
1
;
pusch_enable64QAM
=
"DISABLE"
;
pusch_hoppingMode
=
"interSubFrame"
;
pusch_hoppingOffset
=
0
;
pusch_groupHoppingEnabled
=
"ENABLE"
;
pusch_groupAssignment
=
0
;
pusch_sequenceHoppingEnabled
=
"DISABLE"
;
pusch_nDMRS1
=
1
;
phich_duration
=
"NORMAL"
;
phich_resource
=
"ONESIXTH"
;
srs_enable
=
"DISABLE"
;
/*
srs_BandwidthConfig
=;
srs_SubframeConfig
=;
srs_ackNackST
=;
srs_MaxUpPts
=;*/
pusch_p0_Nominal
= -
96
;
pusch_alpha
=
"AL1"
;
pucch_p0_Nominal
= -
104
;
msg3_delta_Preamble
=
6
;
pucch_deltaF_Format1
=
"deltaF2"
;
pucch_deltaF_Format1b
=
"deltaF3"
;
pucch_deltaF_Format2
=
"deltaF0"
;
pucch_deltaF_Format2a
=
"deltaF0"
;
pucch_deltaF_Format2b
=
"deltaF0"
;
rach_numberOfRA_Preambles
=
64
;
rach_preamblesGroupAConfig
=
"DISABLE"
;
/*
rach_sizeOfRA_PreamblesGroupA
= ;
rach_messageSizeGroupA
= ;
rach_messagePowerOffsetGroupB
= ;
*/
rach_powerRampingStep
=
4
;
rach_preambleInitialReceivedTargetPower
= -
108
;
rach_preambleTransMax
=
10
;
rach_raResponseWindowSize
=
10
;
rach_macContentionResolutionTimer
=
48
;
rach_maxHARQ_Msg3Tx
=
4
;
pcch_default_PagingCycle
=
128
;
pcch_nB
=
"oneT"
;
bcch_modificationPeriodCoeff
=
2
;
ue_TimersAndConstants_t300
=
1000
;
ue_TimersAndConstants_t301
=
1000
;
ue_TimersAndConstants_t310
=
1000
;
ue_TimersAndConstants_t311
=
10000
;
ue_TimersAndConstants_n310
=
20
;
ue_TimersAndConstants_n311
=
1
;
ue_TransmissionMode
=
1
;
//
Parameters
for
SIB18
rxPool_sc_CP_Len
=
"normal"
;
rxPool_sc_Period
=
"sf40"
;
rxPool_data_CP_Len
=
"normal"
;
rxPool_ResourceConfig_prb_Num
=
20
;
rxPool_ResourceConfig_prb_Start
=
5
;
rxPool_ResourceConfig_prb_End
=
44
;
rxPool_ResourceConfig_offsetIndicator_present
=
"prSmall"
;
rxPool_ResourceConfig_offsetIndicator_choice
=
0
;
rxPool_ResourceConfig_subframeBitmap_present
=
"prBs40"
;
rxPool_ResourceConfig_subframeBitmap_choice_bs_buf
=
"00000000000000000000"
;
rxPool_ResourceConfig_subframeBitmap_choice_bs_size
=
5
;
rxPool_ResourceConfig_subframeBitmap_choice_bs_bits_unused
=
0
;
/*
rxPool_dataHoppingConfig_hoppingParameter
=
0
;
rxPool_dataHoppingConfig_numSubbands
=
"ns1"
;
rxPool_dataHoppingConfig_rbOffset
=
0
;
rxPool_commTxResourceUC
-
ReqAllowed
=
"TRUE"
;
*/
//
Parameters
for
SIB19
discRxPool_cp_Len
=
"normal"
discRxPool_discPeriod
=
"rf32"
discRxPool_numRetx
=
1
;
discRxPool_numRepetition
=
2
;
discRxPool_ResourceConfig_prb_Num
=
5
;
discRxPool_ResourceConfig_prb_Start
=
3
;
discRxPool_ResourceConfig_prb_End
=
21
;
discRxPool_ResourceConfig_offsetIndicator_present
=
"prSmall"
;
discRxPool_ResourceConfig_offsetIndicator_choice
=
0
;
discRxPool_ResourceConfig_subframeBitmap_present
=
"prBs40"
;
discRxPool_ResourceConfig_subframeBitmap_choice_bs_buf
=
"f0ffffffff"
;
discRxPool_ResourceConfig_subframeBitmap_choice_bs_size
=
5
;
discRxPool_ResourceConfig_subframeBitmap_choice_bs_bits_unused
=
0
;
//
SSB
central
frequency
of
NR
secondary
cell
group
(
for
ENDC
NSA
)
nr_scg_ssb_freq
=
640000
;
}
);
srb1_parameters
:
{
# timer_poll_retransmit = (ms) [5, 10, 15, 20,... 250, 300, 350, ... 500]
timer_poll_retransmit
=
80
;
# timer_reordering = (ms) [0,5, ... 100, 110, 120, ... ,200]
timer_reordering
=
35
;
# timer_reordering = (ms) [0,5, ... 250, 300, 350, ... ,500]
timer_status_prohibit
=
0
;
# poll_pdu = [4, 8, 16, 32 , 64, 128, 256, infinity(>10000)]
poll_pdu
=
4
;
# poll_byte = (kB) [25,50,75,100,125,250,375,500,750,1000,1250,1500,2000,3000,infinity(>10000)]
poll_byte
=
99999
;
# max_retx_threshold = [1, 2, 3, 4 , 6, 8, 16, 32]
max_retx_threshold
=
4
;
}
# ------- SCTP definitions
SCTP
:
{
# Number of streams to use in input/output
SCTP_INSTREAMS
=
2
;
SCTP_OUTSTREAMS
=
2
;
};
//////////
MME
parameters
:
mme_ip_address
= ( {
ipv4
=
"CI_MME_IP_ADDR"
;
ipv6
=
"192:168:30::17"
;
active
=
"yes"
;
preference
=
"ipv4"
;
}
);
enable_measurement_reports
=
"yes"
;
///
X2
enable_x2
=
"yes"
;
t_reloc_prep
=
1000
; /*
unit
:
millisecond
*/
tx2_reloc_overall
=
2000
; /*
unit
:
millisecond
*/
t_dc_prep
=
1000
; /*
unit
:
millisecond
*/
t_dc_overall
=
2000
; /*
unit
:
millisecond
*/
NETWORK_INTERFACES
:
{
ENB_INTERFACE_NAME_FOR_S1_MME
=
"eth0"
;
ENB_IPV4_ADDRESS_FOR_S1_MME
=
"CI_ENB_IP_ADDR"
;
ENB_INTERFACE_NAME_FOR_S1U
=
"eth0"
;
ENB_IPV4_ADDRESS_FOR_S1U
=
"CI_ENB_IP_ADDR"
;
ENB_PORT_FOR_S1U
=
2152
;
# Spec 2152
ENB_IPV4_ADDRESS_FOR_X2C
=
"127.0.0.1"
;
ENB_PORT_FOR_X2C
=
36422
;
# Spec 36422
};
}
);
MACRLCs
= (
{
num_cc
=
1
;
tr_s_preference
=
"local_L1"
;
tr_n_preference
=
"local_RRC"
;
phy_test_mode
=
0
;
puSch10xSnr
=
160
;
puCch10xSnr
=
160
;
}
);
L1s
= (
{
num_cc
=
1
;
tr_n_preference
=
"local_mac"
;
prach_dtx_threshold
=
150
;
}
);
RUs
= (
{
//
local_if_name
=
"enp129s0f0"
;
local_if_name
=
"dpdk"
;
sdr_addrs
=
"softmodem -m 2048 -l 35 -n 2 -b 0000:81:00.3 --proc-type auto --file-prefix ggg -- -p 0x1"
;
#sdr_addrs = "softmodem -l 8 -n 2 -- -p 0x2";
#remote_address = "127.0.0.2";
#local_address = "127.0.0.1";
#local_portc = 50000;
#remote_portc = 50000;
#local_portd = 50001;
#remote_portd = 50001;
local_rf
=
"no"
tr_preference
=
"raw_if4p5"
nb_tx
=
1
nb_rx
=
1
att_tx
=
0
att_rx
=
0
;
eNB_instances
= [
0
];
}
);
THREAD_STRUCT
= (
{
#three config for level of parallelism "PARALLEL_SINGLE_THREAD", "PARALLEL_RU_L1_SPLIT", or "PARALLEL_RU_L1_TRX_SPLIT"
#parallel_config = "PARALLEL_RU_L1_TRX_SPLIT";
parallel_config
=
"PARALLEL_SINGLE_THREAD"
;
#two option for worker "WORKER_DISABLE" or "WORKER_ENABLE"
worker_config
=
"WORKER_ENABLE"
;
}
);
NETWORK_CONTROLLER
:
{
FLEXRAN_ENABLED
=
"no"
;
FLEXRAN_INTERFACE_NAME
=
"lo"
;
FLEXRAN_IPV4_ADDRESS
=
"127.0.0.1"
;
FLEXRAN_PORT
=
2210
;
FLEXRAN_CACHE
=
"/mnt/oai_agent_cache"
;
FLEXRAN_AWAIT_RECONF
=
"no"
;
};
log_config
:
{
global_log_level
=
"info"
;
global_log_verbosity
=
"medium"
;
hw_log_level
=
"info"
;
hw_log_verbosity
=
"medium"
;
phy_log_level
=
"info"
;
phy_log_verbosity
=
"medium"
;
mac_log_level
=
"info"
;
mac_log_verbosity
=
"high"
;
rlc_log_level
=
"info"
;
rlc_log_verbosity
=
"medium"
;
pdcp_log_level
=
"info"
;
pdcp_log_verbosity
=
"medium"
;
rrc_log_level
=
"info"
;
rrc_log_verbosity
=
"medium"
;
};
ci-scripts/conf_files/benetel-5g.conf
0 → 100644
View file @
7b76f3f2
Active_gNBs
= (
"gNB-Eurecom-5GNRBox"
);
# Asn1_verbosity, choice in: none, info, annoying
Asn1_verbosity
=
"none"
;
gNBs
=
(
{
//////////
Identification
parameters
:
gNB_ID
=
0
xe00
;
cell_type
=
"CELL_MACRO_GNB"
;
gNB_name
=
"gNB-Eurecom-5GNRBox"
;
//
Tracking
area
code
,
0
x0000
and
0
xfffe
are
reserved
values
tracking_area_code
=
1
;
plmn_list
= ({
mcc
=
222
;
mnc
=
01
;
mnc_length
=
2
;});
tr_s_preference
=
"local_mac"
//////////
Physical
parameters
:
ssb_SubcarrierOffset
=
31
; //
0
;
pdsch_AntennaPorts
=
1
;
#pusch_TargetSNRx10 = 200;
#pucch_TargetSNRx10 = 200;
pusch_TargetSNRx10
=
200
;
pucch_TargetSNRx10
=
200
;
servingCellConfigCommon
= (
{
#spCellConfigCommon
physCellId
=
0
;
# downlinkConfigCommon
#frequencyInfoDL
# this is is the central frequency of SSB
absoluteFrequencySSB
=
640000
; //
641272
dl_frequencyBand
=
78
;
# the carrier frequency is assumed to be in the middle of the carrier, i.e. dl_absoluteFrequencyPointA_kHz + dl_carrierBandwidth*12*SCS_kHz/2
dl_absoluteFrequencyPointA
=
638728
; //
640000
;
#scs-SpecificCarrierList
dl_offstToCarrier
=
0
;
# subcarrierSpacing
# 0=kHz15, 1=kHz30, 2=kHz60, 3=kHz120
dl_subcarrierSpacing
=
1
;
dl_carrierBandwidth
=
106
;
#initialDownlinkBWP
#genericParameters
# this is RBstart=84,L=13 (275*(L-1))+RBstart
initialDLBWPlocationAndBandwidth
=
6366
; //
28875
; //
6366
;
#6407; #3384;
# subcarrierSpacing
# 0=kHz15, 1=kHz30, 2=kHz60, 3=kHz120
initialDLBWPsubcarrierSpacing
=
1
;
#pdcch-ConfigCommon
initialDLBWPcontrolResourceSetZero
=
0
;
initialDLBWPsearchSpaceZero
=
0
;
#pdsch-ConfigCommon
#pdschTimeDomainAllocationList (up to 16 entries)
initialDLBWPk0_0
=
0
;
#initialULBWPmappingType
#0=typeA,1=typeB
initialDLBWPmappingType_0
=
0
;
#this is SS=1,L=13
initialDLBWPstartSymbolAndLength_0
=
40
;
initialDLBWPk0_1
=
0
;
initialDLBWPmappingType_1
=
0
;
#this is SS=2,L=12
initialDLBWPstartSymbolAndLength_1
=
53
;
initialDLBWPk0_2
=
0
;
initialDLBWPmappingType_2
=
0
;
#this is SS=1,L=12
initialDLBWPstartSymbolAndLength_2
=
54
;
initialDLBWPk0_3
=
0
;
initialDLBWPmappingType_3
=
0
;
#this is SS=1,L=4 //5 (4 is for 43, 5 is for 57)
initialDLBWPstartSymbolAndLength_3
=
57
; //
43
; //
57
;
#uplinkConfigCommon
#frequencyInfoUL
ul_frequencyBand
=
78
;
#scs-SpecificCarrierList
ul_offstToCarrier
=
0
;
# subcarrierSpacing
# 0=kHz15, 1=kHz30, 2=kHz60, 3=kHz120
ul_subcarrierSpacing
=
1
;
ul_carrierBandwidth
=
106
;
pMax
=
20
;
#initialUplinkBWP
#genericParameters
initialULBWPlocationAndBandwidth
=
6366
; //
28875
; //
6366
;
#6407; #3384;
# subcarrierSpacing
# 0=kHz15, 1=kHz30, 2=kHz60, 3=kHz120
initialULBWPsubcarrierSpacing
=
1
;
#rach-ConfigCommon
#rach-ConfigGeneric
prach_ConfigurationIndex
=
4
;
#prach_msg1_FDM
#0 = one, 1=two, 2=four, 3=eight
prach_msg1_FDM
=
0
;
prach_msg1_FrequencyStart
=
74
;
zeroCorrelationZoneConfig
=
13
;
preambleReceivedTargetPower
= -
118
;
#preambleReceivedTargetPower = -104;
#preambleReceivedTargetPower = -108;
#preamblTransMax (0...10) = (3,4,5,6,7,8,10,20,50,100,200)
preambleTransMax
=
6
;
#powerRampingStep
# 0=dB0,1=dB2,2=dB4,3=dB6
powerRampingStep
=
2
;
#ra_ReponseWindow
#1,2,4,8,10,20,40,80
ra_ResponseWindow
=
5
;
#ssb_perRACH_OccasionAndCB_PreamblesPerSSB_PR
#1=oneeighth,2=onefourth,3=half,4=one,5=two,6=four,7=eight,8=sixteen
ssb_perRACH_OccasionAndCB_PreamblesPerSSB_PR
=
4
;
#oneHalf (0..15) 4,8,12,16,...60,64
ssb_perRACH_OccasionAndCB_PreamblesPerSSB
=
14
; //
15
;
#ra_ContentionResolutionTimer
#(0..7) 8,16,24,32,40,48,56,64
ra_ContentionResolutionTimer
=
7
;
rsrp_ThresholdSSB
=
19
;
#prach-RootSequenceIndex_PR
#1 = 839, 2 = 139
prach_RootSequenceIndex_PR
=
1
;
prach_RootSequenceIndex
=
1
;
# SCS for msg1, can only be 15 for 30 kHz < 6 GHz, takes precendence over the one derived from prach-ConfigIndex
#
#msg1_SubcarrierSpacing = 1,
# restrictedSetConfig
# 0=unrestricted, 1=restricted type A, 2=restricted type B
restrictedSetConfig
=
0
,
# pusch-ConfigCommon (up to 16 elements)
initialULBWPk2_0
=
2
;
initialULBWPmappingType_0
=
1
# this is SS=0 L=11
initialULBWPstartSymbolAndLength_0
=
55
;
initialULBWPk2_1
=
2
;
initialULBWPmappingType_1
=
1
;
# this is SS=0 L=12
initialULBWPstartSymbolAndLength_1
=
69
;
initialULBWPk2_2
=
7
;
initialULBWPmappingType_2
=
1
;
# this is SS=10 L=4
initialULBWPstartSymbolAndLength_2
=
52
;
msg3_DeltaPreamble
=
1
;
#p0_NominalWithGrant =-90;
p0_NominalWithGrant
=-
118
;
# pucch-ConfigCommon setup :
# pucchGroupHopping
# 0 = neither, 1= group hopping, 2=sequence hopping
pucchGroupHopping
=
0
;
hoppingId
=
40
;
#p0_nominal = -90;
p0_nominal
= -
118
;
# ssb_PositionsInBurs_BitmapPR
# 1=short, 2=medium, 3=long
ssb_PositionsInBurst_PR
=
2
;
ssb_PositionsInBurst_Bitmap
=
1
;
#0x80;
# ssb_periodicityServingCell
# 0 = ms5, 1=ms10, 2=ms20, 3=ms40, 4=ms80, 5=ms160, 6=spare2, 7=spare1
ssb_periodicityServingCell
=
2
;
# dmrs_TypeA_position
# 0 = pos2, 1 = pos3
dmrs_TypeA_Position
=
0
;
# subcarrierSpacing
# 0=kHz15, 1=kHz30, 2=kHz60, 3=kHz120
subcarrierSpacing
=
1
;
#tdd-UL-DL-ConfigurationCommon
# subcarrierSpacing
# 0=kHz15, 1=kHz30, 2=kHz60, 3=kHz120
referenceSubcarrierSpacing
=
1
;
# pattern1
# dl_UL_TransmissionPeriodicity
# 0=ms0p5, 1=ms0p625, 2=ms1, 3=ms1p25, 4=ms2, 5=ms2p5, 6=ms5, 7=ms10
dl_UL_TransmissionPeriodicity
=
6
;
nrofDownlinkSlots
=
7
; //
8
; //
7
;
nrofDownlinkSymbols
=
6
; //
0
; //
6
;
nrofUplinkSlots
=
2
;
nrofUplinkSymbols
=
4
; //
0
; //
4
;
#ssPBCH_BlockPower = 10;
ssPBCH_BlockPower
= -
35
;
}
);
# ------- SCTP definitions
SCTP
:
{
# Number of streams to use in input/output
SCTP_INSTREAMS
=
2
;
SCTP_OUTSTREAMS
=
2
;
};
//////////
MME
parameters
:
mme_ip_address
= ( {
ipv4
=
"CI_MME_IP_ADDR"
;
ipv6
=
"192:168:30::17"
;
active
=
"yes"
;
preference
=
"ipv4"
;
}
);
///
X2
enable_x2
=
"yes"
;
t_reloc_prep
=
1000
; /*
unit
:
millisecond
*/
tx2_reloc_overall
=
2000
; /*
unit
:
millisecond
*/
t_dc_prep
=
1000
; /*
unit
:
millisecond
*/
t_dc_overall
=
2000
; /*
unit
:
millisecond
*/
target_enb_x2_ip_address
= (
{
ipv4
=
"127.0.0.1"
;
ipv6
=
"192:168:30::17"
;
preference
=
"ipv4"
;
}
);
NETWORK_INTERFACES
:
{
GNB_INTERFACE_NAME_FOR_S1_MME
=
"eth0"
;
GNB_IPV4_ADDRESS_FOR_S1_MME
=
"CI_GNB_IP_ADDR"
;
GNB_INTERFACE_NAME_FOR_S1U
=
"eth0"
;
GNB_IPV4_ADDRESS_FOR_S1U
=
"CI_GNB_IP_ADDR"
;
GNB_PORT_FOR_S1U
=
2152
;
# Spec 2152
GNB_IPV4_ADDRESS_FOR_X2C
=
"127.0.0.2/24"
;
GNB_PORT_FOR_X2C
=
36422
;
# Spec 36422
};
}
);
MACRLCs
= (
{
num_cc
=
1
;
tr_s_preference
=
"local_L1"
;
tr_n_preference
=
"local_RRC"
;
}
);
L1s
= (
{
num_cc
=
1
;
tr_n_preference
=
"local_mac"
;
pusch_proc_threads
=
8
;
}
);
RUs
= (
{
local_rf
=
"no"
nb_tx
=
1
nb_rx
=
1
att_tx
=
0
att_rx
=
0
;
bands
= [
7
];
max_pdschReferenceSignalPower
= -
27
;
max_rxgain
=
114
;
eNB_instances
= [
0
];
//
clock_src
=
"internal"
;
clock_src
=
"external"
;
local_if_name
=
"dpdk"
;
sdr_addrs
=
"softmodem -m 2048 -l 34 -n 3 -b 0000:81:00.2 --proc-type auto --file-prefix hhh -- -p 0x1"
;
remote_address
=
"127.0.0.2"
;
local_address
=
"127.0.0.1"
;
local_portc
=
50000
;
remote_portc
=
50000
;
local_portd
=
50001
;
remote_portd
=
50001
;
tr_preference
=
"raw_if4p5"
}
);
THREAD_STRUCT
= (
{
#three config for level of parallelism "PARALLEL_SINGLE_THREAD", "PARALLEL_RU_L1_SPLIT", or "PARALLEL_RU_L1_TRX_SPLIT"
//
parallel_config
=
"PARALLEL_RU_L1_TRX_SPLIT"
;
parallel_config
=
"PARALLEL_SINGLE_THREAD"
;
#two option for worker "WORKER_DISABLE" or "WORKER_ENABLE"
worker_config
=
"WORKER_DISABLE"
;
}
);
log_config
:
{
global_log_level
=
"info"
;
global_log_verbosity
=
"medium"
;
hw_log_level
=
"info"
;
hw_log_verbosity
=
"medium"
;
phy_log_level
=
"info"
;
phy_log_verbosity
=
"medium"
;
mac_log_level
=
"info"
;
mac_log_verbosity
=
"high"
;
rlc_log_level
=
"info"
;
rlc_log_verbosity
=
"medium"
;
pdcp_log_level
=
"info"
;
pdcp_log_verbosity
=
"medium"
;
rrc_log_level
=
"info"
;
rrc_log_verbosity
=
"medium"
;
};
ci-scripts/docker_log_split.py
0 → 100644
View file @
7b76f3f2
#/*
# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
# * contributor license agreements. See the NOTICE file distributed with
# * this work for additional information regarding copyright ownership.
# * The OpenAirInterface Software Alliance licenses this file to You under
# * the OAI Public License, Version 1.1 (the "License"); you may not use this file
# * except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.openairinterface.org/?page_id=698
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# *-------------------------------------------------------------------------------
# * For more information about the OpenAirInterface (OAI) Software Alliance:
# * contact@openairinterface.org
# */
#---------------------------------------------------------------------
# Python for CI of OAI-eNB + COTS-UE
#
# Required Python Version
# Python 3.x
#
# Required Python Package
# pexpect
#---------------------------------------------------------------------
#-----------------------------------------------------------
# Import Libs
#-----------------------------------------------------------
import
sys
# arg
import
re
# reg
import
os
import
subprocess
class
SplitReport
():
def
__init__
(
self
):
self
.
logfilename
=
''
self
.
destinationFolder
=
''
def
split
(
self
):
self
.
destinationFolder
=
self
.
logfilename
.
replace
(
".log"
,
""
)
if
os
.
path
.
isfile
(
self
.
logfilename
):
newImageLog
=
open
(
self
.
logfilename
+
'.new'
,
'w'
)
copyFlag
=
True
with
open
(
self
.
logfilename
,
'r'
)
as
imageLog
:
for
line
in
imageLog
:
header
=
False
ret
=
re
.
search
(
'====== Start of log for ([0-9\.A-Za-z\-\_]+) ======'
,
line
)
if
ret
is
not
None
:
copyFlag
=
False
header
=
True
detailedLogFile
=
open
(
self
.
destinationFolder
+
'/'
+
ret
.
group
(
1
),
'w'
)
if
copyFlag
:
newImageLog
.
write
(
line
)
ret
=
re
.
search
(
'====== End of log for ([0-9\.A-Za-z\-\_]+) ======'
,
line
)
if
ret
is
not
None
:
copyFlag
=
True
detailedLogFile
.
close
()
elif
not
copyFlag
and
not
header
:
detailedLogFile
.
write
(
line
)
imageLog
.
close
()
newImageLog
.
close
()
os
.
rename
(
self
.
logfilename
+
'.new'
,
self
.
logfilename
)
else
:
print
(
'Cannot split unfound file'
)
#--------------------------------------------------------------------------------------------------------
#
# Start of main
#
#--------------------------------------------------------------------------------------------------------
argvs
=
sys
.
argv
argc
=
len
(
argvs
)
SP
=
SplitReport
()
while
len
(
argvs
)
>
1
:
myArgv
=
argvs
.
pop
(
1
)
if
re
.
match
(
'^\-\-logfilename=(.+)$'
,
myArgv
,
re
.
IGNORECASE
):
matchReg
=
re
.
match
(
'^\-\-logfilename=(.+)$'
,
myArgv
,
re
.
IGNORECASE
)
SP
.
logfilename
=
matchReg
.
group
(
1
)
SP
.
split
()
sys
.
exit
(
0
)
ci-scripts/ran.py
View file @
7b76f3f2
...
@@ -239,6 +239,7 @@ class RANManagement():
...
@@ -239,6 +239,7 @@ class RANManagement():
while
(
count
>
0
)
and
buildOAIprocess
:
while
(
count
>
0
)
and
buildOAIprocess
:
mySSH
.
command
(
'ps aux | grep --color=never build_ | grep -v grep'
,
'\$'
,
6
)
mySSH
.
command
(
'ps aux | grep --color=never build_ | grep -v grep'
,
'\$'
,
6
)
result
=
re
.
search
(
'build_oai'
,
mySSH
.
getBefore
())
result
=
re
.
search
(
'build_oai'
,
mySSH
.
getBefore
())
print
(
result
)
if
result
is
None
:
if
result
is
None
:
buildOAIprocess
=
False
buildOAIprocess
=
False
else
:
else
:
...
@@ -464,7 +465,7 @@ class RANManagement():
...
@@ -464,7 +465,7 @@ class RANManagement():
self
.
prematureExit
=
True
self
.
prematureExit
=
True
return
return
else
:
else
:
mySSH
.
command
(
'stdbuf -o0 cat enb_'
+
self
.
testCase_id
+
'.log | egrep --text --color=never -i "wait|sync|Starting"'
,
'\$'
,
4
)
mySSH
.
command
(
'stdbuf -o0 cat enb_'
+
self
.
testCase_id
+
'.log | egrep --text --color=never -i "wait|sync|Starting
|Started
"'
,
'\$'
,
4
)
if
rruCheck
:
if
rruCheck
:
result
=
re
.
search
(
'wait RUs'
,
mySSH
.
getBefore
())
result
=
re
.
search
(
'wait RUs'
,
mySSH
.
getBefore
())
else
:
else
:
...
...
ci-scripts/xml_files/benetel_multi_node_build.xml
0 → 100644
View file @
7b76f3f2
<!--
Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The OpenAirInterface Software Alliance licenses this file to You under
the OAI Public License, Version 1.1 (the "License"); you may not use this file
except in compliance with the License.
You may obtain a copy of the License at
http://www.openairinterface.org/?page_id=698
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
For more information about the OpenAirInterface (OAI) Software Alliance:
contact@openairinterface.org
-->
<testCaseList>
<htmlTabRef>
build-tab
</htmlTabRef>
<htmlTabName>
Build
</htmlTabName>
<htmlTabIcon>
wrench
</htmlTabIcon>
<TestCaseRequestedList>
000001 000002
</TestCaseRequestedList>
<TestCaseExclusionList></TestCaseExclusionList>
<testCase
id=
"000001"
>
<class>
Build_eNB
</class>
<desc>
Build eNB
</desc>
<Build_eNB_args>
--eNB -t benetel4g -w None
</Build_eNB_args>
<forced_workspace_cleanup>
True
</forced_workspace_cleanup>
<eNB_instance>
0
</eNB_instance>
<eNB_serverId>
0
</eNB_serverId>
</testCase>
<testCase
id=
"000002"
>
<class>
Build_eNB
</class>
<desc>
Build gNB
</desc>
<Build_eNB_args>
--gNB -t benetel5g -w None
</Build_eNB_args>
<forced_workspace_cleanup>
True
</forced_workspace_cleanup>
<eNB_instance>
1
</eNB_instance>
<eNB_serverId>
1
</eNB_serverId>
</testCase>
</testCaseList>
ci-scripts/xml_files/benetel_nsa_base.xml
0 → 100644
View file @
7b76f3f2
<!--
Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The OpenAirInterface Software Alliance licenses this file to You under
the OAI Public License, Version 1.1 (the "License"); you may not use this file
except in compliance with the License.
You may obtain a copy of the License at
http://www.openairinterface.org/?page_id=698
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
For more information about the OpenAirInterface (OAI) Software Alliance:
contact@openairinterface.org
-->
<testCaseList>
<htmlTabRef>
TEST-FR1-TM1
</htmlTabRef>
<htmlTabName>
FR1
</htmlTabName>
<htmlTabIcon>
tasks
</htmlTabIcon>
<TestCaseRequestedList>
010000
030000
040000
010001
000001
050000
050001
000001
060000
060001
000001
070000
070001
010002
000001
080001
080000
010003
</TestCaseRequestedList>
<TestCaseExclusionList></TestCaseExclusionList>
<testCase
id=
"010000"
>
<class>
Initialize_UE
</class>
<desc>
Initialize UE
</desc>
</testCase>
<testCase
id=
"010003"
>
<class>
Terminate_UE
</class>
<desc>
Terminate UE
</desc>
</testCase>
<testCase
id=
"010001"
>
<class>
Attach_UE
</class>
<desc>
Attach UE
</desc>
</testCase>
<testCase
id=
"010002"
>
<class>
Detach_UE
</class>
<desc>
Detach UE
</desc>
</testCase>
<testCase
id=
"030000"
>
<class>
Initialize_eNB
</class>
<desc>
Initialize eNB
</desc>
<Initialize_eNB_args>
-O ci-scripts/conf_files/benetel-4g.conf
</Initialize_eNB_args>
<eNB_instance>
0
</eNB_instance>
<eNB_serverId>
0
</eNB_serverId>
<air_interface>
lte
</air_interface>
</testCase>
<testCase
id=
"040000"
>
<class>
Initialize_eNB
</class>
<desc>
Initialize gNB
</desc>
<Initialize_eNB_args>
-O ci-scripts/conf_files/benetel-5g.conf
</Initialize_eNB_args>
<eNB_instance>
1
</eNB_instance>
<eNB_serverId>
1
</eNB_serverId>
<air_interface>
nr
</air_interface>
</testCase>
<testCase
id=
"000001"
>
<class>
IdleSleep
</class>
<desc>
Sleep
</desc>
<idle_sleep_time_in_sec>
20
</idle_sleep_time_in_sec>
</testCase>
<testCase
id=
"050000"
>
<class>
Ping
</class>
<desc>
Ping: 20pings in 20sec
</desc>
<ping_args>
-c 20
</ping_args>
<ping_packetloss_threshold>
50
</ping_packetloss_threshold>
</testCase>
<testCase
id=
"050001"
>
<class>
Ping
</class>
<desc>
Ping: 100pings in 20sec
</desc>
<ping_args>
-c 100 -i 0.2
</ping_args>
<ping_packetloss_threshold>
50
</ping_packetloss_threshold>
</testCase>
<testCase
id=
"060000"
>
<class>
Iperf
</class>
<desc>
iperf (DL/2.5Mbps/UDP)(60 sec)(single-ue profile)
</desc>
<iperf_args>
-u -b 2.5M -t 60 -i 1
</iperf_args>
<iperf_packetloss_threshold>
50
</iperf_packetloss_threshold>
<iperf_profile>
single-ue
</iperf_profile>
</testCase>
<testCase
id=
"060001"
>
<class>
Iperf
</class>
<desc>
iperf (UL/1.5Mbps/UDP)(60 sec)(single-ue profile)
</desc>
<iperf_args>
-u -b 1.5M -t 60 -i 1 -R
</iperf_args>
<iperf_packetloss_threshold>
50
</iperf_packetloss_threshold>
<iperf_profile>
single-ue
</iperf_profile>
</testCase>
<testCase
id=
"070000"
>
<class>
Iperf
</class>
<desc>
iperf (DL/20Mbps/UDP)(20 sec)(single-ue profile)
</desc>
<iperf_args>
-u -b 20M -t 20 -i 1
</iperf_args>
<iperf_packetloss_threshold>
50
</iperf_packetloss_threshold>
<iperf_profile>
single-ue
</iperf_profile>
</testCase>
<testCase
id=
"070001"
>
<class>
Iperf
</class>
<desc>
iperf (UL/3Mbps/UDP)(20 sec)(single-ue profile)
</desc>
<iperf_args>
-u -b 3M -t 20 -i 1 -R
</iperf_args>
<iperf_packetloss_threshold>
50
</iperf_packetloss_threshold>
<iperf_profile>
single-ue
</iperf_profile>
</testCase>
<testCase
id=
"080000"
>
<class>
Terminate_eNB
</class>
<desc>
Terminate eNB
</desc>
<eNB_instance>
0
</eNB_instance>
<eNB_serverId>
0
</eNB_serverId>
<air_interface>
lte
</air_interface>
</testCase>
<testCase
id=
"080001"
>
<class>
Terminate_eNB
</class>
<desc>
Terminate gNB
</desc>
<eNB_instance>
1
</eNB_instance>
<eNB_serverId>
1
</eNB_serverId>
<air_interface>
nr
</air_interface>
</testCase>
</testCaseList>
ci-scripts/xml_files/
fr1
_image_build.xml
→
ci-scripts/xml_files/
container
_image_build.xml
View file @
7b76f3f2
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
-->
-->
<testCaseList>
<testCaseList>
<htmlTabRef>
build-tab
</htmlTabRef>
<htmlTabRef>
build-tab
</htmlTabRef>
<htmlTabName>
Build
</htmlTabName>
<htmlTabName>
Build
Container Images
</htmlTabName>
<htmlTabIcon>
wrench
</htmlTabIcon>
<htmlTabIcon>
wrench
</htmlTabIcon>
<TestCaseRequestedList>
<TestCaseRequestedList>
000001
000001
...
@@ -31,7 +31,7 @@
...
@@ -31,7 +31,7 @@
<testCase
id=
"000001"
>
<testCase
id=
"000001"
>
<class>
Build_Image
</class>
<class>
Build_Image
</class>
<desc>
Build
eNB Image
</desc>
<desc>
Build
all Images
</desc>
<kind>
all
</kind>
<kind>
all
</kind>
<eNB_instance>
0
</eNB_instance>
<eNB_instance>
0
</eNB_instance>
<eNB_serverId>
0
</eNB_serverId>
<eNB_serverId>
0
</eNB_serverId>
...
...
ci-scripts/xml_files/fr1_multi_node_build.xml
View file @
7b76f3f2
...
@@ -37,6 +37,7 @@
...
@@ -37,6 +37,7 @@
<eNB_instance>
0
</eNB_instance>
<eNB_instance>
0
</eNB_instance>
<eNB_serverId>
0
</eNB_serverId>
<eNB_serverId>
0
</eNB_serverId>
<backgroundBuild>
True
</backgroundBuild>
<backgroundBuild>
True
</backgroundBuild>
<forced_workspace_cleanup>
True
</forced_workspace_cleanup>
</testCase>
</testCase>
<testCase
id=
"000004"
>
<testCase
id=
"000004"
>
...
@@ -53,6 +54,7 @@
...
@@ -53,6 +54,7 @@
<eNB_instance>
1
</eNB_instance>
<eNB_instance>
1
</eNB_instance>
<eNB_serverId>
1
</eNB_serverId>
<eNB_serverId>
1
</eNB_serverId>
<backgroundBuild>
True
</backgroundBuild>
<backgroundBuild>
True
</backgroundBuild>
<forced_workspace_cleanup>
True
</forced_workspace_cleanup>
</testCase>
</testCase>
<testCase
id=
"000003"
>
<testCase
id=
"000003"
>
...
...
cmake_targets/CMakeLists.txt
View file @
7b76f3f2
...
@@ -926,6 +926,9 @@ set_target_properties(tcp_bridge_oai PROPERTIES COMPILE_FLAGS "-fvisibility=hidd
...
@@ -926,6 +926,9 @@ set_target_properties(tcp_bridge_oai PROPERTIES COMPILE_FLAGS "-fvisibility=hidd
# Benetel 4G library
# Benetel 4G library
######################################################################
######################################################################
include_directories
(
"/usr/include/dpdk"
)
set
(
HWLIB_BENETEL_4G_SOURCE
set
(
HWLIB_BENETEL_4G_SOURCE
${
OPENAIR_TARGETS
}
/ARCH/ETHERNET/benetel/4g/benetel.c
${
OPENAIR_TARGETS
}
/ARCH/ETHERNET/benetel/4g/benetel.c
${
OPENAIR_TARGETS
}
/ARCH/ETHERNET/benetel/4g/shared_buffers.c
${
OPENAIR_TARGETS
}
/ARCH/ETHERNET/benetel/4g/shared_buffers.c
...
...
cmake_targets/build_oai
View file @
7b76f3f2
...
@@ -48,8 +48,9 @@ DEADLINE_SCHEDULER_FLAG_USER=""
...
@@ -48,8 +48,9 @@ DEADLINE_SCHEDULER_FLAG_USER=""
CPU_AFFINITY_FLAG_USER
=
"False"
#Only valid when low-latency flag is set to False
CPU_AFFINITY_FLAG_USER
=
"False"
#Only valid when low-latency flag is set to False
REL
=
"Rel15"
REL
=
"Rel15"
HW
=
"None"
HW
=
"None"
TP
=
"
None
"
TP
=
"
Ethernet
"
EPC
=
0
EPC
=
0
VERBOSE_CI
=
0
VERBOSE_COMPILE
=
0
VERBOSE_COMPILE
=
0
CFLAGS_PROCESSOR_USER
=
""
CFLAGS_PROCESSOR_USER
=
""
RUN_GROUP
=
0
RUN_GROUP
=
0
...
@@ -289,6 +290,9 @@ function main() {
...
@@ -289,6 +290,9 @@ function main() {
esac
esac
echo_info
"Setting hardware to:
$HW
"
echo_info
"Setting hardware to:
$HW
"
shift
2
;;
shift
2
;;
-t
|
--transport
)
TP
=
$2
shift
2
;;
-P
|
--phy_simulators
)
-P
|
--phy_simulators
)
SIMUS_PHY
=
1
SIMUS_PHY
=
1
echo_info
"Will compile dlsim, ulsim, ..."
echo_info
"Will compile dlsim, ulsim, ..."
...
@@ -324,6 +328,10 @@ function main() {
...
@@ -324,6 +328,10 @@ function main() {
HWLAT_TEST
=
1
HWLAT_TEST
=
1
echo_info
"Will compile hw latency test program"
echo_info
"Will compile hw latency test program"
shift
;;
shift
;;
--verbose-ci
)
VERBOSE_CI
=
1
echo_info
"Will compile with verbose instructions in CI Docker env"
shift
;;
--verbose-compile
)
--verbose-compile
)
VERBOSE_COMPILE
=
1
VERBOSE_COMPILE
=
1
echo_info
"Will compile with verbose instructions"
echo_info
"Will compile with verbose instructions"
...
@@ -936,6 +944,7 @@ function main() {
...
@@ -936,6 +944,7 @@ function main() {
echo_info
"Building transport protocol libraries"
echo_info
"Building transport protocol libraries"
rm
-f
liboai_transpro.so
rm
-f
liboai_transpro.so
rm
-f
$dbin
/liboai_transpro.so
rm
-f
$dbin
/liboai_transpro.so
if
[
"
$TP
"
==
"Ethernet"
]
;
then
compilations
\
compilations
\
$build_dir
oai_eth_transpro
\
$build_dir
oai_eth_transpro
\
liboai_eth_transpro.so
$dbin
/liboai_eth_transpro.so.
$REL
liboai_eth_transpro.so
$dbin
/liboai_eth_transpro.so.
$REL
...
@@ -943,6 +952,23 @@ function main() {
...
@@ -943,6 +952,23 @@ function main() {
ln
-sf
$dbin
/liboai_eth_transpro.so.
$REL
$dbin
/liboai_transpro.so
ln
-sf
$dbin
/liboai_eth_transpro.so.
$REL
$dbin
/liboai_transpro.so
echo_info
"liboai_transpro.so is linked to ETHERNET transport"
echo_info
"liboai_transpro.so is linked to ETHERNET transport"
fi
fi
if
[
"
$TP
"
==
"benetel4g"
]
;
then
compilations
\
$build_dir
benetel_4g
\
libbenetel_4g.so
$dbin
/libbenetel_4g.
$REL
ln
-sf
libbenetel_4g.so liboai_transpro.so
ln
-sf
$dbin
/libbenetel_4g.so.
$REL
$dbin
/liboai_transpro.so
echo_info
"liboai_transpro.so is linked to BENETEL4G transport"
fi
if
[
"
$TP
"
==
"benetel5g"
]
;
then
compilations
\
$build_dir
benetel_5g
\
libbenetel_5g.so
$dbin
/libbenetel_5g.
$REL
ln
-sf
libbenetel_5g.so liboai_transpro.so
ln
-sf
$dbin
/libbenetel_5g.so.
$REL
$dbin
/liboai_transpro.so
echo_info
"liboai_transpro.so is linked to BENETEL4G transport"
fi
fi
fi
fi
###################
###################
...
...
cmake_targets/tools/build_helper
View file @
7b76f3f2
...
@@ -41,7 +41,7 @@ KERNEL_VERSION=$(uname -r | cut -d '.' -f1)
...
@@ -41,7 +41,7 @@ KERNEL_VERSION=$(uname -r | cut -d '.' -f1)
KERNEL_MAJOR=$(uname -r | cut -d '.' -f2)
KERNEL_MAJOR=$(uname -r | cut -d '.' -f2)
#check if we run inside a container
#check if we run inside a container
IS_CONTAINER=`egrep -c "docker|podman|kubepods" /proc/self/cgroup || true`
IS_CONTAINER=`egrep -c "docker|podman|kubepods
|libpod|buildah
" /proc/self/cgroup || true`
#sudo is not needed when we are root
#sudo is not needed when we are root
if [ "$UID" = 0 ]
if [ "$UID" = 0 ]
then
then
...
@@ -221,6 +221,11 @@ compilations() {
...
@@ -221,6 +221,11 @@ compilations() {
ret=$?
ret=$?
} > $dlog/$2.$REL.txt 2>&1
} > $dlog/$2.$REL.txt 2>&1
set -e
set -e
if [ "$VERBOSE_CI" == "1" ]; then
echo_info "====== Start of log for $2.$REL.txt ======"
cat $dlog/$2.$REL.txt
echo_info "====== End of log for $2.$REL.txt ======"
fi
if [[ $ret -ne 0 ]]; then
if [[ $ret -ne 0 ]]; then
check_warnings "$dlog/$2.$REL.txt"
check_warnings "$dlog/$2.$REL.txt"
check_errors "$dlog/$2.$REL.txt"
check_errors "$dlog/$2.$REL.txt"
...
@@ -359,10 +364,10 @@ check_install_usrp_uhd_driver(){
...
@@ -359,10 +364,10 @@ check_install_usrp_uhd_driver(){
elif [[ "$OS_BASEDISTRO" == "fedora" ]]; then
elif [[ "$OS_BASEDISTRO" == "fedora" ]]; then
if [ $IS_CONTAINER -eq 0 ]
if [ $IS_CONTAINER -eq 0 ]
then
then
$SUDO $INSTALLER -y install python boost libusb-devel libusbx-devel boost-devel python-mako python-docutils
cmake
$SUDO $INSTALLER -y install python boost libusb-devel libusbx-devel boost-devel python-mako python-docutils
$CMAKE
$SUDO -H pip install requests
$SUDO -H pip install requests
else
else
$SUDO $INSTALLER -y install boost boost-devel
cmake3
$SUDO $INSTALLER -y install boost boost-devel
$CMAKE
$SUDO pip3 install mako requests
$SUDO pip3 install mako requests
fi
fi
if [[ "$OS_DISTRO" == "rhel" ]] || [[ "$OS_DISTRO" == "centos" ]]; then
if [[ "$OS_DISTRO" == "rhel" ]] || [[ "$OS_DISTRO" == "centos" ]]; then
...
@@ -488,7 +493,7 @@ install_soapy_from_source(){
...
@@ -488,7 +493,7 @@ install_soapy_from_source(){
#git checkout tags/release_003_010_001_001
#git checkout tags/release_003_010_001_001
mkdir -p build
mkdir -p build
cd build
cd build
cmake
../
$CMAKE
../
echo "Compiling SoapyRemote"
echo "Compiling SoapyRemote"
make -j`nproc`
make -j`nproc`
$SUDO make install
$SUDO make install
...
@@ -507,7 +512,7 @@ install_soapy_iris_from_source(){
...
@@ -507,7 +512,7 @@ install_soapy_iris_from_source(){
cd sklk-soapyiris
cd sklk-soapyiris
mkdir -p build
mkdir -p build
cd build
cd build
cmake
../
$CMAKE
../
echo "Compiling SoapyIris"
echo "Compiling SoapyIris"
make -j`nproc`
make -j`nproc`
$SUDO make install
$SUDO make install
...
@@ -684,7 +689,7 @@ check_install_oai_software() {
...
@@ -684,7 +689,7 @@ check_install_oai_software() {
automake \
automake \
bison \
bison \
build-essential \
build-essential \
cmake
\
$CMAKE
\
cmake-curses-gui \
cmake-curses-gui \
ninja-build \
ninja-build \
doxygen \
doxygen \
...
...
docker/Dockerfile.eNB.rhel8.2
View file @
7b76f3f2
...
@@ -27,14 +27,15 @@
...
@@ -27,14 +27,15 @@
FROM localhost/ran-build:latest AS enb-build
FROM localhost/ran-build:latest AS enb-build
RUN rm -Rf /oai-ran
WORKDIR /oai-ran
WORKDIR /oai-ran
COPY . .
#run build_oai to build the target image
#run build_oai to build the target image
RUN /bin/sh oaienv && \
RUN /bin/sh oaienv && \
cd cmake_targets && \
cd cmake_targets && \
rm -Rf log && \
mkdir -p log && \
mkdir -p log && \
./build_oai --eNB --ninja -w USRP
./build_oai --eNB --ninja -w USRP
--verbose-ci
# debug
# debug
#RUN ldconfig -v && ldd /oai-ran/targets/bin/lte-softmodem.Rel15
#RUN ldconfig -v && ldd /oai-ran/targets/bin/lte-softmodem.Rel15
...
...
docker/Dockerfile.eNB.ubuntu18
View file @
7b76f3f2
...
@@ -27,14 +27,15 @@
...
@@ -27,14 +27,15 @@
FROM ran-build:latest AS enb-build
FROM ran-build:latest AS enb-build
RUN rm -Rf /oai-ran
WORKDIR /oai-ran
WORKDIR /oai-ran
COPY . .
#run build_oai to build the target image
#run build_oai to build the target image
RUN /bin/sh oaienv && \
RUN /bin/sh oaienv && \
cd cmake_targets && \
cd cmake_targets && \
rm -Rf log && \
mkdir -p log && \
mkdir -p log && \
./build_oai --eNB --ninja -w USRP
./build_oai --eNB --ninja -w USRP
--verbose-ci
RUN apt-get install -y python3-pip && \
RUN apt-get install -y python3-pip && \
pip3 install --ignore-installed pyyaml && \
pip3 install --ignore-installed pyyaml && \
...
...
docker/Dockerfile.gNB.rhel8.2
View file @
7b76f3f2
...
@@ -27,14 +27,15 @@
...
@@ -27,14 +27,15 @@
FROM localhost/ran-build:latest AS gnb-build
FROM localhost/ran-build:latest AS gnb-build
RUN rm -Rf /oai-ran
WORKDIR /oai-ran
WORKDIR /oai-ran
COPY . .
#run build_oai to build the target image
#run build_oai to build the target image
RUN /bin/sh oaienv && \
RUN /bin/sh oaienv && \
cd cmake_targets && \
cd cmake_targets && \
rm -Rf log && \
mkdir -p log && \
mkdir -p log && \
./build_oai --gNB --ninja -w USRP
./build_oai --gNB --ninja -w USRP
--verbose-ci
#debug
#debug
#RUN ldconfig -v
#RUN ldconfig -v
...
...
docker/Dockerfile.gNB.ubuntu18
View file @
7b76f3f2
...
@@ -27,14 +27,15 @@
...
@@ -27,14 +27,15 @@
FROM ran-build:latest AS gnb-build
FROM ran-build:latest AS gnb-build
RUN rm -Rf /oai-ran
WORKDIR /oai-ran
WORKDIR /oai-ran
COPY . .
#run build_oai to build the target image
#run build_oai to build the target image
RUN /bin/sh oaienv && \
RUN /bin/sh oaienv && \
cd cmake_targets && \
cd cmake_targets && \
rm -Rf log && \
mkdir -p log && \
mkdir -p log && \
./build_oai --gNB --ninja -w USRP
./build_oai --gNB --ninja -w USRP
--verbose-ci
#debug
#debug
RUN ldconfig -v
RUN ldconfig -v
...
...
docker/Dockerfile.lteUE.rhel8.2
View file @
7b76f3f2
...
@@ -27,15 +27,15 @@
...
@@ -27,15 +27,15 @@
FROM localhost/ran-build:latest AS lte-ue-build
FROM localhost/ran-build:latest AS lte-ue-build
RUN rm -Rf /oai-ran
WORKDIR /oai-ran
WORKDIR /oai-ran
COPY . .
#run build_oai to build the target image
#run build_oai to build the target image
RUN /bin/sh oaienv && \
RUN /bin/sh oaienv && \
cd cmake_targets && \
cd cmake_targets && \
rm -Rf log && \
mkdir -p log && \
mkdir -p log && \
./build_oai --UE --ninja -w USRP
./build_oai --UE --ninja -w USRP
--verbose-ci
# debug
# debug
#RUN ldconfig -v && ldd /oai-ran/targets/bin/lte-uesoftmodem.Rel15
#RUN ldconfig -v && ldd /oai-ran/targets/bin/lte-uesoftmodem.Rel15
...
...
docker/Dockerfile.lteUE.ubuntu18
View file @
7b76f3f2
...
@@ -27,14 +27,15 @@
...
@@ -27,14 +27,15 @@
FROM ran-build:latest AS lte-ue-build
FROM ran-build:latest AS lte-ue-build
RUN rm -Rf /oai-ran
WORKDIR /oai-ran
WORKDIR /oai-ran
COPY . .
#run build_oai to build the target image
#run build_oai to build the target image
RUN /bin/sh oaienv && \
RUN /bin/sh oaienv && \
cd cmake_targets && \
cd cmake_targets && \
rm -Rf log && \
mkdir -p log && \
mkdir -p log && \
./build_oai --UE --ninja -w USRP
./build_oai --UE --ninja -w USRP
--verbose-ci
# debug
# debug
#RUN ldconfig -v
#RUN ldconfig -v
...
...
docker/Dockerfile.nrUE.rhel8.2
View file @
7b76f3f2
...
@@ -27,14 +27,15 @@
...
@@ -27,14 +27,15 @@
FROM localhost/ran-build:latest AS nr-ue-build
FROM localhost/ran-build:latest AS nr-ue-build
RUN rm -Rf /oai-ran
WORKDIR /oai-ran
WORKDIR /oai-ran
COPY . .
#run build_oai to build the target image
#run build_oai to build the target image
RUN /bin/sh oaienv && \
RUN /bin/sh oaienv && \
cd cmake_targets && \
cd cmake_targets && \
rm -Rf log && \
mkdir -p log && \
mkdir -p log && \
./build_oai --nrUE --ninja -w USRP
./build_oai --nrUE --ninja -w USRP
--verbose-ci
# debug
# debug
#RUN ldconfig -v
#RUN ldconfig -v
...
...
docker/Dockerfile.nrUE.ubuntu18
View file @
7b76f3f2
...
@@ -27,14 +27,15 @@
...
@@ -27,14 +27,15 @@
FROM ran-build:latest AS nr-ue-build
FROM ran-build:latest AS nr-ue-build
RUN rm -Rf /oai-ran
WORKDIR /oai-ran
WORKDIR /oai-ran
COPY . .
#run build_oai to build the target image
#run build_oai to build the target image
RUN /bin/sh oaienv && \
RUN /bin/sh oaienv && \
cd cmake_targets && \
cd cmake_targets && \
rm -Rf log && \
mkdir -p log && \
mkdir -p log && \
./build_oai --nrUE --ninja -w USRP
./build_oai --nrUE --ninja -w USRP
--verbose-ci
# debug
# debug
#RUN ldconfig -v
#RUN ldconfig -v
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment