[Asterisk-code-review] CI: Don't enable non-core modules in Certified branches (...asterisk[certified/13.21])
George Joseph
asteriskteam at digium.com
Fri Jul 26 09:48:02 CDT 2019
George Joseph has submitted this change and it was merged. ( https://gerrit.asterisk.org/c/asterisk/+/11622 )
Change subject: CI: Don't enable non-core modules in Certified branches
......................................................................
CI: Don't enable non-core modules in Certified branches
We don't support non-core modules for Certified releases but we
were enabling them for CI builds which was causing lots of test
failures. Now we don't.
NOTE: This change required that the rest of the CI shell scripts
and jenkinsfiles be updates to the same level as certified/16.3.
Change-Id: I0b3254c08a2479f3d39151690350cce5ce5ad766
---
M tests/CI/buildAsterisk.sh
M tests/CI/gateTestGroups.json
M tests/CI/gates.jenkinsfile
M tests/CI/installAsterisk.sh
M tests/CI/periodic-dailyTestGroups.json
M tests/CI/periodics-daily.jenkinsfile
M tests/CI/ref_debug.jenkinsfile
M tests/CI/runTestsuite.sh
M tests/CI/runUnittests.sh
M tests/CI/setupRealtime.sh
M tests/CI/teardownRealtime.sh
M tests/CI/unittests.jenkinsfile
12 files changed, 578 insertions(+), 216 deletions(-)
Approvals:
Kevin Harwell: Looks good to me, but someone else must approve
Joshua Colp: Looks good to me, but someone else must approve
George Joseph: Looks good to me, approved; Approved for Submit
diff --git a/tests/CI/buildAsterisk.sh b/tests/CI/buildAsterisk.sh
index 9840d85..2e94791 100755
--- a/tests/CI/buildAsterisk.sh
+++ b/tests/CI/buildAsterisk.sh
@@ -1,9 +1,21 @@
#!/usr/bin/env bash
CIDIR=$(dirname $(readlink -fn $0))
+COVERAGE=0
REF_DEBUG=0
+DISABLE_BINARY_MODULES=0
+NO_CONFIGURE=0
+NO_MENUSELECT=0
+NO_MAKE=0
+NO_ALEMBIC=0
source $CIDIR/ci.functions
+set -e
+
+if [ -z $BRANCH_NAME ]; then
+ BRANCH_NAME=$(git config -f .gitreview --get gerrit.defaultbranch)
+fi
+
gen_cats() {
set +x
action=$1
@@ -26,10 +38,27 @@
done
}
-[ x"$OUTPUT_DIR" != x ] && mkdir -p "$OUTPUT_DIR" 2&> /dev/null
+run_alembic() {
+ pushd contrib/ast-db-manage >/dev/null
+ runner alembic $@
+ RC=$?
+ popd > /dev/null
+ return $RC
+}
+
+[ x"$OUTPUT_DIR" != x ] && mkdir -p "$OUTPUT_DIR" 2> /dev/null
+
+if [ -z $TESTED_ONLY ]; then
+ # Skip building untested modules by default if coverage is enabled.
+ TESTED_ONLY=$COVERAGE
+fi
+
+if [ -z $LCOV_DIR ]; then
+ LCOV_DIR="${OUTPUT_DIR:+${OUTPUT_DIR}/}lcov"
+fi
if [ x"$CACHE_DIR" != x ] ; then
- mkdir -p "$CACHE_DIR/sounds $CACHE_DIR/externals" 2&> /dev/null
+ mkdir -p $CACHE_DIR/sounds $CACHE_DIR/externals 2> /dev/null
fi
if [ ${CCACHE_DISABLE:-0} -ne 1 ] ; then
@@ -58,76 +87,134 @@
runner ulimit -a
MAKE=`which make`
+PKGCONFIG=`which pkg-config`
[ -d /usr/lib64 ] && _libdir=/usr/lib64
common_config_args="--prefix=/usr ${_libdir:+--libdir=${_libdir}} --sysconfdir=/etc --with-pjproject-bundled"
+$PKGCONFIG 'jansson >= 2.11' || common_config_args+=" --with-jansson-bundled"
common_config_args+=" ${CACHE_DIR:+--with-sounds-cache=${CACHE_DIR}/sounds --with-externals-cache=${CACHE_DIR}/externals}"
common_config_args+=" --enable-dev-mode"
+if [ $COVERAGE -eq 1 ] ; then
+ common_config_args+=" --enable-coverage"
+fi
+if [ "$BRANCH_NAME" == "master" -o $DISABLE_BINARY_MODULES -eq 1 ] ; then
+ common_config_args+=" --disable-binary-modules"
+fi
+
export WGET_EXTRA_ARGS="--quiet"
-runner ./configure ${common_config_args} > ${OUTPUT_DIR:+${OUTPUT_DIR}/}configure.txt
-
-runner ${MAKE} menuselect.makeopts
-
-runner menuselect/menuselect `gen_mods enable DONT_OPTIMIZE BETTER_BACKTRACES MALLOC_DEBUG DO_CRASH TEST_FRAMEWORK` menuselect.makeopts
-runner menuselect/menuselect `gen_mods disable COMPILE_DOUBLE BUILD_NATIVE` menuselect.makeopts
-if [ $REF_DEBUG -eq 1 ] ; then
- runner menuselect/menuselect --enable REF_DEBUG menuselect.makeopts
+if [ $NO_CONFIGURE -eq 0 ] ; then
+ runner ./configure ${common_config_args} > ${OUTPUT_DIR:+${OUTPUT_DIR}/}configure.txt
fi
-cat_enables="MENUSELECT_BRIDGES MENUSELECT_CEL MENUSELECT_CDR"
-cat_enables+=" MENUSELECT_CHANNELS MENUSELECT_CODECS MENUSELECT_FORMATS MENUSELECT_FUNCS"
-cat_enables+=" MENUSELECT_PBX MENUSELECT_RES MENUSELECT_UTILS MENUSELECT_TESTS"
-runner menuselect/menuselect `gen_cats enable $cat_enables` menuselect.makeopts
+if [ $NO_MENUSELECT -eq 0 ] ; then
+ runner ${MAKE} menuselect.makeopts
-mod_disables="res_digium_phone chan_vpb"
-[ "$BRANCH_NAME" == "master" ] && mod_disables+=" codec_opus codec_silk codec_g729a codec_siren7 codec_siren14"
-runner menuselect/menuselect `gen_mods disable $mod_disables` menuselect.makeopts
+ runner menuselect/menuselect `gen_mods enable DONT_OPTIMIZE BETTER_BACKTRACES MALLOC_DEBUG DO_CRASH TEST_FRAMEWORK` menuselect.makeopts
+ runner menuselect/menuselect `gen_mods disable COMPILE_DOUBLE BUILD_NATIVE` menuselect.makeopts
+ if [ $REF_DEBUG -eq 1 ] ; then
+ runner menuselect/menuselect --enable REF_DEBUG menuselect.makeopts
+ fi
-mod_enables="app_voicemail app_directory FILE_STORAGE"
-mod_enables+=" res_mwi_external res_ari_mailboxes res_mwi_external_ami res_stasis_mailbox"
-mod_enables+=" CORE-SOUNDS-EN-GSM MOH-OPSOUND-GSM EXTRA-SOUNDS-EN-GSM"
-runner menuselect/menuselect `gen_mods enable $mod_enables` menuselect.makeopts
+ cat_enables="MENUSELECT_TESTS"
+ mod_disabled=""
-runner ${MAKE} -j8 || runner ${MAKE} -j1 NOISY_BUILD=yes
+ if [[ ! "${BRANCH_NAME}" =~ ^certified ]] ; then
+ cat_enables+=" MENUSELECT_BRIDGES MENUSELECT_CEL MENUSELECT_CDR"
+ cat_enables+=" MENUSELECT_CHANNELS MENUSELECT_CODECS MENUSELECT_FORMATS MENUSELECT_FUNCS"
+ cat_enables+=" MENUSELECT_PBX MENUSELECT_RES MENUSELECT_UTILS"
+ else
+ mod_disables+=" test_utils"
+ fi
-ALEMBIC=$(which alembic 2>/dev/null || : )
-if [ x"$ALEMBIC" = x ] ; then
- echo "Alembic not installed"
- exit 1
+ runner menuselect/menuselect `gen_cats enable $cat_enables` menuselect.makeopts
+
+ mod_disables+=" res_digium_phone chan_vpb"
+ if [ $TESTED_ONLY -eq 1 ] ; then
+ # These modules are not tested at all. They are loaded but nothing is ever done
+ # with them, no testsuite tests depend on them.
+ mod_disables+=" app_adsiprog app_alarmreceiver app_celgenuserevent app_db app_dictate"
+ mod_disables+=" app_dumpchan app_externalivr app_festival app_getcpeid app_ices app_image"
+ mod_disables+=" app_jack app_milliwatt app_minivm app_morsecode app_mp3 app_nbscat app_privacy"
+ mod_disables+=" app_readexten app_sms app_speech_utils app_test app_url app_waitforring"
+ mod_disables+=" app_waitforsilence app_waituntil app_zapateller"
+ mod_disables+=" cdr_adaptive_odbc cdr_custom cdr_manager cdr_odbc cdr_pgsql cdr_radius"
+ mod_disables+=" cdr_syslog cdr_tds"
+ mod_disables+=" cel_odbc cel_pgsql cel_radius cel_sqlite3_custom cel_tds"
+ mod_disables+=" chan_alsa chan_console chan_mgcp chan_motif chan_oss chan_rtp chan_skinny chan_unistim"
+ mod_disables+=" func_frame_trace func_pitchshift func_speex func_volume func_dialgroup"
+ mod_disables+=" func_periodic_hook func_sprintf func_enum func_extstate func_sysinfo func_iconv"
+ mod_disables+=" func_callcompletion func_version func_rand func_sha1 func_module func_md5"
+ mod_disables+=" pbx_dundi pbx_loopback"
+ mod_disables+=" res_ael_share res_calendar res_config_ldap res_config_pgsql res_corosync"
+ mod_disables+=" res_http_post res_pktccops res_rtp_multicast res_snmp res_xmpp"
+ fi
+
+ runner menuselect/menuselect `gen_mods disable $mod_disables` menuselect.makeopts
+
+ mod_enables="app_voicemail app_directory FILE_STORAGE"
+ mod_enables+=" res_mwi_external res_ari_mailboxes res_mwi_external_ami res_stasis_mailbox"
+ mod_enables+=" CORE-SOUNDS-EN-GSM MOH-OPSOUND-GSM EXTRA-SOUNDS-EN-GSM"
+ runner menuselect/menuselect `gen_mods enable $mod_enables` menuselect.makeopts
fi
-cd contrib/ast-db-manage
-find -name *.pyc -delete
-out=$(alembic -c config.ini.sample branches)
-if [ "x$out" != "x" ] ; then
- >&2 echo "Alembic branches were found for config"
- >&2 echo $out
- exit 1
-else
- >&2 echo "Alembic for 'config' OK"
+if [ $NO_MAKE -eq 0 ] ; then
+ runner ${MAKE} -j8 || runner ${MAKE} -j1 NOISY_BUILD=yes
fi
-out=$(alembic -c cdr.ini.sample branches)
-if [ "x$out" != "x" ] ; then
- >&2 echo "Alembic branches were found for cdr"
- >&2 echo $out
- exit 1
-else
- >&2 echo "Alembic for 'cdr' OK"
+runner rm -f ${LCOV_DIR}/*.info
+if [ $COVERAGE -eq 1 ] ; then
+ runner mkdir -p ${LCOV_DIR}
+
+ # Zero counter data
+ runner lcov --quiet --directory . --zerocounters
+
+ # Branch coverage is not supported by --initial. Disable to suppresses a notice
+ # printed if it was enabled in lcovrc.
+ # This initial capture ensures any module which was built but never loaded is
+ # reported with 0% coverage for all sources.
+ runner lcov --quiet --directory . --no-external --capture --initial --rc lcov_branch_coverage=0 \
+ --output-file ${LCOV_DIR}/initial.info
fi
-out=$(alembic -c voicemail.ini.sample branches)
-if [ "x$out" != "x" ] ; then
- >&2 echo "Alembic branches were found for voicemail"
- >&2 echo $out
- exit 1
-else
- >&2 echo "Alembic for 'voicemail' OK"
+if [ $NO_ALEMBIC -eq 0 ] ; then
+ ALEMBIC=$(which alembic 2>/dev/null || : )
+ if [ x"$ALEMBIC" = x ] ; then
+ >&2 echo "Alembic not installed"
+ exit 1
+ fi
+
+ find contrib/ast-db-manage -name *.pyc -delete
+ out=$(run_alembic -c config.ini.sample branches)
+ if [ "x$out" != "x" ] ; then
+ >&2 echo "Alembic branches were found for config"
+ >&2 echo $out
+ exit 1
+ fi
+ run_alembic -c config.ini.sample upgrade head --sql > "${OUTPUT_DIR:+${OUTPUT_DIR}/}alembic-config.sql" || exit 1
+ echo "Alembic for 'config' OK"
+
+ out=$(run_alembic -c cdr.ini.sample branches)
+ if [ "x$out" != "x" ] ; then
+ >&2 echo "Alembic branches were found for cdr"
+ >&2 echo $out
+ exit 1
+ fi
+ run_alembic -c cdr.ini.sample upgrade head --sql > "${OUTPUT_DIR:+${OUTPUT_DIR}/}alembic-cdr.sql" || exit 1
+ echo "Alembic for 'cdr' OK"
+
+ out=$(run_alembic -c voicemail.ini.sample branches)
+ if [ "x$out" != "x" ] ; then
+ >&2 echo "Alembic branches were found for voicemail"
+ >&2 echo $out
+ exit 1
+ fi
+ run_alembic -c voicemail.ini.sample upgrade head --sql > "${OUTPUT_DIR:+${OUTPUT_DIR}/}alembic-voicemail.sql" || exit 1
+ echo "Alembic for 'voicemail' OK"
fi
if [ -f "doc/core-en_US.xml" ] ; then
- ${MAKE} validate-docs || ${MAKE} NOISY_BUILD=yes validate-docs
+ runner ${MAKE} validate-docs || ${MAKE} NOISY_BUILD=yes validate-docs
fi
diff --git a/tests/CI/gateTestGroups.json b/tests/CI/gateTestGroups.json
index 7c8b917..d048896 100644
--- a/tests/CI/gateTestGroups.json
+++ b/tests/CI/gateTestGroups.json
@@ -2,46 +2,55 @@
{
"name": "ari1",
"dir": "tests/CI/output/ari1",
+ "runTestsuiteOptions": "--test-timeout=180",
"testcmd": "--test-regex=tests/rest_api/[Ca-d]"
},
{
"name": "ari2",
"dir": "tests/CI/output/ari2",
+ "runTestsuiteOptions": "--test-timeout=180",
"testcmd": "--test-regex=tests/rest_api/[e-z]"
},
{
"name": "pjs1",
"dir": "tests/CI/output/pjsip1",
+ "runTestsuiteOptions": "--test-timeout=180",
"testcmd": "--test-regex=tests/channels/pjsip/[a-f]"
},
{
"name": "pjs2",
"dir": "tests/CI/output/pjsip2",
+ "runTestsuiteOptions": "--test-timeout=180",
"testcmd": "--test-regex=tests/channels/pjsip/[g-r]"
},
{
"name": "pjs3",
"dir": "tests/CI/output/pjsip3",
+ "runTestsuiteOptions": "--test-timeout=180",
"testcmd": "--test-regex=tests/channels/pjsip/[s-z]"
},
{
"name": "sip1",
"dir": "tests/CI/output/sip1",
+ "runTestsuiteOptions": "--test-timeout=240",
"testcmd": "--test-regex=tests/channels/SIP/[Sa-r]"
},
{
"name": "sip2",
"dir": "tests/CI/output/sip2",
+ "runTestsuiteOptions": "--test-timeout=240",
"testcmd": "--test-regex=tests/channels/SIP/[s-z]"
},
{
"name": "iax ",
"dir": "tests/CI/output/iax2_local",
+ "runTestsuiteOptions": "--test-timeout=180",
"testcmd": " -t tests/channels/iax2 -t tests/channels/local"
},
{
"name": "mwi ",
"dir": "tests/CI/output/extmwi",
+ "runTestsuiteOptions": "--test-timeout=180",
"testcmd": "--test-regex=tests/channels/pjsip/.*mwi"
}
]
diff --git a/tests/CI/gates.jenkinsfile b/tests/CI/gates.jenkinsfile
index 9dc7308..bc23781 100644
--- a/tests/CI/gates.jenkinsfile
+++ b/tests/CI/gates.jenkinsfile
@@ -10,7 +10,19 @@
* we need to dynamically determine which docker image we're going to use and
* you can't do that in a delcarative pipeline.
*/
+def timeoutTime = 60
+def timeoutUnits = 'MINUTES'
+if (env.TIMEOUT_GATES) {
+ def _timeout = env.TIMEOUT_GATES.split()
+ timeoutTime = _timeout[0].toInteger()
+ timeoutUnits = _timeout[1]
+}
+
pipeline {
+ options {
+ timestamps()
+ timeout(time: timeoutTime, unit: timeoutUnits)
+ }
triggers {
/*
* This trigger will match either the "asterisk" or "Security-asterisk"
@@ -62,10 +74,12 @@
steps {
/* Here's where we switch to scripted pipeline */
script {
+ manager.build.displayName = "${env.GERRIT_CHANGE_NUMBER}"
+ manager.createSummary("/plugin/workflow-job/images/48x48/pipelinejob.png").appendText("Docker Host: ${NODE_NAME}", false)
+
stage ("Checkout") {
sh "sudo chown -R jenkins:users ."
- env.GERRIT_PROJECT_URL = env.GERRIT_CHANGE_URL.replaceAll(/\/[0-9]+$/, "/${env.GERRIT_PROJECT}")
- sh "printenv | sort"
+ env.GERRIT_PROJECT_URL = env.GIT_URL.replaceAll(/[^\/]+$/, env.GERRIT_PROJECT)
/*
* Jenkins has already automatically checked out the base branch
@@ -76,33 +90,46 @@
*
* The Gerrit Trigger provides all the URLs and refspecs to
* check out the change.
+ *
+ * We need to retrieve the jenkins2 gerrit https credentials
+ * in case this review is in a restricted project.
*/
- checkout scm: [$class: 'GitSCM',
- branches: [[name: env.GERRIT_BRANCH ]],
- extensions: [
- [$class: 'ScmName', name: 'gerrit-public'],
- [$class: 'CleanBeforeCheckout'],
- [$class: 'PreBuildMerge', options: [
- mergeRemote: 'gerrit-public',
- fastForwardMode: 'NO_FF',
- mergeStrategy: 'RECURSIVE',
- mergeTarget: env.GERRIT_BRANCH]],
- [$class: 'CloneOption',
- honorRefspec: true,
- noTags: true,
- depth: 10,
- shallow: true
- ],
- [$class: 'PruneStaleBranch'],
- [$class: 'BuildChooserSetting',
- buildChooser: [$class: 'GerritTriggerBuildChooser']
- ]
- ],
- userRemoteConfigs: [
- [name: env.GERRIT_NAME, refspec: env.GERRIT_REFSPEC, url: env.GERRIT_PROJECT_URL ]
- ]
- ]
+ withCredentials([usernamePassword(credentialsId: "${JENKINS_GERRIT_CREDS}",
+ passwordVariable: 'GERRIT_USER_PW', usernameVariable: 'GERRIT_USER_NAME')]) {
+ sh "printenv | sort"
+
+ checkout scm: [$class: 'GitSCM',
+ branches: [[name: env.GERRIT_BRANCH ]],
+ extensions: [
+ [$class: 'ScmName', name: env.GERRIT_NAME],
+ [$class: 'CleanBeforeCheckout'],
+ [$class: 'PreBuildMerge', options: [
+ mergeRemote: env.GERRIT_NAME,
+ fastForwardMode: 'NO_FF',
+ mergeStrategy: 'RECURSIVE',
+ mergeTarget: env.GERRIT_BRANCH]],
+ [$class: 'CloneOption',
+ honorRefspec: true,
+ noTags: true,
+ depth: 10,
+ shallow: true
+ ],
+ [$class: 'PruneStaleBranch'],
+ [$class: 'BuildChooserSetting',
+ buildChooser: [$class: 'GerritTriggerBuildChooser']
+ ]
+ ],
+ userRemoteConfigs: [
+ [
+ credentialsId: env.JENKINS_GERRIT_CREDS,
+ name: env.GERRIT_NAME,
+ refspec: env.GERRIT_REFSPEC,
+ url: env.GERRIT_PROJECT_URL.replaceAll("http(s)?://", "http\$1://${GERRIT_USER_NAME}@")
+ ]
+ ]
+ ]
+ }
sh "sudo tests/CI/setupJenkinsEnvironment.sh"
}
@@ -110,11 +137,14 @@
def r = currentBuild.startTimeInMillis % images.length
def ri = images[(int)r]
def randomImage = env.DOCKER_REGISTRY + "/" + ri
- def dockerOptions = "--ulimit core=0 --ulimit nofile=10240 " +
- " -v /srv/jenkins:/srv/jenkins:rw -v /srv/cache:/srv/cache:rw " +
+ /* FYI... Jenkins takes care of mouting the workspace for the container */
+ def dockerOptions = "--privileged --ulimit core=0 --ulimit nofile=10240 " +
+ " --tmpfs /tmp:exec,size=1G -v /srv/jenkins:/srv/jenkins:rw -v /srv/cache:/srv/cache:rw " +
" --entrypoint=''"
def bt = env.BUILD_TAG.replaceAll(/[^a-zA-Z0-9_.-]/, '-')
def outputdir = "tests/CI/output/Testsuite"
+
+ manager.createSummary("/plugin/workflow-job/images/48x48/pipelinejob.png").appendText("Docker Image: ${randomImage}", false)
def img = docker.image(randomImage)
img.pull()
@@ -122,7 +152,7 @@
img.inside(dockerOptions + " --name ${bt}-build") {
echo 'Building..'
env.CCACHE_DIR = "/srv/cache/ccache"
- sh "./tests/CI/buildAsterisk.sh --output-dir=${outputdir} --cache-dir=/srv/cache"
+ sh "./tests/CI/buildAsterisk.sh --branch-name=${BRANCH_NAME} --output-dir=${outputdir} --cache-dir=/srv/cache"
archiveArtifacts allowEmptyArchive: true, defaultExcludes: false, fingerprint: false,
artifacts: "${outputdir}/*"
@@ -148,13 +178,15 @@
img.inside("${dockerOptions} --name ${bt}-${groupName}") {
lock("${JOB_NAME}.${NODE_NAME}.installer") {
- sh 'sudo ./tests/CI/installAsterisk.sh --user-group=jenkins:users'
+ sh "sudo ./tests/CI/installAsterisk.sh --uninstall-all --branch-name=${BRANCH_NAME} --user-group=jenkins:users"
}
sh "sudo rm -rf ${groupDir} || : "
- checkout scm: [$class: 'GitSCM',
- branches: [[name: "${BRANCH_NAME}"]],
+ withCredentials([usernamePassword(credentialsId: "${JENKINS_GERRIT_CREDS}",
+ passwordVariable: 'GERRIT_USER_PW', usernameVariable: 'GERRIT_USER_NAME')]) {
+ checkout scm: [$class: 'GitSCM',
+ branches: [[name: "${BRANCH_NAME}"]],
extensions: [
[$class: 'RelativeTargetDirectory', relativeTargetDir: groupDir],
[$class: 'CloneOption',
@@ -164,10 +196,17 @@
shallow: true
],
],
- userRemoteConfigs: [[name: env.GERRIT_NAME, url: testsuiteUrl]]
+ userRemoteConfigs: [
+ [
+ credentialsId: env.JENKINS_GERRIT_CREDS,
+ name: env.GERRIT_NAME,
+ url: testsuiteUrl.replaceAll("http(s)?://", "http\$1://${GERRIT_USER_NAME}@")
+ ]
+ ]
]
+ }
- sh "sudo tests/CI/runTestsuite.sh --testsuite-dir='${groupDir}' --test-command='${groupTestcmd}'"
+ sh "sudo tests/CI/runTestsuite.sh --testsuite-dir='${groupDir}' --testsuite-command='${groupTestcmd}'"
archiveArtifacts allowEmptyArchive: true, defaultExcludes: false, fingerprint: true,
artifacts: "${groupDir}/asterisk-test-suite-report.xml, ${groupDir}/logs/**, ${groupDir}/core*.txt"
@@ -189,8 +228,8 @@
}
post {
cleanup {
- sh "sudo make distclean 2&>/dev/null || : "
- sh "sudo rm -rf tests/CI/output 2&>/dev/null || : "
+ sh "sudo make distclean >/dev/null 2>&1 || : "
+ sh "sudo rm -rf tests/CI/output >/dev/null 2>&1 || : "
}
/*
* The Gerrit Trigger will automatically post the "Verified" results back
diff --git a/tests/CI/installAsterisk.sh b/tests/CI/installAsterisk.sh
index 74f5651..0806566 100755
--- a/tests/CI/installAsterisk.sh
+++ b/tests/CI/installAsterisk.sh
@@ -1,6 +1,8 @@
#!/usr/bin/env bash
CIDIR=$(dirname $(readlink -fn $0))
+UNINSTALL=0
+UNINSTALL_ALL=0
source $CIDIR/ci.functions
MAKE=`which make`
@@ -10,6 +12,9 @@
fi
destdir=${DESTDIR:+DESTDIR=$DESTDIR}
+[ $UNINSTALL -gt 0 ] && ${MAKE} ${destdir} uninstall
+[ $UNINSTALL_ALL -gt 0 ] && ${MAKE} ${destdir} uninstall-all
+
${MAKE} ${destdir} install || ${MAKE} ${destdir} NOISY_BUILD=yes install || exit 1
${MAKE} ${destdir} samples
if [ x"$DESTDIR" != x ] ; then
diff --git a/tests/CI/periodic-dailyTestGroups.json b/tests/CI/periodic-dailyTestGroups.json
index 01f51d1..7f0fd7e 100644
--- a/tests/CI/periodic-dailyTestGroups.json
+++ b/tests/CI/periodic-dailyTestGroups.json
@@ -2,37 +2,43 @@
{
"name": "ari ",
"dir": "tests/CI/output/ari",
+ "runTestsuiteOptions": "--test-timeout=180",
"testcmd": "-t tests/rest_api/"
},
{
"name": "pjs ",
"dir": "tests/CI/output/pjsip",
+ "runTestsuiteOptions": "--test-timeout=180",
"testcmd": "-t tests/channels/pjsip"
},
{
"name": "sip ",
"dir": "tests/CI/output/sip",
+ "runTestsuiteOptions": "--test-timeout=240",
"testcmd": "-t tests/channels/SIP"
},
{
"name": "iax ",
"dir": "tests/CI/output/iax2_local",
+ "runTestsuiteOptions": "--test-timeout=180",
"testcmd": " -t tests/channels/iax2 -t tests/channels/local"
},
{
"name": "apps",
"dir": "tests/CI/output/agi-apps",
+ "runTestsuiteOptions": "--test-timeout=180",
"testcmd": " -t tests/agi -t tests/apps -t blind-transfer-parkingtimeout"
},
{
"name": "othr",
"dir": "tests/CI/output/other",
+ "runTestsuiteOptions": "--test-timeout=180",
"testcmd": " -T tests/(apps|agi|blind-transfer-parkingtimeout|rest_api|channels|realtime|example|skeleton_test|remote-test)"
},
{
"name": "real",
"dir": "tests/CI/output/realtime",
- "runTestsuiteOptions": "--realtime",
+ "runTestsuiteOptions": "--test-timeout=180 --realtime --initialize-db --cleanup-db",
"testcmd": " -t tests/channels/pjsip -G realtime-incompatible"
}
]
diff --git a/tests/CI/periodics-daily.jenkinsfile b/tests/CI/periodics-daily.jenkinsfile
index 8f53658..ae762f8 100644
--- a/tests/CI/periodics-daily.jenkinsfile
+++ b/tests/CI/periodics-daily.jenkinsfile
@@ -10,11 +10,23 @@
* we need to dynamically determine which docker image we're going to use and
* you can't do that in a delcarative pipeline.
*/
+def timeoutTime = 3
+def timeoutUnits = 'HOURS'
+if (env.TIMEOUT_DAILIES) {
+ def _timeout = env.TIMEOUT_DAILIES.split()
+ timeoutTime = _timeout[0].toInteger()
+ timeoutUnits = _timeout[1]
+}
+
pipeline {
+ options {
+ timestamps()
+ timeout(time: timeoutTime, unit: timeoutUnits)
+ }
triggers {
cron 'H H(0-4) * * *'
}
-
+
agent {
/* All of the stages need to be performed on a docker host */
label "swdev-docker"
@@ -25,8 +37,10 @@
steps {
/* Here's where we switch to scripted pipeline */
script {
+ manager.createSummary("/plugin/workflow-job/images/48x48/pipelinejob.png").appendText("Docker Host: ${NODE_NAME}", false)
+
stage ("Checkout") {
- sh "sudo chown -R jenkins:users ."
+ sh "sudo chown -R jenkins:users ."
sh "printenv | sort"
sh "sudo tests/CI/setupJenkinsEnvironment.sh"
}
@@ -35,23 +49,46 @@
def r = currentBuild.startTimeInMillis % images.length
def ri = images[(int)r]
def randomImage = env.DOCKER_REGISTRY + "/" + ri
- def dockerOptions = "--ulimit core=0 --ulimit nofile=10240 " +
- " -v /srv/jenkins:/srv/jenkins:rw -v /srv/cache:/srv/cache:rw " +
+ def dockerOptions = "--privileged --ulimit core=0 --ulimit nofile=10240 " +
+ " --tmpfs /tmp:exec,size=1G -v /srv/jenkins:/srv/jenkins:rw -v /srv/cache:/srv/cache:rw " +
" --entrypoint=''"
def bt = env.BUILD_TAG.replaceAll(/[^a-zA-Z0-9_.-]/, '-')
def outputdir = "tests/CI/output/Testsuite"
+
+ manager.createSummary("/plugin/workflow-job/images/48x48/pipelinejob.png").appendText("Docker Image: ${randomImage}", false)
def img = docker.image(randomImage)
img.pull()
- stage ("Build") {
- img.inside(dockerOptions + " --name ${bt}-build") {
+ img.inside(dockerOptions + " --name ${bt}-build") {
+ stage ("Build") {
echo 'Building..'
env.CCACHE_DIR = "/srv/cache/ccache"
- sh "./tests/CI/buildAsterisk.sh --output-dir=${outputdir} --cache-dir=/srv/cache"
+ sh "./tests/CI/buildAsterisk.sh --branch-name=${BRANCH_NAME} --output-dir=${outputdir} --cache-dir=/srv/cache"
archiveArtifacts allowEmptyArchive: true, defaultExcludes: false, fingerprint: false,
artifacts: "${outputdir}/*"
}
+ stage ("Docs") {
+
+ sh "sudo ./tests/CI/installAsterisk.sh --branch-name=${BRANCH_NAME} --user-group=jenkins:users"
+
+ def docUrl = env.GIT_URL.replaceAll(/\/[^\/]+$/, "/publish-docs")
+ checkout scm: [$class: 'GitSCM',
+ branches: [[name: "master"]],
+ extensions: [
+ [$class: 'RelativeTargetDirectory', relativeTargetDir: "tests/CI/output/publish-docs"],
+ [$class: 'CloneOption',
+ noTags: true,
+ depth: 10,
+ honorRefspec: true,
+ shallow: true
+ ],
+ ],
+ userRemoteConfigs: [[url: docUrl]]
+ ]
+
+ sh "./tests/CI/publishAsteriskDocs.sh --user-group=jenkins:users --branch-name=${BRANCH_NAME} --wiki-doc-branch-regex=\"${WIKI_DOC_BRANCH_REGEX}\""
+ }
}
def testGroups = readJSON file: "tests/CI/periodic-dailyTestGroups.json"
@@ -74,11 +111,11 @@
img.inside("${dockerOptions} --name ${bt}-${groupName}") {
lock("${JOB_NAME}.${NODE_NAME}.installer") {
- sh 'sudo ./tests/CI/installAsterisk.sh --user-group=jenkins:users'
+ sh "sudo ./tests/CI/installAsterisk.sh --uninstall-all --branch-name=${BRANCH_NAME} --user-group=jenkins:users"
}
sh "sudo rm -rf ${groupDir} || : "
-
+
checkout scm: [$class: 'GitSCM',
branches: [[name: "${BRANCH_NAME}"]],
extensions: [
@@ -93,7 +130,7 @@
userRemoteConfigs: [[url: testsuiteUrl]]
]
- sh "sudo tests/CI/runTestsuite.sh ${groupRunTestsuiteOptions} --testsuite-dir='${groupDir}' --test-command='${groupTestcmd}'"
+ sh "sudo tests/CI/runTestsuite.sh ${groupRunTestsuiteOptions} --testsuite-dir='${groupDir}' --testsuite-command='${groupTestcmd}'"
archiveArtifacts allowEmptyArchive: true, defaultExcludes: false, fingerprint: true,
artifacts: "${groupDir}/asterisk-test-suite-report.xml, ${groupDir}/logs/**, ${groupDir}/core*.txt"
@@ -115,8 +152,8 @@
}
post {
cleanup {
- sh "sudo make distclean 2&>/dev/null || : "
- sh "sudo rm -rf tests/CI/output 2&>/dev/null || : "
+ sh "sudo make distclean >/dev/null 2>&1 || : "
+ sh "sudo rm -rf tests/CI/output >/dev/null 2>&1 || : "
}
success {
echo "Reporting ${currentBuild.currentResult} Passed"
diff --git a/tests/CI/ref_debug.jenkinsfile b/tests/CI/ref_debug.jenkinsfile
index d0c42ea..0e0f49c 100644
--- a/tests/CI/ref_debug.jenkinsfile
+++ b/tests/CI/ref_debug.jenkinsfile
@@ -10,7 +10,19 @@
* we need to dynamically determine which docker image we're going to use and
* you can't do that in a delcarative pipeline.
*/
+def timeoutTime = 24
+def timeoutUnits = 'HOURS'
+if (env.TIMEOUT_REF_DEBUG) {
+ def _timeout = env.TIMEOUT_REF_DEBUG.split()
+ timeoutTime = _timeout[0].toInteger()
+ timeoutUnits = _timeout[1]
+}
+
pipeline {
+ options {
+ timestamps()
+ timeout(time: timeoutTime, unit: timeoutUnits)
+ }
triggers {
cron 'H H(0-4) * * 0'
}
@@ -25,6 +37,8 @@
steps {
/* Here's where we switch to scripted pipeline */
script {
+ manager.createSummary("/plugin/workflow-job/images/48x48/pipelinejob.png").appendText("Docker Host: ${NODE_NAME}", false)
+
stage ("Checkout") {
sh "sudo chown -R jenkins:users ."
sh "printenv | sort"
@@ -35,11 +49,13 @@
def r = currentBuild.startTimeInMillis % images.length
def ri = images[(int)r]
def randomImage = env.DOCKER_REGISTRY + "/" + ri
- def dockerOptions = "--ulimit core=0 --ulimit nofile=10240 " +
- " -v /srv/jenkins:/srv/jenkins:rw -v /srv/cache:/srv/cache:rw " +
+ def dockerOptions = "--privileged --ulimit core=0 --ulimit nofile=10240 " +
+ " --tmpfs /tmp:exec,size=1G -v /srv/jenkins:/srv/jenkins:rw -v /srv/cache:/srv/cache:rw " +
" --entrypoint=''"
def bt = env.BUILD_TAG.replaceAll(/[^a-zA-Z0-9_.-]/, '-')
def outputdir = "tests/CI/output/Testsuite"
+
+ manager.createSummary("/plugin/workflow-job/images/48x48/pipelinejob.png").appendText("Docker Image: ${randomImage}", false)
def img = docker.image(randomImage)
img.pull()
@@ -47,7 +63,7 @@
img.inside(dockerOptions + " --name ${bt}-build") {
echo 'Building..'
env.CCACHE_DIR = "/srv/cache/ccache"
- sh "./tests/CI/buildAsterisk.sh --ref-debug --output-dir=${outputdir} --cache-dir=/srv/cache"
+ sh "./tests/CI/buildAsterisk.sh --ref-debug --branch-name=${BRANCH_NAME} --output-dir=${outputdir} --cache-dir=/srv/cache"
archiveArtifacts allowEmptyArchive: true, defaultExcludes: false, fingerprint: false,
artifacts: "${outputdir}/*"
@@ -65,7 +81,7 @@
def groupName = testGroup.name
def groupDir = testGroup.dir
def groupTestcmd = testGroup.testcmd
- def testsuiteUrl = env.GIT_URL.replaceAll(/\/[^\/]+$/, "/1testsuite")
+ def testsuiteUrl = env.GIT_URL.replaceAll(/\/[^\/]+$/, "/testsuite")
parallelTasks[groupName] = {
stage (groupName) {
@@ -73,7 +89,7 @@
img.inside("${dockerOptions} --name ${bt}-${groupName}") {
lock("${JOB_NAME}.${NODE_NAME}.installer") {
- sh 'sudo ./tests/CI/installAsterisk.sh --user-group=jenkins:users'
+ sh "sudo ./tests/CI/installAsterisk.sh --uninstall-all --branch-name=${BRANCH_NAME} --user-group=jenkins:users"
}
sh "sudo rm -rf ${groupDir} || : "
@@ -92,7 +108,7 @@
userRemoteConfigs: [[url: testsuiteUrl]]
]
- sh "sudo tests/CI/runTestsuite.sh --testsuite-dir='${groupDir}' --test-command='${groupTestcmd}'"
+ sh "sudo tests/CI/runTestsuite.sh --testsuite-dir='${groupDir}' --testsuite-command='${groupTestcmd}'"
archiveArtifacts allowEmptyArchive: true, defaultExcludes: false, fingerprint: true,
artifacts: "${groupDir}/asterisk-test-suite-report.xml, ${groupDir}/logs/**, ${groupDir}/core*.txt"
@@ -111,8 +127,8 @@
}
post {
cleanup {
- sh "sudo make distclean 2&>/dev/null || : "
- sh "sudo rm -rf tests/CI/output 2&>/dev/null || : "
+ sh "sudo make distclean >/dev/null 2>&1 || : "
+ sh "sudo rm -rf tests/CI/output >/dev/null 2>&1 || : "
}
success {
echo "Reporting ${currentBuild.currentResult} Passed"
diff --git a/tests/CI/runTestsuite.sh b/tests/CI/runTestsuite.sh
index 4ca6369..466991a 100755
--- a/tests/CI/runTestsuite.sh
+++ b/tests/CI/runTestsuite.sh
@@ -1,23 +1,29 @@
#!/usr/bin/env bash
CIDIR=$(dirname $(readlink -fn $0))
REALTIME=0
+TEST_TIMEOUT=600
source $CIDIR/ci.functions
ASTETCDIR=$DESTDIR/etc/asterisk
+if [ x"$WORK_DIR" != x ] ; then
+ export AST_WORK_DIR="$(readlink -f $WORK_DIR)"
+ mkdir -p "$AST_WORK_DIR"
+fi
+
pushd $TESTSUITE_DIR
./cleanup-test-remnants.sh
if [ $REALTIME -eq 1 ] ; then
- $CIDIR/setupRealtime.sh
+ $CIDIR/setupRealtime.sh --initialize-db=${INITIALIZE_DB:?0}
fi
export PYTHONPATH=./lib/python/
-echo "Running tests ${TEST_COMMAND}"
-./runtests.py --cleanup ${TEST_COMMAND} | contrib/scripts/pretty_print --no-color --no-timer --term-width=120 --show-errors || :
+echo "Running tests ${TESTSUITE_COMMAND} ${AST_WORK_DIR:+with work directory ${AST_WORK_DIR}}"
+./runtests.py --cleanup --timeout=${TEST_TIMEOUT} ${TESTSUITE_COMMAND} | contrib/scripts/pretty_print --no-color --no-timer --term-width=120 --show-errors || :
if [ $REALTIME -eq 1 ] ; then
- $CIDIR/teardownRealtime.sh
+ $CIDIR/teardownRealtime.sh --cleanup-db=${CLEANUP_DB:?0}
fi
if [ -f core* ] ; then
@@ -26,4 +32,4 @@
exit 1
fi
-popd
\ No newline at end of file
+popd
diff --git a/tests/CI/runUnittests.sh b/tests/CI/runUnittests.sh
index 0ad5f49..e2d7e45 100755
--- a/tests/CI/runUnittests.sh
+++ b/tests/CI/runUnittests.sh
@@ -1,9 +1,69 @@
#!/usr/bin/env bash
CIDIR=$(dirname $(readlink -fn $0))
+NO_EXPECT=0
source $CIDIR/ci.functions
ASTETCDIR=$DESTDIR/etc/asterisk
-echo "full => notice,warning,error,debug,verbose" > "$ASTETCDIR/logger.conf"
+asterisk_corefile_glob() {
+ local pattern=$(/sbin/sysctl -n kernel.core_pattern)
+
+ # If core_pattern is a pipe there isn't much we can do
+ if [[ ${pattern:0:1} == "|" ]] ; then
+ echo "core*"
+ else
+ echo "${pattern%%%*}*"
+ fi
+}
+
+run_tests_expect() {
+$EXPECT <<-EOF
+ spawn sudo $ASTERISK ${USER_GROUP:+-U ${USER_GROUP%%:*} -G ${USER_GROUP##*:}} -fcng -C $CONFFILE
+ match_max 512
+ set timeout 600
+ expect -notransfer "Asterisk Ready."
+ send "core show settings\r"
+ expect -notransfer "CLI>"
+ send "${UNITTEST_COMMAND:-test execute all}\r"
+ expect -notransfer -ex "Test(s) Executed"
+ expect -notransfer "CLI>"
+ send "test show results failed\r"
+ expect -notransfer "CLI>"
+ send "test generate results xml ${OUTPUTFILE}\r"
+ expect -notransfer "CLI>"
+ send "core stop now\r"
+ expect -notransfer "Executing last minute cleanups"
+ wait
+EOF
+}
+
+run_tests_socket() {
+ sudo $ASTERISK ${USER_GROUP:+-U ${USER_GROUP%%:*} -G ${USER_GROUP##*:}} -gn -C $CONFFILE
+ for n in {1..5} ; do
+ sleep 3
+ $ASTERISK -rx "core waitfullybooted" -C $CONFFILE && break
+ done
+ sleep 1
+ $ASTERISK -rx "core show settings" -C $CONFFILE
+ $ASTERISK -rx "${UNITTEST_COMMAND:-test execute all}" -C $CONFFILE
+ $ASTERISK -rx "test show results failed" -C $CONFFILE
+ $ASTERISK -rx "test generate results xml $OUTPUTFILE" -C $CONFFILE
+ $ASTERISK -rx "core stop now" -C $CONFFILE
+}
+
+# If DESTDIR is used to install and run asterisk from non standard locations,
+# the directory entries in asterisk.conf need to be munged to prepend DESTDIR.
+ALTERED=$(head -10 ../tmp/DESTDIR/etc/asterisk/asterisk.conf | grep -q "DESTDIR" && echo yes)
+if [ x"$ALTERED" = x ] ; then
+ # In the section that starts with [directories and ends with a blank line,
+ # replace "=> " with "=> ${DESTDIR}"
+ sed -i -r -e "/^\[directories/,/^$/ s@=>\s+@=> ${DESTDIR}@" "$ASTETCDIR/asterisk.conf"
+fi
+
+cat <<-EOF > "$ASTETCDIR/logger.conf"
+ [logfiles]
+ full => notice,warning,error,debug,verbose
+ console => notice,warning,error
+EOF
echo "[default]" > "$ASTETCDIR/extensions.conf"
@@ -23,7 +83,7 @@
[general]
enabled=yes
bindaddr=127.0.0.1
- port=8088
+ bindport=8088
EOF
cat <<-EOF > "$ASTETCDIR/modules.conf"
@@ -44,6 +104,7 @@
CONFFILE=$ASTETCDIR/asterisk.conf
OUTPUTDIR=${OUTPUT_DIR:-tests/CI/output/}
OUTPUTFILE=${OUTPUT_XML:-${OUTPUTDIR}/unittests-results.xml}
+EXPECT="$(which expect 2>/dev/null || : )"
[ ! -d ${OUTPUTDIR} ] && mkdir -p $OUTPUTDIR
[ x"$USER_GROUP" != x ] && sudo chown -R $USER_GROUP $OUTPUTDIR
@@ -51,23 +112,28 @@
rm -rf $ASTETCDIR/extensions.{ael,lua} || :
set -x
-sudo $ASTERISK ${USER_GROUP:+-U ${USER_GROUP%%:*} -G ${USER_GROUP##*:}} -gn -C $CONFFILE
-for n in `seq 1 5` ; do
- sleep 3
- $ASTERISK -rx "core waitfullybooted" -C $CONFFILE && break
-done
-sleep 1
-$ASTERISK -rx "${TEST_COMMAND:-test execute all}" -C $CONFFILE
-$ASTERISK -rx "test show results failed" -C $CONFFILE
-$ASTERISK -rx "test generate results xml $OUTPUTFILE" -C $CONFFILE
-$ASTERISK -rx "core stop now" -C $CONFFILE
+if [ x"$EXPECT" != x -a $NO_EXPECT -eq 0 ] ; then
+ run_tests_expect
+else
+ run_tests_socket
+fi
+
+# Cleanup "just in case"
+sudo killall -qe -ABRT $ASTERISK
runner rsync -vaH $DESTDIR/var/log/asterisk/. $OUTPUTDIR
set +x
[ x"$USER_GROUP" != x ] && sudo chown -R $USER_GROUP $OUTPUTDIR
-if [ -f core* ] ; then
- echo "*** Found a core file after running unit tests ***"
- $DESTDIR/var/lib/asterisk/scripts/ast_coredumper --no-default-search core*
- exit 1
-fi
+
+for core in $(asterisk_corefile_glob)
+do
+ if [ -f $core ]
+ then
+ echo "*** Found a core file ($core) after running unit tests ***"
+ set -x
+ sudo OUTPUTDIR=$OUTPUTDIR $DESTDIR/var/lib/asterisk/scripts/ast_coredumper --no-default-search $core
+ fi
+done
+
+exit 0
diff --git a/tests/CI/setupRealtime.sh b/tests/CI/setupRealtime.sh
index f599d33..49ffb8b 100755
--- a/tests/CI/setupRealtime.sh
+++ b/tests/CI/setupRealtime.sh
@@ -1,9 +1,114 @@
#!/usr/bin/env bash
CIDIR=$(dirname $(readlink -fn $0))
+INITIALIZE_DB=0
source $CIDIR/ci.functions
+ASTTOP=$(readlink -fn $CIDIR/../../)
set -e
+POSTGRES_PID=`pidof postgres || : `
+
+if [ -z "$POSTGRES_PID" ] ; then
+ if [ -x /usr/local/bin/postgresql-start ] ; then
+ /usr/local/bin/postgresql-start
+ fi
+fi
+
+POSTGRES_PID=`pidof postgres || : `
+if [ -z "$POSTGRES_PID" ] ; then
+ echo "Postgres isn't running. It must be started manually before this test can continue."
+ exit 1
+fi
+
+if [ $INITIALIZE_DB -gt 0 ] ; then
+ echo "(re)Initializing Database"
+
+ sudo -u postgres dropdb -e asterisk_test >/dev/null 2>&1 || :
+ sudo -u postgres dropuser -e asterisk_test >/dev/null 2>&1 || :
+ sudo -u postgres createuser --username=postgres -RDIElS asterisk_test
+ sudo -u postgres createdb --username=postgres -E UTF-8 -O asterisk_test asterisk_test
+
+ echo "Configuring ODBC"
+
+ sudo odbcinst -u -d -n "PostgreSQL-Asterisk-Test"
+
+ sudo odbcinst -i -d -n "PostgreSQL-Asterisk-Test" -f /dev/stdin <<-EOF
+ [PostgreSQL-Asterisk-Test]
+ Description=PostgreSQL ODBC driver (Unicode version)
+ Driver=psqlodbcw.so
+ Setup=libodbcpsqlS.so
+ Debug=0
+ CommLog=1
+ UsageCount=1
+ EOF
+
+ sudo odbcinst -u -s -l -n asterisk-connector-test
+ sudo odbcinst -i -s -l -n asterisk-connector-test -f /dev/stdin <<-EOF
+ [asterisk-connector-test]
+ Description = PostgreSQL connection to 'asterisk' database
+ Driver = PostgreSQL-Asterisk-Test
+ Database = asterisk_test
+ Servername = 127.0.0.1
+ UserName = asterisk_test
+ Port = 5432
+ Protocol = 9.1
+ ReadOnly = No
+ RowVersioning = No
+ ShowSystemTables = No
+ ShowOldColumn = No
+ FakeOldIndex = No
+ ConnSettings =
+ EOF
+fi
+
+cat >/tmp/config.ini <<-EOF
+ [alembic]
+ script_location = config
+ sqlalchemy.url = postgresql://asterisk_test@localhost/asterisk_test
+
+ [loggers]
+ keys = root,sqlalchemy,alembic
+
+ [handlers]
+ keys = console
+
+ [formatters]
+ keys = generic
+
+ [logger_root]
+ level = WARN
+ handlers = console
+ qualname =
+
+ [logger_sqlalchemy]
+ level = WARN
+ handlers =
+ qualname = sqlalchemy.engine
+
+ [logger_alembic]
+ level = INFO
+ handlers =
+ qualname = alembic
+
+ [handler_console]
+ class = StreamHandler
+ args = (sys.stderr,)
+ level = NOTSET
+ formatter = generic
+
+ [formatter_generic]
+ format = %(levelname)-5.5s [%(name)s] %(message)s
+ datefmt = %H:%M:%S
+EOF
+
+pushd $ASTTOP/contrib/ast-db-manage
+
+psql --username=asterisk_test --host=localhost --db=asterisk_test --command='DROP OWNED BY asterisk_test CASCADE'
+alembic -c /tmp/config.ini upgrade head
+rm -rf /tmp/config.ini || :
+
+popd
+
cp test-config.yaml test-config.orig.yaml
cat >test-config.yaml <<-EOF
@@ -65,59 +170,10 @@
config-section: realtime-config
realtime-config:
- username: "asterisk"
+ username: "asterisk_test"
+ password: "asterisk_test"
host: "localhost"
- db: "asterisk"
- dsn: "asterisk-connector"
+ db: "asterisk_test"
+ dsn: "asterisk-connector-test"
EOF
-ASTTOP=$(readlink -fn $CIDIR/../../)
-
-cat >/tmp/config.ini <<-EOF
- [alembic]
- script_location = config
- sqlalchemy.url = postgresql://asterisk@localhost/asterisk
-
- [loggers]
- keys = root,sqlalchemy,alembic
-
- [handlers]
- keys = console
-
- [formatters]
- keys = generic
-
- [logger_root]
- level = WARN
- handlers = console
- qualname =
-
- [logger_sqlalchemy]
- level = WARN
- handlers =
- qualname = sqlalchemy.engine
-
- [logger_alembic]
- level = INFO
- handlers =
- qualname = alembic
-
- [handler_console]
- class = StreamHandler
- args = (sys.stderr,)
- level = NOTSET
- formatter = generic
-
- [formatter_generic]
- format = %(levelname)-5.5s [%(name)s] %(message)s
- datefmt = %H:%M:%S
-EOF
-
-pushd $ASTTOP/contrib/ast-db-manage
-if [ -x /usr/local/bin/postgresql-start ] ; then
- /usr/local/bin/postgresql-start
-fi
-psql --username=asterisk --host=localhost --db=asterisk --command='DROP OWNED BY asterisk CASCADE'
-alembic -c /tmp/config.ini upgrade head
-rm -rf /tmp/config.ini || :
-popd
diff --git a/tests/CI/teardownRealtime.sh b/tests/CI/teardownRealtime.sh
index 1114699..8687706 100755
--- a/tests/CI/teardownRealtime.sh
+++ b/tests/CI/teardownRealtime.sh
@@ -1,6 +1,12 @@
#!/usr/bin/env bash
CIDIR=$(dirname $(readlink -fn $0))
+CLEANUP_DB=0
source $CIDIR/ci.functions
cp test-config.orig.yaml test-config.yaml
-psql --username=asterisk --host=localhost --db=asterisk --command='DROP OWNED BY asterisk CASCADE'
+if [ $CLEANUP_DB -gt 0 ] ; then
+ sudo -u postgres dropdb -e asterisk_test >/dev/null 2>&1 || :
+ sudo -u postgres dropuser -e asterisk_test >/dev/null 2>&1 || :
+ sudo odbcinst -u -d -n "PostgreSQL-Asterisk-Test"
+ sudo odbcinst -u -s -l -n "asterisk-connector-test"
+fi
diff --git a/tests/CI/unittests.jenkinsfile b/tests/CI/unittests.jenkinsfile
index 767ff94..3961cf5 100644
--- a/tests/CI/unittests.jenkinsfile
+++ b/tests/CI/unittests.jenkinsfile
@@ -10,7 +10,19 @@
* we need to dynamically determine which docker image we're going to use and
* you can't do that in a delcarative pipeline.
*/
+def timeoutTime = 30
+def timeoutUnits = 'MINUTES'
+if (env.TIMEOUT_UNITTESTS) {
+ def _timeout = env.TIMEOUT_UNITTESTS.split()
+ timeoutTime = _timeout[0].toInteger()
+ timeoutUnits = _timeout[1]
+}
+
pipeline {
+ options {
+ timestamps()
+ timeout(time: timeoutTime, unit: timeoutUnits)
+ }
triggers {
/*
* This trigger will match either the "asterisk" or "Security-asterisk"
@@ -34,7 +46,7 @@
triggerOnEvents: [
commentAddedContains('^recheck$'),
patchsetCreated(excludeDrafts: false,
- excludeNoCodeChange: true,
+ excludeNoCodeChange: false,
excludeTrivialRebase: false),
draftPublished()
],
@@ -63,10 +75,12 @@
}
steps {
script {
+ manager.build.displayName = "${env.GERRIT_CHANGE_NUMBER}"
+ manager.createSummary("/plugin/workflow-job/images/48x48/pipelinejob.png").appendText("Docker Host: ${NODE_NAME}", false)
+
stage ("Checkout") {
sh "sudo chown -R jenkins:users ."
- env.GERRIT_PROJECT_URL = env.GERRIT_CHANGE_URL.replaceAll(/\/[0-9]+$/, "/${env.GERRIT_PROJECT}")
- sh "printenv | sort"
+ env.GERRIT_PROJECT_URL = env.GIT_URL.replaceAll(/[^\/]+$/, env.GERRIT_PROJECT)
/*
* Jenkins has already automatically checked out the base branch
@@ -77,32 +91,46 @@
*
* The Gerrit Trigger provides all the URLs and refspecs to
* check out the change.
+ *
+ * We need to retrieve the jenkins2 gerrit https credentials
+ * in case this review is in a restricted project.
*/
- checkout scm: [$class: 'GitSCM',
- branches: [[name: env.GERRIT_BRANCH ]],
- extensions: [
- [$class: 'ScmName', name: 'gerrit-public'],
- [$class: 'CleanBeforeCheckout'],
- [$class: 'PreBuildMerge', options: [
- mergeRemote: 'gerrit-public',
- fastForwardMode: 'NO_FF',
- mergeStrategy: 'RECURSIVE',
- mergeTarget: env.GERRIT_BRANCH]],
- [$class: 'CloneOption',
- honorRefspec: true,
- noTags: true,
- depth: 10,
- shallow: true
+ withCredentials([usernamePassword(credentialsId: "${JENKINS_GERRIT_CREDS}",
+ passwordVariable: 'GERRIT_USER_PW', usernameVariable: 'GERRIT_USER_NAME')]) {
+
+ sh "printenv | sort"
+
+ checkout scm: [$class: 'GitSCM',
+ branches: [[name: env.GERRIT_BRANCH ]],
+ extensions: [
+ [$class: 'ScmName', name: env.GERRIT_NAME],
+ [$class: 'CleanBeforeCheckout'],
+ [$class: 'PreBuildMerge', options: [
+ mergeRemote: env.GERRIT_NAME,
+ fastForwardMode: 'NO_FF',
+ mergeStrategy: 'RECURSIVE',
+ mergeTarget: env.GERRIT_BRANCH]],
+ [$class: 'CloneOption',
+ honorRefspec: true,
+ noTags: true,
+ depth: 10,
+ shallow: true
+ ],
+ [$class: 'PruneStaleBranch'],
+ [$class: 'BuildChooserSetting',
+ buildChooser: [$class: 'GerritTriggerBuildChooser']
+ ]
],
- [$class: 'PruneStaleBranch'],
- [$class: 'BuildChooserSetting',
- buildChooser: [$class: 'GerritTriggerBuildChooser']
+ userRemoteConfigs: [
+ [
+ credentialsId: env.JENKINS_GERRIT_CREDS,
+ name: env.GERRIT_NAME,
+ refspec: env.GERRIT_REFSPEC,
+ url: env.GERRIT_PROJECT_URL.replaceAll("http(s)?://", "http\$1://${GERRIT_USER_NAME}@")
+ ]
]
- ],
- userRemoteConfigs: [
- [name: env.GERRIT_NAME, refspec: env.GERRIT_REFSPEC, url: env.GERRIT_PROJECT_URL ]
]
- ]
+ }
sh "sudo tests/CI/setupJenkinsEnvironment.sh"
}
@@ -112,11 +140,12 @@
def ri = images[(int)r]
def randomImage = env.DOCKER_REGISTRY + "/" + ri;
def bt = env.BUILD_TAG.replaceAll(/[^a-zA-Z0-9_.-]/, '-')
- def dockerOptions = "--ulimit core=0 --ulimit nofile=10240 " +
- " -v /srv/jenkins:/srv/jenkins:rw -v /srv/cache:/srv/cache:rw " +
+ def dockerOptions = "--privileged --ulimit core=0 --ulimit nofile=10240 " +
+ " --tmpfs /tmp:exec,size=1G -v /srv/jenkins:/srv/jenkins:rw -v /srv/cache:/srv/cache:rw " +
" --entrypoint='' --name ${bt}-build"
def outputdir = "tests/CI/output/UnitTests"
+ manager.createSummary("/plugin/workflow-job/images/48x48/pipelinejob.png").appendText("Docker Image: ${randomImage}", false)
def img = docker.image(randomImage)
img.pull()
img.inside(dockerOptions) {
@@ -124,7 +153,7 @@
stage ('Build') {
echo 'Building..'
- sh "./tests/CI/buildAsterisk.sh --output-dir=${outputdir} --cache-dir=/srv/cache"
+ sh "./tests/CI/buildAsterisk.sh --branch-name=${BRANCH_NAME} --output-dir=${outputdir} --cache-dir=/srv/cache"
archiveArtifacts allowEmptyArchive: true, defaultExcludes: false, fingerprint: false,
artifacts: "${outputdir}/*"
@@ -134,9 +163,9 @@
def outputfile = "${outputdir}/unittests-results.xml"
def testcmd = "test execute all"
- sh 'sudo ./tests/CI/installAsterisk.sh --user-group=jenkins:users'
+ sh "sudo ./tests/CI/installAsterisk.sh --uninstall-all --branch-name=${BRANCH_NAME} --user-group=jenkins:users"
- sh "tests/CI/runUnittests.sh --user-group=jenkins:users --output-dir='${outputdir}' --output-xml='${outputfile}' --test-command='${testcmd}'"
+ sh "tests/CI/runUnittests.sh --user-group=jenkins:users --output-dir='${outputdir}' --output-xml='${outputfile}' --unittest-command='${testcmd}'"
archiveArtifacts allowEmptyArchive: true, defaultExcludes: false, fingerprint: true,
artifacts: "${outputdir}/**"
@@ -152,8 +181,8 @@
}
post {
cleanup {
- sh "sudo make distclean 2&>/dev/null || : "
- sh "sudo rm -rf tests/CI/output 2&>/dev/null || : "
+ sh "sudo make distclean >/dev/null 2>&1 || : "
+ sh "sudo rm -rf tests/CI/output >/dev/null 2>&1 || : "
}
/*
* The Gerrit Trigger will automatically post the "Verified" results back
--
To view, visit https://gerrit.asterisk.org/c/asterisk/+/11622
To unsubscribe, or for help writing mail filters, visit https://gerrit.asterisk.org/settings
Gerrit-Project: asterisk
Gerrit-Branch: certified/13.21
Gerrit-Change-Id: I0b3254c08a2479f3d39151690350cce5ce5ad766
Gerrit-Change-Number: 11622
Gerrit-PatchSet: 4
Gerrit-Owner: George Joseph <gjoseph at digium.com>
Gerrit-Reviewer: Friendly Automation
Gerrit-Reviewer: George Joseph <gjoseph at digium.com>
Gerrit-Reviewer: Joshua Colp <jcolp at digium.com>
Gerrit-Reviewer: Kevin Harwell <kharwell at digium.com>
Gerrit-MessageType: merged
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.digium.com/pipermail/asterisk-code-review/attachments/20190726/e180ffd7/attachment-0001.html>
More information about the asterisk-code-review
mailing list