default: 'root'
- string:
<<: *k8s_ssh_key
+ default: ''
+
+# IEC usecase(s) parameters
+- parameter:
+ name: 'seba_on_arm-defaults'
+ parameters:
+ - string:
+ name: PON_TYPE
+ default: 'ponsim'
########################
# trigger macros
name: 'iec-type2-fuel-virtual-ubuntu1604-daily-master-trigger'
triggers:
- timed: '0 1 * * 1,3,5'
-- trigger:
- name: 'iec-type2-fuel-baremetal-ubuntu1604-daily-master-trigger'
- triggers:
- - timed: ''
- trigger:
name: 'iec-type2-compass-virtual-ubuntu1604-daily-master-trigger'
triggers:
- trigger:
name: 'iec-type2-fuel-baremetal-ubuntu1804-daily-master-trigger'
triggers:
- - timed: ''
+ - timed: '0 1 * * *'
- trigger:
name: 'iec-type2-fuel-virtual-centos7-daily-master-trigger'
triggers:
- timed: '0 1 * * 4,7'
-- trigger:
- name: 'iec-type2-fuel-baremetal-centos7-daily-master-trigger'
- triggers:
- - timed: ''
- trigger:
name: 'iec-type1-compass-virtual-ubuntu1604-daily-master-trigger'
triggers:
DEPLOY_SCENARIO={scenario}
same-node: true
block: true
+ - trigger-builds:
- project: 'validation-enea-daily-{stream}'
same-node: true
current-parameters: true
BLUEPRINT={project-name}
VERSION=master
OPTIONAL=false
+ PULL=yes
block: true
- project:
!j2: |
+ {%- if deploy_type not in ['virtual'] -%}
{%- for uc in usecase -%}
- iec-{{ iecType }}-install-{{ uc | first }}-{{ installer }}-{{ deploy_type }}-{{ os }}-daily-{{ stream }}
+ {%- for ph in phase -%}
+ iec-{{ iecType }}-{{ ph }}-{{ uc }}-{{ installer }}-{{ deploy_type }}-{{ os }}-daily-{{ stream }}
+ {%- if not loop.last -%},{%- endif -%}
+ {%- endfor %}
{%- if not loop.last -%},{%- endif -%}
{%- endfor %}
+ {%- endif -%}
current-parameters: true
same-node: true
block: true
name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO $IEC_TYPE'
scm: &scm
- - git:
- credentials-id: '{jenkins-ssh-credential}'
- url: '{git-url}/{project}.git'
- refspec: ''
- branches:
- - 'refs/heads/{branch}'
- skip-tag: true
- wipe-workspace: true
- submodule:
- disable: true
- recursive: false
- timeout: '{submodule-timeout}'
+ - lf-infra-gerrit-scm:
+ jenkins-ssh-credential: '{jenkins-ssh-credential}'
+ git-url: '{git-url}/{project}.git'
+ refspec: '$GERRIT_REFSPEC'
+ branch: '$GERRIT_BRANCH'
+ submodule-recursive: false
+ submodule-timeout: '{submodule-timeout}'
+ submodule-disable: true
choosing-strategy: default
builders:
publishers: *publishers
- job-template:
- id: akraino-iec-install-usecase
- name: 'iec-{iecType}-install-{usecase}-{installer}-{deploy_type}-{os}-daily-{stream}'
+ id: akraino-iec-phase-usecase
+ name: 'iec-{iecType}-{phase}-{usecase}-{installer}-{deploy_type}-{os}-daily-{stream}'
concurrent: true
node: '{slave-label}'
properties: *properties
branch: '{branch}'
- '{project}-defaults'
- '{installer}-{os}-defaults'
+ - '{usecase}-defaults'
wrappers:
- build-name:
!include-raw-escape: ../iec/iec-{usecase}.sh
publishers: *publishers
-
- lf-infra-gerrit-scm:
jenkins-ssh-credential: '{jenkins-ssh-credential}'
git-url: '{git-url}/{project}.git'
- refspec: ''
- branch: '{branch}'
+ refspec: '$GERRIT_REFSPEC'
+ branch: '$GERRIT_BRANCH'
submodule-recursive: '{submodule-recursive}'
submodule-timeout: '{submodule-timeout}'
submodule-disable: false
- lf-infra-gerrit-scm:
jenkins-ssh-credential: '{jenkins-ssh-credential}'
git-url: '{git-url}/{project}.git'
- refspec: ''
- branch: '{branch}'
+ refspec: '$GERRIT_REFSPEC'
+ branch: '$GERRIT_BRANCH'
submodule-recursive: '{submodule-recursive}'
submodule-timeout: '{submodule-timeout}'
submodule-disable: false
- lf-infra-gerrit-scm:
jenkins-ssh-credential: '{jenkins-ssh-credential}'
git-url: '{git-url}/{project}.git'
- refspec: ''
- branch: '{branch}'
+ refspec: '$GERRIT_REFSPEC'
+ branch: '$GERRIT_BRANCH'
submodule-recursive: true
submodule-timeout: '{submodule-timeout}'
submodule-disable: false
---
-#######
-# SCM #
-#######
-- scm:
- # Enhanced version of the global-jjb
- name: validation-infra-gerrit-scm
- scm:
- - git:
- credentials-id: '{jenkins-ssh-credential}'
- url: '{git-url}'
- refspec: '{refspec}'
- branches:
- - 'refs/{refs_tag}/{stream}'
- skip-tag: false
- wipe-workspace: true
- submodule:
- recursive: '{submodule-recursive}'
- timeout: '{submodule-timeout}'
- choosing-strategy: '{choosing-strategy}'
-
##############
# PARAMETERS #
##############
- parameter:
- name: validation-job-parameters
+ name: bluval-defaults
parameters:
- - lf-infra-parameters:
- project: '{project}'
- stream: '{stream}'
- branch: '{branch}'
+ - lab_params
+ - string:
+ name: LAB_SILO
+ default: ''
+ description: 'Folder in nexus where the logs are pushed.'
+ - string:
+ name: SETTINGS_FILE
+ default: '/home/jenkins/settings.xml'
+ description: 'Path to credentials file'
- parameter:
- name: bluval-defaults
+ name: lab_params
parameters:
- - string: &cluster_master_ip
+ - string:
name: CLUSTER_MASTER_IP
default: ''
description: 'IP address to connect to the cluster master'
- - string: &cluster_ssh_user
+ - string:
name: CLUSTER_SSH_USER
default: ''
description: 'SSH username to connect to the cluster master '
- - string: &cluster_ssh_password
+ - string:
name: CLUSTER_SSH_PASSWORD
default: ''
description: 'SSH password to connect to the cluster master'
- - string: &cluster_ssh_key
+ - string:
name: CLUSTER_SSH_KEY
default: ''
description: 'SSH key to connect to the cluster master'
- - string: &blueprint_name
+ - string:
name: BLUEPRINT
default: ''
description: 'Blueprint used'
- - string: &layer
+ - string:
name: LAYER
default: ''
description: 'Layer'
- - string: &version
+ - string:
name: VERSION
default: ''
description: 'Version'
- - string: &optional
+ - string:
name: OPTIONAL
default: ''
description: 'If set to yes run optional tests'
- string:
- name: LAB_SILO
- default: ''
- description: 'Folder in nexus where the logs are pushed.'
- - string:
- name: SETTINGS_FILE
- default: '/home/jenkins/settings.xml'
- description: 'Path to credentials file'
-
-- parameter:
- name: lab_params
- parameters:
- - string:
- <<: *cluster_master_ip
- - string:
- <<: *cluster_ssh_user
- - string:
- <<: *cluster_ssh_password
- - string:
- <<: *cluster_ssh_key
- - string:
- <<: *blueprint_name
- - string:
- <<: *layer
- - string:
- <<: *version
- - string:
- <<: *optional
+ name: PULL
+ default: 'false'
+ description: 'If set to yes pull docker images before run'
############
# SETTINGS #
# settings for jobs run in multijob phases
- docker-build-job-settings: &docker-build-job-settings
name: docker_build_job_settings
- current-parameters: false
- git-revision: true
+ current-parameters: true
+ git-revision: false
node-parameters: false
- predefined-parameters: |
- GERRIT_REFNAME=$GERRIT_REFNAME
kill-phase-on: FAILURE
abort-all-jobs: false
build-timeout: 90
parameters:
- - validation-job-parameters:
+ - lf-infra-parameters: &validation_lf_infra_parameters
project: '{project}'
branch: '{branch}'
stream: '{stream}'
blocking-jobs:
- 'validation-docker-.*'
block-level: 'NODE'
- scm:
- - validation-infra-gerrit-scm:
- jenkins-ssh-credential: '{jenkins-ssh-credential}'
- git-url: '{git-url}/{project}.git'
- refspec: ''
- refs_tag: '{refs_tag}'
- stream: '{stream}'
- submodule-recursive: false
- submodule-timeout: '{submodule-timeout}'
- submodule-disable: false
- choosing-strategy: default
- triggers:
- # Build nigtly at 12:10 AM
- - timed: '10 0 * * *'
+ triggers: '{obj:triggers}'
builders:
- multijob:
build-timeout: 75
parameters:
- - validation-job-parameters:
- project: '{project}'
- branch: '{branch}'
- stream: '{stream}'
- arch_tag: '{arch_tag}'
+ - lf-infra-parameters: *validation_lf_infra_parameters
properties:
- build-blocker:
blocking-jobs:
- 'validation-docker-build-.*'
block-level: 'NODE'
- scm:
- - validation-infra-gerrit-scm:
+ scm: &validation_scm
+ # lf-infra-gerrit-scm hardcodes branch specifier to refs/heads; while
+ # lf-infra-github-scm does not, allowing us to pass a fully custom '{branch}'
+ - lf-infra-github-scm:
jenkins-ssh-credential: '{jenkins-ssh-credential}'
- git-url: '{git-url}/{project}.git'
- refspec: ''
- refs_tag: '{refs_tag}'
- stream: '{stream}'
+ url: '{git-url}/{project}.git'
+ refspec: '$GERRIT_REFSPEC'
+ branch: '$STREAM'
submodule-recursive: false
submodule-timeout: '{submodule-timeout}'
submodule-disable: false
- lf-infra-docker-login:
global-settings-file: global-settings
settings-file: '{mvn-settings}'
- - conditional-step:
- condition-kind: regex-match
- regex: '^tags$'
- label: '{refs_tag}'
- steps:
- - shell: |
- #!/bin/bash -ex
- echo "export TAG_VER=$STREAM" >> tag_version.sh
- shell: |
#!/bin/bash -ex
- if [ -f tag_version.sh ]; then source tag_version.sh; fi
+ if [ "{branch}" != "{stream}" ]; then export TAG_VER=$STREAM; fi
make -k -C docker push-all
- rm -f tag_version.sh
- - shell: |
- #!/bin/bash -ex
docker system prune -af
- job-template:
build-timeout: 15
parameters:
- - validation-job-parameters:
- project: '{project}'
- branch: '{branch}'
- stream: '{stream}'
+ - lf-infra-parameters: *validation_lf_infra_parameters
disabled: '{obj:disabled}'
- scm:
- - validation-infra-gerrit-scm:
- jenkins-ssh-credential: '{jenkins-ssh-credential}'
- git-url: '{git-url}/{project}.git'
- refspec: ''
- refs_tag: '{refs_tag}'
- stream: '{stream}'
- submodule-recursive: false
- submodule-timeout: '{submodule-timeout}'
- submodule-disable: false
- choosing-strategy: default
+ scm: *validation_scm
builders:
- lf-infra-docker-login:
global-settings-file: global-settings
settings-file: '{mvn-settings}'
- - conditional-step:
- condition-kind: regex-match
- regex: '^tags$'
- label: '{refs_tag}'
- steps:
- - shell: |
- #!/bin/bash -ex
- echo "export TAG_VER=$STREAM" >> tag_version.sh
- shell: |
#!/bin/bash -ex
- if [ -f tag_version.sh ]; then source tag_version.sh; fi
+ if [ "{branch}" != "{stream}" ]; then export TAG_VER=$STREAM; fi
for sd in docker/*/.; do make -k -C $sd .push_manifest; done
- rm -f tag_version.sh
- job-template:
id: akraino-validation-lab-daily
concurrent: true
node: '{build-node}'
parameters:
+ - lf-infra-parameters: *validation_lf_infra_parameters
- string:
name: DEPLOY_SCENARIO
default: ''
- {'bluval-defaults'}
scm:
- - git:
- credentials-id: '{jenkins-ssh-credential}'
- url: '{git-url}/validation.git'
- refspec: ''
- branches:
- - 'refs/heads/{branch}'
- skip-tag: true
- wipe-workspace: true
- submodule:
- disable: true
- recursive: false
- timeout: '{submodule-timeout}'
+ - lf-infra-github-scm:
+ jenkins-ssh-credential: '{jenkins-ssh-credential}'
+ url: '{git-url}/{project}.git'
+ refspec: '$GERRIT_REFSPEC'
+ branch: '$GERRIT_BRANCH'
+ submodule-recursive: false
+ submodule-timeout: '{submodule-timeout}'
+ submodule-disable: true
choosing-strategy: default
builders:
- description-setter:
- description: "POD: $NODE_NAME"
+ description: "POD: $NODE_NAME<br/>BLUEPRINT: $BLUEPRINT<br/>LAB: $LAB_SILO<br/>LAYER: $LAYER"
- lf-infra-create-netrc:
server-id: logs
- shell: !include-raw-escape:
- ../shell/run_bluval.sh
publishers:
+ - robot:
+ output-path: 'results'
+ report-html: '**/report.html'
+ log-html: '**/log.html'
+ output-xml: '**/output.xml'
- logparser:
use-project-rules: true
parse-rules: "./bluval/rules.txt"
- unstable-on-warning: true
+ unstable-on-warning: false
fail-on-error: true
- show-graphs: false
+ show-graphs: true
- aarch64_dev
- aarch64_build
- aarch64_baremetal
+ - aarch64_baremetal_dev
jobs:
- akraino-project-build-node-init
LAB_NAME='arm'
# shellcheck disable=SC2153
POD_NAME=${NODE_NAME/*ubuntu1804-dev-48c-256g-/virtual}
-else
+ if [[ ! "$POD_NAME" =~ virtual ]]; then
+ POD_NAME=${NODE_NAME/*ubuntu1804-dev-96c-256g-/baremetal}
+ fi
+fi
+if [[ ! "$POD_NAME" =~ (virtual|baremetal) ]]; then
echo "Unavailable hardware. Cannot continue!"
exit 1
fi
##############################################################################
export TERM="vt220"
+set -ex
echo "JOB_NAME=${JOB_NAME}"
+# Run either ssh or scp on the K8S master
+run_on_k8s_master () {
+ case "${1}" in
+ ssh)
+ _c="${1}"
+ shift
+ _s=" "
+ ;;
+ scp)
+ _c="${1} -r"
+ shift
+ _s=":${1}"
+ shift
+ ;;
+ *)
+ echo "Use either ssh or scp to run on K8S master"
+ exit 1
+ ;;
+ esac
+
+ if [ -n "${K8S_SSH_PASSWORD}" ]
+ then
+ eval sshpass -p "${K8S_SSH_PASSWORD}" "${_c}" \
+ -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null \
+ ${_o} "${K8S_SSH_USER}"@"${K8S_MASTER_IP}${_s}" "$@"
+ elif [ -n "${K8S_SSH_KEY}" ]
+ then
+ eval ${_c} -i "${K8S_SSH_KEY}" \
+ -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null \
+ ${_o} "${K8S_SSH_USER}"@"${K8S_MASTER_IP}${_s}" "$@"
+ else
+ echo "Neither K8S_SSH_PASSWORD nor K8S_SSH_KEY are set"
+ exit 1
+ fi
+}
+
+if [ -z "$K8S_SSH_USER" ]
+then
+ echo "K8S_SSH_USER not set, cannot ssh to K8S master, aborting"
+ exit 1
+fi
+
case "${JOB_NAME}" in
*fuel*)
K8S_MASTER_IP=$(docker exec fuel cat /etc/hosts | grep iec01 \
# On Fuel deployements the K8s ssh key is the one used by the deploy job
K8S_SSH_KEY=${SSH_KEY}
IEC_DIR="/var/lib/akraino/iec"
+ KUBE_DIR="~${K8S_SSH_USER}/.kube"
;;
*compass*)
# K8S_{MASTER_IP,SSH_USER,SSH_PASSWORD} are already set by job params
IEC_DIR="/${K8S_SSH_USER}/iec"
+ KUBE_DIR="~${K8S_SSH_USER}/.kube"
;;
*)
echo "Cannot determine installer from ${JOB_NAME}"
;;
esac
-if [ -z "$K8S_SSH_USER" ]
-then
- echo "K8S_SSH_USER not set, cannot ssh to install SEBA"
- exit 1
-fi
+case "${JOB_NAME}" in
+ iec-*-install-seba_on_arm*)
+ INSTALL_CMD="'cd ${IEC_DIR}/src/use_cases/seba_on_arm/install; ./install.sh'"
+ run_on_k8s_master ssh "${INSTALL_CMD}"
+ ;;
+ iec-*-test-seba_on_arm*)
-INSTALL_CMD="cd ${IEC_DIR}/src/use_cases/seba_on_arm/install; ./install.sh"
+ case "${PON_TYPE}" in
+ *ponsim*)
+ ;;
+ *)
+ echo "Unknown PON_TYPE ${PON_TYPE}"
+ exit
+ ;;
+ esac
-if [ -n "${K8S_SSH_PASSWORD}" ]
-then
- sshpass -p "${K8S_SSH_PASSWORD}" ssh -o StrictHostKeyChecking=no \
- "${K8S_SSH_USER}"@"${K8S_MASTER_IP}" "${INSTALL_CMD}"
-elif [ -n "${K8S_SSH_KEY}" ]
-then
- ssh -o StrictHostKeyChecking=no -i "${K8S_SSH_KEY}" \
- "${K8S_SSH_USER}"@"${K8S_MASTER_IP}" "${INSTALL_CMD}"
-else
- echo "Neither K8S_SSH_USER or K8S_SSH_KEY set. Cannot ssh to K8S Master"
- exit 1
-fi
+ # After the installation, copy the .kube dir from the K8S master
+ cd "${WORKSPACE}" || exit 1
+ run_on_k8s_master scp "${KUBE_DIR}" "${WORKSPACE}"
+
+ SEBA_TEST_DIR="src/use_cases/seba_on_arm/test"
+ TEST_CMD="${SEBA_TEST_DIR}/${PON_TYPE}/test.sh"
+ echo "Issuing command"
+ echo "${TEST_CMD}"
+
+ eval ${TEST_CMD}
+ ;;
+ *)
+ echo "Cannot determine what to do for seba_on_arm from ${JOB_NAME}"
+ exit 1
+ ;;
+esac
exit 0
views:
- common-view:
view-name: iec
- view-regex: '^{project-name}-(tox|type1|type2)-(verify|fuel|compass)-.*'
+ view-regex: '^{project-name}-(sonar|(tox|type1|type2)-(verify|fuel|compass)-.*)'
# -------------------------------
# BRANCH DEFINITIONS
# -------------------------------
# -------------------------------
deploy_type:
- baremetal:
- slave-label: 'aarch64_dev'
+ slave-label: 'aarch64_baremetal_dev'
scenario: 'k8-calico-iec-noha'
- virtual:
slave-label: 'aarch64_dev'
os: ubuntu1804
- iecType: type1
installer: fuel
+ # Fuel baremetal provisioning only supports Ubuntu 18.04
+ - installer: fuel
+ deploy_type: baremetal
+ os: centos7
+ - installer: fuel
+ deploy_type: baremetal
+ os: ubuntu1604
+ # SEBA on virtual PODs should be excluded
+ - deploy_type: virtual
+ usecase: seba_on_arm
# -------------------------------
# USECASE DEFINITIONS
# -------------------------------
usecase:
- - seba_on_arm:
- slave-label: 'aarch64_dev'
- scenario: 'k8-calico-iec-noha'
+ - seba_on_arm
+
+ phase:
+ - install
+ - test
jobs:
- akraino-iec-install
- akraino-iec-deploy
- akraino-iec-destroy
- - akraino-iec-install-usecase
+ - akraino-iec-phase-usecase
- gerrit-sonar:
sonar-project-file: ""
build-node: centos7-builder-2c-1g
+ submodule-recursive: false
+ submodule-disable: true
sonar-properties: |
# Required metadata
sonar.projectKey=iec
cwd=$(pwd)
current_user=$(whoami)
is_optional="false"
+pull="false"
info () {
logger -s -t "run_blu_val.info" "$*"
verify_connectivity() {
local ip=$1
info "Verifying connectivity to $ip..."
+ # shellcheck disable=SC2034
for i in $(seq 0 10); do
if ping -c 1 -W 1 "$ip" > /dev/null; then
info "$ip is reachable!"
}
# Get options from shell
-while getopts "j:k:u:s:b:l:r:n:ov:" optchar; do
+while getopts "j:k:u:s:b:l:r:n:opv:" optchar; do
case "${optchar}" in
j) cluster_master_ip=${OPTARG} ;;
k) k8s_config_dir=${OPTARG} ;;
n) blueprint_name=${OPTARG} ;;
u) sh_user=${OPTARG} ;;
o) is_optional="true" ;;
+ p) pull="true" ;;
v) version=${OPTARG} ;;
*) echo "Non-option argument: '-${OPTARG}'" >&2
usage
ssh_user=${sh_user:-$CLUSTER_SSH_USER}
blueprint_layer=${blueprint_layer:-$LAYER}
-if [ "$blueprint_layer" == "k8s" ]
+if [ "$blueprint_layer" == "k8s" ] || [ -z "$blueprint_layer" ]
then
if [ -z "$cluster_master_ip" ]
then
"$volumes_path"
# create ssh_key_dir
-mkdir -p $cwd/ssh_key_dir
+mkdir -p "$cwd/ssh_key_dir"
# copy ssh_key in ssh_key_dir
-cp $ssh_key $cwd/ssh_key_dir/id_rsa
+cp "$ssh_key" "$cwd/ssh_key_dir/id_rsa"
variables_path="$cwd/tests/variables.yaml"
# update information in variables yaml
then
options+=" -o"
fi
-
-printf 'ok / PASS /\nerror / FAIL /\n' > ./bluval/rules.txt
+if [ "$pull" == "true" ] || [ "$PULL" == "yes" ]
+then
+ options+=" -p"
+fi
set +e
-# even if the script fails we need to change the owner of results
-# shellcheck disable=SC2086
-python3 bluval/blucon.py $options "$blueprint_name"
+if python3 --version > /dev/null; then
+ # shellcheck disable=SC2086
+ python3 bluval/blucon.py $options "$blueprint_name"
+else
+ # shellcheck disable=SC2086
+ VALIDATION_DIR="$WORKSPACE" RESULTS_DIR="$WORKSPACE/results" \
+ bluval/blucon.sh $options "$blueprint_name"
+fi
+# even if the script fails we need to change the owner of results
+# shellcheck disable=SC2181
if [ $? -ne 0 ]; then
change_res_owner
error "Bluval validation FAIL "
TIMESTAMP=$(date +'%Y%m%d-%H%M%S')
NEXUS_URL=https://nexus.akraino.org/
NEXUS_PATH="${LAB_SILO}/bluval_results/${blueprint_name}/${VERSION}/${TIMESTAMP}"
- BUILD_URL="${JENKINS_HOSTNAME}/job/${JOB_NAME}/${BUILD_NUMBER}/"
zip -r results.zip ./results
lftools deploy nexus-zip "$NEXUS_URL" logs "$NEXUS_PATH" results.zip
rm results.zip
fi
rm -f ~/.netrc
-
stream:
- master:
branch: '{stream}'
- refs_tag: 'heads'
disabled: false
- - 2.0.1:
+ triggers:
+ # Build nigtly at 12:10 AM
+ - timed: '10 0 * * *'
+ - 3.0.0:
branch: 'master'
- refs_tag: 'tags'
- disabled: true
+ disabled: false
+ triggers:
+ - gerrit:
+ server-name: '{gerrit-server-name}'
+ trigger-on:
+ - change-merged-event
+ - comment-added-contains-event:
+ comment-contains-value: remerge$
+ projects:
+ - project-compare-type: ANT
+ project-pattern: 'ci-management'
+ branches:
+ - branch-compare-type: ANT
+ branch-pattern: '**/master'
+ file-paths:
+ - compare-type: REG_EXP
+ pattern: 'jjb/validation/validation-docker.yaml'
arch_tag:
- 'amd64':