# ========================================================================================= # Copyright (C) 2021 Orange # # This program is free software; you can redistribute it and/or modify it under the terms # of the GNU Lesser General Public License as published by the Free Software Foundation; # either version 3 of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; # without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License along with this # program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth # Floor, Boston, MA 02110-1301, USA. # ========================================================================================= variables: # Change pip's cache directory to be inside the project directory since we can # only cache local items. PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip" PYTHON_IMAGE: python:3-slim # Default Python project root directory PYTHON_PROJECT_DIR: . REQUIREMENTS_FILE: requirements.txt TEST_REQUIREMENTS_FILE: test-requirements.txt SETUP_PY_DIR: "." # default production ref name (pattern) PROD_REF: '/^master$/' # default integration ref name (pattern) INTEG_REF: '/^develop$/' # compileall PYTHON_COMPILE_ARGS: "*" BANDIT_ARGS: "--recursive ." # Safety tool PYTHON_SAFETY_IMAGE: pyupio/safety:latest SAFETY_ARGS: "--full-report" # Docs DOCS_REQUIREMENTS_FILE: docs-requirements.txt DOCS_DIRECTORY: docs DOCS_BUILD_DIR: public DOCS_MAKE_ARGS: html BUILDDIR=${DOCS_BUILD_DIR} RELEASE_VERSION_PART: "minor" .python-scripts: &python-scripts | # BEGSCRIPT set -e function log_info() { echo -e "[\\e[1;94mINFO\\e[0m] $*" } function log_warn() { echo -e "[\\e[1;93mWARN\\e[0m] $*" } function log_error() { echo -e "[\\e[1;91mERROR\\e[0m] $*" } function assert_defined() { if [[ -z "$1" ]] then log_error "$2" exit 1 fi } function install_test_requirements() { if [[ -f "pyproject.toml" ]]; then if [[ ! -f "poetry.lock" ]]; then log_error "Poetry detected but \\e[33;1mpoetry.lock\\e[0m file not found: you shall commit it with your project files" exit 1 fi log_info "--- Poetry detected: generating \\e[33;1m${TEST_REQUIREMENTS_FILE}\\e[0m from poetry.lock" pip install poetry poetry export --without-hashes --dev -f requirements.txt --output "${TEST_REQUIREMENTS_FILE}" fi if [[ -f "${TEST_REQUIREMENTS_FILE}" ]]; then log_info "--- installing from ${TEST_REQUIREMENTS_FILE} file" # shellcheck disable=SC2086 pip install ${PIP_OPTS} -r "${TEST_REQUIREMENTS_FILE}" else log_info "--- no test requirements file found from env or file ${TEST_REQUIREMENTS_FILE} does not exist" fi } function install_requirements() { if [[ -f "pyproject.toml" ]]; then if [[ ! -f "poetry.lock" ]]; then log_error "Poetry detected but \\e[33;1mpoetry.lock\\e[0m file not found: you shall commit it with your project files" exit 1 fi log_info "--- Poetry detected: generating \\e[33;1m${REQUIREMENTS_FILE}\\e[0m from poetry.lock" pip install poetry poetry export --without-hashes -f requirements.txt --output "${REQUIREMENTS_FILE}" fi if [[ -f "${REQUIREMENTS_FILE}" ]]; then log_info "--- installing from ${REQUIREMENTS_FILE} file" # shellcheck disable=SC2086 pip install ${PIP_OPTS} -r "${REQUIREMENTS_FILE}" elif [[ -f "${SETUP_PY_DIR}/setup.py" ]]; then log_info "--- installing from ${SETUP_PY_DIR}/setup.py file" pip install "${SETUP_PY_DIR}/" else log_info "--- no requirements or setup.py file found from env or file ${REQUIREMENTS_FILE} - ${SETUP_PY_DIR}/setup.py does not exist" fi } function install_doc_requirements() { if [[ -f "pyproject.toml" ]]; then if [[ ! -f "poetry.lock" ]]; then log_error "Poetry detected but \\e[33;1mpoetry.lock\\e[0m file not found: you shall commit it with your project files" exit 1 fi log_info "--- Poetry detected: generating \\e[33;1m${TEST_REQUIREMENTS_FILE}\\e[0m from poetry.lock" pip install poetry poetry export --without-hashes -f requirements.txt --output "${DOCS_REQUIREMENTS_FILE}" fi if [[ -f "${DOCS_REQUIREMENTS_FILE}" ]]; then log_info "--- installing from ${DOCS_REQUIREMENTS_FILE} file" # shellcheck disable=SC2086 pip install ${PIP_OPTS} -r "${DOCS_REQUIREMENTS_FILE}" elif [[ -f "${SETUP_PY_DIR}/setup.py" ]]; then log_info "--- installing from ${SETUP_PY_DIR}/setup.py file" pip install "${SETUP_PY_DIR}/" else log_info "--- no doc requirements file found from env or file ${DOCS_REQUIREMENTS_FILE} - ${SETUP_PY_DIR}/setup.py does not exist" fi } function release_args() { if [[ -f ".bumpversion.cfg" ]]; then log_info "--- .bumpversion.cfg file found " export bumpversion_args="${RELEASE_VERSION_PART} --verbose" else log_info "--- No .bumpversion.cfg file found " if [[ -f "setup.py" ]]; then log_info "--- Getting current version of setup.py file " current_version=$(python setup.py --version) export bumpversion_args=" --verbose --current-version ${current_version} --tag --tag-name {new_version} --commit ${RELEASE_VERSION_PART} setup.py" else log_warn "--- No setup.py file found. Cannot perform release." fi fi log_info "--- Release args: ${bumpversion_args}" } function install_ca_certs() { certs=$1 if [[ -z "$certs" ]] then return fi # import in system if echo "$certs" >> /etc/ssl/certs/ca-certificates.crt then log_info "CA certificates imported in \\e[33;1m/etc/ssl/certs/ca-certificates.crt\\e[0m" fi if echo "$certs" >> /etc/ssl/cert.pem then log_info "CA certificates imported in \\e[33;1m/etc/ssl/cert.pem\\e[0m" fi # import in Java keystore (if keytool command found) if command -v keytool > /dev/null then # shellcheck disable=SC2046 javahome=${JAVA_HOME:-$(dirname $(readlink -f $(command -v java)))/..} # shellcheck disable=SC2086 keystore=${JAVA_KEYSTORE_PATH:-$(ls -1 $javahome/jre/lib/security/cacerts 2>/dev/null || ls -1 $javahome/lib/security/cacerts 2>/dev/null || echo "")} if [[ -f "$keystore" ]] then storepass=${JAVA_KEYSTORE_PASSWORD:-changeit} nb_certs=$(echo "$certs" | grep -c 'END CERTIFICATE') log_info "importing $nb_certs certificates in Java keystore \\e[33;1m$keystore\\e[0m..." for idx in $(seq 0 $((nb_certs - 1))) do # TODO: use keytool option -trustcacerts ? if echo "$certs" | awk "n==$idx { print }; /END CERTIFICATE/ { n++ }" | keytool -noprompt -import -alias "imported CA Cert $idx" -keystore "$keystore" -storepass "$storepass" then log_info "... CA certificate [$idx] successfully imported" else log_warn "... Failed importing CA certificate [$idx]: abort" return fi done else log_warn "Java keystore \\e[33;1m$keystore\\e[0m not found: could not import CA certificates" fi fi # variable REQUESTS_CA_BUNDLE for Python if Python installed if command -v python > /dev/null then export REQUESTS_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt log_info "Python requests \\e[33;1m\$REQUESTS_CA_BUNDLE\\e[0m variable set" fi } function unscope_variables() { _scoped_vars=$(env | awk -F '=' "/^scoped__[a-zA-Z0-9_]+=/ {print \$1}" | sort) if [[ -z "$_scoped_vars" ]]; then return; fi log_info "Processing scoped variables..." for _scoped_var in $_scoped_vars do _fields=${_scoped_var//__/:} _condition=$(echo "$_fields" | cut -d: -f3) case "$_condition" in if) _not="";; ifnot) _not=1;; *) log_warn "... unrecognized condition \\e[1;91m$_condition\\e[0m in \\e[33;1m${_scoped_var}\\e[0m" continue ;; esac _target_var=$(echo "$_fields" | cut -d: -f2) _cond_var=$(echo "$_fields" | cut -d: -f4) _cond_val=$(eval echo "\$${_cond_var}") _test_op=$(echo "$_fields" | cut -d: -f5) case "$_test_op" in defined) if [[ -z "$_not" ]] && [[ -z "$_cond_val" ]]; then continue; elif [[ "$_not" ]] && [[ "$_cond_val" ]]; then continue; fi ;; equals|startswith|endswith|contains|in|equals_ic|startswith_ic|endswith_ic|contains_ic|in_ic) # comparison operator # sluggify actual value _cond_val=$(echo "$_cond_val" | tr '[:punct:]' '_') # retrieve comparison value _cmp_val_prefix="scoped__${_target_var}__${_condition}__${_cond_var}__${_test_op}__" _cmp_val=${_scoped_var#$_cmp_val_prefix} # manage 'ignore case' if [[ "$_test_op" == *_ic ]] then # lowercase everything _cond_val=$(echo "$_cond_val" | tr '[:upper:]' '[:lower:]') _cmp_val=$(echo "$_cmp_val" | tr '[:upper:]' '[:lower:]') fi case "$_test_op" in equals*) if [[ -z "$_not" ]] && [[ "$_cond_val" != "$_cmp_val" ]]; then continue; elif [[ "$_not" ]] && [[ "$_cond_val" == "$_cmp_val" ]]; then continue; fi ;; startswith*) if [[ -z "$_not" ]] && [[ "$_cond_val" != "$_cmp_val"* ]]; then continue; elif [[ "$_not" ]] && [[ "$_cond_val" == "$_cmp_val"* ]]; then continue; fi ;; endswith*) if [[ -z "$_not" ]] && [[ "$_cond_val" != *"$_cmp_val" ]]; then continue; elif [[ "$_not" ]] && [[ "$_cond_val" == *"$_cmp_val" ]]; then continue; fi ;; contains*) if [[ -z "$_not" ]] && [[ "$_cond_val" != *"$_cmp_val"* ]]; then continue; elif [[ "$_not" ]] && [[ "$_cond_val" == *"$_cmp_val"* ]]; then continue; fi ;; in*) if [[ -z "$_not" ]] && [[ "__${_cmp_val}__" != *"__${_cond_val}__"* ]]; then continue; elif [[ "$_not" ]] && [[ "__${_cmp_val}__" == *"__${_cond_val}__"* ]]; then continue; fi ;; esac ;; *) log_warn "... unrecognized test operator \\e[1;91m${_test_op}\\e[0m in \\e[33;1m${_scoped_var}\\e[0m" continue ;; esac # matches _val=$(eval echo "\$${_target_var}") log_info "... apply \\e[32m${_target_var}\\e[0m from \\e[32m\$${_scoped_var}\\e[0m${_val:+ (\\e[33;1moverwrite\\e[0m)}" _val=$(eval echo "\$${_scoped_var}") export "${_target_var}"="${_val}" done log_info "... done" } function get_latest_template_version() { tag_json=$(wget -T 5 -q -O - "$CI_API_V4_URL/projects/Orange-OpenSource%2Ftbc%2F$1/repository/tags?per_page=1" || echo "") echo "$tag_json" | sed -rn 's/^.*"name":"([^"]*)".*$/\1/p' } function check_for_update() { template="$1" actual="$2" latest=$(get_latest_template_version "$template") if [[ -n "$latest" ]] && [[ "$latest" != "$actual" ]] then log_warn "\\e[1;93m=======================================================================================================\\e[0m" log_warn "\\e[93mThe template \\e[32m$template\\e[93m:\\e[33m$actual\\e[93m you're using is not up-to-date: consider upgrading to version \\e[32m$latest\\e[0m" log_warn "\\e[93m(set \$TEMPLATE_CHECK_UPDATE_DISABLED to disable this message)\\e[0m" log_warn "\\e[1;93m=======================================================================================================\\e[0m" fi } if [[ -z "$TEMPLATE_CHECK_UPDATE_DISABLED" ]]; then check_for_update python "1.1.0"; fi unscope_variables # ENDSCRIPT ############################################################################################### # Generic python job # ############################################################################################### .python-base: image: $PYTHON_IMAGE services: - name: "$CI_REGISTRY/orange-opensource/tbc/tools/tracking:master" command: ["--service", "python", "1.1.0"] # Cache downloaded dependencies and plugins between builds. # To keep cache across branches add 'key: "$CI_JOB_NAME"' cache: key: "$CI_COMMIT_REF_SLUG-python" paths: - ${PIP_CACHE_DIR} before_script: - *python-scripts - install_ca_certs "${CUSTOM_CA_CERTS:-$DEFAULT_CA_CERTS}" - cd ${PYTHON_PROJECT_DIR} ############################################################################################### # stages definition # ############################################################################################### stages: - build - test - publish ############################################################################################### # build stage # ############################################################################################### py-lint: extends: .python-base stage: build script: - install_requirements - pip install pylint_gitlab - | if ! pylint --ignore=.cache --output-format=text ${PYLINT_ARGS} ${PYLINT_FILES:-$(find -type f -name "*.py")} then # failed: also generate codeclimate report mkdir -p reports pylint --ignore=.cache --output-format=pylint_gitlab.GitlabCodeClimateReporter ${PYLINT_ARGS} ${PYLINT_FILES:-$(find -type f -name "*.py")} > reports/pylint-codeclimate.json exit 1 else # success: generate empty codeclimate report (required by GitLab :( ) mkdir -p reports echo "[]" > reports/pylint-codeclimate.json fi artifacts: name: "$CI_JOB_NAME artifacts from $CI_PROJECT_NAME on $CI_COMMIT_REF_SLUG" expire_in: 1 day when: always reports: codequality: $PYTHON_PROJECT_DIR/reports/pylint-codeclimate.json paths: - $PYTHON_PROJECT_DIR/reports/ rules: # exclude merge requests - if: $CI_MERGE_REQUEST_ID when: never # on production branch(es): if $PYLINT_ENABLED is set - if: '$PYLINT_ENABLED && $CI_COMMIT_REF_NAME =~ $PROD_REF' # on integration branch(es): if $PYLINT_ENABLED is set - if: '$PYLINT_ENABLED && $CI_COMMIT_REF_NAME =~ $INTEG_REF' # on non-production, non-integration branches, with $PYLINT_ENABLED set: auto & non-blocking - if: '$PYLINT_ENABLED' allow_failure: true py-compile: extends: .python-base stage: build script: - install_requirements - python -m compileall $PYTHON_COMPILE_ARGS rules: # exclude merge requests - if: $CI_MERGE_REQUEST_ID when: never # on any branch: only when none of supported unit test framework is enabled - if: '$UNITTEST_ENABLED == null && $PYTEST_ENABLED == null && $NOSETESTS_ENABLED == null' ############################################################################################### # test stage # ############################################################################################### py-unittest: extends: .python-base stage: build script: - mkdir -p reports - install_requirements - install_test_requirements # code coverage - pip install -U coverage # JUnit XML report - pip install -U unittest-xml-reporting - coverage run -m xmlrunner discover -o "reports/" $UNITTEST_ARGS - coverage report -m - coverage xml -o "reports/coverage.xml" coverage: /^TOTAL.+?(\d+\%)$/ artifacts: name: "$CI_JOB_NAME artifacts from $CI_PROJECT_NAME on $CI_COMMIT_REF_SLUG" expire_in: 1 day when: always reports: junit: - $PYTHON_PROJECT_DIR/reports/TEST-*.xml cobertura: $PYTHON_PROJECT_DIR/reports/coverage.xml paths: - $PYTHON_PROJECT_DIR/reports/ rules: # exclude merge requests - if: $CI_MERGE_REQUEST_ID when: never # on any branch: when $UNITTEST_ENABLED is set - if: $UNITTEST_ENABLED py-pytest: extends: .python-base stage: build script: - install_requirements - install_test_requirements - mkdir -p reports - pip install -U pytest pytest-cov coverage - python -m pytest --junit-xml=reports/TEST-pytests.xml --cov --cov-report term --cov-report xml:reports/coverage.xml ${PYTEST_ARGS} coverage: /^TOTAL.+?(\d+\%)$/ artifacts: name: "$CI_JOB_NAME artifacts from $CI_PROJECT_NAME on $CI_COMMIT_REF_SLUG" expire_in: 1 day when: always reports: junit: - $PYTHON_PROJECT_DIR/reports/TEST-*.xml cobertura: $PYTHON_PROJECT_DIR/reports/coverage.xml paths: - $PYTHON_PROJECT_DIR/reports/ rules: # exclude merge requests - if: $CI_MERGE_REQUEST_ID when: never # on any branch: when $PYTEST_ENABLED is set - if: $PYTEST_ENABLED py-nosetests: extends: .python-base stage: build script: - install_requirements - install_test_requirements - mkdir -p reports - nosetests --with-xunit --xunit-file=reports/TEST-nosetests.xml --with-coverage --cover-erase --cover-xml --cover-xml-file=reports/coverage.xml --cover-html --cover-html-dir=reports/coverage ${NOSETESTS_ARGS} coverage: /^TOTAL.+?(\d+\%)$/ artifacts: name: "$CI_JOB_NAME artifacts from $CI_PROJECT_NAME on $CI_COMMIT_REF_SLUG" expire_in: 1 day when: always reports: junit: - $PYTHON_PROJECT_DIR/reports/TEST-*.xml cobertura: $PYTHON_PROJECT_DIR/reports/coverage.xml paths: - $PYTHON_PROJECT_DIR/reports/ rules: # exclude merge requests - if: $CI_MERGE_REQUEST_ID when: never # on any branch: when $NOSETESTS_ENABLED is set - if: $NOSETESTS_ENABLED # Bandit (SAST) py-bandit: extends: .python-base stage: test # force no dependencies dependencies: [] script: - pip install -U bandit - | if ! bandit ${TRACE+--verbose} ${BANDIT_ARGS} then # failed: also generate JSON report mkdir -p reports bandit ${TRACE+--verbose} --format json --output reports/bandit.json ${BANDIT_ARGS} exit 1 fi artifacts: when: always name: "$CI_JOB_NAME artifacts from $CI_PROJECT_NAME on $CI_COMMIT_REF_SLUG" expire_in: 1 day paths: - $PYTHON_PROJECT_DIR/reports/ rules: # exclude merge requests - if: $CI_MERGE_REQUEST_ID when: never # on production branch(es): if $BANDIT_ENABLED is set - if: '$BANDIT_ENABLED && $CI_COMMIT_REF_NAME =~ $PROD_REF' # on integration branch(es): if $BANDIT_ENABLED is set - if: '$BANDIT_ENABLED && $CI_COMMIT_REF_NAME =~ $INTEG_REF' # on non-production, non-integration branches, with $BANDIT_ENABLED set: manual & non-blocking - if: '$BANDIT_ENABLED' when: manual allow_failure: true # Safety (dependency check) py-safety: extends: .python-base image: $PYTHON_SAFETY_IMAGE stage: test # force no dependencies dependencies: [] script: - install_requirements - | if ! pip freeze | safety check --stdin ${SAFETY_ARGS} then # failed: also generate JSON report mkdir -p reports pip freeze | safety check --stdin --json --output reports/safety.json ${SAFETY_ARGS} exit 1 fi artifacts: name: "$CI_JOB_NAME artifacts from $CI_PROJECT_NAME on $CI_COMMIT_REF_SLUG" expire_in: 1 day when: always paths: - $PYTHON_PROJECT_DIR/reports/ rules: # exclude merge requests - if: $CI_MERGE_REQUEST_ID when: never # on production branch(es): if $SAFETY_ENABLED is set - if: '$SAFETY_ENABLED && $CI_COMMIT_REF_NAME =~ $PROD_REF' # on integration branch(es): if $SAFETY_ENABLED is set - if: '$SAFETY_ENABLED && $CI_COMMIT_REF_NAME =~ $INTEG_REF' # on non-production, non-integration branches, with $SAFETY_ENABLED set: manual & non-blocking - if: '$SAFETY_ENABLED' when: manual allow_failure: true ############################################################################################### # publish stage # ############################################################################################### # (on tag creation): performs a release py-publish: extends: .python-base stage: publish script: - assert_defined "$TWINE_USERNAME" 'Missing required env $TWINE_USERNAME' - assert_defined "$TWINE_PASSWORD" 'Missing required env $TWINE_PASSWORD' - pip install -U twine setuptools - pip list - python setup.py sdist bdist_wheel - twine upload --verbose dist/*.tar.gz - twine upload --verbose dist/*.whl rules: # on tags with $TWINE_USERNAME set - if: '$TWINE_USERNAME && $CI_COMMIT_TAG' # (on tag creation): generates the documentation py-docs: extends: .python-base stage: publish script: - install_doc_requirements - pip install -U sphinx - cd ${DOCS_DIRECTORY} - make ${DOCS_MAKE_ARGS} artifacts: name: "$CI_JOB_NAME artifacts from $CI_PROJECT_NAME on $CI_COMMIT_REF_SLUG" paths: - $DOCS_BUILD_DIR rules: # on tags with $DOCS_ENABLED set - if: '$DOCS_ENABLED && $CI_COMMIT_TAG' # (manual from master branch): triggers a release (tag creation) py-release: extends: .python-base stage: publish script: - git config --global user.email '$GITLAB_USER_EMAIL' - git config --global user.name '$GITLAB_USER_LOGIN' - git checkout -B $CI_BUILD_REF_NAME - pip install --upgrade bumpversion - release_args - bumpversion ${bumpversion_args} - git_url_base=`echo ${CI_REPOSITORY_URL} | cut -d\@ -f2` - git push https://${RELEASE_USERNAME}:${RELEASE_ACCESS_TOKEN}@${git_url_base} --tags - git push https://${RELEASE_USERNAME}:${RELEASE_ACCESS_TOKEN}@${git_url_base} $CI_BUILD_REF_NAME rules: # exclude merge requests - if: $CI_MERGE_REQUEST_ID when: never # on production branch(es): manual & non-blocking if $RELEASE_USERNAME is set - if: '$RELEASE_USERNAME && $CI_COMMIT_REF_NAME =~ $PROD_REF' when: manual allow_failure: true # on integration branch(es): manual & non-blocking if $RELEASE_USERNAME is set - if: '$RELEASE_USERNAME && $CI_COMMIT_REF_NAME =~ $INTEG_REF' when: manual allow_failure: true