|
| 1 | +image: dahanna/python.3.7-git-tox-alpine |
| 2 | + |
| 3 | +{% if cookiecutter.ci_https_proxy != "no" -%} |
| 4 | +variables: |
| 5 | + # HTTP_PROXY is used to pull docker images. |
| 6 | + HTTP_PROXY: {{cookiecutter.ci_https_proxy}} |
| 7 | + # HTTPS_PROXY is used to install Python packages such as tox. |
| 8 | + HTTPS_PROXY: {{cookiecutter.ci_https_proxy}} |
| 9 | + # If we don't include NO_PROXY, we will get "fatal unable to update url base from redirection" |
| 10 | + # when trying to fetch the gitlab-ci-token. |
| 11 | + NO_PROXY: 127.0.0.1,localhost,.lan,.local,.home,/var/run/docker.sock,{{cookiecutter.repo_hosting_domain}} |
| 12 | + # If some of the links in your documentation require a special PEM to verify, |
| 13 | + # then sphinx -b linkcheck will fail without that PEM. |
| 14 | + # But setting REQUESTS_CA_BUNDLE to that PEM will cause other links to fail, |
| 15 | + # because the runner will only accept that PEM, not the defaults. |
| 16 | + # Therefore you will usually want to bundle all certificates together with |
| 17 | + # cat `python -c "import requests; print(requests.certs.where())"` ~/your.pem > ~/bundled.pem |
| 18 | + # REQUESTS_CA_BUNDLE: ci/name-of-your-ca-bundle.pem |
| 19 | +{%- endif %} |
| 20 | + |
| 21 | +default: |
| 22 | + before_script: |
| 23 | + - right_after_pull_docker_image=$(date +%s) |
| 24 | + - if [ -z ${SSH_PRIVATE_KEY+ABC} ]; then echo "SSH_PRIVATE_KEY is unset, so assuming you do not need SSH set up."; |
| 25 | + else |
| 26 | + # All of this will be skipped unless you set SSH_PRIVATE_KEY as a variable at https://{{ cookiecutter.repo_hosting_domain }}/{{ cookiecutter.repo_username }}/{{ cookiecutter.repo_name }}/-/settings/ci_cd |
| 27 | + {% raw -%} |
| 28 | + - if [ ${#SSH_PRIVATE_KEY} -le 5 ]; then echo "SSH_PRIVATE_KEY looks far too short, something is wrong"; fi |
| 29 | + {%- endraw %} |
| 30 | + - apk add openssh-client || apt-get install --assume-yes openssh-client |
| 31 | + - echo "adding openssh-client took $(( $(date +%s) - right_after_pull_docker_image)) seconds" |
| 32 | + |
| 33 | + # ssh-agent -s starts the ssh-agent and then outputs shell commands to run. |
| 34 | + - eval $(ssh-agent -s) |
| 35 | + |
| 36 | + ## |
| 37 | + ## Add the SSH key stored in SSH_PRIVATE_KEY variable to the agent store. |
| 38 | + ## We're using tr to fix line endings which makes ed25519 keys work |
| 39 | + ## without extra base64 encoding. |
| 40 | + ## We use -d because the version of tr on alpine does not recognize --delete. |
| 41 | + ## https://gitlab.com/gitlab-examples/ssh-private-key/issues/1#note_48526556 |
| 42 | + ## |
| 43 | + - echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add - |
| 44 | + |
| 45 | + ## |
| 46 | + ## Create the SSH directory and give it the right permissions |
| 47 | + ## |
| 48 | + - mkdir --parents ~/.ssh |
| 49 | + - ssh-keyscan -t rsa {{ cookiecutter.repo_hosting_domain }} >> ~/.ssh/known_hosts |
| 50 | + - fi |
| 51 | + |
| 52 | + ## |
| 53 | + ## Optionally, if you will be using any Git commands, set the user name and |
| 54 | + ## and email. |
| 55 | + ## |
| 56 | + #- git config --global user.email "{{ cookiecutter.email }}" |
| 57 | + #- git config --global user.name "{{ cookiecutter.full_name }}" |
| 58 | + |
| 59 | +# In general we want to use tox -e docs, but GitLab.com will not deploy Pages |
| 60 | +# if the pages build fails. |
| 61 | +# The pages build will fail if you use tox -e docs with a link to your GitLab |
| 62 | +# Pages documentation that is not yet deployed, because tox -e docs includes |
| 63 | +# sphinx-build -b linkcheck. So the pages will never get deployed... |
| 64 | +# That's why we deploy pages with no checks here. |
| 65 | +# The tests will still run linkcheck on the documentation. |
| 66 | +# Since "It may take up to 30 minutes before the site is available after the |
| 67 | +# first deployment." (per GitLab), the tests will still fail for a little |
| 68 | +# while. |
| 69 | +pages: |
| 70 | + tags: |
| 71 | + - docker |
| 72 | + stage: build |
| 73 | + # On GitLab, the stages are build->test->deploy. |
| 74 | + # If the test stage fails, the deploy stage is skipped. |
| 75 | + script: |
| 76 | + - pip install -r docs/requirements.txt |
| 77 | + - sphinx-build -E -b html docs dist/docs |
| 78 | + - mv dist/docs/ public/ |
| 79 | + artifacts: |
| 80 | + paths: |
| 81 | + - public |
| 82 | + only: |
| 83 | + - master |
| 84 | + |
| 85 | +test: |
| 86 | + tags: |
| 87 | + - docker |
| 88 | + stage: test |
| 89 | + script: |
| 90 | + # apk add any needed packages not included in the image. |
| 91 | + # check-manifest, used in tox -e check, requires git, |
| 92 | + # so we need to either use an image that includes git or |
| 93 | + # apk add git here. |
| 94 | + - pip install --upgrade pip |
| 95 | + # If using an image that does not include tox, we will |
| 96 | + # need to pip install tox here. |
| 97 | + # With --sitepackages, we can save time by installing once |
| 98 | + # for both regular tests and documentation checks. |
| 99 | + - pip install . |
| 100 | + - git --version |
| 101 | + - python --version |
| 102 | + - python2 --version || echo "python2 is not installed." |
| 103 | + - virtualenv --version |
| 104 | + - pip --version |
| 105 | + - tox --version |
| 106 | + - uname --all |
| 107 | + - lsb_release --all || echo "lsb_release is not supported on this host." |
| 108 | + - start_tox=$(date +%s) |
| 109 | + # When testing locally, we might not want to set sitepackages=true, |
| 110 | + # because the local machine might have all kinds of weird things in the |
| 111 | + # environment. But for continuous integration, we do want sitepackages=true, |
| 112 | + # because it allows us to use a Docker image with some packages already |
| 113 | + # installed to accelerate testing. |
| 114 | + - tox --sitepackages |
| 115 | + - echo "tox tests took $(( $(date +%s) - start_tox)) seconds" |
| 116 | + - echo "Everything after pulling the Docker image took $(( $(date +%s) - right_after_pull_docker_image)) seconds total" |
| 117 | + |
0 commit comments