Use non-Docker approach to CI

* The current CI takes a long time for Docker to prepare itself. It's best to take advantage of the caching features of Travis and GitLab CI to spped this up
* Based on my changes from awhile ago on Travis bionic, it's a 1.5 minute decrease
This commit is contained in:
Andrew-Chen-Wang 2020-06-21 13:56:14 -04:00
parent a63278fb4e
commit 922bbae182
3 changed files with 38 additions and 54 deletions

View File

@ -172,12 +172,8 @@ def test_black_passes(cookies, context_override):
pytest.fail(e.stdout.decode()) pytest.fail(e.stdout.decode())
@pytest.mark.parametrize( def test_travis_invokes_pytest(cookies, context):
["use_docker", "expected_test_script"], context.update({"ci_tool": "Travis"})
[("n", "pytest"), ("y", "docker-compose -f local.yml run django pytest"),],
)
def test_travis_invokes_pytest(cookies, context, use_docker, expected_test_script):
context.update({"ci_tool": "Travis", "use_docker": use_docker})
result = cookies.bake(extra_context=context) result = cookies.bake(extra_context=context)
assert result.exit_code == 0 assert result.exit_code == 0
@ -189,19 +185,15 @@ def test_travis_invokes_pytest(cookies, context, use_docker, expected_test_scrip
try: try:
yml = yaml.load(travis_yml, Loader=yaml.FullLoader)["jobs"]["include"] yml = yaml.load(travis_yml, Loader=yaml.FullLoader)["jobs"]["include"]
assert yml[0]["script"] == ["flake8"] assert yml[0]["script"] == ["flake8"]
assert yml[1]["script"] == [expected_test_script] assert yml[1]["script"] == ["pytest"]
except yaml.YAMLError as e: except yaml.YAMLError as e:
pytest.fail(str(e)) pytest.fail(str(e))
@pytest.mark.parametrize(
["use_docker", "expected_test_script"],
[("n", "pytest"), ("y", "docker-compose -f local.yml run django pytest"),],
)
def test_gitlab_invokes_flake8_and_pytest( def test_gitlab_invokes_flake8_and_pytest(
cookies, context, use_docker, expected_test_script cookies, context
): ):
context.update({"ci_tool": "Gitlab", "use_docker": use_docker}) context.update({"ci_tool": "Gitlab"})
result = cookies.bake(extra_context=context) result = cookies.bake(extra_context=context)
assert result.exit_code == 0 assert result.exit_code == 0
@ -213,7 +205,7 @@ def test_gitlab_invokes_flake8_and_pytest(
try: try:
gitlab_config = yaml.load(gitlab_yml, Loader=yaml.FullLoader) gitlab_config = yaml.load(gitlab_yml, Loader=yaml.FullLoader)
assert gitlab_config["flake8"]["script"] == ["flake8"] assert gitlab_config["flake8"]["script"] == ["flake8"]
assert gitlab_config["pytest"]["script"] == [expected_test_script] assert gitlab_config["pytest"]["script"] == ["pytest"]
except yaml.YAMLError as e: except yaml.YAMLError as e:
pytest.fail(e) pytest.fail(e)

View File

@ -3,6 +3,7 @@ stages:
- test - test
variables: variables:
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
POSTGRES_USER: '{{ cookiecutter.project_slug }}' POSTGRES_USER: '{{ cookiecutter.project_slug }}'
POSTGRES_PASSWORD: '' POSTGRES_PASSWORD: ''
POSTGRES_DB: 'test_{{ cookiecutter.project_slug }}' POSTGRES_DB: 'test_{{ cookiecutter.project_slug }}'
@ -11,6 +12,11 @@ variables:
CELERY_BROKER_URL: 'redis://redis:6379/0' CELERY_BROKER_URL: 'redis://redis:6379/0'
{%- endif %} {%- endif %}
cache:
paths:
- .cache/pip
- venv/
flake8: flake8:
stage: lint stage: lint
image: python:3.7-alpine image: python:3.7-alpine
@ -22,30 +28,26 @@ flake8:
pytest: pytest:
stage: test stage: test
image: python:3.7 image: python:3.7
{% if cookiecutter.use_docker == 'y' -%}
tags:
- docker
services:
- docker
before_script:
- docker-compose -f local.yml build
# Ensure celerybeat does not crash due to non-existent tables
- docker-compose -f local.yml run --rm django python manage.py migrate
- docker-compose -f local.yml up -d
script:
- docker-compose -f local.yml run django pytest
{%- else %}
tags: tags:
- python - python
services: services:
- postgres:11 - postgres:11
{% if cookiecutter.use_celery == 'y' -%}
- redis:latest
{%- endif %}
variables: variables:
DATABASE_URL: pgsql://$POSTGRES_USER:$POSTGRES_PASSWORD@postgres/$POSTGRES_DB DATABASE_URL: pgsql://$POSTGRES_USER:$POSTGRES_PASSWORD@postgres/$POSTGRES_DB
USE_DOCKER: 'no'
Host: redis
# Caching involves creating a virtual environment
before_script: before_script:
- pip install virtualenv
- virtualenv venv
- source venv/bin/activate
- pip install -r requirements/local.txt - pip install -r requirements/local.txt
- celery multi start worker beat -A config.celery_app --pool=solo
script: script:
- pytest - pytest
{%- endif %}

View File

@ -1,11 +1,11 @@
os: linux
dist: xenial dist: xenial
language: python language: python
cache: pip
python: python:
- "3.8" - "3.8"
services:
- {% if cookiecutter.use_docker == 'y' %}docker{% else %}postgresql{% endif %}
jobs: jobs:
include: include:
- name: "Linter" - name: "Linter"
@ -15,31 +15,21 @@ jobs:
- "flake8" - "flake8"
- name: "Django Test" - name: "Django Test"
{%- if cookiecutter.use_docker == 'y' %} env:
before_script: global:
- docker-compose -v - USE_DOCKER="no"
- docker -v - DATABASE_URL="postgres:///{{ cookiecutter.project_slug }}"
- docker-compose -f local.yml build {% if cookiecutter.use_celery == 'y' -%}
# Ensure celerybeat does not crash due to non-existent tables - CELERY_BROKER_URL="redis://localhost:6379/0"
- docker-compose -f local.yml run --rm django python manage.py migrate {%- endif %}
- docker-compose -f local.yml up -d services:
script: - postgresql
- "docker-compose -f local.yml run django pytest" {% if cookiecutter.use_celery == 'y' -%}
after_failure: - redis
- docker-compose -f local.yml logs {%- endif %}
{%- else %}
before_install:
- sudo apt-get update -qq
- sudo apt-get install -qq build-essential gettext python-dev zlib1g-dev libpq-dev xvfb
- sudo apt-get install -qq libjpeg8-dev libfreetype6-dev libwebp-dev
- sudo apt-get install -qq graphviz-dev python-setuptools python3-dev python-virtualenv python-pip
- sudo apt-get install -qq firefox automake libtool libreadline6 libreadline6-dev libreadline-dev
- sudo apt-get install -qq libsqlite3-dev libxml2 libxml2-dev libssl-dev libbz2-dev wget curl llvm
language: python
python:
- "3.8"
install: install:
- pip install -r requirements/local.txt - pip install -r requirements/local.txt
before_script:
- celery multi start worker beat -A config.celery_app --pool=solo
script: script:
- "pytest" - "pytest"
{%- endif %}