mirror of
https://github.com/cookiecutter/cookiecutter-django.git
synced 2024-11-14 13:47:07 +03:00
commit
da9d14ca2b
|
@ -57,6 +57,7 @@ Listed in alphabetical order.
|
||||||
Andrew Mikhnevich `@zcho`_
|
Andrew Mikhnevich `@zcho`_
|
||||||
Andy Rose
|
Andy Rose
|
||||||
Anna Callahan `@jazztpt`_
|
Anna Callahan `@jazztpt`_
|
||||||
|
Anna Sidwell `@takkaria`_
|
||||||
Antonia Blair `@antoniablair`_ @antoniablairart
|
Antonia Blair `@antoniablair`_ @antoniablairart
|
||||||
Anuj Bansal `@ahhda`_
|
Anuj Bansal `@ahhda`_
|
||||||
Arcuri Davide `@dadokkio`_
|
Arcuri Davide `@dadokkio`_
|
||||||
|
@ -70,6 +71,7 @@ Listed in alphabetical order.
|
||||||
Bo Lopker `@blopker`_
|
Bo Lopker `@blopker`_
|
||||||
Bouke Haarsma
|
Bouke Haarsma
|
||||||
Brent Payne `@brentpayne`_ @brentpayne
|
Brent Payne `@brentpayne`_ @brentpayne
|
||||||
|
Bartek `@btknu`
|
||||||
Burhan Khalid `@burhan`_ @burhan
|
Burhan Khalid `@burhan`_ @burhan
|
||||||
Carl Johnson `@carlmjohnson`_ @carlmjohnson
|
Carl Johnson `@carlmjohnson`_ @carlmjohnson
|
||||||
Catherine Devlin `@catherinedevlin`_
|
Catherine Devlin `@catherinedevlin`_
|
||||||
|
@ -179,6 +181,7 @@ Listed in alphabetical order.
|
||||||
Denis Bobrov `@delneg`_
|
Denis Bobrov `@delneg`_
|
||||||
Philipp Matthies `@canonnervio`_
|
Philipp Matthies `@canonnervio`_
|
||||||
Vadim Iskuchekov `@Egregors`_ @egregors
|
Vadim Iskuchekov `@Egregors`_ @egregors
|
||||||
|
Keith Bailey `@keithjeb`_
|
||||||
========================== ============================ ==============
|
========================== ============================ ==============
|
||||||
|
|
||||||
.. _@a7p: https://github.com/a7p
|
.. _@a7p: https://github.com/a7p
|
||||||
|
@ -271,6 +274,7 @@ Listed in alphabetical order.
|
||||||
.. _@ssteinerX: https://github.com/ssteinerx
|
.. _@ssteinerX: https://github.com/ssteinerx
|
||||||
.. _@stepmr: https://github.com/stepmr
|
.. _@stepmr: https://github.com/stepmr
|
||||||
.. _@suledev: https://github.com/suledev
|
.. _@suledev: https://github.com/suledev
|
||||||
|
.. _@takkaria: https://github.com/takkaria
|
||||||
.. _@timfreund: https://github.com/timfreund
|
.. _@timfreund: https://github.com/timfreund
|
||||||
.. _@Travistock: https://github.com/Tavistock
|
.. _@Travistock: https://github.com/Tavistock
|
||||||
.. _@trungdong: https://github.com/trungdong
|
.. _@trungdong: https://github.com/trungdong
|
||||||
|
@ -295,6 +299,8 @@ Listed in alphabetical order.
|
||||||
.. _@purplediane: https://github.com/purplediane
|
.. _@purplediane: https://github.com/purplediane
|
||||||
.. _@umrashrf: https://github.com/umrashrf
|
.. _@umrashrf: https://github.com/umrashrf
|
||||||
.. _@ahhda: https://github.com/ahhda
|
.. _@ahhda: https://github.com/ahhda
|
||||||
|
.. _@keithjeb: https://github.com/keithjeb
|
||||||
|
.. _@btknu: https://github.com/btknu
|
||||||
Special Thanks
|
Special Thanks
|
||||||
~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
|
|
@ -279,9 +279,9 @@ experience better.
|
||||||
Articles
|
Articles
|
||||||
---------
|
---------
|
||||||
|
|
||||||
|
* `Using cookiecutter-django with Google Cloud Storage`_ - Mar. 12, 2019
|
||||||
* `cookiecutter-django with Nginx, Route 53 and ELB`_ - Feb. 12, 2018
|
* `cookiecutter-django with Nginx, Route 53 and ELB`_ - Feb. 12, 2018
|
||||||
* `cookiecutter-django and Amazon RDS`_ - Feb. 7, 2018
|
* `cookiecutter-django and Amazon RDS`_ - Feb. 7, 2018
|
||||||
* `Deploying Cookiecutter-Django with Docker-Compose`_ - Oct. 19, 2017
|
|
||||||
* `Using Cookiecutter to Jumpstart a Django Project on Windows with PyCharm`_ - May 19, 2017
|
* `Using Cookiecutter to Jumpstart a Django Project on Windows with PyCharm`_ - May 19, 2017
|
||||||
* `Exploring with Cookiecutter`_ - Dec. 3, 2016
|
* `Exploring with Cookiecutter`_ - Dec. 3, 2016
|
||||||
* `Introduction to Cookiecutter-Django`_ - Feb. 19, 2016
|
* `Introduction to Cookiecutter-Django`_ - Feb. 19, 2016
|
||||||
|
@ -292,9 +292,9 @@ Articles
|
||||||
|
|
||||||
Have a blog or online publication? Write about your cookiecutter-django tips and tricks, then send us a pull request with the link.
|
Have a blog or online publication? Write about your cookiecutter-django tips and tricks, then send us a pull request with the link.
|
||||||
|
|
||||||
|
.. _`Using cookiecutter-django with Google Cloud Storage`: https://ahhda.github.io/cloud/gce/django/2019/03/12/using-django-cookiecutter-cloud-storage.html
|
||||||
.. _`cookiecutter-django with Nginx, Route 53 and ELB`: https://msaizar.com/blog/cookiecutter-django-nginx-route-53-and-elb/
|
.. _`cookiecutter-django with Nginx, Route 53 and ELB`: https://msaizar.com/blog/cookiecutter-django-nginx-route-53-and-elb/
|
||||||
.. _`cookiecutter-django and Amazon RDS`: https://msaizar.com/blog/cookiecutter-django-and-amazon-rds/
|
.. _`cookiecutter-django and Amazon RDS`: https://msaizar.com/blog/cookiecutter-django-and-amazon-rds/
|
||||||
.. _`Deploying Cookiecutter-Django with Docker-Compose`: http://adamantine.me/2017/10/19/deploying-cookiecutter-django-with-docker-compose/
|
|
||||||
.. _`Exploring with Cookiecutter`: http://www.snowboardingcoder.com/django/2016/12/03/exploring-with-cookiecutter/
|
.. _`Exploring with Cookiecutter`: http://www.snowboardingcoder.com/django/2016/12/03/exploring-with-cookiecutter/
|
||||||
.. _`Using Cookiecutter to Jumpstart a Django Project on Windows with PyCharm`: https://joshuahunter.com/posts/using-cookiecutter-to-jumpstart-a-django-project-on-windows-with-pycharm/
|
.. _`Using Cookiecutter to Jumpstart a Django Project on Windows with PyCharm`: https://joshuahunter.com/posts/using-cookiecutter-to-jumpstart-a-django-project-on-windows-with-pycharm/
|
||||||
|
|
||||||
|
|
|
@ -47,9 +47,7 @@ Run these commands to deploy the project to Heroku:
|
||||||
|
|
||||||
git push heroku master
|
git push heroku master
|
||||||
|
|
||||||
heroku run python manage.py migrate
|
|
||||||
heroku run python manage.py createsuperuser
|
heroku run python manage.py createsuperuser
|
||||||
heroku run python manage.py collectstatic --no-input
|
|
||||||
|
|
||||||
heroku run python manage.py check --deploy
|
heroku run python manage.py check --deploy
|
||||||
|
|
||||||
|
|
|
@ -171,6 +171,16 @@ When developing locally you can go with MailHog_ for email testing provided ``us
|
||||||
|
|
||||||
.. _Mailhog: https://github.com/mailhog/MailHog/
|
.. _Mailhog: https://github.com/mailhog/MailHog/
|
||||||
|
|
||||||
|
.. _`CeleryTasks`:
|
||||||
|
|
||||||
|
Celery tasks in local development
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
When not using docker Celery tasks are set to run in Eager mode, so that a full stack is not needed. When using docker the task
|
||||||
|
scheduler will be used by default.
|
||||||
|
|
||||||
|
If you need tasks to be executed on the main thread during development set CELERY_TASK_ALWAYS_EAGER = True in config/settings/local.py.
|
||||||
|
|
||||||
|
Possible uses could be for testing, or ease of profiling with DJDT.
|
||||||
|
|
||||||
.. _`CeleryFlower`:
|
.. _`CeleryFlower`:
|
||||||
|
|
||||||
|
|
|
@ -118,6 +118,16 @@ In production, we have Mailgun_ configured to have your back!
|
||||||
.. _Mailgun: https://www.mailgun.com/
|
.. _Mailgun: https://www.mailgun.com/
|
||||||
|
|
||||||
|
|
||||||
|
Celery
|
||||||
|
------
|
||||||
|
If the project is configured to use Celery as a task scheduler then by default tasks are set to run on the main thread
|
||||||
|
when developing locally. If you have the appropriate setup on your local machine then set
|
||||||
|
|
||||||
|
CELERY_TASK_ALWAYS_EAGER = False
|
||||||
|
|
||||||
|
in /config/settings/local.py
|
||||||
|
|
||||||
|
|
||||||
Sass Compilation & Live Reloading
|
Sass Compilation & Live Reloading
|
||||||
---------------------------------
|
---------------------------------
|
||||||
|
|
||||||
|
|
|
@ -9,5 +9,6 @@ flake8==3.7.6
|
||||||
# Testing
|
# Testing
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
tox==3.6.1
|
tox==3.6.1
|
||||||
pytest==4.3.0
|
pytest==4.3.1
|
||||||
pytest-cookies==0.3.0
|
pytest-cookies==0.3.0
|
||||||
|
pyyaml==5.1
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sh
|
import sh
|
||||||
|
import yaml
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from binaryornot.check import is_binary
|
from binaryornot.check import is_binary
|
||||||
|
@ -85,3 +86,19 @@ def test_flake8_compliance(cookies):
|
||||||
sh.flake8(str(result.project))
|
sh.flake8(str(result.project))
|
||||||
except sh.ErrorReturnCode as e:
|
except sh.ErrorReturnCode as e:
|
||||||
pytest.fail(e)
|
pytest.fail(e)
|
||||||
|
|
||||||
|
|
||||||
|
def test_travis_invokes_pytest(cookies, context):
|
||||||
|
context.update({"use_travisci": "y"})
|
||||||
|
result = cookies.bake(extra_context=context)
|
||||||
|
|
||||||
|
assert result.exit_code == 0
|
||||||
|
assert result.exception is None
|
||||||
|
assert result.project.basename == context["project_slug"]
|
||||||
|
assert result.project.isdir()
|
||||||
|
|
||||||
|
with open(f'{result.project}/.travis.yml', 'r') as travis_yml:
|
||||||
|
try:
|
||||||
|
assert yaml.load(travis_yml)['script'] == ['pytest']
|
||||||
|
except yaml.YAMLError as e:
|
||||||
|
pytest.fail(e)
|
||||||
|
|
|
@ -3,10 +3,11 @@
|
||||||
USE_DOCKER=yes
|
USE_DOCKER=yes
|
||||||
IPYTHONDIR=/app/.ipython
|
IPYTHONDIR=/app/.ipython
|
||||||
|
|
||||||
|
{%- if cookiecutter.use_celery == 'y' %}
|
||||||
# Redis
|
# Redis
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
REDIS_URL=redis://redis:6379/0
|
REDIS_URL=redis://redis:6379/0
|
||||||
{% if cookiecutter.use_celery == 'y' %}
|
|
||||||
# Celery
|
# Celery
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
|
@ -9,3 +9,7 @@ before_install:
|
||||||
language: python
|
language: python
|
||||||
python:
|
python:
|
||||||
- "3.6"
|
- "3.6"
|
||||||
|
install:
|
||||||
|
- pip install -r requirements/local.txt
|
||||||
|
script:
|
||||||
|
- "pytest"
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
release: python manage.py migrate
|
||||||
web: gunicorn config.wsgi:application
|
web: gunicorn config.wsgi:application
|
||||||
{% if cookiecutter.use_celery == "y" -%}
|
{% if cookiecutter.use_celery == "y" -%}
|
||||||
worker: celery worker --app={{cookiecutter.project_slug}}.taskapp --loglevel=info
|
worker: celery worker --app={{cookiecutter.project_slug}}.taskapp --loglevel=info
|
||||||
|
|
|
@ -5,8 +5,10 @@ set -o pipefail
|
||||||
set -o nounset
|
set -o nounset
|
||||||
|
|
||||||
|
|
||||||
|
{% if cookiecutter.use_celery == 'y' %}
|
||||||
# N.B. If only .env files supported variable expansion...
|
# N.B. If only .env files supported variable expansion...
|
||||||
export CELERY_BROKER_URL="${REDIS_URL}"
|
export CELERY_BROKER_URL="${REDIS_URL}"
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
if [ -z "${POSTGRES_USER}" ]; then
|
if [ -z "${POSTGRES_USER}" ]; then
|
||||||
base_postgres_image_default_user='postgres'
|
base_postgres_image_default_user='postgres'
|
||||||
|
|
|
@ -76,8 +76,10 @@ INSTALLED_APPS += ['django_extensions'] # noqa F405
|
||||||
|
|
||||||
# Celery
|
# Celery
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
|
{% if cookiecutter.use_docker == 'n' -%}
|
||||||
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-always-eager
|
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-always-eager
|
||||||
CELERY_TASK_ALWAYS_EAGER = True
|
CELERY_TASK_ALWAYS_EAGER = True
|
||||||
|
{%- endif %}
|
||||||
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-eager-propagates
|
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-eager-propagates
|
||||||
CELERY_TASK_EAGER_PROPAGATES = True
|
CELERY_TASK_EAGER_PROPAGATES = True
|
||||||
|
|
||||||
|
|
|
@ -1,4 +0,0 @@
|
||||||
Deploy
|
|
||||||
========
|
|
||||||
|
|
||||||
This is where you describe how the project is deployed in production.
|
|
|
@ -1,186 +0,0 @@
|
||||||
Developing with Docker
|
|
||||||
======================
|
|
||||||
|
|
||||||
You can develop your application in a `Docker`_ container for simpler deployment onto bare Linux machines later. This instruction assumes an `Amazon Web Services`_ EC2 instance, but it should work on any machine with Docker > 1.3 and `Docker compose`_ installed.
|
|
||||||
|
|
||||||
.. _Docker: https://www.docker.com/
|
|
||||||
.. _Amazon Web Services: http://aws.amazon.com/
|
|
||||||
.. _Docker compose: https://docs.docker.com/compose/
|
|
||||||
|
|
||||||
Setting up
|
|
||||||
^^^^^^^^^^
|
|
||||||
|
|
||||||
Docker encourages running one container for each process. This might mean one container for your web server, one for Django application and a third for your database. Once you're happy composing containers in this way you can easily add more, such as a `Redis`_ cache.
|
|
||||||
|
|
||||||
.. _Redis: http://redis.io/
|
|
||||||
|
|
||||||
The Docker compose tool (previously known as `fig`_) makes linking these containers easy. An example set up for your Cookiecutter Django project might look like this:
|
|
||||||
|
|
||||||
.. _fig: http://www.fig.sh/
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
webapp/ # Your cookiecutter project would be in here
|
|
||||||
Dockerfile
|
|
||||||
...
|
|
||||||
database/
|
|
||||||
Dockerfile
|
|
||||||
...
|
|
||||||
webserver/
|
|
||||||
Dockerfile
|
|
||||||
...
|
|
||||||
production.yml
|
|
||||||
|
|
||||||
Each component of your application would get its own `Dockerfile`_. The rest of this example assumes you are using the `base postgres image`_ for your database. Your database settings in `config/base.py` might then look something like:
|
|
||||||
|
|
||||||
.. _Dockerfile: https://docs.docker.com/reference/builder/
|
|
||||||
.. _base postgres image: https://registry.hub.docker.com/_/postgres/
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
DATABASES = {
|
|
||||||
'default': {
|
|
||||||
'ENGINE': 'django.db.backends.postgresql_psycopg2',
|
|
||||||
'NAME': 'postgres',
|
|
||||||
'USER': 'postgres',
|
|
||||||
'HOST': 'database',
|
|
||||||
'PORT': 5432,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
The `Docker compose documentation`_ explains in detail what you can accomplish in the `production.yml` file, but an example configuration might look like this:
|
|
||||||
|
|
||||||
.. _Docker compose documentation: https://docs.docker.com/compose/#compose-documentation
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
database:
|
|
||||||
build: database
|
|
||||||
webapp:
|
|
||||||
build: webapp:
|
|
||||||
command: /usr/bin/python3.6 manage.py runserver 0.0.0.0:8000 # dev setting
|
|
||||||
# command: gunicorn -b 0.0.0.0:8000 wsgi:application # production setting
|
|
||||||
volumes:
|
|
||||||
- webapp/your_project_name:/path/to/container/workdir/
|
|
||||||
links:
|
|
||||||
- database
|
|
||||||
webserver:
|
|
||||||
build: webserver
|
|
||||||
ports:
|
|
||||||
- "80:80"
|
|
||||||
- "443:443"
|
|
||||||
links:
|
|
||||||
- webapp
|
|
||||||
|
|
||||||
We'll ignore the webserver for now (you'll want to comment that part out while we do). A working Dockerfile to run your cookiecutter application might look like this:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
FROM ubuntu:14.04
|
|
||||||
ENV REFRESHED_AT 2015-01-13
|
|
||||||
|
|
||||||
# update packages and prepare to build software
|
|
||||||
RUN ["apt-get", "update"]
|
|
||||||
RUN ["apt-get", "-y", "install", "build-essential", "vim", "git", "curl"]
|
|
||||||
RUN ["locale-gen", "en_GB.UTF-8"]
|
|
||||||
|
|
||||||
# install latest python
|
|
||||||
RUN ["apt-get", "-y", "build-dep", "python3-dev", "python3-imaging"]
|
|
||||||
RUN ["apt-get", "-y", "install", "python3-dev", "python3-imaging", "python3-pip"]
|
|
||||||
|
|
||||||
# prepare postgreSQL support
|
|
||||||
RUN ["apt-get", "-y", "build-dep", "python3-psycopg2"]
|
|
||||||
|
|
||||||
# move into our working directory
|
|
||||||
# ADD must be after chown see http://stackoverflow.com/a/26145444/1281947
|
|
||||||
RUN ["groupadd", "python"]
|
|
||||||
RUN ["useradd", "python", "-s", "/bin/bash", "-m", "-g", "python", "-G", "python"]
|
|
||||||
ENV HOME /home/python
|
|
||||||
WORKDIR /home/python
|
|
||||||
RUN ["chown", "-R", "python:python", "/home/python"]
|
|
||||||
ADD ./ /home/python
|
|
||||||
|
|
||||||
# manage requirements
|
|
||||||
ENV REQUIREMENTS_REFRESHED_AT 2015-02-25
|
|
||||||
RUN ["pip3", "install", "-r", "requirements.txt"]
|
|
||||||
|
|
||||||
# uncomment the line below to use container as a non-root user
|
|
||||||
USER python:python
|
|
||||||
|
|
||||||
Running `sudo docker-compose -f production.yml build` will follow the instructions in your `production.yml` file and build the database container, then your webapp, before mounting your cookiecutter project files as a volume in the webapp container and linking to the database. Our example yaml file runs in development mode but changing it to production mode is as simple as commenting out the line using `runserver` and uncommenting the line using `gunicorn`.
|
|
||||||
|
|
||||||
Both are set to run on port `0.0.0.0:8000`, which is where the Docker daemon will discover it. You can now run `sudo docker-compose -f production.yml up` and browse to `localhost:8000` to see your application running.
|
|
||||||
|
|
||||||
Deployment
|
|
||||||
^^^^^^^^^^
|
|
||||||
|
|
||||||
You'll need a webserver container for deployment. An example setup for `Nginx`_ might look like this:
|
|
||||||
|
|
||||||
.. _Nginx: http://wiki.nginx.org/Main
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
FROM ubuntu:14.04
|
|
||||||
ENV REFRESHED_AT 2015-02-11
|
|
||||||
|
|
||||||
# get the nginx package and set it up
|
|
||||||
RUN ["apt-get", "update"]
|
|
||||||
RUN ["apt-get", "-y", "install", "nginx"]
|
|
||||||
|
|
||||||
# forward request and error logs to docker log collector
|
|
||||||
RUN ln -sf /dev/stdout /var/log/nginx/access.log
|
|
||||||
RUN ln -sf /dev/stderr /var/log/nginx/error.log
|
|
||||||
VOLUME ["/var/cache/nginx"]
|
|
||||||
EXPOSE 80 443
|
|
||||||
|
|
||||||
# load nginx conf
|
|
||||||
ADD ./site.conf /etc/nginx/sites-available/your_cookiecutter_project
|
|
||||||
RUN ["ln", "-s", "/etc/nginx/sites-available/your_cookiecutter_project", "/etc/nginx/sites-enabled/your_cookiecutter_project"]
|
|
||||||
RUN ["rm", "-rf", "/etc/nginx/sites-available/default"]
|
|
||||||
|
|
||||||
#start the server
|
|
||||||
CMD ["nginx", "-g", "daemon off;"]
|
|
||||||
|
|
||||||
That Dockerfile assumes you have an Nginx conf file named `site.conf` in the same directory as the webserver Dockerfile. A very basic example, which forwards traffic onto the development server or gunicorn for processing, would look like this:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
# see http://serverfault.com/questions/577370/how-can-i-use-environment-variables-in-nginx-conf#comment730384_577370
|
|
||||||
upstream localhost {
|
|
||||||
server webapp_1:8000;
|
|
||||||
}
|
|
||||||
server {
|
|
||||||
location / {
|
|
||||||
proxy_pass http://localhost;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Running `sudo docker-compose -f production.yml build webserver` will build your server container. Running `sudo docker-compose -f production.yml up` will now expose your application directly on `localhost` (no need to specify the port number).
|
|
||||||
|
|
||||||
Building and running your app on EC2
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
All you now need to do to run your app in production is:
|
|
||||||
|
|
||||||
* Create an empty EC2 Linux instance (any Linux machine should do).
|
|
||||||
|
|
||||||
* Install your preferred source control solution, Docker and Docker compose on the news instance.
|
|
||||||
|
|
||||||
* Pull in your code from source control. The root directory should be the one with your `production.yml` file in it.
|
|
||||||
|
|
||||||
* Run `sudo docker-compose -f production.yml build` and `sudo docker-compose -f production.yml up`.
|
|
||||||
|
|
||||||
* Assign an `Elastic IP address`_ to your new machine.
|
|
||||||
|
|
||||||
.. _Elastic IP address: https://aws.amazon.com/articles/1346
|
|
||||||
|
|
||||||
* Point your domain name to the elastic IP.
|
|
||||||
|
|
||||||
**Be careful with Elastic IPs** because, on the AWS free tier, if you assign one and then stop the machine you will incur charges while the machine is down (presumably because you're preventing them allocating the IP to someone else).
|
|
||||||
|
|
||||||
Security advisory
|
|
||||||
^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
The setup described in this instruction will get you up-and-running but it hasn't been audited for security. If you are running your own setup like this it is always advisable to, at a minimum, examine your application with a tool like `OWASP ZAP`_ to see what security holes you might be leaving open.
|
|
||||||
|
|
||||||
.. _OWASP ZAP: https://www.owasp.org/index.php/OWASP_Zed_Attack_Proxy_Project
|
|
|
@ -3,23 +3,17 @@
|
||||||
You can adapt this file completely to your liking, but it should at least
|
You can adapt this file completely to your liking, but it should at least
|
||||||
contain the root `toctree` directive.
|
contain the root `toctree` directive.
|
||||||
|
|
||||||
Welcome to {{ cookiecutter.project_name }}'s documentation!
|
{{ cookiecutter.project_name }} Project Documentation
|
||||||
====================================================================
|
====================================================================
|
||||||
|
|
||||||
Contents:
|
Table of Contents:
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 2
|
:maxdepth: 2
|
||||||
|
|
||||||
install
|
|
||||||
deploy
|
|
||||||
docker_ec2
|
|
||||||
tests
|
|
||||||
|
|
||||||
|
Indices & Tables
|
||||||
|
================
|
||||||
Indices and tables
|
|
||||||
==================
|
|
||||||
|
|
||||||
* :ref:`genindex`
|
* :ref:`genindex`
|
||||||
* :ref:`modindex`
|
* :ref:`modindex`
|
||||||
|
|
|
@ -1,4 +0,0 @@
|
||||||
Install
|
|
||||||
=========
|
|
||||||
|
|
||||||
This is where you write how to get a new laptop to run this project.
|
|
|
@ -1,63 +1,70 @@
|
||||||
|
////////////////////////////////
|
||||||
|
// Setup
|
||||||
|
////////////////////////////////
|
||||||
|
|
||||||
////////////////////////////////
|
// Gulp and package
|
||||||
//Setup//
|
const { src, dest, parallel, series, watch } = require('gulp')
|
||||||
////////////////////////////////
|
const pjson = require('./package.json')
|
||||||
|
|
||||||
// Plugins
|
// Plugins
|
||||||
var gulp = require('gulp'),
|
const autoprefixer = require('autoprefixer')
|
||||||
pjson = require('./package.json'),
|
const browserSync = require('browser-sync').create()
|
||||||
gutil = require('gulp-util'),
|
{% if cookiecutter.custom_bootstrap_compilation == 'y' %}
|
||||||
sass = require('gulp-sass'),
|
const concat = require('gulp-concat')
|
||||||
autoprefixer = require('gulp-autoprefixer'),
|
{% endif %}
|
||||||
cssnano = require('gulp-cssnano'),
|
const cssnano = require ('cssnano')
|
||||||
{% if cookiecutter.custom_bootstrap_compilation == 'y' %}
|
const imagemin = require('gulp-imagemin')
|
||||||
concat = require('gulp-concat'),
|
const pixrem = require('pixrem')
|
||||||
{% endif %}
|
const plumber = require('gulp-plumber')
|
||||||
rename = require('gulp-rename'),
|
const postcss = require('gulp-postcss')
|
||||||
del = require('del'),
|
const reload = browserSync.reload
|
||||||
plumber = require('gulp-plumber'),
|
const rename = require('gulp-rename')
|
||||||
pixrem = require('gulp-pixrem'),
|
const sass = require('gulp-sass')
|
||||||
uglify = require('gulp-uglify'),
|
const spawn = require('child_process').spawn
|
||||||
imagemin = require('gulp-imagemin'),
|
const uglify = require('gulp-uglify-es').default
|
||||||
spawn = require('child_process').spawn,
|
|
||||||
runSequence = require('run-sequence'),
|
|
||||||
browserSync = require('browser-sync').create(),
|
|
||||||
reload = browserSync.reload;
|
|
||||||
|
|
||||||
|
|
||||||
// Relative paths function
|
// Relative paths function
|
||||||
var pathsConfig = function (appName) {
|
function pathsConfig(appName) {
|
||||||
this.app = "./" + (appName || pjson.name);
|
this.app = `./${pjson.name}`
|
||||||
var vendorsRoot = 'node_modules/';
|
const vendorsRoot = 'node_modules'
|
||||||
|
|
||||||
return {
|
return {
|
||||||
{% if cookiecutter.custom_bootstrap_compilation == 'y' %}
|
{% if cookiecutter.custom_bootstrap_compilation == 'y' %}
|
||||||
bootstrapSass: vendorsRoot + '/bootstrap/scss',
|
bootstrapSass: `${vendorsRoot}/bootstrap/scss`,
|
||||||
vendorsJs: [
|
vendorsJs: [
|
||||||
vendorsRoot + 'jquery/dist/jquery.slim.js',
|
`${vendorsRoot}/jquery/dist/jquery.slim.js`,
|
||||||
vendorsRoot + 'popper.js/dist/umd/popper.js',
|
`${vendorsRoot}/popper.js/dist/umd/popper.js`,
|
||||||
vendorsRoot + 'bootstrap/dist/js/bootstrap.js'
|
`${vendorsRoot}/bootstrap/dist/js/bootstrap.js`,
|
||||||
],
|
],
|
||||||
{% endif %}
|
{% endif %}
|
||||||
app: this.app,
|
app: this.app,
|
||||||
templates: this.app + '/templates',
|
templates: `${this.app}/templates`,
|
||||||
css: this.app + '/static/css',
|
css: `${this.app}/static/css`,
|
||||||
sass: this.app + '/static/sass',
|
sass: `${this.app}/static/sass`,
|
||||||
fonts: this.app + '/static/fonts',
|
fonts: `${this.app}/static/fonts`,
|
||||||
images: this.app + '/static/images',
|
images: `${this.app}/static/images`,
|
||||||
js: this.app + '/static/js'
|
js: `${this.app}/static/js`,
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
|
|
||||||
var paths = pathsConfig();
|
var paths = pathsConfig()
|
||||||
|
|
||||||
////////////////////////////////
|
////////////////////////////////
|
||||||
//Tasks//
|
// Tasks
|
||||||
////////////////////////////////
|
////////////////////////////////
|
||||||
|
|
||||||
// Styles autoprefixing and minification
|
// Styles autoprefixing and minification
|
||||||
gulp.task('styles', function() {
|
function styles() {
|
||||||
return gulp.src(paths.sass + '/project.scss')
|
var processCss = [
|
||||||
|
autoprefixer(), // adds vendor prefixes
|
||||||
|
pixrem(), // add fallbacks for rem units
|
||||||
|
]
|
||||||
|
|
||||||
|
var minifyCss = [
|
||||||
|
cssnano({ preset: 'default' }) // minify result
|
||||||
|
]
|
||||||
|
|
||||||
|
return src(`${paths.sass}/project.scss`)
|
||||||
.pipe(sass({
|
.pipe(sass({
|
||||||
includePaths: [
|
includePaths: [
|
||||||
{% if cookiecutter.custom_bootstrap_compilation == 'y' %}
|
{% if cookiecutter.custom_bootstrap_compilation == 'y' %}
|
||||||
|
@ -67,72 +74,86 @@ gulp.task('styles', function() {
|
||||||
]
|
]
|
||||||
}).on('error', sass.logError))
|
}).on('error', sass.logError))
|
||||||
.pipe(plumber()) // Checks for errors
|
.pipe(plumber()) // Checks for errors
|
||||||
.pipe(autoprefixer({browsers: ['last 2 versions']})) // Adds vendor prefixes
|
.pipe(postcss(processCss))
|
||||||
.pipe(pixrem()) // add fallbacks for rem units
|
.pipe(dest(paths.css))
|
||||||
.pipe(gulp.dest(paths.css))
|
|
||||||
.pipe(rename({ suffix: '.min' }))
|
.pipe(rename({ suffix: '.min' }))
|
||||||
.pipe(cssnano()) // Minifies the result
|
.pipe(postcss(minifyCss)) // Minifies the result
|
||||||
.pipe(gulp.dest(paths.css));
|
.pipe(dest(paths.css))
|
||||||
});
|
}
|
||||||
|
|
||||||
// Javascript minification
|
// Javascript minification
|
||||||
gulp.task('scripts', function() {
|
function scripts() {
|
||||||
return gulp.src(paths.js + '/project.js')
|
return src(`${paths.js}/project.js`)
|
||||||
.pipe(plumber()) // Checks for errors
|
.pipe(plumber()) // Checks for errors
|
||||||
.pipe(uglify()) // Minifies the js
|
.pipe(uglify()) // Minifies the js
|
||||||
.pipe(rename({ suffix: '.min' }))
|
.pipe(rename({ suffix: '.min' }))
|
||||||
.pipe(gulp.dest(paths.js));
|
.pipe(dest(paths.js))
|
||||||
});
|
}
|
||||||
|
|
||||||
|
|
||||||
{% if cookiecutter.custom_bootstrap_compilation == 'y' %}
|
{% if cookiecutter.custom_bootstrap_compilation == 'y' %}
|
||||||
// Vendor Javascript minification
|
// Vendor Javascript minification
|
||||||
gulp.task('vendor-scripts', function() {
|
function vendorScripts() {
|
||||||
return gulp.src(paths.vendorsJs)
|
return src(paths.vendorsJs)
|
||||||
.pipe(concat('vendors.js'))
|
.pipe(concat('vendors.js'))
|
||||||
.pipe(gulp.dest(paths.js))
|
.pipe(dest(paths.js))
|
||||||
.pipe(plumber()) // Checks for errors
|
.pipe(plumber()) // Checks for errors
|
||||||
.pipe(uglify()) // Minifies the js
|
.pipe(uglify()) // Minifies the js
|
||||||
.pipe(rename({ suffix: '.min' }))
|
.pipe(rename({ suffix: '.min' }))
|
||||||
.pipe(gulp.dest(paths.js));
|
.pipe(dest(paths.js))
|
||||||
});
|
}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
// Image compression
|
// Image compression
|
||||||
gulp.task('imgCompression', function(){
|
function imgCompression() {
|
||||||
return gulp.src(paths.images + '/*')
|
return src(`${paths.images}/*`)
|
||||||
.pipe(imagemin()) // Compresses PNG, JPEG, GIF and SVG images
|
.pipe(imagemin()) // Compresses PNG, JPEG, GIF and SVG images
|
||||||
.pipe(gulp.dest(paths.images))
|
.pipe(dest(paths.images))
|
||||||
});
|
}
|
||||||
|
|
||||||
// Run django server
|
// Run django server
|
||||||
gulp.task('runServer', function(cb) {
|
function runServer(cb) {
|
||||||
var cmd = spawn('python', ['manage.py', 'runserver'], {stdio: 'inherit'});
|
var cmd = spawn('python', ['manage.py', 'runserver'], {stdio: 'inherit'})
|
||||||
cmd.on('close', function(code) {
|
cmd.on('close', function(code) {
|
||||||
console.log('runServer exited with code ' + code);
|
console.log('runServer exited with code ' + code)
|
||||||
cb(code);
|
cb(code)
|
||||||
});
|
})
|
||||||
});
|
}
|
||||||
|
|
||||||
// Browser sync server for live reload
|
// Browser sync server for live reload
|
||||||
gulp.task('browserSync', function() {
|
function initBrowserSync() {
|
||||||
browserSync.init(
|
browserSync.init(
|
||||||
[paths.css + "/*.css", paths.js + "*.js", paths.templates + '*.html'], {
|
[
|
||||||
proxy: "localhost:8000"
|
`${paths.css}/*.css`,
|
||||||
});
|
`${paths.js}/*.js`,
|
||||||
});
|
`${paths.templates}/*.html`
|
||||||
|
], {
|
||||||
|
proxy: "localhost:8000"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
// Watch
|
// Watch
|
||||||
gulp.task('watch', function() {
|
function watchPaths() {
|
||||||
|
watch(`${paths.sass}/*.scss`, styles)
|
||||||
|
watch(`${paths.templates}/**/*.html`).on("change", reload)
|
||||||
|
watch([`${paths.js}/*.js`, `!${paths.js}/*.min.js`], scripts).on("change", reload)
|
||||||
|
}
|
||||||
|
|
||||||
gulp.watch(paths.sass + '/*.scss', ['styles']);
|
// Generate all assets
|
||||||
gulp.watch(paths.js + '/*.js', ['scripts']).on("change", reload);
|
const generateAssets = parallel(
|
||||||
gulp.watch(paths.images + '/*', ['imgCompression']);
|
styles,
|
||||||
gulp.watch(paths.templates + '/**/*.html').on("change", reload);
|
scripts,
|
||||||
|
{% if cookiecutter.custom_bootstrap_compilation == 'y' %}vendorScripts,{% endif %}
|
||||||
|
imgCompression
|
||||||
|
)
|
||||||
|
|
||||||
});
|
// Set up dev environment
|
||||||
|
const dev = parallel(
|
||||||
|
runServer,
|
||||||
|
initBrowserSync,
|
||||||
|
watchPaths
|
||||||
|
)
|
||||||
|
|
||||||
// Default task
|
exports.default = series(generateAssets, dev)
|
||||||
gulp.task('default', function() {
|
exports["generate-assets"] = generateAssets
|
||||||
runSequence(['styles', 'scripts', {% if cookiecutter.custom_bootstrap_compilation == 'y' %}'vendor-scripts', {% endif %}'imgCompression'], ['runServer', 'browserSync', 'watch']);
|
exports["dev"] = dev
|
||||||
});
|
|
||||||
|
|
|
@ -6,32 +6,29 @@
|
||||||
{% if cookiecutter.js_task_runner == 'Gulp' -%}
|
{% if cookiecutter.js_task_runner == 'Gulp' -%}
|
||||||
{% if cookiecutter.custom_bootstrap_compilation == 'y' -%}
|
{% if cookiecutter.custom_bootstrap_compilation == 'y' -%}
|
||||||
"bootstrap": "4.1.1",
|
"bootstrap": "4.1.1",
|
||||||
{% endif -%}
|
|
||||||
"browser-sync": "^2.14.0",
|
|
||||||
"del": "^2.2.2",
|
|
||||||
"gulp": "^3.9.1",
|
|
||||||
"gulp-autoprefixer": "^5.0.0",
|
|
||||||
{% if cookiecutter.custom_bootstrap_compilation == 'y' -%}
|
|
||||||
"gulp-concat": "^2.6.1",
|
"gulp-concat": "^2.6.1",
|
||||||
{% endif -%}
|
|
||||||
"gulp-cssnano": "^2.1.2",
|
|
||||||
"gulp-imagemin": "^4.1.0",
|
|
||||||
"gulp-pixrem": "^1.0.0",
|
|
||||||
"gulp-plumber": "^1.1.0",
|
|
||||||
"gulp-rename": "^1.2.2",
|
|
||||||
"gulp-sass": "^3.1.0",
|
|
||||||
"gulp-uglify": "^3.0.0",
|
|
||||||
"gulp-util": "^3.0.7",
|
|
||||||
{% if cookiecutter.custom_bootstrap_compilation == 'y' -%}
|
|
||||||
"jquery": "3.3.1",
|
"jquery": "3.3.1",
|
||||||
"popper.js": "1.14.3",
|
"popper.js": "1.14.3",
|
||||||
{% endif -%}
|
{% endif -%}
|
||||||
"run-sequence": "^2.1.1"
|
"autoprefixer": "^9.4.7",
|
||||||
|
"browser-sync": "^2.14.0",
|
||||||
|
"cssnano": "^4.1.10",
|
||||||
|
"gulp": "^4.0.0",
|
||||||
|
"gulp-imagemin": "^5.0.3",
|
||||||
|
"gulp-plumber": "^1.2.1",
|
||||||
|
"gulp-postcss": "^8.0.0",
|
||||||
|
"gulp-rename": "^1.2.2",
|
||||||
|
"gulp-sass": "^4.0.2",
|
||||||
|
"gulp-uglify-es": "^1.0.4",
|
||||||
|
"pixrem": "^5.0.0"
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=0.8.0"
|
"node": ">=8"
|
||||||
},
|
},
|
||||||
|
"browserslist": [
|
||||||
|
"last 2 versions"
|
||||||
|
],
|
||||||
"scripts": {
|
"scripts": {
|
||||||
{% if cookiecutter.js_task_runner == 'Gulp' -%}
|
{% if cookiecutter.js_task_runner == 'Gulp' -%}
|
||||||
"dev": "gulp"
|
"dev": "gulp"
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
python-3.6.6
|
python-3.6.8
|
||||||
|
|
Loading…
Reference in New Issue
Block a user