Merge 5f03505db7
into 3af5333bbb
7
.vscode/settings.json
vendored
Normal file
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"python.pythonPath": "${workspaceFolder}/.venv/bin/python",
|
||||
"python.formatting.provider": "yapf",
|
||||
"python.linting.flake8Enabled": true,
|
||||
"python.linting.pylintEnabled": false,
|
||||
"python.linting.enabled": true,
|
||||
}
|
20
README.rst
|
@ -159,23 +159,15 @@ Answer the prompts with your own desired options_. For example::
|
|||
domain_name [example.com]: myreddit.com
|
||||
version [0.1.0]: 0.0.1
|
||||
timezone [UTC]: America/Los_Angeles
|
||||
use_whitenoise [n]: n
|
||||
use_celery [n]: y
|
||||
use_whitenoise [y]: y
|
||||
use_celery [y]: y
|
||||
use_mailhog [n]: n
|
||||
use_sentry [n]: y
|
||||
use_sentry [y]: y
|
||||
use_pycharm [n]: y
|
||||
windows [n]: n
|
||||
use_docker [n]: n
|
||||
use_heroku [n]: y
|
||||
use_compressor [n]: y
|
||||
Select postgresql_version:
|
||||
1 - 10.3
|
||||
2 - 10.2
|
||||
3 - 10.1
|
||||
4 - 9.6
|
||||
5 - 9.5
|
||||
6 - 9.4
|
||||
7 - 9.3
|
||||
use_newrelic [y]: y
|
||||
use_prometheus [y]: y
|
||||
Choose from 1, 2, 3, 4 [1]: 1
|
||||
Select js_task_runner:
|
||||
1 - None
|
||||
|
@ -190,7 +182,7 @@ Answer the prompts with your own desired options_. For example::
|
|||
5 - Not open source
|
||||
Choose from 1, 2, 3, 4, 5 [1]: 1
|
||||
keep_local_envs_in_vcs [y]: y
|
||||
debug[n]: n
|
||||
debug[y]: y
|
||||
|
||||
Enter the project and take a look around::
|
||||
|
||||
|
|
|
@ -16,30 +16,19 @@
|
|||
"timezone": "UTC",
|
||||
"windows": "n",
|
||||
"use_pycharm": "n",
|
||||
"use_docker": "n",
|
||||
"postgresql_version": [
|
||||
"10.4",
|
||||
"10.3",
|
||||
"10.2",
|
||||
"10.1",
|
||||
"9.6",
|
||||
"9.5",
|
||||
"9.4",
|
||||
"9.3"
|
||||
],
|
||||
"use_newrelic": "y",
|
||||
"use_prometheus": "y",
|
||||
"js_task_runner": [
|
||||
"None",
|
||||
"Gulp"
|
||||
],
|
||||
"custom_bootstrap_compilation": "n",
|
||||
"use_compressor": "n",
|
||||
"use_celery": "n",
|
||||
"use_celery": "y",
|
||||
"use_mailhog": "n",
|
||||
"use_sentry": "n",
|
||||
"use_whitenoise": "n",
|
||||
"use_heroku": "n",
|
||||
"use_sentry": "y",
|
||||
"use_whitenoise": "y",
|
||||
"use_travisci": "n",
|
||||
"keep_local_envs_in_vcs": "y",
|
||||
|
||||
"debug": "n"
|
||||
"debug": "y"
|
||||
}
|
||||
|
|
|
@ -1,54 +0,0 @@
|
|||
Deployment on Heroku
|
||||
====================
|
||||
|
||||
.. index:: Heroku
|
||||
|
||||
Run these commands to deploy the project to Heroku:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
heroku create --buildpack https://github.com/heroku/heroku-buildpack-python
|
||||
|
||||
heroku addons:create heroku-postgresql:hobby-dev
|
||||
heroku pg:backups schedule --at '02:00 America/Los_Angeles' DATABASE_URL
|
||||
heroku pg:promote DATABASE_URL
|
||||
|
||||
heroku addons:create heroku-redis:hobby-dev
|
||||
|
||||
# If using mailgun:
|
||||
heroku addons:create mailgun:starter
|
||||
|
||||
heroku addons:create sentry:f1
|
||||
|
||||
heroku config:set PYTHONHASHSEED=random
|
||||
|
||||
heroku config:set WEB_CONCURRENCY=4
|
||||
|
||||
heroku config:set DJANGO_DEBUG=False
|
||||
heroku config:set DJANGO_SETTINGS_MODULE=config.settings.production
|
||||
heroku config:set DJANGO_SECRET_KEY="$(openssl rand -base64 64)"
|
||||
|
||||
# Generating a 32 character-long random string without any of the visually similiar characters "IOl01":
|
||||
heroku config:set DJANGO_ADMIN_URL="$(openssl rand -base64 4096 | tr -dc 'A-HJ-NP-Za-km-z2-9' | head -c 32)/"
|
||||
|
||||
# Set this to your Heroku app url, e.g. 'bionic-beaver-28392.herokuapp.com'
|
||||
heroku config:set DJANGO_ALLOWED_HOSTS=
|
||||
|
||||
# Assign with AWS_ACCESS_KEY_ID
|
||||
heroku config:set DJANGO_AWS_ACCESS_KEY_ID=
|
||||
|
||||
# Assign with AWS_SECRET_ACCESS_KEY
|
||||
heroku config:set DJANGO_AWS_SECRET_ACCESS_KEY=
|
||||
|
||||
# Assign with AWS_STORAGE_BUCKET_NAME
|
||||
heroku config:set DJANGO_AWS_STORAGE_BUCKET_NAME=
|
||||
|
||||
git push heroku master
|
||||
|
||||
heroku run python manage.py migrate
|
||||
heroku run python manage.py createsuperuser
|
||||
heroku run python manage.py collectstatic --no-input
|
||||
|
||||
heroku run python manage.py check --deploy
|
||||
|
||||
heroku open
|
|
@ -1,181 +0,0 @@
|
|||
Deployment on PythonAnywhere
|
||||
============================
|
||||
|
||||
.. index:: PythonAnywhere
|
||||
|
||||
|
||||
Overview
|
||||
--------
|
||||
|
||||
Full instructions follow, but here's a high-level view.
|
||||
|
||||
**First time config**:
|
||||
|
||||
1. Pull your code down to PythonAnywhere using a *Bash console* and setup a virtualenv
|
||||
|
||||
2. Set your config variables in the *postactivate* script
|
||||
|
||||
3. Run the *manage.py* ``migrate`` and ``collectstatic`` commands
|
||||
|
||||
4. Add an entry to the PythonAnywhere *Web tab*
|
||||
|
||||
5. Set your config variables in the PythonAnywhere *WSGI config file*
|
||||
|
||||
|
||||
Once you've been through this one-off config, future deployments are much simpler: just ``git pull`` and then hit the "Reload" button :)
|
||||
|
||||
|
||||
|
||||
Getting your code and dependencies installed on PythonAnywhere
|
||||
--------------------------------------------------------------
|
||||
|
||||
Make sure your project is fully commited and pushed up to Bitbucket or Github or wherever it may be. Then, log into your PythonAnywhere account, open up a **Bash** console, clone your repo, and create a virtualenv:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
git clone <my-repo-url> # you can also use hg
|
||||
cd my-project-name
|
||||
mkvirtualenv --python=/usr/bin/python3.6 my-project-name
|
||||
pip install -r requirements/production.txt # may take a few minutes
|
||||
|
||||
|
||||
|
||||
Setting environment variables in the console
|
||||
--------------------------------------------
|
||||
|
||||
Generate a secret key for yourself, eg like this:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
python -c 'import random;import string; print("".join(random.SystemRandom().choice(string.digits + string.ascii_letters + string.punctuation) for _ in range(50)))'
|
||||
|
||||
Make a note of it, since we'll need it here in the console and later on in the web app config tab.
|
||||
|
||||
Set environment variables via the virtualenv "postactivate" script (this will set them every time you use the virtualenv in a console):
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
vi $VIRTUAL_ENV/bin/postactivate
|
||||
|
||||
**TIP:** *If you don't like vi, you can also edit this file via the PythonAnywhere "Files" menu; look in the ".virtualenvs" folder*.
|
||||
|
||||
Add these exports
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
export WEB_CONCURRENCY=4
|
||||
export DJANGO_SETTINGS_MODULE='config.settings.production'
|
||||
export DJANGO_SECRET_KEY='<secret key goes here>'
|
||||
export DJANGO_ALLOWED_HOSTS='<www.your-domain.com>'
|
||||
export DJANGO_ADMIN_URL='<not admin/>'
|
||||
export MAILGUN_API_KEY='<mailgun key>'
|
||||
export MAILGUN_DOMAIN='<mailgun sender domain (e.g. mg.yourdomain.com)>'
|
||||
export DJANGO_AWS_ACCESS_KEY_ID=
|
||||
export DJANGO_AWS_SECRET_ACCESS_KEY=
|
||||
export DJANGO_AWS_STORAGE_BUCKET_NAME=
|
||||
export DATABASE_URL='<see below>'
|
||||
|
||||
**NOTE:** *The AWS details are not required if you're using whitenoise or the built-in pythonanywhere static files service, but you do need to set them to blank, as above.*
|
||||
|
||||
|
||||
Database setup:
|
||||
---------------
|
||||
|
||||
Go to the PythonAnywhere **Databases tab** and configure your database.
|
||||
|
||||
* For Postgres, setup your superuser password, then open a Postgres console and run a ``CREATE DATABASE my-db-name``. You should probably also set up a specific role and permissions for your app, rather than using the superuser credentials. Make a note of the address and port of your postgres server.
|
||||
|
||||
* For MySQL, set the password and create a database. More info here: https://help.pythonanywhere.com/pages/UsingMySQL
|
||||
|
||||
* You can also use sqlite if you like! Not recommended for anything beyond toy projects though.
|
||||
|
||||
|
||||
Now go back to the *postactivate* script and set the ``DATABASE_URL`` environment variable:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
export DATABASE_URL='postgres://<postgres-username>:<postgres-password>@<postgres-address>:<postgres-port>/<database-name>'
|
||||
# or
|
||||
export DATABASE_URL='mysql://<pythonanywhere-username>:<mysql-password>@<mysql-address>/<database-name>'
|
||||
# or
|
||||
export DATABASE_URL='sqlite:////home/yourusername/path/to/db.sqlite'
|
||||
|
||||
If you're using MySQL, you may need to run ``pip install mysqlclient``, and maybe add ``mysqlclient`` to *requirements/production.txt* too.
|
||||
|
||||
Now run the migration, and collectstatic:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
source $VIRTUAL_ENV/bin/postactivate
|
||||
python manage.py migrate
|
||||
python manage.py collectstatic
|
||||
# and, optionally
|
||||
python manage.py createsuperuser
|
||||
|
||||
|
||||
|
||||
Configure the PythonAnywhere Web Tab
|
||||
------------------------------------
|
||||
|
||||
Go to the PythonAnywhere **Web tab**, hit **Add new web app**, and choose **Manual Config**, and then the version of Python you used for your virtualenv.
|
||||
|
||||
**NOTE:** *If you're using a custom domain (not on \*.pythonanywhere.com), then you'll need to set up a CNAME with your domain registrar.*
|
||||
|
||||
When you're redirected back to the web app config screen, set the **path to your virtualenv**. If you used virtualenvwrapper as above, you can just enter its name.
|
||||
|
||||
Click through to the **WSGI configuration file** link (near the top) and edit the wsgi file. Make it look something like this, repeating the environment variables you used earlier:
|
||||
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import os
|
||||
import sys
|
||||
path = '/home/<your-username>/<your-project-directory>'
|
||||
if path not in sys.path:
|
||||
sys.path.append(path)
|
||||
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.settings.production'
|
||||
os.environ['DJANGO_SECRET_KEY'] = '<as above>'
|
||||
os.environ['DJANGO_ALLOWED_HOSTS'] = '<as above>'
|
||||
os.environ['DJANGO_ADMIN_URL'] = '<as above>'
|
||||
os.environ['MAILGUN_API_KEY'] = '<as above>'
|
||||
os.environ['MAILGUN_DOMAIN'] = '<as above>'
|
||||
os.environ['DJANGO_AWS_ACCESS_KEY_ID'] = ''
|
||||
os.environ['DJANGO_AWS_SECRET_ACCESS_KEY'] = ''
|
||||
os.environ['DJANGO_AWS_STORAGE_BUCKET_NAME'] = ''
|
||||
os.environ['DATABASE_URL'] = '<as above>'
|
||||
|
||||
from django.core.wsgi import get_wsgi_application
|
||||
application = get_wsgi_application()
|
||||
|
||||
|
||||
Back on the Web tab, hit **Reload**, and your app should be live!
|
||||
|
||||
|
||||
**NOTE:** *you may see security warnings until you set up your SSL certificates. If you
|
||||
want to supress them temporarily, set DJANGO_SECURE_SSL_REDIRECT to blank. Follow
|
||||
the instructions here to get SSL set up: https://help.pythonanywhere.com/pages/SSLOwnDomains/*
|
||||
|
||||
|
||||
Optional: static files
|
||||
----------------------
|
||||
|
||||
If you want to use the PythonAnywhere static files service instead of using whitenoise or S3, you'll find its configuration section on the Web tab. Essentially you'll need an entry to match your ``STATIC_URL`` and ``STATIC_ROOT`` settings. There's more info here: https://help.pythonanywhere.com/pages/DjangoStaticFiles
|
||||
|
||||
|
||||
Future deployments
|
||||
------------------
|
||||
|
||||
For subsequent deployments, the procedure is much simpler. In a Bash console:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
workon my-virtualenv-name
|
||||
cd project-directory
|
||||
git pull
|
||||
python manage.py migrate
|
||||
python manage.py collectstatic
|
||||
|
||||
And then go to the Web tab and hit **Reload**
|
||||
|
||||
**TIP:** *if you're really keen, you can set up git-push based deployments: https://blog.pythonanywhere.com/87/*
|
|
@ -1,146 +0,0 @@
|
|||
Deployment with Docker
|
||||
======================
|
||||
|
||||
.. index:: deployment, docker, docker-compose, compose
|
||||
|
||||
|
||||
Prerequisites
|
||||
-------------
|
||||
|
||||
* Docker 1.10+.
|
||||
* Docker Compose 1.6+
|
||||
|
||||
|
||||
Understanding the Docker Compose Setup
|
||||
--------------------------------------
|
||||
|
||||
Before you begin, check out the ``production.yml`` file in the root of this project. Keep note of how it provides configuration for the following services:
|
||||
|
||||
* ``django``: your application running behind ``Gunicorn``;
|
||||
* ``postgres``: PostgreSQL database with the application's relational data;
|
||||
* ``redis``: Redis instance for caching;
|
||||
* ``caddy``: Caddy web server with HTTPS on by default.
|
||||
|
||||
Provided you have opted for Celery (via setting ``use_celery`` to ``y``) there are three more services:
|
||||
|
||||
* ``celeryworker`` running a Celery worker process;
|
||||
* ``celerybeat`` running a Celery beat process;
|
||||
* ``flower`` running Flower_ (for more info, check out :ref:`CeleryFlower` instructions for local environment).
|
||||
|
||||
.. _`Flower`: https://github.com/mher/flower
|
||||
|
||||
|
||||
Configuring the Stack
|
||||
---------------------
|
||||
|
||||
The majority of services above are configured through the use of environment variables. Just check out :ref:`envs` and you will know the drill.
|
||||
|
||||
To obtain logs and information about crashes in a production setup, make sure that you have access to an external Sentry instance (e.g. by creating an account with `sentry.io`_), and set the ``SENTRY_DSN`` variable.
|
||||
|
||||
You will probably also need to setup the Mail backend, for example by adding a `Mailgun`_ API key and a `Mailgun`_ sender domain, otherwise, the account creation view will crash and result in a 500 error when the backend attempts to send an email to the account owner.
|
||||
|
||||
.. _sentry.io: https://sentry.io/welcome
|
||||
.. _Mailgun: https://mailgun.com
|
||||
|
||||
|
||||
Optional: Use AWS IAM Role for EC2 instance
|
||||
-------------------------------------------
|
||||
|
||||
If you are deploying to AWS, you can use the IAM role to substitute AWS credentials, after which it's safe to remove the ``AWS_ACCESS_KEY_ID`` AND ``AWS_SECRET_ACCESS_KEY`` from ``.envs/.production/.django``. To do it, create an `IAM role`_ and `attach`_ it to the existing EC2 instance or create a new EC2 instance with that role. The role should assume, at minimum, the ``AmazonS3FullAccess`` permission.
|
||||
|
||||
.. _IAM role: https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html
|
||||
.. _attach: https://aws.amazon.com/blogs/security/easily-replace-or-attach-an-iam-role-to-an-existing-ec2-instance-by-using-the-ec2-console/
|
||||
|
||||
|
||||
HTTPS is On by Default
|
||||
----------------------
|
||||
|
||||
SSL (Secure Sockets Layer) is a standard security technology for establishing an encrypted link between a server and a client, typically in this case, a web server (website) and a browser. Not having HTTPS means that malicious network users can sniff authentication credentials between your website and end users' browser.
|
||||
|
||||
It is always better to deploy a site behind HTTPS and will become crucial as the web services extend to the IoT (Internet of Things). For this reason, we have set up a number of security defaults to help make your website secure:
|
||||
|
||||
* If you are not using a subdomain of the domain name set in the project, then remember to put the your staging/production IP address in the ``DJANGO_ALLOWED_HOSTS`` environment variable (see :ref:`settings`) before you deploy your website. Failure to do this will mean you will not have access to your website through the HTTP protocol.
|
||||
|
||||
* Access to the Django admin is set up by default to require HTTPS in production or once *live*.
|
||||
|
||||
The Caddy web server used in the default configuration will get you a valid certificate from Lets Encrypt and update it automatically. All you need to do to enable this is to make sure that your DNS records are pointing to the server Caddy runs on.
|
||||
|
||||
You can read more about this here at `Automatic HTTPS`_ in the Caddy docs.
|
||||
|
||||
.. _Automatic HTTPS: https://caddyserver.com/docs/automatic-https
|
||||
|
||||
|
||||
(Optional) Postgres Data Volume Modifications
|
||||
---------------------------------------------
|
||||
|
||||
Postgres is saving its database files to the ``production_postgres_data`` volume by default. Change that if you want something else and make sure to make backups since this is not done automatically.
|
||||
|
||||
|
||||
Building & Running Production Stack
|
||||
-----------------------------------
|
||||
|
||||
You will need to build the stack first. To do that, run::
|
||||
|
||||
docker-compose -f production.yml build
|
||||
|
||||
Once this is ready, you can run it with::
|
||||
|
||||
docker-compose -f production.yml up
|
||||
|
||||
To run the stack and detach the containers, run::
|
||||
|
||||
docker-compose -f production.yml up -d
|
||||
|
||||
To run a migration, open up a second terminal and run::
|
||||
|
||||
docker-compose -f production.yml run --rm django python manage.py migrate
|
||||
|
||||
To create a superuser, run::
|
||||
|
||||
docker-compose -f production.yml run --rm django python manage.py createsuperuser
|
||||
|
||||
If you need a shell, run::
|
||||
|
||||
docker-compose -f production.yml run --rm django python manage.py shell
|
||||
|
||||
To check the logs out, run::
|
||||
|
||||
docker-compose -f production.yml logs
|
||||
|
||||
If you want to scale your application, run::
|
||||
|
||||
docker-compose -f production.yml scale django=4
|
||||
docker-compose -f production.yml scale celeryworker=2
|
||||
|
||||
.. warning:: don't try to scale ``postgres``, ``celerybeat``, or ``caddy``.
|
||||
|
||||
To see how your containers are doing run::
|
||||
|
||||
docker-compose -f production.yml ps
|
||||
|
||||
|
||||
Example: Supervisor
|
||||
-------------------
|
||||
|
||||
Once you are ready with your initial setup, you want to make sure that your application is run by a process manager to
|
||||
survive reboots and auto restarts in case of an error. You can use the process manager you are most familiar with. All
|
||||
it needs to do is to run ``docker-compose -f production.yml up`` in your projects root directory.
|
||||
|
||||
If you are using ``supervisor``, you can use this file as a starting point::
|
||||
|
||||
[program:{{cookiecutter.project_slug}}]
|
||||
command=docker-compose -f production.yml up
|
||||
directory=/path/to/{{cookiecutter.project_slug}}
|
||||
redirect_stderr=true
|
||||
autostart=true
|
||||
autorestart=true
|
||||
priority=10
|
||||
|
||||
Move it to ``/etc/supervisor/conf.d/{{cookiecutter.project_slug}}.conf`` and run::
|
||||
|
||||
supervisorctl reread
|
||||
supervisorctl start {{cookiecutter.project_slug}}
|
||||
|
||||
For status check, run::
|
||||
|
||||
supervisorctl status
|
|
@ -181,7 +181,6 @@ Celery Flower
|
|||
|
||||
Prerequisites:
|
||||
|
||||
* ``use_docker`` was set to ``y`` on project initialization;
|
||||
* ``use_celery`` was set to ``y`` on project initialization.
|
||||
|
||||
By default, it's enabled both in local and production environments (``local.yml`` and ``production.yml`` Docker Compose configs, respectively) through a ``flower`` service. For added security, ``flower`` requires its clients to provide authentication credentials specified as the corresponding environments' ``.envs/.local/.django`` and ``.envs/.production/.django`` ``CELERY_FLOWER_USER`` and ``CELERY_FLOWER_PASSWORD`` environment variables. Check out ``localhost:5555`` and see for yourself.
|
||||
|
|
|
@ -1,87 +0,0 @@
|
|||
PostgreSQL Backups with Docker
|
||||
==============================
|
||||
|
||||
.. note:: For brevity it is assumed that you will be running the below commands against local environment, however, this is by no means mandatory so feel free to switch to ``production.yml`` when needed.
|
||||
|
||||
|
||||
Prerequisites
|
||||
-------------
|
||||
|
||||
#. the project was generated with ``use_docker`` set to ``y``;
|
||||
#. the stack is up and running: ``docker-compose -f local.yml up -d postgres``.
|
||||
|
||||
|
||||
Creating a Backup
|
||||
-----------------
|
||||
|
||||
To create a backup, run::
|
||||
|
||||
$ docker-compose -f local.yml exec postgres backup
|
||||
|
||||
Assuming your project's database is named ``my_project`` here is what you will see: ::
|
||||
|
||||
Backing up the 'my_project' database...
|
||||
SUCCESS: 'my_project' database backup 'backup_2018_03_13T09_05_07.sql.gz' has been created and placed in '/backups'.
|
||||
|
||||
Keep in mind that ``/backups`` is the ``postgres`` container directory.
|
||||
|
||||
|
||||
Viewing the Existing Backups
|
||||
----------------------------
|
||||
|
||||
To list existing backups, ::
|
||||
|
||||
$ docker-compose -f local.yml exec postgres backups
|
||||
|
||||
These are the sample contents of ``/backups``: ::
|
||||
|
||||
These are the backups you have got:
|
||||
total 24K
|
||||
-rw-r--r-- 1 root root 5.2K Mar 13 09:05 backup_2018_03_13T09_05_07.sql.gz
|
||||
-rw-r--r-- 1 root root 5.2K Mar 12 21:13 backup_2018_03_12T21_13_03.sql.gz
|
||||
-rw-r--r-- 1 root root 5.2K Mar 12 21:12 backup_2018_03_12T21_12_58.sql.gz
|
||||
|
||||
|
||||
Copying Backups Locally
|
||||
-----------------------
|
||||
|
||||
If you want to copy backups from your ``postgres`` container locally, ``docker cp`` command_ will help you on that.
|
||||
|
||||
For example, given ``9c5c3f055843`` is the container ID copying all the backups over to a local directory is as simple as ::
|
||||
|
||||
$ docker cp 9c5c3f055843:/backups ./backups
|
||||
|
||||
With a single backup file copied to ``.`` that would be ::
|
||||
|
||||
$ docker cp 9c5c3f055843:/backups/backup_2018_03_13T09_05_07.sql.gz .
|
||||
|
||||
.. _`command`: https://docs.docker.com/engine/reference/commandline/cp/
|
||||
|
||||
|
||||
Restoring from the Existing Backup
|
||||
----------------------------------
|
||||
|
||||
To restore from one of the backups you have already got (take the ``backup_2018_03_13T09_05_07.sql.gz`` for example), ::
|
||||
|
||||
$ docker-compose -f local.yml exec postgres restore backup_2018_03_13T09_05_07.sql.gz
|
||||
|
||||
You will see something like ::
|
||||
|
||||
Restoring the 'my_project' database from the '/backups/backup_2018_03_13T09_05_07.sql.gz' backup...
|
||||
INFO: Dropping the database...
|
||||
INFO: Creating a new database...
|
||||
INFO: Applying the backup to the new database...
|
||||
SET
|
||||
SET
|
||||
SET
|
||||
SET
|
||||
SET
|
||||
set_config
|
||||
------------
|
||||
|
||||
(1 row)
|
||||
|
||||
SET
|
||||
# ...
|
||||
ALTER TABLE
|
||||
SUCCESS: The 'my_project' database has been restored from the '/backups/backup_2018_03_13T09_05_07.sql.gz' backup.
|
|
@ -1,17 +0,0 @@
|
|||
PostgreSQL Installation Basics
|
||||
==============================
|
||||
|
||||
.. index:: pip, virtualenv, PostgreSQL
|
||||
|
||||
The steps below will get you up and running with PostgreSQL. This assumes you have pip and virtualenv_ installed.
|
||||
|
||||
.. _virtualenv: http://docs.python-guide.org/en/latest/dev/virtualenvs/
|
||||
|
||||
On Mac
|
||||
|
||||
Install PostgreSQLapp_ from the browser and move PostGresSQL into your applications folder. Then install PostgreSQL from HomeBrew_.
|
||||
|
||||
$ brew install postgres
|
||||
|
||||
.. _PostgreSQLapp: http://postgresapp.com/
|
||||
.. _HomeBrew: http://brew.sh/
|
|
@ -1,22 +0,0 @@
|
|||
Sass Compilation & Live Reloading
|
||||
=================================
|
||||
|
||||
If you'd like to take advantage of `live reload`_ and Sass compilation:
|
||||
|
||||
- Make sure that nodejs_ is installed. Then in the project root run::
|
||||
|
||||
$ npm install
|
||||
|
||||
.. _nodejs: http://nodejs.org/download/
|
||||
|
||||
- Now you just need::
|
||||
|
||||
$ npm run dev
|
||||
|
||||
The base app will now run as it would with the usual ``manage.py runserver`` but with live reloading and Sass compilation enabled.
|
||||
When changing your Sass files, they will be automatically recompiled and change will be reflected in your browser without refreshing.
|
||||
|
||||
To get live reloading to work you'll probably need to install an `appropriate browser extension`_
|
||||
|
||||
.. _live reload: http://livereload.com/
|
||||
.. _appropriate browser extension: http://livereload.com/extensions/
|
|
@ -43,19 +43,11 @@ windows:
|
|||
use_pycharm:
|
||||
Indicates whether the project should be configured for development with PyCharm_.
|
||||
|
||||
use_docker:
|
||||
Indicates whether the project should be configured to use Docker_ and `Docker Compose`_.
|
||||
use_newrelic:
|
||||
Indicates whether the project should be configured to use Newrelic
|
||||
|
||||
postgresql_version:
|
||||
Select a PostgreSQL_ version to use. The choices are:
|
||||
|
||||
1. 10.3
|
||||
2. 10.2
|
||||
3. 10.1
|
||||
4. 9.6
|
||||
5. 9.5
|
||||
6. 9.4
|
||||
7. 9.3
|
||||
use_prometheus:
|
||||
Indicates whether the project should be configured to use Prometheus
|
||||
|
||||
js_task_runner:
|
||||
Select a JavaScript task runner. The choices are:
|
||||
|
@ -83,10 +75,6 @@ use_sentry:
|
|||
use_whitenoise:
|
||||
Indicates whether the project should be configured to use WhiteNoise_.
|
||||
|
||||
use_heroku:
|
||||
Indicates whether the project should be configured so as to be deployable
|
||||
to Heroku_.
|
||||
|
||||
use_travisci:
|
||||
Indicates whether the project should be configured to use `Travis CI`_.
|
||||
|
||||
|
|
|
@ -41,9 +41,6 @@ The following table lists settings and their defaults for third-party applicatio
|
|||
======================================= =========================== ============================================== ======================================================================
|
||||
Environment Variable Django Setting Development Default Production Default
|
||||
======================================= =========================== ============================================== ======================================================================
|
||||
DJANGO_AWS_ACCESS_KEY_ID AWS_ACCESS_KEY_ID n/a raises error
|
||||
DJANGO_AWS_SECRET_ACCESS_KEY AWS_SECRET_ACCESS_KEY n/a raises error
|
||||
DJANGO_AWS_STORAGE_BUCKET_NAME AWS_STORAGE_BUCKET_NAME n/a raises error
|
||||
SENTRY_DSN SENTRY_DSN n/a raises error
|
||||
DJANGO_SENTRY_CLIENT SENTRY_CLIENT n/a raven.contrib.django.raven_compat.DjangoClient
|
||||
DJANGO_SENTRY_LOG_LEVEL SENTRY_LOG_LEVEL n/a logging.INFO
|
||||
|
|
|
@ -55,15 +55,6 @@ def remove_pycharm_files():
|
|||
if os.path.exists(docs_dir_path):
|
||||
shutil.rmtree(docs_dir_path)
|
||||
|
||||
|
||||
def remove_docker_files():
|
||||
shutil.rmtree("compose")
|
||||
|
||||
file_names = ["local.yml", "production.yml", ".dockerignore"]
|
||||
for file_name in file_names:
|
||||
os.remove(file_name)
|
||||
|
||||
|
||||
def remove_utility_files():
|
||||
shutil.rmtree("utility")
|
||||
|
||||
|
@ -240,11 +231,6 @@ def set_flags_in_envs(
|
|||
set_django_secret_key(production_django_envs_path)
|
||||
set_django_admin_url(production_django_envs_path)
|
||||
|
||||
set_postgres_user(local_postgres_envs_path, value=postgres_user)
|
||||
set_postgres_password(local_postgres_envs_path, value=DEBUG_VALUE if debug else None)
|
||||
set_postgres_user(production_postgres_envs_path, value=postgres_user)
|
||||
set_postgres_password(production_postgres_envs_path, value=DEBUG_VALUE if debug else None)
|
||||
|
||||
set_celery_flower_user(local_django_envs_path, value=celery_flower_user)
|
||||
set_celery_flower_password(local_django_envs_path, value=DEBUG_VALUE if debug else None)
|
||||
set_celery_flower_user(production_django_envs_path, value=celery_flower_user)
|
||||
|
@ -284,18 +270,9 @@ def main():
|
|||
if "{{ cookiecutter.use_pycharm }}".lower() == "n":
|
||||
remove_pycharm_files()
|
||||
|
||||
if "{{ cookiecutter.use_docker }}".lower() == "y":
|
||||
remove_utility_files()
|
||||
else:
|
||||
remove_docker_files()
|
||||
|
||||
if "{{ cookiecutter.use_heroku }}".lower() == "n":
|
||||
remove_heroku_files()
|
||||
|
||||
if (
|
||||
"{{ cookiecutter.use_docker }}".lower() == "n"
|
||||
and "{{ cookiecutter.use_heroku }}".lower() == "n"
|
||||
):
|
||||
|
||||
if "{{ cookiecutter.keep_local_envs_in_vcs }}".lower() == "y":
|
||||
print(
|
||||
INFO + ".env(s) are only utilized when Docker Compose and/or "
|
||||
|
@ -303,35 +280,13 @@ def main():
|
|||
"make sense given your current setup." + TERMINATOR
|
||||
)
|
||||
remove_envs_and_associated_files()
|
||||
else:
|
||||
append_to_gitignore_file(".env")
|
||||
append_to_gitignore_file(".envs/*")
|
||||
if "{{ cookiecutter.keep_local_envs_in_vcs }}".lower() == "y":
|
||||
append_to_gitignore_file("!.envs/.local/")
|
||||
|
||||
if "{{ cookiecutter.js_task_runner}}".lower() == "none":
|
||||
remove_gulp_files()
|
||||
remove_packagejson_file()
|
||||
if (
|
||||
"{{ cookiecutter.js_task_runner }}".lower() != "none"
|
||||
and "{{ cookiecutter.use_docker }}".lower() == "y"
|
||||
):
|
||||
print(
|
||||
WARNING
|
||||
+ "Docker and {} JS task runner ".format(
|
||||
"{{ cookiecutter.js_task_runner }}".lower().capitalize()
|
||||
)
|
||||
+ "working together not supported yet. "
|
||||
"You can continue using the generated project like you "
|
||||
"normally would, however you would need to add a JS "
|
||||
"task runner service to your Docker Compose configuration "
|
||||
"manually." + TERMINATOR
|
||||
)
|
||||
|
||||
if "{{ cookiecutter.use_celery }}".lower() == "n":
|
||||
remove_celery_app()
|
||||
if "{{ cookiecutter.use_docker }}".lower() == "y":
|
||||
remove_celery_compose_dirs()
|
||||
|
||||
if "{{ cookiecutter.use_travisci }}".lower() == "n":
|
||||
remove_dottravisyml_file()
|
||||
|
|
|
@ -24,9 +24,8 @@ if hasattr(project_slug, "isidentifier"):
|
|||
|
||||
assert "\\" not in "{{ cookiecutter.author_name }}", "Don't include backslashes in author name."
|
||||
|
||||
if "{{ cookiecutter.use_docker }}".lower() == "n":
|
||||
python_major_version = sys.version_info[0]
|
||||
if python_major_version == 2:
|
||||
python_major_version = sys.version_info[0]
|
||||
if python_major_version == 2:
|
||||
print(
|
||||
WARNING + "Cookiecutter Django does not support Python 2. "
|
||||
"Stability is guaranteed with Python 3.6+ only, "
|
||||
|
|
|
@ -1,27 +0,0 @@
|
|||
#!/bin/sh
|
||||
# this is a very simple script that tests the docker configuration for cookiecutter-django
|
||||
# it is meant to be run from the root directory of the repository, eg:
|
||||
# sh tests/test_docker.sh
|
||||
|
||||
# install test requirements
|
||||
pip install -r requirements.txt
|
||||
|
||||
# create a cache directory
|
||||
mkdir -p .cache/docker
|
||||
cd .cache/docker
|
||||
|
||||
# create the project using the default settings in cookiecutter.json
|
||||
cookiecutter ../../ --no-input --overwrite-if-exists use_docker=y
|
||||
cd my_awesome_project
|
||||
|
||||
# run the project's type checks
|
||||
docker-compose -f local.yml run django mypy my_awesome_project
|
||||
|
||||
# run the project's tests
|
||||
docker-compose -f local.yml run django pytest
|
||||
|
||||
# return non-zero status code if there are migrations that have not been created
|
||||
docker-compose -f local.yml run django python manage.py makemigrations --dry-run --check || { echo "ERROR: there were changes in the models, but migration listed above have not been created and are not saved in version control"; exit 1; }
|
||||
|
||||
# Test support for translations
|
||||
docker-compose -f local.yml run django python manage.py makemessages
|
|
@ -1,4 +1,10 @@
|
|||
.*
|
||||
!.coveragerc
|
||||
!.env
|
||||
!.pylintrc
|
||||
Dockerfile*
|
||||
|
||||
# dot files and dot directories
|
||||
**/.*
|
||||
|
||||
.venv
|
||||
|
||||
# Add ref files from .git, needed by sentry (logs)
|
||||
!.git/HEAD
|
||||
!.git/refs/
|
15
{{cookiecutter.project_slug}}/.env-example
Normal file
|
@ -0,0 +1,15 @@
|
|||
// DB
|
||||
export DB_NAME=
|
||||
export DB_USER=
|
||||
export DB_PASSWORD=
|
||||
export DB_HOST=
|
||||
export DB_PORT=
|
||||
export DB_NAME_TESTS=
|
||||
|
||||
// CELERY
|
||||
export CELERY_BROKER_URL=
|
||||
export CELERY_RESULT_BACKEND=
|
||||
export CELERY_ACCEPT_CONTENT=
|
||||
export CELERY_TASK_SERIALIZER=
|
||||
export CELERY_RESULT_SERIALIZER=
|
||||
export CELERY_IGNORE_RESULT=
|
|
@ -1,7 +0,0 @@
|
|||
# PostgreSQL
|
||||
# ------------------------------------------------------------------------------
|
||||
POSTGRES_HOST=postgres
|
||||
POSTGRES_PORT=5432
|
||||
POSTGRES_DB={{ cookiecutter.project_slug }}
|
||||
POSTGRES_USER=!!!SET POSTGRES_USER!!!
|
||||
POSTGRES_PASSWORD=!!!SET POSTGRES_PASSWORD!!!
|
|
@ -17,12 +17,6 @@ MAILGUN_API_KEY=
|
|||
DJANGO_SERVER_EMAIL=
|
||||
MAILGUN_DOMAIN=
|
||||
|
||||
# AWS
|
||||
# ------------------------------------------------------------------------------
|
||||
DJANGO_AWS_ACCESS_KEY_ID=
|
||||
DJANGO_AWS_SECRET_ACCESS_KEY=
|
||||
DJANGO_AWS_STORAGE_BUCKET_NAME=
|
||||
|
||||
# django-allauth
|
||||
# ------------------------------------------------------------------------------
|
||||
DJANGO_ACCOUNT_ALLOW_REGISTRATION=True
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
# PostgreSQL
|
||||
# ------------------------------------------------------------------------------
|
||||
POSTGRES_HOST=postgres
|
||||
POSTGRES_PORT=5432
|
||||
POSTGRES_DB={{ cookiecutter.project_slug }}
|
||||
POSTGRES_USER=!!!SET POSTGRES_USER!!!
|
||||
POSTGRES_PASSWORD=!!!SET POSTGRES_PASSWORD!!!
|
4
{{cookiecutter.project_slug}}/.gitignore
vendored
|
@ -321,7 +321,6 @@ Session.vim
|
|||
|
||||
# Auto-generated tag files
|
||||
tags
|
||||
{% if cookiecutter.use_docker == 'n' %}
|
||||
|
||||
### VirtualEnv template
|
||||
# Virtualenv
|
||||
|
@ -333,10 +332,9 @@ tags
|
|||
[Ss]cripts
|
||||
pyvenv.cfg
|
||||
pip-selfcheck.json
|
||||
{% endif %}
|
||||
|
||||
### Project template
|
||||
{% if cookiecutter.use_mailhog == 'y' and cookiecutter.use_docker == 'n' %}
|
||||
{% if cookiecutter.use_mailhog == 'y'%}
|
||||
MailHog
|
||||
{%- endif %}
|
||||
{{ cookiecutter.project_slug }}/media/
|
||||
|
|
21
{{cookiecutter.project_slug}}/Dockerfile
Normal file
|
@ -0,0 +1,21 @@
|
|||
FROM python:3.6
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
WORKDIR /mc/{{ cookiecutter.project_slug }}
|
||||
|
||||
ADD requirements requirements
|
||||
|
||||
RUN pip install -r requirements/production.txt
|
||||
|
||||
RUN apt-get update && apt-get install -y unzip
|
||||
|
||||
RUN wget https://releases.hashicorp.com/envconsul/0.6.1/envconsul_0.6.1_linux_amd64.zip && unzip envconsul_0.6.1_linux_amd64.zip -d /usr/local/bin
|
||||
|
||||
ADD . /mc/{{ cookiecutter.project_slug }}
|
||||
|
||||
RUN chmod +x run.sh
|
||||
|
||||
ENTRYPOINT ["./run.sh"]
|
||||
|
||||
CMD ["{{ cookiecutter.project_slug }}"]
|
|
@ -85,14 +85,7 @@ Please note: For Celery's import magic to work, it is important *where* the cele
|
|||
|
||||
Email Server
|
||||
^^^^^^^^^^^^
|
||||
{% if cookiecutter.use_docker == 'y' %}
|
||||
In development, it is often nice to be able to see emails that are being sent from your application. For that reason local SMTP server `MailHog`_ with a web interface is available as docker container.
|
||||
|
||||
Container mailhog will start automatically when you will run all docker containers.
|
||||
Please check `cookiecutter-django Docker documentation`_ for more details how to start all containers.
|
||||
|
||||
With MailHog running, to view messages that are sent by your application, open your browser and go to ``http://127.0.0.1:8025``
|
||||
{% else %}
|
||||
In development, it is often nice to be able to see emails that are being sent from your application. If you choose to use `MailHog`_ when generating the project a local SMTP server with a web interface will be available.
|
||||
|
||||
#. `Download the latest MailHog release`_ for your OS.
|
||||
|
@ -114,7 +107,6 @@ In development, it is often nice to be able to see emails that are being sent fr
|
|||
Now you have your own mail server running locally, ready to receive whatever you send it.
|
||||
|
||||
.. _`Download the latest MailHog release`: https://github.com/mailhog/MailHog/releases
|
||||
{% endif %}
|
||||
.. _mailhog: https://github.com/mailhog/MailHog
|
||||
{% endif %}
|
||||
{% if cookiecutter.use_sentry == "y" %}
|
||||
|
@ -132,24 +124,6 @@ Deployment
|
|||
----------
|
||||
|
||||
The following details how to deploy this application.
|
||||
{% if cookiecutter.use_heroku.lower() == "y" %}
|
||||
|
||||
Heroku
|
||||
^^^^^^
|
||||
|
||||
See detailed `cookiecutter-django Heroku documentation`_.
|
||||
|
||||
.. _`cookiecutter-django Heroku documentation`: http://cookiecutter-django.readthedocs.io/en/latest/deployment-on-heroku.html
|
||||
{% endif %}
|
||||
{% if cookiecutter.use_docker.lower() == "y" %}
|
||||
|
||||
Docker
|
||||
^^^^^^
|
||||
|
||||
See detailed `cookiecutter-django Docker documentation`_.
|
||||
|
||||
.. _`cookiecutter-django Docker documentation`: http://cookiecutter-django.readthedocs.io/en/latest/deployment-with-docker.html
|
||||
{% endif %}
|
||||
|
||||
{% if cookiecutter.custom_bootstrap_compilation == "y" %}
|
||||
Custom Bootstrap Compilation
|
||||
|
|
|
@ -1,44 +0,0 @@
|
|||
FROM python:3.6-alpine
|
||||
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
|
||||
RUN apk update \
|
||||
# psycopg2 dependencies
|
||||
&& apk add --virtual build-deps gcc python3-dev musl-dev \
|
||||
&& apk add postgresql-dev \
|
||||
# Pillow dependencies
|
||||
&& apk add jpeg-dev zlib-dev freetype-dev lcms2-dev openjpeg-dev tiff-dev tk-dev tcl-dev \
|
||||
# CFFI dependencies
|
||||
&& apk add libffi-dev py-cffi \
|
||||
# Translations dependencies
|
||||
&& apk add gettext \
|
||||
# https://docs.djangoproject.com/en/dev/ref/django-admin/#dbshell
|
||||
&& apk add postgresql-client
|
||||
|
||||
# Requirements are installed here to ensure they will be cached.
|
||||
COPY ./requirements /requirements
|
||||
RUN pip install -r /requirements/local.txt
|
||||
|
||||
COPY ./compose/production/django/entrypoint /entrypoint
|
||||
RUN sed -i 's/\r//' /entrypoint
|
||||
RUN chmod +x /entrypoint
|
||||
|
||||
COPY ./compose/local/django/start /start
|
||||
RUN sed -i 's/\r//' /start
|
||||
RUN chmod +x /start
|
||||
{% if cookiecutter.use_celery == "y" %}
|
||||
COPY ./compose/local/django/celery/worker/start /start-celeryworker
|
||||
RUN sed -i 's/\r//' /start-celeryworker
|
||||
RUN chmod +x /start-celeryworker
|
||||
|
||||
COPY ./compose/local/django/celery/beat/start /start-celerybeat
|
||||
RUN sed -i 's/\r//' /start-celerybeat
|
||||
RUN chmod +x /start-celerybeat
|
||||
|
||||
COPY ./compose/local/django/celery/flower/start /start-flower
|
||||
RUN sed -i 's/\r//' /start-flower
|
||||
RUN chmod +x /start-flower
|
||||
{% endif %}
|
||||
WORKDIR /app
|
||||
|
||||
ENTRYPOINT ["/entrypoint"]
|
|
@ -1,8 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
set -o errexit
|
||||
set -o nounset
|
||||
|
||||
|
||||
rm -f './celerybeat.pid'
|
||||
celery -A {{cookiecutter.project_slug}}.taskapp beat -l INFO
|
|
@ -1,10 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
set -o errexit
|
||||
set -o nounset
|
||||
|
||||
|
||||
celery flower \
|
||||
--app={{cookiecutter.project_slug}}.taskapp \
|
||||
--broker="${CELERY_BROKER_URL}" \
|
||||
--basic_auth="${CELERY_FLOWER_USER}:${CELERY_FLOWER_PASSWORD}"
|
|
@ -1,7 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
set -o errexit
|
||||
set -o nounset
|
||||
|
||||
|
||||
celery -A {{cookiecutter.project_slug}}.taskapp worker -l INFO
|
|
@ -1,9 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
set -o errexit
|
||||
set -o pipefail
|
||||
set -o nounset
|
||||
|
||||
|
||||
python manage.py migrate
|
||||
python manage.py runserver_plus 0.0.0.0:8000
|
|
@ -1,14 +0,0 @@
|
|||
www.{% raw %}{$DOMAIN_NAME}{% endraw %} {
|
||||
redir https://{% raw %}{$DOMAIN_NAME}{% endraw %}
|
||||
}
|
||||
|
||||
{% raw %}{$DOMAIN_NAME}{% endraw %} {
|
||||
proxy / django:5000 {
|
||||
header_upstream Host {host}
|
||||
header_upstream X-Real-IP {remote}
|
||||
header_upstream X-Forwarded-Proto {scheme}
|
||||
}
|
||||
log stdout
|
||||
errors stdout
|
||||
gzip
|
||||
}
|
|
@ -1,3 +0,0 @@
|
|||
FROM abiosoft/caddy:0.11.0
|
||||
|
||||
COPY ./compose/production/caddy/Caddyfile /etc/Caddyfile
|
|
@ -1,54 +0,0 @@
|
|||
FROM python:3.6-alpine
|
||||
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
|
||||
RUN apk update \
|
||||
# psycopg2 dependencies
|
||||
&& apk add --virtual build-deps gcc python3-dev musl-dev \
|
||||
&& apk add postgresql-dev \
|
||||
# Pillow dependencies
|
||||
&& apk add jpeg-dev zlib-dev freetype-dev lcms2-dev openjpeg-dev tiff-dev tk-dev tcl-dev \
|
||||
# CFFI dependencies
|
||||
&& apk add libffi-dev py-cffi
|
||||
|
||||
RUN addgroup -S django \
|
||||
&& adduser -S -G django django
|
||||
|
||||
# Requirements are installed here to ensure they will be cached.
|
||||
COPY ./requirements /requirements
|
||||
RUN pip install --no-cache-dir -r /requirements/production.txt \
|
||||
&& rm -rf /requirements
|
||||
|
||||
COPY ./compose/production/django/entrypoint /entrypoint
|
||||
RUN sed -i 's/\r//' /entrypoint
|
||||
RUN chmod +x /entrypoint
|
||||
RUN chown django /entrypoint
|
||||
|
||||
COPY ./compose/production/django/start /start
|
||||
RUN sed -i 's/\r//' /start
|
||||
RUN chmod +x /start
|
||||
RUN chown django /start
|
||||
{% if cookiecutter.use_celery == "y" %}
|
||||
COPY ./compose/production/django/celery/worker/start /start-celeryworker
|
||||
RUN sed -i 's/\r//' /start-celeryworker
|
||||
RUN chmod +x /start-celeryworker
|
||||
RUN chown django /start-celeryworker
|
||||
|
||||
COPY ./compose/production/django/celery/beat/start /start-celerybeat
|
||||
RUN sed -i 's/\r//' /start-celerybeat
|
||||
RUN chmod +x /start-celerybeat
|
||||
RUN chown django /start-celerybeat
|
||||
|
||||
COPY ./compose/production/django/celery/flower/start /start-flower
|
||||
RUN sed -i 's/\r//' /start-flower
|
||||
RUN chmod +x /start-flower
|
||||
{% endif %}
|
||||
COPY . /app
|
||||
|
||||
RUN chown -R django /app
|
||||
|
||||
USER django
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
ENTRYPOINT ["/entrypoint"]
|
|
@ -1,8 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
set -o errexit
|
||||
set -o pipefail
|
||||
set -o nounset
|
||||
|
||||
|
||||
celery -A {{cookiecutter.project_slug}}.taskapp beat -l INFO
|
|
@ -1,10 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
set -o errexit
|
||||
set -o nounset
|
||||
|
||||
|
||||
celery flower \
|
||||
--app={{cookiecutter.project_slug}}.taskapp \
|
||||
--broker="${CELERY_BROKER_URL}" \
|
||||
--basic_auth="${CELERY_FLOWER_USER}:${CELERY_FLOWER_PASSWORD}"
|
|
@ -1,8 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
set -o errexit
|
||||
set -o pipefail
|
||||
set -o nounset
|
||||
|
||||
|
||||
celery -A {{cookiecutter.project_slug}}.taskapp worker -l INFO
|
|
@ -1,43 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
set -o errexit
|
||||
set -o pipefail
|
||||
set -o nounset
|
||||
|
||||
|
||||
# N.B. If only .env files supported variable expansion...
|
||||
export CELERY_BROKER_URL="${REDIS_URL}"
|
||||
|
||||
if [ -z "${POSTGRES_USER}" ]; then
|
||||
base_postgres_image_default_user='postgres'
|
||||
export POSTGRES_USER="${base_postgres_image_default_user}"
|
||||
fi
|
||||
export DATABASE_URL="postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}"
|
||||
|
||||
postgres_ready() {
|
||||
python << END
|
||||
import sys
|
||||
|
||||
import psycopg2
|
||||
|
||||
try:
|
||||
psycopg2.connect(
|
||||
dbname="${POSTGRES_DB}",
|
||||
user="${POSTGRES_USER}",
|
||||
password="${POSTGRES_PASSWORD}",
|
||||
host="${POSTGRES_HOST}",
|
||||
port="${POSTGRES_PORT}",
|
||||
)
|
||||
except psycopg2.OperationalError:
|
||||
sys.exit(-1)
|
||||
sys.exit(0)
|
||||
|
||||
END
|
||||
}
|
||||
until postgres_ready; do
|
||||
>&2 echo 'Waiting for PostgreSQL to become available...'
|
||||
sleep 1
|
||||
done
|
||||
>&2 echo 'PostgreSQL is available'
|
||||
|
||||
exec "$@"
|
|
@ -1,9 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
set -o errexit
|
||||
set -o pipefail
|
||||
set -o nounset
|
||||
|
||||
|
||||
python /app/manage.py collectstatic --noinput
|
||||
/usr/local/bin/gunicorn config.wsgi --bind 0.0.0.0:5000 --chdir=/app
|
|
@ -1,6 +0,0 @@
|
|||
FROM postgres:{{ cookiecutter.postgresql_version }}
|
||||
|
||||
COPY ./compose/production/postgres/maintenance /usr/local/bin/maintenance
|
||||
RUN chmod +x /usr/local/bin/maintenance/*
|
||||
RUN mv /usr/local/bin/maintenance/* /usr/local/bin \
|
||||
&& rmdir /usr/local/bin/maintenance
|
|
@ -1,5 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
|
||||
BACKUP_DIR_PATH='/backups'
|
||||
BACKUP_FILE_PREFIX='backup'
|
|
@ -1,12 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
|
||||
countdown() {
|
||||
declare desc="A simple countdown. Source: https://superuser.com/a/611582"
|
||||
local seconds="${1}"
|
||||
local d=$(($(date +%s) + "${seconds}"))
|
||||
while [ "$d" -ge `date +%s` ]; do
|
||||
echo -ne "$(date -u --date @$(($d - `date +%s`)) +%H:%M:%S)\r";
|
||||
sleep 0.1
|
||||
done
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
|
||||
message_newline() {
|
||||
echo
|
||||
}
|
||||
|
||||
message_debug()
|
||||
{
|
||||
echo -e "DEBUG: ${@}"
|
||||
}
|
||||
|
||||
message_welcome()
|
||||
{
|
||||
echo -e "\e[1m${@}\e[0m"
|
||||
}
|
||||
|
||||
message_warning()
|
||||
{
|
||||
echo -e "\e[33mWARNING\e[0m: ${@}"
|
||||
}
|
||||
|
||||
message_error()
|
||||
{
|
||||
echo -e "\e[31mERROR\e[0m: ${@}"
|
||||
}
|
||||
|
||||
message_info()
|
||||
{
|
||||
echo -e "\e[37mINFO\e[0m: ${@}"
|
||||
}
|
||||
|
||||
message_suggestion()
|
||||
{
|
||||
echo -e "\e[33mSUGGESTION\e[0m: ${@}"
|
||||
}
|
||||
|
||||
message_success()
|
||||
{
|
||||
echo -e "\e[32mSUCCESS\e[0m: ${@}"
|
||||
}
|
|
@ -1,16 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
|
||||
yes_no() {
|
||||
declare desc="Prompt for confirmation. \$\"\{1\}\": confirmation message."
|
||||
local arg1="${1}"
|
||||
|
||||
local response=
|
||||
read -r -p "${arg1} (y/[n])? " response
|
||||
if [[ "${response}" =~ ^[Yy]$ ]]
|
||||
then
|
||||
exit 0
|
||||
else
|
||||
exit 1
|
||||
fi
|
||||
}
|
|
@ -1,38 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
|
||||
### Create a database backup.
|
||||
###
|
||||
### Usage:
|
||||
### $ docker-compose -f <environment>.yml (exec |run --rm) postgres backup
|
||||
|
||||
|
||||
set -o errexit
|
||||
set -o pipefail
|
||||
set -o nounset
|
||||
|
||||
|
||||
working_dir="$(dirname ${0})"
|
||||
source "${working_dir}/_sourced/constants.sh"
|
||||
source "${working_dir}/_sourced/messages.sh"
|
||||
|
||||
|
||||
message_welcome "Backing up the '${POSTGRES_DB}' database..."
|
||||
|
||||
|
||||
if [[ "${POSTGRES_USER}" == "postgres" ]]; then
|
||||
message_error "Backing up as 'postgres' user is not supported. Assign 'POSTGRES_USER' env with another one and try again."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
export PGHOST="${POSTGRES_HOST}"
|
||||
export PGPORT="${POSTGRES_PORT}"
|
||||
export PGUSER="${POSTGRES_USER}"
|
||||
export PGPASSWORD="${POSTGRES_PASSWORD}"
|
||||
export PGDATABASE="${POSTGRES_DB}"
|
||||
|
||||
backup_filename="${BACKUP_FILE_PREFIX}_$(date +'%Y_%m_%dT%H_%M_%S').sql.gz"
|
||||
pg_dump | gzip > "${BACKUP_DIR_PATH}/${backup_filename}"
|
||||
|
||||
|
||||
message_success "'${POSTGRES_DB}' database backup '${backup_filename}' has been created and placed in '${BACKUP_DIR_PATH}'."
|
|
@ -1,22 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
|
||||
### View backups.
|
||||
###
|
||||
### Usage:
|
||||
### $ docker-compose -f <environment>.yml (exec |run --rm) postgres backups
|
||||
|
||||
|
||||
set -o errexit
|
||||
set -o pipefail
|
||||
set -o nounset
|
||||
|
||||
|
||||
working_dir="$(dirname ${0})"
|
||||
source "${working_dir}/_sourced/constants.sh"
|
||||
source "${working_dir}/_sourced/messages.sh"
|
||||
|
||||
|
||||
message_welcome "These are the backups you have got:"
|
||||
|
||||
ls -lht "${BACKUP_DIR_PATH}"
|
|
@ -1,55 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
|
||||
### Restore database from a backup.
|
||||
###
|
||||
### Parameters:
|
||||
### <1> filename of an existing backup.
|
||||
###
|
||||
### Usage:
|
||||
### $ docker-compose -f <environment>.yml (exec |run --rm) postgres restore <1>
|
||||
|
||||
|
||||
set -o errexit
|
||||
set -o pipefail
|
||||
set -o nounset
|
||||
|
||||
|
||||
working_dir="$(dirname ${0})"
|
||||
source "${working_dir}/_sourced/constants.sh"
|
||||
source "${working_dir}/_sourced/messages.sh"
|
||||
|
||||
|
||||
if [[ -z ${1+x} ]]; then
|
||||
message_error "Backup filename is not specified yet it is a required parameter. Make sure you provide one and try again."
|
||||
exit 1
|
||||
fi
|
||||
backup_filename="${BACKUP_DIR_PATH}/${1}"
|
||||
if [[ ! -f "${backup_filename}" ]]; then
|
||||
message_error "No backup with the specified filename found. Check out the 'backups' maintenance script output to see if there is one and try again."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
message_welcome "Restoring the '${POSTGRES_DB}' database from the '${backup_filename}' backup..."
|
||||
|
||||
if [[ "${POSTGRES_USER}" == "postgres" ]]; then
|
||||
message_error "Restoring as 'postgres' user is not supported. Assign 'POSTGRES_USER' env with another one and try again."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
export PGHOST="${POSTGRES_HOST}"
|
||||
export PGPORT="${POSTGRES_PORT}"
|
||||
export PGUSER="${POSTGRES_USER}"
|
||||
export PGPASSWORD="${POSTGRES_PASSWORD}"
|
||||
export PGDATABASE="${POSTGRES_DB}"
|
||||
|
||||
message_info "Dropping the database..."
|
||||
dropdb "${PGDATABASE}"
|
||||
|
||||
message_info "Creating a new database..."
|
||||
createdb --owner="${POSTGRES_USER}"
|
||||
|
||||
message_info "Applying the backup to the new database..."
|
||||
gunzip -c "${backup_filename}" | psql "${POSTGRES_DB}"
|
||||
|
||||
message_success "The '${POSTGRES_DB}' database has been restored from the '${backup_filename}' backup."
|
|
@ -3,21 +3,29 @@ Base settings to build other settings files upon.
|
|||
"""
|
||||
|
||||
import environ
|
||||
from os import getenv
|
||||
import dj_database_url
|
||||
|
||||
ROOT_DIR = environ.Path(__file__) - 3 # ({{ cookiecutter.project_slug }}/config/settings/base.py - 3 = {{ cookiecutter.project_slug }}/)
|
||||
APPS_DIR = ROOT_DIR.path('{{ cookiecutter.project_slug }}')
|
||||
|
||||
env = environ.Env()
|
||||
|
||||
def eval_bool(env_value, default=None):
|
||||
return {'true': True, 'false': False}.get(str(env_value).lower(), default)
|
||||
|
||||
READ_DOT_ENV_FILE = env.bool('DJANGO_READ_DOT_ENV_FILE', default=False)
|
||||
if READ_DOT_ENV_FILE:
|
||||
# OS environment variables take precedence over variables from .env
|
||||
env.read_env(str(ROOT_DIR.path('.env')))
|
||||
|
||||
ENV = getenv('ENV', 'development').lower()
|
||||
|
||||
# GENERAL
|
||||
# ------------------------------------------------------------------------------
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#debug
|
||||
DEBUG = env.bool('DJANGO_DEBUG', False)
|
||||
DEBUG = eval_bool(getenv('DEBUG'), True)
|
||||
|
||||
# Local time zone. Choices are
|
||||
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
|
||||
# though not all of them may be available with every OS.
|
||||
|
@ -37,16 +45,27 @@ USE_TZ = True
|
|||
# DATABASES
|
||||
# ------------------------------------------------------------------------------
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#databases
|
||||
{% if cookiecutter.use_docker == 'y' -%}
|
||||
|
||||
DB_NAME = getenv('DB_NAME', '{{ cookiecutter.project_slug }}')
|
||||
DB_USER = getenv('DB_USER', 'root')
|
||||
DB_PASSWORD = getenv('DB_PASSWORD', '')
|
||||
DB_HOST = getenv('DB_HOST', '127.0.0.1')
|
||||
DB_PORT = getenv('DB_PORT', '3306')
|
||||
DB_URL = getenv('DB_URL',
|
||||
'mysql://' + DB_USER + ':' + DB_PASSWORD + '@' +
|
||||
DB_HOST + ':' + DB_PORT + '/' + DB_NAME)
|
||||
|
||||
DATABASES = {
|
||||
'default': env.db('DATABASE_URL'),
|
||||
}
|
||||
{%- else %}
|
||||
DATABASES = {
|
||||
'default': env.db('DATABASE_URL', default='postgres://{% if cookiecutter.windows == 'y' %}localhost{% endif %}/{{cookiecutter.project_slug}}'),
|
||||
'default': dj_database_url.config(
|
||||
default=DB_URL,
|
||||
conn_max_age=int(getenv('DB_CONN_MAX_AGE', 600)),
|
||||
)
|
||||
}
|
||||
|
||||
{% if cookiecutter.use_prometheus == 'y' -%}
|
||||
DB_ENGINE = getenv('DB_ENGINE', 'django_prometheus.db.backends.mysql')
|
||||
DATABASES['default']['ENGINE'] = DB_ENGINE
|
||||
{%- endif %}
|
||||
DATABASES['default']['ATOMIC_REQUESTS'] = True
|
||||
|
||||
# URLS
|
||||
# ------------------------------------------------------------------------------
|
||||
|
@ -133,6 +152,9 @@ AUTH_PASSWORD_VALIDATORS = [
|
|||
# ------------------------------------------------------------------------------
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#middleware
|
||||
MIDDLEWARE = [
|
||||
{% if cookiecutter.use_prometheus == 'y' -%}
|
||||
'django_prometheus.middleware.PrometheusBeforeMiddleware',
|
||||
{% endif %}
|
||||
'django.middleware.security.SecurityMiddleware',
|
||||
'django.contrib.sessions.middleware.SessionMiddleware',
|
||||
'django.middleware.common.CommonMiddleware',
|
||||
|
@ -140,6 +162,9 @@ MIDDLEWARE = [
|
|||
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||
'django.contrib.messages.middleware.MessageMiddleware',
|
||||
'django.middleware.clickjacking.XFrameOptionsMiddleware',
|
||||
{% if cookiecutter.use_prometheus == 'y' -%}
|
||||
'django_prometheus.middleware.PrometheusAfterMiddleware',
|
||||
{% endif %}
|
||||
]
|
||||
|
||||
# STATIC
|
||||
|
@ -210,9 +235,22 @@ FIXTURE_DIRS = (
|
|||
)
|
||||
|
||||
# EMAIL
|
||||
# ------------------------------------------------------------------------------
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
|
||||
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.smtp.EmailBackend')
|
||||
# to test smtp server in development comment console backend line
|
||||
# and start local smtp mail server using the following command:
|
||||
# `python -m smtpd -n -c DebuggingServer localhost:1025`
|
||||
EMAIL_BACKEND = 'naomi.mail.backends.naomi.NaomiBackend'
|
||||
|
||||
(
|
||||
EMAIL_HOST,
|
||||
EMAIL_PORT,
|
||||
EMAIL_HOST_USER,
|
||||
EMAIL_HOST_PASSWORD,
|
||||
) = (
|
||||
'localhost',
|
||||
'1025',
|
||||
'',
|
||||
'',
|
||||
)
|
||||
|
||||
# ADMIN
|
||||
# ------------------------------------------------------------------------------
|
||||
|
@ -232,23 +270,34 @@ INSTALLED_APPS += ['{{cookiecutter.project_slug}}.taskapp.celery.CeleryAppConfig
|
|||
if USE_TZ:
|
||||
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-timezone
|
||||
CELERY_TIMEZONE = TIME_ZONE
|
||||
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-broker_url
|
||||
CELERY_BROKER_URL = env('CELERY_BROKER_URL')
|
||||
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-result_backend
|
||||
CELERY_RESULT_BACKEND = CELERY_BROKER_URL
|
||||
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-accept_content
|
||||
CELERY_ACCEPT_CONTENT = ['json']
|
||||
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-task_serializer
|
||||
CELERY_TASK_SERIALIZER = 'json'
|
||||
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-result_serializer
|
||||
CELERY_RESULT_SERIALIZER = 'json'
|
||||
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-time-limit
|
||||
# TODO: set to whatever value is adequate in your circumstances
|
||||
|
||||
# Celery Broker
|
||||
(
|
||||
CELERY_BROKER_URL,
|
||||
CELERY_RESULT_BACKEND,
|
||||
CELERY_ACCEPT_CONTENT,
|
||||
CELERY_TASK_SERIALIZER,
|
||||
CELERY_RESULT_SERIALIZER,
|
||||
CELERY_IGNORE_RESULT,
|
||||
) = (
|
||||
getenv('CELERY_BROKER_URL', 'redis://localhost:6379/1'),
|
||||
getenv('CELERY_RESULT_BACKEND', 'django-db'),
|
||||
getenv('CELERY_ACCEPT_CONTENT', ['json']),
|
||||
getenv('CELERY_TASK_SERIALIZER', 'json'),
|
||||
getenv('CELERY_RESULT_SERIALIZER', 'json'),
|
||||
getenv('CELERY_IGNORE_RESULT', False),
|
||||
)
|
||||
|
||||
CELERYD_TASK_TIME_LIMIT = 5 * 60
|
||||
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-soft-time-limit
|
||||
# TODO: set to whatever value is adequate in your circumstances
|
||||
CELERYD_TASK_SOFT_TIME_LIMIT = 60
|
||||
|
||||
{%- endif %}
|
||||
|
||||
{% if cookiecutter.use_prometheus == 'y' -%}
|
||||
INSTALLED_APPS+= ['django_prometheus']
|
||||
|
||||
{%- endif %}
|
||||
# django-allauth
|
||||
# ------------------------------------------------------------------------------
|
||||
|
|
|
@ -31,10 +31,7 @@ TEMPLATES[0]['OPTIONS']['debug'] = DEBUG # noqa F405
|
|||
|
||||
# EMAIL
|
||||
# ------------------------------------------------------------------------------
|
||||
{% if cookiecutter.use_mailhog == 'y' and cookiecutter.use_docker == 'y' -%}
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#email-host
|
||||
EMAIL_HOST = env('EMAIL_HOST', default='mailhog')
|
||||
{%- elif cookiecutter.use_mailhog == 'y' and cookiecutter.use_docker == 'n' -%}
|
||||
{%- if cookiecutter.use_mailhog == 'y' -%}
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#email-host
|
||||
EMAIL_HOST = 'localhost'
|
||||
{%- else -%}
|
||||
|
@ -61,12 +58,6 @@ DEBUG_TOOLBAR_CONFIG = {
|
|||
}
|
||||
# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#internal-ips
|
||||
INTERNAL_IPS = ['127.0.0.1', '10.0.2.2']
|
||||
{% if cookiecutter.use_docker == 'y' -%}
|
||||
if env('USE_DOCKER') == 'yes':
|
||||
import socket
|
||||
hostname, _, ips = socket.gethostbyname_ex(socket.gethostname())
|
||||
INTERNAL_IPS += [ip[:-1] + '1' for ip in ips]
|
||||
{%- endif %}
|
||||
|
||||
# django-extensions
|
||||
# ------------------------------------------------------------------------------
|
||||
|
|
|
@ -4,6 +4,11 @@ import logging
|
|||
{% endif -%}
|
||||
from .base import * # noqa
|
||||
from .base import env
|
||||
from os import getenv
|
||||
|
||||
|
||||
def eval_bool(env_value, default=None):
|
||||
return {'true': True, 'false': False}.get(str(env_value).lower(), default)
|
||||
|
||||
# GENERAL
|
||||
# ------------------------------------------------------------------------------
|
||||
|
@ -12,11 +17,7 @@ SECRET_KEY = env('DJANGO_SECRET_KEY')
|
|||
# https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
|
||||
ALLOWED_HOSTS = env.list('DJANGO_ALLOWED_HOSTS', default=['{{ cookiecutter.domain_name }}'])
|
||||
|
||||
# DATABASES
|
||||
# ------------------------------------------------------------------------------
|
||||
DATABASES['default'] = env.db('DATABASE_URL') # noqa F405
|
||||
DATABASES['default']['ATOMIC_REQUESTS'] = True # noqa F405
|
||||
DATABASES['default']['CONN_MAX_AGE'] = env.int('CONN_MAX_AGE', default=60) # noqa F405
|
||||
DEBUG = eval_bool(getenv('DEBUG'), True)
|
||||
|
||||
# CACHES
|
||||
# ------------------------------------------------------------------------------
|
||||
|
@ -66,20 +67,6 @@ X_FRAME_OPTIONS = 'DENY'
|
|||
# ------------------------------------------------------------------------------
|
||||
# https://django-storages.readthedocs.io/en/latest/#installation
|
||||
INSTALLED_APPS += ['storages'] # noqa F405
|
||||
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
|
||||
AWS_ACCESS_KEY_ID = env('DJANGO_AWS_ACCESS_KEY_ID')
|
||||
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
|
||||
AWS_SECRET_ACCESS_KEY = env('DJANGO_AWS_SECRET_ACCESS_KEY')
|
||||
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
|
||||
AWS_STORAGE_BUCKET_NAME = env('DJANGO_AWS_STORAGE_BUCKET_NAME')
|
||||
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
|
||||
AWS_QUERYSTRING_AUTH = False
|
||||
# DO NOT change these unless you know what you're doing.
|
||||
_AWS_EXPIRY = 60 * 60 * 24 * 7
|
||||
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
|
||||
AWS_S3_OBJECT_PARAMETERS = {
|
||||
'CacheControl': f'max-age={_AWS_EXPIRY}, s-maxage={_AWS_EXPIRY}, must-revalidate',
|
||||
}
|
||||
|
||||
# STATIC
|
||||
# ------------------------
|
||||
|
@ -87,14 +74,12 @@ AWS_S3_OBJECT_PARAMETERS = {
|
|||
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
|
||||
{%- else %}
|
||||
STATICFILES_STORAGE = 'config.settings.production.StaticRootS3Boto3Storage'
|
||||
STATIC_URL = f'https://{AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com/static/'
|
||||
{%- endif %}
|
||||
|
||||
# MEDIA
|
||||
# ------------------------------------------------------------------------------
|
||||
{% if cookiecutter.use_whitenoise == 'y' -%}
|
||||
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
|
||||
MEDIA_URL = f'https://{AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com/'
|
||||
{%- else %}
|
||||
# region http://stackoverflow.com/questions/10390244/
|
||||
# Full-fledge class: https://stackoverflow.com/a/18046120/104731
|
||||
|
@ -112,7 +97,6 @@ class MediaRootS3Boto3Storage(S3Boto3Storage):
|
|||
|
||||
# endregion
|
||||
DEFAULT_FILE_STORAGE = 'config.settings.production.MediaRootS3Boto3Storage'
|
||||
MEDIA_URL = f'https://{AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com/media/'
|
||||
{%- endif %}
|
||||
|
||||
# TEMPLATES
|
||||
|
@ -140,6 +124,20 @@ SERVER_EMAIL = env('DJANGO_SERVER_EMAIL', default=DEFAULT_FROM_EMAIL)
|
|||
# https://docs.djangoproject.com/en/dev/ref/settings/#email-subject-prefix
|
||||
EMAIL_SUBJECT_PREFIX = env('DJANGO_EMAIL_SUBJECT_PREFIX', default='[{{cookiecutter.project_name}}]')
|
||||
|
||||
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
|
||||
|
||||
(
|
||||
EMAIL_HOST,
|
||||
EMAIL_PORT,
|
||||
EMAIL_HOST_USER,
|
||||
EMAIL_HOST_PASSWORD,
|
||||
) = (
|
||||
getenv('EMAIL_HOST'),
|
||||
getenv('EMAIL_PORT'),
|
||||
getenv('EMAIL_HOST_USER'),
|
||||
getenv('EMAIL_HOST_PASSWORD'),
|
||||
)
|
||||
|
||||
# ADMIN
|
||||
# ------------------------------------------------------------------------------
|
||||
# Django Admin URL regex.
|
||||
|
@ -183,7 +181,6 @@ COMPRESS_URL = STATIC_URL
|
|||
# ------------------------------------------------------------------------------
|
||||
# https://github.com/antonagestam/collectfast#installation
|
||||
INSTALLED_APPS = ['collectfast'] + INSTALLED_APPS # noqa F405
|
||||
AWS_PRELOAD_METADATA = True
|
||||
|
||||
{% endif %}
|
||||
{%- if cookiecutter.use_sentry == 'y' -%}
|
||||
|
|
130
{{cookiecutter.project_slug}}/deployment/deploy.yml
Normal file
|
@ -0,0 +1,130 @@
|
|||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: {{cookiecutter.project_slug}}
|
||||
labels:
|
||||
app: {{cookiecutter.project_slug}}
|
||||
spec:
|
||||
ports:
|
||||
- port: 80
|
||||
targetPort: 8000
|
||||
selector:
|
||||
app: {{cookiecutter.project_slug}}
|
||||
|
||||
---
|
||||
apiVersion: extensions/v1beta1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{cookiecutter.project_slug}}
|
||||
spec:
|
||||
replicas: 2
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: {{cookiecutter.project_slug}}
|
||||
fleet: production
|
||||
|
||||
spec:
|
||||
affinity:
|
||||
podAntiAffinity:
|
||||
requiredDuringSchedulingIgnoredDuringExecution:
|
||||
- labelSelector:
|
||||
matchExpressions:
|
||||
- key: app
|
||||
operator: In
|
||||
values:
|
||||
- {{cookiecutter.project_slug}}
|
||||
topologyKey: kubernetes.io/hostname
|
||||
imagePullSecrets:
|
||||
- name: myregistrykey
|
||||
containers:
|
||||
- name: {{cookiecutter.project_slug}}
|
||||
image: menacommere-docker-registry.bintray.io/{{cookiecutter.project_slug}}
|
||||
imagePullPolicy: Always
|
||||
env:
|
||||
- name: "DEPLOY_ENV"
|
||||
value: "{{cookiecutter.project_slug}}"
|
||||
- name: "CONSUL_TOKEN"
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
key: accesstoken
|
||||
name: consul-token
|
||||
- name: DB_HOST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
key: hostname
|
||||
name: mysql-prod
|
||||
- name: DB_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
key: password
|
||||
name: mysql-prod
|
||||
- name: DB_USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
key: username
|
||||
name: mysql-prod
|
||||
volumeMounts:
|
||||
- name: tz-config
|
||||
mountPath: /etc/localtime
|
||||
volumes:
|
||||
- name: tz-config
|
||||
hostPath:
|
||||
path: /usr/share/zoneinfo/Africa/Cairo
|
||||
nodeSelector:
|
||||
environment: production
|
||||
|
||||
---
|
||||
apiVersion: extensions/v1beta1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{cookiecutter.project_slug}}-worker
|
||||
spec:
|
||||
replicas: 1
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: {{cookiecutter.project_slug}}-worker
|
||||
fleet: production
|
||||
|
||||
spec:
|
||||
imagePullSecrets:
|
||||
- name: myregistrykey
|
||||
containers:
|
||||
- name: {{cookiecutter.project_slug}}
|
||||
image: menacommere-docker-registry.bintray.io/{{cookiecutter.project_slug}}
|
||||
args: ["./run.sh", "worker"]
|
||||
imagePullPolicy: Always
|
||||
env:
|
||||
- name: "DEPLOY_ENV"
|
||||
value: "{{cookiecutter.project_slug}}"
|
||||
- name: "CONSUL_TOKEN"
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
key: accesstoken
|
||||
name: consul-token
|
||||
- name: DB_HOST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
key: hostname
|
||||
name: mysql-prod
|
||||
- name: DB_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
key: password
|
||||
name: mysql-prod
|
||||
- name: DB_USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
key: username
|
||||
name: mysql-prod
|
||||
volumeMounts:
|
||||
- name: tz-config
|
||||
mountPath: /etc/localtime
|
||||
volumes:
|
||||
- name: tz-config
|
||||
hostPath:
|
||||
path: /usr/share/zoneinfo/Africa/Cairo
|
||||
nodeSelector:
|
||||
environment: production
|
14
{{cookiecutter.project_slug}}/deployment/ingress.yml
Normal file
|
@ -0,0 +1,14 @@
|
|||
---
|
||||
apiVersion: extensions/v1beta1
|
||||
kind: Ingress
|
||||
metadata:
|
||||
name: {{cookiecutter.project_slug}}
|
||||
spec:
|
||||
rules:
|
||||
- host: {{cookiecutter.domain_name}}
|
||||
http:
|
||||
paths:
|
||||
- path: /
|
||||
backend:
|
||||
serviceName: {{cookiecutter.project_slug}}
|
||||
servicePort: 80
|
|
@ -1,153 +0,0 @@
|
|||
# Makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS =
|
||||
SPHINXBUILD = sphinx-build
|
||||
PAPER =
|
||||
BUILDDIR = _build
|
||||
|
||||
# Internal variables.
|
||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||
PAPEROPT_letter = -D latex_paper_size=letter
|
||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
# the i18n builder cannot share the environment and doctrees with the others
|
||||
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
|
||||
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
|
||||
|
||||
help:
|
||||
@echo "Please use \`make <target>' where <target> is one of"
|
||||
@echo " html to make standalone HTML files"
|
||||
@echo " dirhtml to make HTML files named index.html in directories"
|
||||
@echo " singlehtml to make a single large HTML file"
|
||||
@echo " pickle to make pickle files"
|
||||
@echo " json to make JSON files"
|
||||
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||
@echo " qthelp to make HTML files and a qthelp project"
|
||||
@echo " devhelp to make HTML files and a Devhelp project"
|
||||
@echo " epub to make an epub"
|
||||
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||
@echo " text to make text files"
|
||||
@echo " man to make manual pages"
|
||||
@echo " texinfo to make Texinfo files"
|
||||
@echo " info to make Texinfo files and run them through makeinfo"
|
||||
@echo " gettext to make PO message catalogs"
|
||||
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||
@echo " linkcheck to check all external links for integrity"
|
||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||
|
||||
clean:
|
||||
-rm -rf $(BUILDDIR)/*
|
||||
|
||||
html:
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
dirhtml:
|
||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||
|
||||
singlehtml:
|
||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||
|
||||
pickle:
|
||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||
@echo
|
||||
@echo "Build finished; now you can process the pickle files."
|
||||
|
||||
json:
|
||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||
@echo
|
||||
@echo "Build finished; now you can process the JSON files."
|
||||
|
||||
htmlhelp:
|
||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||
|
||||
qthelp:
|
||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/{{ cookiecutter.project_slug }}.qhcp"
|
||||
@echo "To view the help file:"
|
||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/{{ cookiecutter.project_slug }}.qhc"
|
||||
|
||||
devhelp:
|
||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||
@echo
|
||||
@echo "Build finished."
|
||||
@echo "To view the help file:"
|
||||
@echo "# mkdir -p $$HOME/.local/share/devhelp/{{ cookiecutter.project_slug }}"
|
||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/{{ cookiecutter.project_slug }}"
|
||||
@echo "# devhelp"
|
||||
|
||||
epub:
|
||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||
@echo
|
||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||
|
||||
latex:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo
|
||||
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||
"(use \`make latexpdf' here to do that automatically)."
|
||||
|
||||
latexpdf:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through pdflatex..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
text:
|
||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||
@echo
|
||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||
|
||||
man:
|
||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||
@echo
|
||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||
|
||||
texinfo:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo
|
||||
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||
"(use \`make info' here to do that automatically)."
|
||||
|
||||
info:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo "Running Texinfo files through makeinfo..."
|
||||
make -C $(BUILDDIR)/texinfo info
|
||||
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||
|
||||
gettext:
|
||||
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||
@echo
|
||||
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||
|
||||
changes:
|
||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||
@echo
|
||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||
|
||||
linkcheck:
|
||||
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||
@echo
|
||||
@echo "Link check complete; look for any errors in the above output " \
|
||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||
|
||||
doctest:
|
||||
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||
@echo "Testing of doctests in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/doctest/output.txt."
|
|
@ -1 +0,0 @@
|
|||
# Included so that Django's startproject comment runs against the docs directory
|
|
@ -1,255 +0,0 @@
|
|||
# {{ cookiecutter.project_name }} documentation build configuration file, created by
|
||||
# sphinx-quickstart.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
# sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
# -- General configuration -----------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
# needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be extensions
|
||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = []
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ["_templates"]
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = ".rst"
|
||||
|
||||
# The encoding of source files.
|
||||
# source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = "index"
|
||||
|
||||
# General information about the project.
|
||||
project = "{{ cookiecutter.project_name }}"
|
||||
copyright = """{% now 'utc', '%Y' %}, {{ cookiecutter.author_name }}"""
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = "0.1"
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = "0.1"
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
# language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
# today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
# today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = ["_build"]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
# default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
# add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
# add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
# show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = "sphinx"
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
# modindex_common_prefix = []
|
||||
|
||||
|
||||
# -- Options for HTML output ---------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = "default"
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
# html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
# html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
# html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
# html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
# html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
# html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ["_static"]
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
# html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
# html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
# html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
# html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
# html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
# html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
# html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
# html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
# html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
# html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
# html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
# html_file_suffix = None
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = "{{ cookiecutter.project_slug }}doc"
|
||||
|
||||
|
||||
# -- Options for LaTeX output --------------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
# 'papersize': 'letterpaper',
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
# 'pointsize': '10pt',
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
# 'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||
latex_documents = [
|
||||
(
|
||||
"index",
|
||||
"{{ cookiecutter.project_slug }}.tex",
|
||||
"{{ cookiecutter.project_name }} Documentation",
|
||||
"""{{ cookiecutter.author_name }}""",
|
||||
"manual",
|
||||
)
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
# latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
# latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
# latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
# latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
# latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
# latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output --------------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
(
|
||||
"index",
|
||||
"{{ cookiecutter.project_slug }}",
|
||||
"{{ cookiecutter.project_name }} Documentation",
|
||||
["""{{ cookiecutter.author_name }}"""],
|
||||
1,
|
||||
)
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
# man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output ------------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(
|
||||
"index",
|
||||
"{{ cookiecutter.project_slug }}",
|
||||
"{{ cookiecutter.project_name }} Documentation",
|
||||
"""{{ cookiecutter.author_name }}""",
|
||||
"{{ cookiecutter.project_name }}",
|
||||
"""{{ cookiecutter.description }}""",
|
||||
"Miscellaneous",
|
||||
)
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
# texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
# texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
# texinfo_show_urls = 'footnote'
|
|
@ -1,4 +0,0 @@
|
|||
Deploy
|
||||
========
|
||||
|
||||
This is where you describe how the project is deployed in production.
|
|
@ -1,186 +0,0 @@
|
|||
Developing with Docker
|
||||
======================
|
||||
|
||||
You can develop your application in a `Docker`_ container for simpler deployment onto bare Linux machines later. This instruction assumes an `Amazon Web Services`_ EC2 instance, but it should work on any machine with Docker > 1.3 and `Docker compose`_ installed.
|
||||
|
||||
.. _Docker: https://www.docker.com/
|
||||
.. _Amazon Web Services: http://aws.amazon.com/
|
||||
.. _Docker compose: https://docs.docker.com/compose/
|
||||
|
||||
Setting up
|
||||
^^^^^^^^^^
|
||||
|
||||
Docker encourages running one container for each process. This might mean one container for your web server, one for Django application and a third for your database. Once you're happy composing containers in this way you can easily add more, such as a `Redis`_ cache.
|
||||
|
||||
.. _Redis: http://redis.io/
|
||||
|
||||
The Docker compose tool (previously known as `fig`_) makes linking these containers easy. An example set up for your Cookiecutter Django project might look like this:
|
||||
|
||||
.. _fig: http://www.fig.sh/
|
||||
|
||||
::
|
||||
|
||||
webapp/ # Your cookiecutter project would be in here
|
||||
Dockerfile
|
||||
...
|
||||
database/
|
||||
Dockerfile
|
||||
...
|
||||
webserver/
|
||||
Dockerfile
|
||||
...
|
||||
production.yml
|
||||
|
||||
Each component of your application would get its own `Dockerfile`_. The rest of this example assumes you are using the `base postgres image`_ for your database. Your database settings in `config/base.py` might then look something like:
|
||||
|
||||
.. _Dockerfile: https://docs.docker.com/reference/builder/
|
||||
.. _base postgres image: https://registry.hub.docker.com/_/postgres/
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.postgresql_psycopg2',
|
||||
'NAME': 'postgres',
|
||||
'USER': 'postgres',
|
||||
'HOST': 'database',
|
||||
'PORT': 5432,
|
||||
}
|
||||
}
|
||||
|
||||
The `Docker compose documentation`_ explains in detail what you can accomplish in the `production.yml` file, but an example configuration might look like this:
|
||||
|
||||
.. _Docker compose documentation: https://docs.docker.com/compose/#compose-documentation
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
database:
|
||||
build: database
|
||||
webapp:
|
||||
build: webapp:
|
||||
command: /usr/bin/python3.6 manage.py runserver 0.0.0.0:8000 # dev setting
|
||||
# command: gunicorn -b 0.0.0.0:8000 wsgi:application # production setting
|
||||
volumes:
|
||||
- webapp/your_project_name:/path/to/container/workdir/
|
||||
links:
|
||||
- database
|
||||
webserver:
|
||||
build: webserver
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
links:
|
||||
- webapp
|
||||
|
||||
We'll ignore the webserver for now (you'll want to comment that part out while we do). A working Dockerfile to run your cookiecutter application might look like this:
|
||||
|
||||
::
|
||||
|
||||
FROM ubuntu:14.04
|
||||
ENV REFRESHED_AT 2015-01-13
|
||||
|
||||
# update packages and prepare to build software
|
||||
RUN ["apt-get", "update"]
|
||||
RUN ["apt-get", "-y", "install", "build-essential", "vim", "git", "curl"]
|
||||
RUN ["locale-gen", "en_GB.UTF-8"]
|
||||
|
||||
# install latest python
|
||||
RUN ["apt-get", "-y", "build-dep", "python3-dev", "python3-imaging"]
|
||||
RUN ["apt-get", "-y", "install", "python3-dev", "python3-imaging", "python3-pip"]
|
||||
|
||||
# prepare postgreSQL support
|
||||
RUN ["apt-get", "-y", "build-dep", "python3-psycopg2"]
|
||||
|
||||
# move into our working directory
|
||||
# ADD must be after chown see http://stackoverflow.com/a/26145444/1281947
|
||||
RUN ["groupadd", "python"]
|
||||
RUN ["useradd", "python", "-s", "/bin/bash", "-m", "-g", "python", "-G", "python"]
|
||||
ENV HOME /home/python
|
||||
WORKDIR /home/python
|
||||
RUN ["chown", "-R", "python:python", "/home/python"]
|
||||
ADD ./ /home/python
|
||||
|
||||
# manage requirements
|
||||
ENV REQUIREMENTS_REFRESHED_AT 2015-02-25
|
||||
RUN ["pip3", "install", "-r", "requirements.txt"]
|
||||
|
||||
# uncomment the line below to use container as a non-root user
|
||||
USER python:python
|
||||
|
||||
Running `sudo docker-compose -f production.yml build` will follow the instructions in your `production.yml` file and build the database container, then your webapp, before mounting your cookiecutter project files as a volume in the webapp container and linking to the database. Our example yaml file runs in development mode but changing it to production mode is as simple as commenting out the line using `runserver` and uncommenting the line using `gunicorn`.
|
||||
|
||||
Both are set to run on port `0.0.0.0:8000`, which is where the Docker daemon will discover it. You can now run `sudo docker-compose -f production.yml up` and browse to `localhost:8000` to see your application running.
|
||||
|
||||
Deployment
|
||||
^^^^^^^^^^
|
||||
|
||||
You'll need a webserver container for deployment. An example setup for `Nginx`_ might look like this:
|
||||
|
||||
.. _Nginx: http://wiki.nginx.org/Main
|
||||
|
||||
::
|
||||
|
||||
FROM ubuntu:14.04
|
||||
ENV REFRESHED_AT 2015-02-11
|
||||
|
||||
# get the nginx package and set it up
|
||||
RUN ["apt-get", "update"]
|
||||
RUN ["apt-get", "-y", "install", "nginx"]
|
||||
|
||||
# forward request and error logs to docker log collector
|
||||
RUN ln -sf /dev/stdout /var/log/nginx/access.log
|
||||
RUN ln -sf /dev/stderr /var/log/nginx/error.log
|
||||
VOLUME ["/var/cache/nginx"]
|
||||
EXPOSE 80 443
|
||||
|
||||
# load nginx conf
|
||||
ADD ./site.conf /etc/nginx/sites-available/your_cookiecutter_project
|
||||
RUN ["ln", "-s", "/etc/nginx/sites-available/your_cookiecutter_project", "/etc/nginx/sites-enabled/your_cookiecutter_project"]
|
||||
RUN ["rm", "-rf", "/etc/nginx/sites-available/default"]
|
||||
|
||||
#start the server
|
||||
CMD ["nginx", "-g", "daemon off;"]
|
||||
|
||||
That Dockerfile assumes you have an Nginx conf file named `site.conf` in the same directory as the webserver Dockerfile. A very basic example, which forwards traffic onto the development server or gunicorn for processing, would look like this:
|
||||
|
||||
::
|
||||
|
||||
# see http://serverfault.com/questions/577370/how-can-i-use-environment-variables-in-nginx-conf#comment730384_577370
|
||||
upstream localhost {
|
||||
server webapp_1:8000;
|
||||
}
|
||||
server {
|
||||
location / {
|
||||
proxy_pass http://localhost;
|
||||
}
|
||||
}
|
||||
|
||||
Running `sudo docker-compose -f production.yml build webserver` will build your server container. Running `sudo docker-compose -f production.yml up` will now expose your application directly on `localhost` (no need to specify the port number).
|
||||
|
||||
Building and running your app on EC2
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
All you now need to do to run your app in production is:
|
||||
|
||||
* Create an empty EC2 Linux instance (any Linux machine should do).
|
||||
|
||||
* Install your preferred source control solution, Docker and Docker compose on the news instance.
|
||||
|
||||
* Pull in your code from source control. The root directory should be the one with your `production.yml` file in it.
|
||||
|
||||
* Run `sudo docker-compose -f production.yml build` and `sudo docker-compose -f production.yml up`.
|
||||
|
||||
* Assign an `Elastic IP address`_ to your new machine.
|
||||
|
||||
.. _Elastic IP address: https://aws.amazon.com/articles/1346
|
||||
|
||||
* Point your domain name to the elastic IP.
|
||||
|
||||
**Be careful with Elastic IPs** because, on the AWS free tier, if you assign one and then stop the machine you will incur charges while the machine is down (presumably because you're preventing them allocating the IP to someone else).
|
||||
|
||||
Security advisory
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
The setup described in this instruction will get you up-and-running but it hasn't been audited for security. If you are running your own setup like this it is always advisable to, at a minimum, examine your application with a tool like `OWASP ZAP`_ to see what security holes you might be leaving open.
|
||||
|
||||
.. _OWASP ZAP: https://www.owasp.org/index.php/OWASP_Zed_Attack_Proxy_Project
|
|
@ -1,26 +0,0 @@
|
|||
.. {{ cookiecutter.project_name }} documentation master file, created by
|
||||
sphinx-quickstart.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Welcome to {{ cookiecutter.project_name }}'s documentation!
|
||||
====================================================================
|
||||
|
||||
Contents:
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
install
|
||||
deploy
|
||||
docker_ec2
|
||||
tests
|
||||
|
||||
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
|
@ -1,4 +0,0 @@
|
|||
Install
|
||||
=========
|
||||
|
||||
This is where you write how to get a new laptop to run this project.
|
|
@ -1,190 +0,0 @@
|
|||
@ECHO OFF
|
||||
|
||||
REM Command file for Sphinx documentation
|
||||
|
||||
if "%SPHINXBUILD%" == "" (
|
||||
set SPHINXBUILD=sphinx-build
|
||||
)
|
||||
set BUILDDIR=_build
|
||||
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
|
||||
set I18NSPHINXOPTS=%SPHINXOPTS% .
|
||||
if NOT "%PAPER%" == "" (
|
||||
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
|
||||
set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
|
||||
)
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
if "%1" == "help" (
|
||||
:help
|
||||
echo.Please use `make ^<target^>` where ^<target^> is one of
|
||||
echo. html to make standalone HTML files
|
||||
echo. dirhtml to make HTML files named index.html in directories
|
||||
echo. singlehtml to make a single large HTML file
|
||||
echo. pickle to make pickle files
|
||||
echo. json to make JSON files
|
||||
echo. htmlhelp to make HTML files and a HTML help project
|
||||
echo. qthelp to make HTML files and a qthelp project
|
||||
echo. devhelp to make HTML files and a Devhelp project
|
||||
echo. epub to make an epub
|
||||
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
|
||||
echo. text to make text files
|
||||
echo. man to make manual pages
|
||||
echo. texinfo to make Texinfo files
|
||||
echo. gettext to make PO message catalogs
|
||||
echo. changes to make an overview over all changed/added/deprecated items
|
||||
echo. linkcheck to check all external links for integrity
|
||||
echo. doctest to run all doctests embedded in the documentation if enabled
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "clean" (
|
||||
for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
|
||||
del /q /s %BUILDDIR%\*
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "html" (
|
||||
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "dirhtml" (
|
||||
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "singlehtml" (
|
||||
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "pickle" (
|
||||
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can process the pickle files.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "json" (
|
||||
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can process the JSON files.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "htmlhelp" (
|
||||
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can run HTML Help Workshop with the ^
|
||||
.hhp project file in %BUILDDIR%/htmlhelp.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "qthelp" (
|
||||
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can run "qcollectiongenerator" with the ^
|
||||
.qhcp project file in %BUILDDIR%/qthelp, like this:
|
||||
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\{{ cookiecutter.project_slug }}.qhcp
|
||||
echo.To view the help file:
|
||||
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\{{ cookiecutter.project_slug }}.ghc
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "devhelp" (
|
||||
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "epub" (
|
||||
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The epub file is in %BUILDDIR%/epub.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "latex" (
|
||||
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "text" (
|
||||
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The text files are in %BUILDDIR%/text.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "man" (
|
||||
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The manual pages are in %BUILDDIR%/man.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "texinfo" (
|
||||
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "gettext" (
|
||||
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "changes" (
|
||||
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.The overview file is in %BUILDDIR%/changes.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "linkcheck" (
|
||||
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Link check complete; look for any errors in the above output ^
|
||||
or in %BUILDDIR%/linkcheck/output.txt.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "doctest" (
|
||||
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Testing of doctests in the sources finished, look at the ^
|
||||
results in %BUILDDIR%/doctest/output.txt.
|
||||
goto end
|
||||
)
|
||||
|
||||
:end
|
|
@ -1,64 +0,0 @@
|
|||
Docker Remote Debugging
|
||||
=======================
|
||||
|
||||
To connect to python remote interpreter inside docker, you have to make sure first, that Pycharm is aware of your docker.
|
||||
|
||||
Go to *Settings > Build, Execution, Deployment > Docker*. If you are on linux, you can use docker directly using its socket `unix:///var/run/docker.sock`, if you are on Windows or Mac, make sure that you have docker-machine installed, then you can simply *Import credentials from Docker Machine*.
|
||||
|
||||
.. image:: images/1.png
|
||||
|
||||
Configure Remote Python Interpreter
|
||||
-----------------------------------
|
||||
|
||||
This repository comes with already prepared "Run/Debug Configurations" for docker.
|
||||
|
||||
.. image:: images/2.png
|
||||
|
||||
But as you can see, at the beggining there is something wrong with them. They have red X on django icon, and they cannot be used, without configuring remote python interpteter. To do that, you have to go to *Settings > Build, Execution, Deployment* first.
|
||||
|
||||
|
||||
Next, you have to add new remote python interpreter, based on already tested deployment settings. Go to *Settings > Project > Project Interpreter*. Click on the cog icon, and click *Add Remote*.
|
||||
|
||||
.. image:: images/3.png
|
||||
|
||||
Switch to *Docker Compose* and select `local.yml` file from directory of your project, next set *Service name* to `django`
|
||||
|
||||
.. image:: images/4.png
|
||||
|
||||
Having that, click *OK*. Close *Settings* panel, and wait few seconds...
|
||||
|
||||
.. image:: images/7.png
|
||||
|
||||
After few seconds, all *Run/Debug Configurations* should be ready to use.
|
||||
|
||||
.. image:: images/8.png
|
||||
|
||||
**Things you can do with provided configuration**:
|
||||
|
||||
* run and debug python code
|
||||
.. image:: images/f1.png
|
||||
* run and debug tests
|
||||
.. image:: images/f2.png
|
||||
.. image:: images/f3.png
|
||||
* run and debug migrations or different django management commands
|
||||
.. image:: images/f4.png
|
||||
* and many others..
|
||||
|
||||
Known issues
|
||||
------------
|
||||
|
||||
* Pycharm hangs on "Connecting to Debugger"
|
||||
|
||||
.. image:: images/issue1.png
|
||||
|
||||
This might be fault of your firewall. Take a look on this ticket - https://youtrack.jetbrains.com/issue/PY-18913
|
||||
|
||||
* Modified files in `.idea` directory
|
||||
|
||||
Most of the files from `.idea/` were added to `.gitignore` with a few exceptions, which were made, to provide "ready to go" configuration. After adding remote interpreter some of these files are altered by PyCharm:
|
||||
|
||||
.. image:: images/issue2.png
|
||||
|
||||
In theory you can remove them from repository, but then, other people will lose a ability to initialize a project from provided configurations as you did. To get rid of this annoying state, you can run command::
|
||||
|
||||
$ git update-index --assume-unchanged {{cookiecutter.project_slug}}.iml
|
Before Width: | Height: | Size: 66 KiB |
Before Width: | Height: | Size: 15 KiB |
Before Width: | Height: | Size: 177 KiB |
Before Width: | Height: | Size: 110 KiB |
Before Width: | Height: | Size: 6.1 KiB |
Before Width: | Height: | Size: 19 KiB |
Before Width: | Height: | Size: 249 KiB |
Before Width: | Height: | Size: 229 KiB |
Before Width: | Height: | Size: 230 KiB |
Before Width: | Height: | Size: 222 KiB |
Before Width: | Height: | Size: 42 KiB |
Before Width: | Height: | Size: 11 KiB |
|
@ -1,81 +0,0 @@
|
|||
version: '3'
|
||||
|
||||
volumes:
|
||||
local_postgres_data: {}
|
||||
local_postgres_data_backups: {}
|
||||
|
||||
services:
|
||||
django:{% if cookiecutter.use_celery == 'y' %} &django{% endif %}
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./compose/local/django/Dockerfile
|
||||
image: {{ cookiecutter.project_slug }}_local_django
|
||||
depends_on:
|
||||
- postgres
|
||||
{%- if cookiecutter.use_mailhog == 'y' %}
|
||||
- mailhog
|
||||
{%- endif %}
|
||||
volumes:
|
||||
- .:/app
|
||||
env_file:
|
||||
- ./.envs/.local/.django
|
||||
- ./.envs/.local/.postgres
|
||||
ports:
|
||||
- "8000:8000"
|
||||
command: /start
|
||||
|
||||
postgres:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./compose/production/postgres/Dockerfile
|
||||
image: {{ cookiecutter.project_slug }}_production_postgres
|
||||
volumes:
|
||||
- local_postgres_data:/var/lib/postgresql/data
|
||||
- local_postgres_data_backups:/backups
|
||||
env_file:
|
||||
- ./.envs/.local/.postgres
|
||||
{%- if cookiecutter.use_mailhog == 'y' %}
|
||||
|
||||
mailhog:
|
||||
image: mailhog/mailhog:v1.0.0
|
||||
ports:
|
||||
- "8025:8025"
|
||||
|
||||
{%- endif %}
|
||||
{%- if cookiecutter.use_celery == 'y' %}
|
||||
|
||||
redis:
|
||||
image: redis:3.2
|
||||
|
||||
celeryworker:
|
||||
<<: *django
|
||||
image: {{ cookiecutter.project_slug }}_local_celeryworker
|
||||
depends_on:
|
||||
- redis
|
||||
- postgres
|
||||
{% if cookiecutter.use_mailhog == 'y' -%}
|
||||
- mailhog
|
||||
{%- endif %}
|
||||
ports: []
|
||||
command: /start-celeryworker
|
||||
|
||||
celerybeat:
|
||||
<<: *django
|
||||
image: {{ cookiecutter.project_slug }}_local_celerybeat
|
||||
depends_on:
|
||||
- redis
|
||||
- postgres
|
||||
{% if cookiecutter.use_mailhog == 'y' -%}
|
||||
- mailhog
|
||||
{%- endif %}
|
||||
ports: []
|
||||
command: /start-celerybeat
|
||||
|
||||
flower:
|
||||
<<: *django
|
||||
image: {{ cookiecutter.project_slug }}_local_flower
|
||||
ports:
|
||||
- "5555:5555"
|
||||
command: /start-flower
|
||||
|
||||
{%- endif %}
|
|
@ -3,6 +3,7 @@ import os
|
|||
import sys
|
||||
|
||||
if __name__ == "__main__":
|
||||
# must be overriden in the env for production to "config.settings.production"
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
|
||||
|
||||
try:
|
||||
|
|
25
{{cookiecutter.project_slug}}/newrelic-customizer.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
from configparser import SafeConfigParser
|
||||
from os import getenv
|
||||
import argparse
|
||||
|
||||
|
||||
def main(options):
|
||||
postfix = options.get("postfix", "")
|
||||
|
||||
parser = SafeConfigParser()
|
||||
parser.read('newrelic.ini')
|
||||
|
||||
app_name = getenv("NEW_RELIC_APP_NAME", "{{ cookiecutter.project_slug }}")
|
||||
if postfix:
|
||||
app_name = app_name + "-" + postfix
|
||||
parser.set('newrelic', 'app_name', app_name)
|
||||
|
||||
with open('newrelic.ini', 'w') as configfile:
|
||||
parser.write(configfile)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
arg_p = argparse.ArgumentParser()
|
||||
arg_p.add_argument("-p", "--postfix", help="posfix for newrelic app name")
|
||||
args = vars(arg_p.parse_args())
|
||||
main(args)
|
207
{{cookiecutter.project_slug}}/newrelic.ini
Normal file
|
@ -0,0 +1,207 @@
|
|||
# ---------------------------------------------------------------------------
|
||||
|
||||
#
|
||||
# This file configures the New Relic Python Agent.
|
||||
#
|
||||
# The path to the configuration file should be supplied to the function
|
||||
# newrelic.agent.initialize() when the agent is being initialized.
|
||||
#
|
||||
# The configuration file follows a structure similar to what you would
|
||||
# find for Microsoft Windows INI files. For further information on the
|
||||
# configuration file format see the Python ConfigParser documentation at:
|
||||
#
|
||||
# http://docs.python.org/library/configparser.html
|
||||
#
|
||||
# For further discussion on the behaviour of the Python agent that can
|
||||
# be configured via this configuration file see:
|
||||
#
|
||||
# http://newrelic.com/docs/python/python-agent-configuration
|
||||
#
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
# Here are the settings that are common to all environments.
|
||||
|
||||
[newrelic]
|
||||
|
||||
# You must specify the license key associated with your New
|
||||
# Relic account. This key binds the Python Agent's data to your
|
||||
# account in the New Relic service.
|
||||
license_key =
|
||||
|
||||
# The application name. Set this to be the name of your
|
||||
# application as you would like it to show up in New Relic UI.
|
||||
# The UI will then auto-map instances of your application into a
|
||||
# entry on your home dashboard page.
|
||||
app_name = {{ cookiecutter.project_slug }}
|
||||
|
||||
# When "true", the agent collects performance data about your
|
||||
# application and reports this data to the New Relic UI at
|
||||
# newrelic.com. This global switch is normally overridden for
|
||||
# each environment below.
|
||||
monitor_mode = true
|
||||
|
||||
# Sets the name of a file to log agent messages to. Useful for
|
||||
# debugging any issues with the agent. This is not set by
|
||||
# default as it is not known in advance what user your web
|
||||
# application processes will run as and where they have
|
||||
# permission to write to. Whatever you set this to you must
|
||||
# ensure that the permissions for the containing directory and
|
||||
# the file itself are correct, and that the user that your web
|
||||
# application runs as can write to the file. If not able to
|
||||
# write out a log file, it is also possible to say "stderr" and
|
||||
# output to standard error output. This would normally result in
|
||||
# output appearing in your web server log.
|
||||
#log_file = /tmp/newrelic-python-agent.log
|
||||
|
||||
# Sets the level of detail of messages sent to the log file, if
|
||||
# a log file location has been provided. Possible values, in
|
||||
# increasing order of detail, are: "critical", "error", "warning",
|
||||
# "info" and "debug". When reporting any agent issues to New
|
||||
# Relic technical support, the most useful setting for the
|
||||
# support engineers is "debug". However, this can generate a lot
|
||||
# of information very quickly, so it is best not to keep the
|
||||
# agent at this level for longer than it takes to reproduce the
|
||||
# problem you are experiencing.
|
||||
log_level = info
|
||||
|
||||
# High Security Mode enforces certain security settings, and prevents
|
||||
# them from being overridden, so that no sensitive data is sent to New
|
||||
# Relic. Enabling High Security Mode means that request parameters are
|
||||
# not collected and SQL can not be sent to New Relic in its raw form.
|
||||
# To activate High Security Mode, it must be set to 'true' in this
|
||||
# local .ini configuration file AND be set to 'true' in the
|
||||
# server-side configuration in the New Relic user interface. For
|
||||
# details, see
|
||||
# https://docs.newrelic.com/docs/subscriptions/high-security
|
||||
high_security = false
|
||||
|
||||
# The Python Agent will attempt to connect directly to the New
|
||||
# Relic service. If there is an intermediate firewall between
|
||||
# your host and the New Relic service that requires you to use a
|
||||
# HTTP proxy, then you should set both the "proxy_host" and
|
||||
# "proxy_port" settings to the required values for the HTTP
|
||||
# proxy. The "proxy_user" and "proxy_pass" settings should
|
||||
# additionally be set if proxy authentication is implemented by
|
||||
# the HTTP proxy. The "proxy_scheme" setting dictates what
|
||||
# protocol scheme is used in talking to the HTTP proxy. This
|
||||
# would normally always be set as "http" which will result in the
|
||||
# agent then using a SSL tunnel through the HTTP proxy for end to
|
||||
# end encryption.
|
||||
# proxy_scheme = http
|
||||
# proxy_host = hostname
|
||||
# proxy_port = 8080
|
||||
# proxy_user =
|
||||
# proxy_pass =
|
||||
|
||||
# Capturing request parameters is off by default. To enable the
|
||||
# capturing of request parameters, first ensure that the setting
|
||||
# "attributes.enabled" is set to "true" (the default value), and
|
||||
# then add "request.parameters.*" to the "attributes.include"
|
||||
# setting. For details about attributes configuration, please
|
||||
# consult the documentation.
|
||||
# attributes.include = request.parameters.*
|
||||
|
||||
# The transaction tracer captures deep information about slow
|
||||
# transactions and sends this to the UI on a periodic basis. The
|
||||
# transaction tracer is enabled by default. Set this to "false"
|
||||
# to turn it off.
|
||||
transaction_tracer.enabled = true
|
||||
|
||||
# Threshold in seconds for when to collect a transaction trace.
|
||||
# When the response time of a controller action exceeds this
|
||||
# threshold, a transaction trace will be recorded and sent to
|
||||
# the UI. Valid values are any positive float value, or (default)
|
||||
# "apdex_f", which will use the threshold for a dissatisfying
|
||||
# Apdex controller action - four times the Apdex T value.
|
||||
transaction_tracer.transaction_threshold = apdex_f
|
||||
|
||||
# When the transaction tracer is on, SQL statements can
|
||||
# optionally be recorded. The recorder has three modes, "off"
|
||||
# which sends no SQL, "raw" which sends the SQL statement in its
|
||||
# original form, and "obfuscated", which strips out numeric and
|
||||
# string literals.
|
||||
transaction_tracer.record_sql = obfuscated
|
||||
|
||||
# Threshold in seconds for when to collect stack trace for a SQL
|
||||
# call. In other words, when SQL statements exceed this
|
||||
# threshold, then capture and send to the UI the current stack
|
||||
# trace. This is helpful for pinpointing where long SQL calls
|
||||
# originate from in an application.
|
||||
transaction_tracer.stack_trace_threshold = 0.5
|
||||
|
||||
# Determines whether the agent will capture query plans for slow
|
||||
# SQL queries. Only supported in MySQL and PostgreSQL. Set this
|
||||
# to "false" to turn it off.
|
||||
transaction_tracer.explain_enabled = true
|
||||
|
||||
# Threshold for query execution time below which query plans
|
||||
# will not not be captured. Relevant only when "explain_enabled"
|
||||
# is true.
|
||||
transaction_tracer.explain_threshold = 0.5
|
||||
|
||||
# Space separated list of function or method names in form
|
||||
# 'module:function' or 'module:class.function' for which
|
||||
# additional function timing instrumentation will be added.
|
||||
transaction_tracer.function_trace =
|
||||
|
||||
# The error collector captures information about uncaught
|
||||
# exceptions or logged exceptions and sends them to UI for
|
||||
# viewing. The error collector is enabled by default. Set this
|
||||
# to "false" to turn it off.
|
||||
error_collector.enabled = true
|
||||
|
||||
# To stop specific errors from reporting to the UI, set this to
|
||||
# a space separated list of the Python exception type names to
|
||||
# ignore. The exception name should be of the form 'module:class'.
|
||||
error_collector.ignore_errors =
|
||||
|
||||
# Browser monitoring is the Real User Monitoring feature of the UI.
|
||||
# For those Python web frameworks that are supported, this
|
||||
# setting enables the auto-insertion of the browser monitoring
|
||||
# JavaScript fragments.
|
||||
browser_monitoring.auto_instrument = true
|
||||
|
||||
# A thread profiling session can be scheduled via the UI when
|
||||
# this option is enabled. The thread profiler will periodically
|
||||
# capture a snapshot of the call stack for each active thread in
|
||||
# the application to construct a statistically representative
|
||||
# call tree.
|
||||
thread_profiler.enabled = true
|
||||
|
||||
# Your application deployments can be recorded through the
|
||||
# New Relic REST API. To use this feature provide your API key
|
||||
# below then use the `newrelic-admin record-deploy` command.
|
||||
# api_key =
|
||||
|
||||
# Distributed tracing lets you see the path that a request takes
|
||||
# through your distributed system. Enabling distributed tracing
|
||||
# changes the behavior of some New Relic features, so carefully
|
||||
# consult the transition guide before you enable this feature:
|
||||
# https://docs.newrelic.com/docs/transition-guide-distributed-tracing
|
||||
distributed_tracing.enabled = false
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
#
|
||||
# The application environments. These are specific settings which
|
||||
# override the common environment settings. The settings related to a
|
||||
# specific environment will be used when the environment argument to the
|
||||
# newrelic.agent.initialize() function has been defined to be either
|
||||
# "development", "test", "staging" or "production".
|
||||
#
|
||||
|
||||
[newrelic:development]
|
||||
monitor_mode = false
|
||||
|
||||
[newrelic:test]
|
||||
monitor_mode = false
|
||||
|
||||
[newrelic:staging]
|
||||
app_name = Python Application (Staging)
|
||||
monitor_mode = true
|
||||
|
||||
[newrelic:production]
|
||||
monitor_mode = true
|
||||
|
||||
# ---------------------------------------------------------------------------
|
|
@ -1,69 +0,0 @@
|
|||
version: '3'
|
||||
|
||||
volumes:
|
||||
production_postgres_data: {}
|
||||
production_postgres_data_backups: {}
|
||||
production_caddy: {}
|
||||
|
||||
services:
|
||||
django:{% if cookiecutter.use_celery == 'y' %} &django{% endif %}
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./compose/production/django/Dockerfile
|
||||
image: {{ cookiecutter.project_slug }}_production_django
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
env_file:
|
||||
- ./.envs/.production/.django
|
||||
- ./.envs/.production/.postgres
|
||||
command: /start
|
||||
|
||||
postgres:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./compose/production/postgres/Dockerfile
|
||||
image: {{ cookiecutter.project_slug }}_production_postgres
|
||||
volumes:
|
||||
- production_postgres_data:/var/lib/postgresql/data
|
||||
- production_postgres_data_backups:/backups
|
||||
env_file:
|
||||
- ./.envs/.production/.postgres
|
||||
|
||||
caddy:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./compose/production/caddy/Dockerfile
|
||||
image: {{ cookiecutter.project_slug }}_production_caddy
|
||||
depends_on:
|
||||
- django
|
||||
volumes:
|
||||
- production_caddy:/root/.caddy
|
||||
env_file:
|
||||
- ./.envs/.production/.caddy
|
||||
ports:
|
||||
- "0.0.0.0:80:80"
|
||||
- "0.0.0.0:443:443"
|
||||
|
||||
redis:
|
||||
image: redis:3.2
|
||||
{%- if cookiecutter.use_celery == 'y' %}
|
||||
|
||||
celeryworker:
|
||||
<<: *django
|
||||
image: {{ cookiecutter.project_slug }}_production_celeryworker
|
||||
command: /start-celeryworker
|
||||
|
||||
celerybeat:
|
||||
<<: *django
|
||||
image: {{ cookiecutter.project_slug }}_production_celerybeat
|
||||
command: /start-celerybeat
|
||||
|
||||
flower:
|
||||
<<: *django
|
||||
image: {{ cookiecutter.project_slug }}_production_flower
|
||||
ports:
|
||||
- "5555:5555"
|
||||
command: /start-flower
|
||||
|
||||
{%- endif %}
|
|
@ -11,9 +11,7 @@ whitenoise==4.1 # https://github.com/evansd/whitenoise
|
|||
redis>=2.10.5 # https://github.com/antirez/redis
|
||||
{%- if cookiecutter.use_celery == "y" %}
|
||||
celery==4.2.1 # pyup: <5.0 # https://github.com/celery/celery
|
||||
{%- if cookiecutter.use_docker == 'y' %}
|
||||
flower==0.9.2 # https://github.com/mher/flower
|
||||
{%- endif %}
|
||||
|
||||
{%- endif %}
|
||||
|
||||
# Django
|
||||
|
@ -27,7 +25,27 @@ django-crispy-forms==1.7.2 # https://github.com/django-crispy-forms/django-cris
|
|||
django-compressor==2.2 # https://github.com/django-compressor/django-compressor
|
||||
{%- endif %}
|
||||
django-redis==4.9.0 # https://github.com/niwinz/django-redis
|
||||
django-debug-toolbar==1.10.1 # https://github.com/jazzband/django-debug-toolbar
|
||||
django-extensions==2.1.0
|
||||
|
||||
# Django REST Framework
|
||||
djangorestframework==3.8.2 # https://github.com/encode/django-rest-framework
|
||||
coreapi==2.3.3 # https://github.com/core-api/python-client
|
||||
|
||||
# Customized
|
||||
|
||||
# Mysql
|
||||
mysqlclient==1.3.10
|
||||
dj-database-url==0.5.0
|
||||
|
||||
# monitoring service
|
||||
{%- if cookiecutter.use_newrelic == "y" %}
|
||||
newrelic==4.4.0.103
|
||||
{%- endif %}
|
||||
|
||||
{%- if cookiecutter.use_prometheus == "y" %}
|
||||
django-prometheus==1.0.15
|
||||
{%- endif %}
|
||||
|
||||
# flak8 reports
|
||||
flake8-html==0.4.0
|
||||
|
|
|
@ -3,11 +3,6 @@
|
|||
Werkzeug==0.14.1 # https://github.com/pallets/werkzeug
|
||||
ipdb==0.11 # https://github.com/gotcha/ipdb
|
||||
Sphinx==1.8.1 # https://github.com/sphinx-doc/sphinx
|
||||
{%- if cookiecutter.use_docker == 'y' %}
|
||||
psycopg2==2.7.4 --no-binary psycopg2 # https://github.com/psycopg/psycopg2
|
||||
{%- else %}
|
||||
psycopg2-binary==2.7.5 # https://github.com/psycopg/psycopg2
|
||||
{%- endif %}
|
||||
|
||||
# Testing
|
||||
# ------------------------------------------------------------------------------
|
||||
|
@ -25,6 +20,6 @@ coverage==4.5.1 # https://github.com/nedbat/coveragepy
|
|||
factory-boy==2.11.1 # https://github.com/FactoryBoy/factory_boy
|
||||
|
||||
django-debug-toolbar==1.10.1 # https://github.com/jazzband/django-debug-toolbar
|
||||
django-extensions==2.1.3 # https://github.com/django-extensions/django-extensions
|
||||
|
||||
django-coverage-plugin==1.6.0 # https://github.com/nedbat/django_coverage_plugin
|
||||
pytest-django==3.4.3 # https://github.com/pytest-dev/pytest-django
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
-r ./base.txt
|
||||
|
||||
gunicorn==19.8.1 # https://github.com/benoitc/gunicorn
|
||||
psycopg2==2.7.4 --no-binary psycopg2 # https://github.com/psycopg/psycopg2
|
||||
{%- if cookiecutter.use_whitenoise == 'n' %}
|
||||
Collectfast==0.6.2 # https://github.com/antonagestam/collectfast
|
||||
{%- endif %}
|
||||
|
|
37
{{cookiecutter.project_slug}}/run.sh
Normal file
|
@ -0,0 +1,37 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
export NEW_RELIC_CONFIG_FILE=$(/usr/local/bin/envconsul -consul consul.mcagrid.com -token $CONSUL_TOKEN -prefix $DEPLOY_ENV/ env | grep ^NEW_RELIC_CONFIG_FILE= | cut -d = -f2 | awk '{print $1 }')
|
||||
|
||||
|
||||
if [ "$1" = "{{ cookiecutter.project_slug }}" ]; then
|
||||
echo "Running {{ cookiecutter.project_slug }}"
|
||||
/usr/local/bin/envconsul -consul consul.mcagrid.com -token $CONSUL_TOKEN -prefix $DEPLOY_ENV/ \
|
||||
python manage.py migrate
|
||||
|
||||
/usr/local/bin/envconsul -consul consul.mcagrid.com -token $CONSUL_TOKEN -prefix $DEPLOY_ENV/ \
|
||||
python manage.py loaddata seed_data.json
|
||||
|
||||
/usr/local/bin/envconsul -consul consul.mcagrid.com -token $CONSUL_TOKEN -prefix $DEPLOY_ENV/ \
|
||||
python manage.py collectstatic --noinput
|
||||
|
||||
if [ -z "$NEW_RELIC_CONFIG_FILE" ] ; then
|
||||
/usr/local/bin/envconsul -consul consul.mcagrid.com -token $CONSUL_TOKEN -prefix $DEPLOY_ENV/ \
|
||||
python manage.py runserver 0.0.0.0:8000
|
||||
else
|
||||
/usr/local/bin/envconsul -consul consul.mcagrid.com -token $CONSUL_TOKEN -prefix $DEPLOY_ENV/ python newrelic-customizer.py
|
||||
/usr/local/bin/envconsul -consul consul.mcagrid.com -token $CONSUL_TOKEN -prefix $DEPLOY_ENV/ \
|
||||
newrelic-admin run-program python manage.py runserver 0.0.0.0:8000
|
||||
fi
|
||||
|
||||
elif [ "$1" = "worker" ]; then
|
||||
echo "Running celery worker"
|
||||
|
||||
if [ -z "$NEW_RELIC_CONFIG_FILE" ] ; then
|
||||
/usr/local/bin/envconsul -consul consul.mcagrid.com -token $CONSUL_TOKEN -prefix $DEPLOY_ENV/ \
|
||||
celery worker -A walla --loglevel=INFO -Q {{ cookiecutter.project_slug }}
|
||||
else
|
||||
/usr/local/bin/envconsul -consul consul.mcagrid.com -token $CONSUL_TOKEN -prefix $DEPLOY_ENV/ python newrelic-customizer.py --postfix "worker"
|
||||
/usr/local/bin/envconsul -consul consul.mcagrid.com -token $CONSUL_TOKEN -prefix $DEPLOY_ENV/ \
|
||||
newrelic-admin run-program celery worker -A walla --loglevel=INFO -Q {{ cookiecutter.project_slug }}
|
||||
fi
|
||||
fi
|
|
@ -35,7 +35,4 @@ if [ -z "$VIRTUAL_ENV" ]; then
|
|||
else
|
||||
|
||||
pip install -r $PROJECT_DIR/requirements/local.txt
|
||||
{% if cookiecutter.use_heroku == "y" -%}
|
||||
pip install -r $PROJECT_DIR/requirements.txt
|
||||
{%- endif %}
|
||||
fi
|
||||
|
|