Merge pull request #1 from datalets/master

merging
This commit is contained in:
khashashin 2017-07-18 16:26:55 +02:00 committed by GitHub
commit 1ae0ebdf76
162 changed files with 1702 additions and 14666 deletions

2
.gitignore vendored
View file

@ -4,6 +4,7 @@
.DS_Store
*.swp
*.sqlite3
/docker-compose.yml
/env/
/venv/
/static/
@ -15,3 +16,4 @@
/docs/_build/
/node_modules/
/publichealth/static/libs/
/mockup/assets/css/*.css

11
CHANGELOG.md Normal file
View file

@ -0,0 +1,11 @@
# Change Log
## [v0.0.2](https://github.com/datalets/public-health-ch/tree/v0.0.2) (2017-05-05)
[Full Changelog](https://github.com/datalets/public-health-ch/compare/v0.0.2...v0.0.1)
- Social media networks snippet
- Update to Wagtail 1.10, Django 1.11
## [v0.0.1](https://github.com/datalets/public-health-ch/tree/v0.0.1) (2017-05-04)
- Initial release

View file

@ -29,7 +29,7 @@ module.exports = function(grunt) {
sourcemap: 'none'
},
files: { // Dictionary of files
"publichealth/static/mockup/assets/css/main.css": "publichealth/static/css/main.scss"
"./assets/css/main.css": "publichealth/static/css/main.scss"
}
},
dist: {
@ -38,7 +38,7 @@ module.exports = function(grunt) {
sourcemap: 'none'
},
files: {
"publichealth/static/mockup/assets/css/main.min.css": "publichealth/static/css/main.scss"
"./assets/css/main.min.css": "publichealth/static/css/main.scss"
}
}
},
@ -58,8 +58,8 @@ module.exports = function(grunt) {
dev: {
bsFiles: {
src: [
"publichealth/static/mockup/assets/css/*.css",
"publichealth/static/mockup/*.html"
"./assets/css/*.css",
"./*.html"
]
},
options: {

52
LICENSE.md Normal file
View file

@ -0,0 +1,52 @@
# Wagtail
Copyright (C) 2014 Torchbox Ltd and individual contributors
https://github.com/wagtail/wagtail/blob/master/LICENSE
BSD 3-clause "New" or "Revised" License
# Puput
Copyright (c) 2015 Marc Tudurí - APSL
https://github.com/APSL/puput/blob/master/LICENSE
MIT License
# Bootstrap
Copyright (c) 2011-2016 Twitter, Inc.
https://github.com/twbs/bootstrap/blob/v3-dev/LICENSE
MIT License
# Bootswatch
Copyright (c) 2013 Thomas Park
https://github.com/thomaspark/bootswatch/blob/gh-pages/LICENSE
MIT License
# Slick
Copyright (c) 2013-2016 Ken Wheeler
https://github.com/kenwheeler/slick/blob/master/LICENSE
MIT License
# public-health-ch
Copyright (c) 2017 Public Health Schweiz
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View file

@ -9,14 +9,14 @@ build-cached:
build:
docker-compose build --no-cache
run:
run-here:
docker-compose stop web # for restart cases, when already running
docker-compose up
run-detached:
docker-compose up -d
run:
docker-compose up -d # detach by default
django-restart-detached:
restart:
docker-compose stop web
docker-compose up -d web
@ -27,7 +27,7 @@ migrate:
docker-compose exec web ./manage.py migrate
migrations:
docker-compose exec web ./manage.py makemigrations
docker-compose exec web ./manage.py makemigrations --merge
apply-migrations: migrations migrate
@ -38,13 +38,21 @@ setup:
docker-compose exec web ./manage.py collectstatic
release:
sudo docker-compose build web
docker-compose stop web
docker-compose kill web
docker-compose build web
docker-compose up -d web
docker-compose exec web ./manage.py collectstatic --noinput
docker-compose exec web ./manage.py compress
reindex:
docker-compose exec web ./manage.py update_index
clear_index:
docker-compose exec elasticsearch curl -XDELETE localhost:9200/_all
django-exec-bash:
# execute bash in the currently running container
# execute bash in the currently running container
docker-compose exec web bash
django-run-bash:
@ -57,21 +65,40 @@ django-shell:
logs:
docker-compose logs -f --tail=500
backup:
docker-compose exec web ./manage.py dumpdata --natural-foreign --indent=4 -e contenttypes -e auth.Permission -e sessions -e wagtailcore.pagerevision -e wagtailcore.groupcollectionpermission > ~/publichealth.home.json
zip ~/publichealth.home.json.`date +"%d%m%Y-%H%M"`.zip ~/publichealth.home.json
rm ~/publichealth.home.json
django-loaddata:
gunzip ~/publichealth.home.json.gz
docker-compose exec web ./manage.py loaddata ~/publichealth.home.json
restore: django-loaddata restart
psql:
docker-compose exec postgres psql -U postgres -d postgres
pg-run-detached:
# start pg service
docker-compose up -d pg_database
# start pg service
docker-compose up -d postgres
pg-exec:
docker-compose exec pg_database bash
docker-compose exec postgres bash
pg-dump:
docker-compose exec pg_database bash -c 'pg_dump -U "$$POSTGRES_USER" -d "$$POSTGRES_DB" -f ./dumps/latest.sql'
docker-compose exec postgres bash -c 'pg_dump -U postgres -d postgres -f ./latest.sql'
pg-backup:
docker-compose exec postgres bash -c 'pg_dump -U postgres -d postgres' > ~/pg-backup.sql
zip ~/pg-backup.sql.`date +"%d%m%Y-%H%M"`.zip ~/pg-backup.sql
rm ~/pg-backup.sql
pg-restore:
docker-compose exec pg_database bash -c 'psql -U "$$POSTGRES_USER" -d "$$POSTGRES_DB" -f ./dumps/latest.sql'
docker-compose exec postgres bash -c 'psql -U postgres -d postgres -f ./latest.sql'
pg-surefire-drop-restore-db:
# drop existing database, recreate it, and then restore its content from backup.
-docker-compose exec pg_database bash -c 'dropdb -h localhost -U "$$POSTGRES_USER" "$$POSTGRES_DB"'
docker-compose exec pg_database bash -c 'createdb -h localhost -U "$$POSTGRES_USER" "$$POSTGRES_DB"'
# drop existing database, recreate it, and then restore its content from backup.
-docker-compose exec postgres bash -c 'dropdb -h localhost -U postgres postgres'
docker-compose exec postgres bash -c 'createdb -h localhost -U postgres postgres'
make pg-restore

141
README.md
View file

@ -1,43 +1,27 @@
Public Health Schweiz
=====================
Website of the Swiss Society for Public Health
New website of the [Swiss Society for Public Health](http://public-health.ch), developed by [datalets,ch](http://datalets.ch) using the open source, [Django](https://www.djangoproject.com/)-based [Wagtail CMS](http://wagtail.io). The frontend has been implemented by [moving water](http://www.movingwater.ch/) using [Bootstrap](https://getbootstrap.com) framework.
This project is open source under the [MIT License](LICENSE.md).
[![Dependency Status](https://dependencyci.com/github/datalets/public-health-ch/badge)](https://dependencyci.com/github/datalets/public-health-ch)
## Development environment
The easiest way to set up your machine would be to use [Vagrant](https://vagrantup.com), then in the project folder in the terminal type:
The easiest way to set up your machine would be to use [Vagrant](https://vagrantup.com), then in the project folder in the terminal type: `vagrant up`. Then when it is ready, follow instructions for *Database setup*.
```
vagrant liverun
```
**Backend setup**
After installing Python 3, from the project folder:
```
sudo apt-get install python3-venv python3-dev libjpeg-dev
pyvenv env
. env/bin/activate
pip install -U pip
pip install -r requirements.txt
./manage.py migrate
./manage.py createsuperuser
```
You will be asked a few questions to create an administrator account.
To set up a full development environment, follow all these instructions.
**Frontend setup**
You will need to have Ruby and SASS installed on your system, e.g.:
If not using Vagrant, you will need to have Ruby and SASS installed on your system, e.g.:
```
sudo yum install rubygem-sass
sudo apt-get install ruby-sass
```
Make sure a recent version of node.js, then:
Make sure a recent version of node.js (we recommend using [nave.sh](https://github.com/isaacs/nave)), then:
```
npm install -g bower grunt-cli
@ -45,20 +29,54 @@ npm install
bower install
```
If you have one installed, also start your local redis server (`service redis start`).
The first command (`..install -g..`) may require `sudo` if you installed node.js as a system package.
If you are only working on the frontend, you can start a local webserver and work on frontend assets without the backend setup described below. Mock content is at `mockup`, and there is a `grunt browser-sync` setup for working with frontend assets.
**Backend setup**
If not using Vagrant: after installing Python 3, from the project folder, deploy system packages and create a virtual environment as detailed (for Ubuntu users) below:
```
sudo apt-get install python3-venv python3-dev libjpeg-dev
pyvenv env
. env/bin/activate
pip install -U pip
pip install -r requirements.txt
```
At this point your backup is ready to be deployed.
## Database setup
Once your installation is ready, you can get a blank database set up and add a user to login with.
If you are using Vagrant, enter the shell of your virtual machine now with `vagrant ssh`
Run these commands:
```
./manage.py migrate
./manage.py createsuperuser
```
You will be asked a few questions to create an administrator account.
**Starting up**
Run this after completing setup:
If you have one installed, also start your local redis server (`service redis start`).
After completing setup, you can use:
```
./manage.py runserver &
grunt browser-sync
./manage.py runserver
```
A default browser should open pointing to the default home page.
(In a Vagrant shell, just use `djrun`)
Now access the admin panel with the user account you created earlier: http://localhost:3000/admin/
Now access the admin panel with the user account you created earlier: http://localhost:8000/admin/
## Troubleshooting
@ -66,8 +84,63 @@ Now access the admin panel with the user account you created earlier: http://loc
## Production notes
We use Ansible for automated deployment. You need to obtain an SSH and vault keys and place these in a `.keys` folder, then:
We use [Ansible](https://www.ansible.com) and [Docker Compose](https://docs.docker.com/compose/reference/overview/) for automated deployment.
To use Docker Compose to deploy the site, copy `ansible/roles/web/templates/docker-compose.j2` to `/docker-compose.yml` and fill in all `{{ variables }}`. This is done automatically in Ansible.
To do production deployments, you need to obtain SSH and vault keys from your system administrator (who has followed the Ansible guide to set up a vault..), and place these in a `.keys` folder. To deploy a site:
```
ansible-playbook -s ansible/docker.yaml -i ansible/inventories/production
ansible-playbook -s ansible/<*.yaml> -i ansible/inventories/production
```
For an update release with a specific version, use:
```
ansible-playbook -s ansible/site.yaml -i ansible/inventories/production --tags release -e gitversion=<v*.*.*>
```
We use a StackScript to deploy to Linode, the basic system set up is to have a user in the sudoers and docker group, and a few basic system packages ready.
For example, on Ubuntu:
```
apt-get install -q -y zip git nginx python-virtualenv python-dev
```
The order of deployment is:
- docker.yaml (base system)
- node.yaml
- site.yaml
- harden.yaml
- certbot.yaml
The last line adds support for Let's Encrypt, which you can configure and enable (updating your Nginx setup) with:
```
sudo /opt/certbot/certbot-auto --nginx certonly
```
If you do **not** wish to use SSL, delete the last part of your nginx site configuration (/etc/nginx/sites-enabled/...).
### Production releases
For further deployment and system maintenance we have a `Makefile` which automates Docker Compose tasks. This should be converted to use [Ansible Container](http://docs.ansible.com/ansible-container/getting_started.html). In the meantime, start a release with Ansible, then complete it using `make`, i.e.:
```
ansible-playbook -s ansible/site.yaml -i ansible/inventories/production --tags release
ssh -i .keys/ansible.pem ansible@<server-ip> "cd <release_dir> && make release"
```
### Restoring a data backup
For development, it's handy to have access to a copy of the production data. To delete your local database and restore from a file backup, run:
```
rm publichealth-dev.sqlite3
python manage.py migrate
python manage.py loaddata publichealth.home.json
```
You might want to `createsuperuser` again at this point.

13
ansible/certbot.yaml Normal file
View file

@ -0,0 +1,13 @@
- hosts: webservers
become: true
become_method: 'sudo'
gather_facts: yes
vars:
certbot_auto_renew_user: ansible
certbot_auto_renew_minute: 20
certbot_auto_renew_hour: 5
certbot_dir: /opt/certbot
certbot_install_from_source: yes
certbot_version: v0.14.2
roles:
- geerlingguy.certbot

View file

@ -3,8 +3,6 @@
gather_facts: True
roles:
- role: dev-sec.os-hardening
- role: dev-sec.ssh-hardening
become: yes
- dev-sec.nginx-hardening

View file

@ -1,18 +1,18 @@
$ANSIBLE_VAULT;1.1;AES256
62653363616638643933303063396435316366376466643563316461343162633661346638343836
6232346565306638373931396164303438373261396630610a343536386335336634663239626637
36376137393433343636366137333336303764616536643864353630366535653936663531373439
3966363565633630620a373238363535653936383365656366306231666365656265303638393864
33323363373365613134393261393139656261323036656632366562316264346434393366656165
35363861303036316465326134653163303835376333633132633332303438666363386330303463
66376132396666396163306566663534646261653139343432663332323837666165653235633538
34613037306339623032613939663639663161336539333832333438306466653439306535343134
64363833666138626561363263323338376261343333623839366236343464363737616232633566
62633934323463623834653539613039636138666539666665333434663165636639303532333233
30363437633762366230326231653961373462383330336462313935643761316334666232613261
64613538363763666666303832393632373934343162613439616535663666373434333632633664
64623531383239636464393036346565373564356666626632613437303335653465386639623366
61636231383561336238336334303462643137633465303466633764336630636462626535666633
61386263316437346666633164616162636234623530343038343338373439616131333538343835
39386435373035633837383264346266643433366363653566363863393434356337633366363635
64666465613164353238626165623931633534666438386264633431356332616264
65303737656632326238636231303166643730313665343038366330623866643834656263303936
6365633063373036663532356539366535393339343834650a343930366435653561666666333561
65323333396164323863333262613937313437373232393434643337363638366332663765663037
3432643961653264300a626234333966343963616461366439643138633633393435343863646138
66666632366666326632353865636565646435316339643761623235643235643930636435653433
61363035316462613265616236383531633930393034396661613562306234306261383261666432
37303332633466646537366139653266303532346437623863326635336535643366346236356562
66643033353335653961376365363432363563343461386134306137636636643538353232393237
61333932653134633738326435353564393835353334653732663737323935613837626132396434
39646334313132623039646432373537633938353638376666663238633631373063323636333733
31656563316362333138383232326532323832646639386333376330643535666639323834383363
66383431333834393535353539623730623764373363326431333866636666366637373639666437
36643665343965303131356336656362333564633161306638356166663963643235353166333131
33386566356664313837333565613239636233626462333532656566343661323938373338633264
30376264363566383435363765323864626264646331383162313235633463316532636634356630
30306666313961613839653939613737316339313932636266666530636463633963306635306665
62353966313762376333306564633665613632396339616564303433626265633262

View file

@ -1,7 +1,13 @@
[webservers]
172.104.150.7
[all:vars]
ansible_connection=ssh
ansible_ssh_user=ansible
ansible_ssh_private_key_file=.keys/ansible.pem
$ANSIBLE_VAULT;1.1;AES256
33346132656133323263366630316265383032356264303236306562366336346133303931366363
6166623234393661383738316361653639353863323062660a333966363436333031363936396438
31333262343961646537623534373165363735316566653133626435633538306636616162306535
3336396134363466350a363839663636313862643531633762626563396333646136376430643634
39383738613433643139323263613061353464383864636334313865313162316236343261333237
36303836313661336534613630663161633163646131376238643335303663653338363261633165
62623833626462633563386631643962393936646563663738623530666332353836303062376635
64326237663737316438633063343935383463663937373634303236376635643062666664633132
63633236373462376637363563363564643433356138636233363034306130306139306461373331
39396564303963633135303536383862336562623663336636373435373366353934613964353863
36313638623335356562626135383337656632333530333561623265323930323732333736343532
31366239663161663562

View file

@ -0,0 +1,2 @@
*.retry
tests/test.sh

View file

@ -0,0 +1,23 @@
---
services: docker
env:
- distro: centos7
- distro: centos6
playbook: test-source-install.yml
- distro: ubuntu1604
- distro: ubuntu1404
playbook: test-source-install.yml
- distro: debian8
playbook: test-source-install.yml
script:
# Download test shim.
- wget -O ${PWD}/tests/test.sh https://gist.githubusercontent.com/geerlingguy/73ef1e5ee45d8694570f334be385e181/raw/
- chmod +x ${PWD}/tests/test.sh
# Run tests.
- ${PWD}/tests/test.sh
notifications:
webhooks: https://galaxy.ansible.com/api/v1/notifications/

View file

@ -0,0 +1,20 @@
The MIT License (MIT)
Copyright (c) 2017 Jeff Geerling
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View file

@ -0,0 +1,75 @@
# Ansible Role: Certbot (for Let's Encrypt)
[![Build Status](https://travis-ci.org/geerlingguy/ansible-role-certbot.svg?branch=master)](https://travis-ci.org/geerlingguy/ansible-role-certbot)
Installs and configures Certbot (for Let's Encrypt).
## Requirements
If installing from source, Git is required. You can install Git using the `geerlingguy.git` role.
## Role Variables
The variable `certbot_install_from_source` controls whether to install Certbot from Git or package management. The latter is the default, so the variable defaults to `no`.
certbot_auto_renew: true
certbot_auto_renew_user: "{{ ansible_user }}"
certbot_auto_renew_hour: 3
certbot_auto_renew_minute: 30
By default, this role configures a cron job to run under the provided user account at the given hour and minute, every day. The defaults run `certbot renew` (or `certbot-auto renew`) via cron every day at 03:30:00 by the user you use in your Ansible playbook. It's preferred that you set a custom user/hour/minute so the renewal is during a low-traffic period and done by a non-root user account.
### Source Installation from Git
You can install Certbot from it's Git source repository if desired. This might be useful in several cases, but especially when older distributions don't have Certbot packages available (e.g. CentOS < 7, Ubuntu < 16.10 and Debian < 8).
certbot_install_from_source: no
certbot_repo: https://github.com/certbot/certbot.git
certbot_version: master
certbot_keep_updated: yes
Certbot Git repository options. To install from source, set `certbot_install_from_source` to `yes`. This clones the configured `certbot_repo`, respecting the `certbot_version` setting. If `certbot_keep_updated` is set to `yes`, the repository is updated every time this role runs.
certbot_dir: /opt/certbot
The directory inside which Certbot will be cloned.
## Dependencies
None.
## Example Playbook
- hosts: servers
vars:
certbot_auto_renew_user: your_username_here
certbot_auto_renew_minute: 20
certbot_auto_renew_hour: 5
roles:
- geerlingguy.certbot
After installation, you can create certificates using the `certbot` (or `certbot-auto`) script, which by default is installed inside the configured `certbot_dir` (when using Git). Here are some example commands to configure certificates with Certbot:
# Automatically add certs for all Apache virtualhosts (use with caution!).
/opt/certbot/certbot-auto --apache
# Generate certs, but don't modify Apache configuration (safer).
/opt/certbot/certbot-auto --apache certonly
By default, this role adds a cron job that will renew all installed certificates once per day at the hour and minute of your choosing.
You can test the auto-renewal (without actually renewing the cert) with the command:
/opt/certbot/certbot-auto renew --dry-run
See full documentation and options on the [Certbot website](https://certbot.eff.org/).
## License
MIT / BSD
## Author Information
This role was created in 2016 by [Jeff Geerling](https://www.jeffgeerling.com/), author of [Ansible for DevOps](https://www.ansiblefordevops.com/).

View file

@ -0,0 +1,16 @@
---
# Certbot auto-renew cron job configuration (for certificate renewals).
certbot_auto_renew: true
certbot_auto_renew_user: "{{ ansible_user }}"
certbot_auto_renew_hour: 3
certbot_auto_renew_minute: 30
# To install from source (on older OSes or if you need a specific or newer
# version of Certbot), set this variable to `yes` and configure other options.
certbot_install_from_source: no
certbot_repo: https://github.com/certbot/certbot.git
certbot_version: master
certbot_keep_updated: yes
# Where to put Certbot when installing from source.
certbot_dir: /opt/certbot

View file

@ -0,0 +1 @@
{install_date: 'Fri Jun 2 09:43:15 2017', version: 2.0.0}

View file

@ -0,0 +1,33 @@
---
dependencies: []
galaxy_info:
author: geerlingguy
description: "Installs and configures Certbot (for Let's Encrypt)."
company: "Midwestern Mac, LLC"
license: "license (BSD, MIT)"
min_ansible_version: 2.0
platforms:
- name: EL
versions:
- 6
- 7
- name: Fedora
versions:
- all
- name: Ubuntu
versions:
- all
- name: Debian
versions:
- all
galaxy_tags:
- networking
- system
- web
- certbot
- letsencrypt
- encryption
- certificates
- ssl
- https

View file

@ -0,0 +1,8 @@
---
- name: Load a variable file based on the OS type, or a default if not found.
include_vars: "{{ item }}"
with_first_found:
- "{{ ansible_distribution }}-{{ ansible_distribution_version }}.yml"
- "{{ ansible_distribution }}.yml"
- "{{ ansible_os_family }}.yml"
- "default.yml"

View file

@ -0,0 +1,17 @@
---
- name: Clone Certbot into configured directory.
git:
repo: "{{ certbot_repo }}"
dest: "{{ certbot_dir }}"
version: "{{ certbot_version }}"
update: "{{ certbot_keep_updated }}"
force: yes
- name: Set Certbot script variable.
set_fact:
certbot_script: "{{ certbot_dir }}/certbot-auto"
- name: Ensure certbot-auto is executable.
file:
path: "{{ certbot_script }}"
mode: 0755

View file

@ -0,0 +1,7 @@
---
- name: Install Certbot.
package: "name={{ certbot_package }} state=present"
- name: Set Certbot script variable.
set_fact:
certbot_script: "{{ certbot_package }}"

View file

@ -0,0 +1,11 @@
---
- include: include-vars.yml
- include: install-with-package.yml
when: not certbot_install_from_source
- include: install-from-source.yml
when: certbot_install_from_source
- include: renew-cron.yml
when: certbot_auto_renew

View file

@ -0,0 +1,8 @@
---
- name: Add cron job for certbot renewal (if configured).
cron:
name: Certbot automatic renewal.
job: "{{ certbot_script }} renew --quiet --no-self-upgrade"
minute: "{{ certbot_auto_renew_minute }}"
hour: "{{ certbot_auto_renew_hour }}"
user: "{{ certbot_auto_renew_user }}"

View file

@ -0,0 +1,11 @@
# Ansible Role tests
To run the test playbook(s) in this directory:
1. Install and start Docker.
1. Download the test shim (see .travis.yml file for the URL) into `tests/test.sh`:
- `wget -O tests/test.sh https://gist.githubusercontent.com/geerlingguy/73ef1e5ee45d8694570f334be385e181/raw/`
1. Make the test shim executable: `chmod +x tests/test.sh`.
1. Run (from the role root directory) `distro=[distro] playbook=[playbook] ./tests/test.sh`
If you don't want the container to be automatically deleted after the test playbook is run, add the following environment variables: `cleanup=false container_id=$(date +%s)`

View file

@ -0,0 +1,2 @@
---
- src: geerlingguy.git

View file

@ -0,0 +1,23 @@
---
- hosts: all
vars:
certbot_install_from_source: yes
pre_tasks:
- name: Update apt cache.
apt: update_cache=yes cache_valid_time=600
when: ansible_os_family == 'Debian'
changed_when: false
- name: Install cron (RedHat).
yum: name=cronie state=present
when: ansible_os_family == 'RedHat'
- name: Install cron (Debian).
apt: name=cron state=present
when: ansible_os_family == 'Debian'
roles:
- geerlingguy.git
- role_under_test

View file

@ -0,0 +1,19 @@
---
- hosts: all
pre_tasks:
- name: Update apt cache.
apt: update_cache=yes cache_valid_time=600
when: ansible_os_family == 'Debian'
changed_when: false
- name: Install cron (RedHat).
yum: name=cronie state=present
when: ansible_os_family == 'RedHat'
- name: Install cron (Debian).
apt: name=cron state=present
when: ansible_os_family == 'Debian'
roles:
- role_under_test

View file

@ -0,0 +1 @@
certbot_package: letsencrypt

View file

@ -0,0 +1,2 @@
---
certbot_package: certbot

View file

@ -16,4 +16,6 @@
dest: "{{ release_dir }}/docker-compose.yml"
- name: Deploy Docker site
shell: docker-compose build
shell: make setup
args:
chdir: "{{ release_dir }}"

View file

@ -17,9 +17,6 @@
- nodejs
- setup
- include: manage.yaml
- include: release.yaml
tags:
- manage
- docker
- setup
- run
- release

View file

@ -1,19 +0,0 @@
---
#- name: Build a current site
# shell: "docker-compose build web"
- name: Migrate the site data
shell: "docker-compose exec web ./manage.py migrate"
- name: Create administrative user
shell: "docker-compose exec web ./manage.py createsuperuser"
- name: Compress site frontend
shell: "docker-compose exec web ./manage.py compress"
- name: Collect static frontend files
shell: "docker-compose exec web ./manage.py collectstatic --noinput"
- name: Start Docker site
shell: "docker-compose up -d"

View file

@ -7,6 +7,12 @@
src: nginx.conf.j2
dest: /etc/nginx/sites-available/{{ domain }}
- name: Copy extra Nginx site config
become: true
template:
src: ph-extra-nginx.conf.j2
dest: /etc/nginx/sites-available/extra-{{ domain }}
- name: Activate Nginx site config
become: true
file:

View file

@ -0,0 +1,17 @@
---
- name: Checkout code branch from git
git:
repo: 'https://github.com/datalets/public-health-ch'
dest: "{{ release_dir }}"
force: yes
version: "{{ gitversion }}"
- name: Copy Docker site configuration
template:
src: docker-compose.j2
dest: "{{ release_dir }}/docker-compose.yml"
- name: Update Docker site
debug:
msg: "Please run 'make release' in {{ release_dir }} to finish updating the site"

View file

@ -31,6 +31,6 @@ postgres:
ports:
- "5432:5432"
elasticsearch:
image: orchardup/elasticsearch
image: elasticsearch:2
ports:
- "9200:9200"

View file

@ -6,7 +6,7 @@ upstream wagtail-site {
server {
listen [::]:80;
listen 80;
server_name {{ domain }} www.{{ domain }};
server_name {{ domain }};
client_max_body_size 16M;
@ -19,26 +19,28 @@ server {
gzip_vary on;
location /static/ {
access_log off;
expires 3600;
access_log off; expires 36000;
alias {{ release_dir }}/static/;
add_header Cache-Control "public";
add_header Access-Control-Allow-Origin https://{{ domain }};
add_header Cache-Control "public";
add_header Access-Control-Allow-Origin https://{{ domain }};
}
# Set a longer expiry for CACHE/, because the filenames are unique.
location /static/CACHE/ {
access_log off;
expires 864000;
access_log off; expires 864000;
alias {{ release_dir }}/static/CACHE/;
}
location /favicon.ico {
access_log off; expires max;
alias {{ release_dir }}/static/images/favicon.ico;
}
# Only serve /media/images by default, not e.g. original_images/.
location /media/images {
alias {{ release_dir }}/media/images;
access_log off;
expires max;
add_header Cache-Control "public";
access_log off; expires max;
add_header Cache-Control "public";
}
location / {
@ -48,23 +50,17 @@ server {
proxy_redirect off;
proxy_pass http://wagtail-site;
}
}
# TODO: parameterize
server {
listen 80; listen 443 ssl;
server_name conference.public-health.ch;
location /fr {
return 301 $scheme://sph17.organizers-congress.org/frontend/index.php?sub=89;
}
location / {
return 301 $scheme://sph17.organizers-congress.org;
}
}
# Enable secure site support
listen [::]:443;
listen 443 ssl;
ssl on;
ssl_certificate /etc/letsencrypt/live/{{ domain }}/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/{{ domain }}/privkey.pem;
include /etc/letsencrypt/options-ssl-nginx.conf;
if ($scheme != "https") {
return 301 https://$host$request_uri;
}
# TODO: parameterize
server {
listen 80; listen 443 ssl;
server_name sphc.ch;
return 301 $scheme://sph17.organizers-congress.org;
}

View file

@ -0,0 +1,32 @@
#{{ ansible_managed }}
# Web archive and other special configurations for public-health.ch
server {
listen 80;
server_name www-old.{{ domain }};
index index.html index.htm;
root {{ archive_dir }};
location / {
try_files $uri $uri/ =404;
}
}
# TODO: parameterize
server {
listen 80;
server_name conference.{{ domain }};
location /fr {
return 301 $scheme://sph17.organizers-congress.org/frontend/index.php?sub=89;
}
location / {
return 301 $scheme://sph17.organizers-congress.org;
}
}
# TODO: parameterize
server {
listen 80;
server_name sphc.ch;
return 301 $scheme://sph17.organizers-congress.org;
}

View file

@ -3,6 +3,7 @@
gather_facts: True
vars:
release_dir: /opt/publichealth
archive_dir: /opt/www-old
django_log_dir: /var/log/publichealth
ipv4_addresses: "{{ ansible_all_ipv4_addresses }}"
environment:

File diff suppressed because it is too large Load diff

View file

@ -1,57 +0,0 @@
# Public Health CMS stage
upstream wagtail-stage {
server localhost:5000;
}
server {
listen 80;
server_name ph-alpha.nebula1.public-health.ch;
client_max_body_size 64M;
gzip on;
gzip_types text/plain text/css application/x-javascript image/svg+xml;
gzip_comp_level 1;
gzip_disable msie6;
gzip_http_version 1.0;
gzip_proxied any;
gzip_vary on;
location /static/ {
access_log off;
expires 3600;
alias /opt/public-health-ch/static/;
}
# Set a longer expiry for CACHE/, because the filenames are unique.
location /static/CACHE/ {
access_log off;
expires 864000;
alias /opt/public-health-ch/static/CACHE/;
}
# Only server /media/images by default, not e.g. original_images/.
location /media/images {
expires 864000;
alias /opt/public-health-ch/media/images;
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $http_host;
proxy_redirect off;
proxy_pass http://wagtail-stage;
}
}
server {
listen 80; listen 443 ssl;
server_name conference.public-health.ch;
location /fr {
return 301 $scheme://sph17.organizers-congress.org/frontend/index.php?sub=89;
}
location / {
return 301 $scheme://sph17.organizers-congress.org;
}
}

20
feedler/__init__.py Normal file
View file

@ -0,0 +1,20 @@
# -*- coding: utf-8 -*-
__author__ = 'Oleg Lavrovsky'
__email__ = 'oleg@datalets.ch'
__version__ = '0.1'
FEEDLER_APPS = (
# Wagtail apps
'wagtail.wagtailcore',
'wagtail.wagtailadmin',
'wagtail.contrib.modeladmin',
'wagtail.contrib.wagtailroutablepage',
'wagtail.api.v2',
# Third-party apps
'rest_framework',
# My apps
'feedler',
)

7
feedler/api.py Normal file
View file

@ -0,0 +1,7 @@
from wagtail.api.v2.router import WagtailAPIRouter
from .endpoints import EntriesAPIEndpoint
# Create the router. "wagtailapi" is the URL namespace
api_router = WagtailAPIRouter('wagtailapi')
api_router.register_endpoint('entries', EntriesAPIEndpoint)

5
feedler/apps.py Normal file
View file

@ -0,0 +1,5 @@
from django.apps import AppConfig
class FeedlerConfig(AppConfig):
name = 'feedler'

26
feedler/endpoints.py Normal file
View file

@ -0,0 +1,26 @@
# -*- coding: utf-8 -*-
from wagtail.contrib.wagtailapi.endpoints import BaseAPIEndpoint
from wagtail.contrib.wagtailapi.serializers import BaseSerializer
from wagtail.contrib.wagtailapi.filters import FieldsFilter, OrderingFilter, SearchFilter
from wagtail.contrib.wagtailapi.pagination import WagtailPagination
from .models import Entry
class EntrySerializer(BaseSerializer):
pass
class EntriesAPIEndpoint(BaseAPIEndpoint):
base_serializer_class = EntrySerializer
filter_backends = [FieldsFilter, OrderingFilter, SearchFilter]
extra_api_fields = [
'title',
'author',
'link',
'visual',
'content',
'tags',
'published',
]
name = 'entries'
model = Entry

60
feedler/feedparser.py Normal file
View file

@ -0,0 +1,60 @@
# -*- coding: utf-8 -*-
from datetime import datetime
def parse(obj, raw, stream):
"""
Parse raw JSON implementation from the Feedly API
"""
obj.raw = raw
obj.stream = stream
obj.entry_id = raw['id']
# Date stamp handling
ts = raw['published'] / 1000
obj.published = datetime.fromtimestamp(ts)
# Authorship and title
obj.title = raw['title']
if 'author' in raw['origin']:
obj.author = raw['author']
elif 'title' in raw['origin']:
obj.author = raw['origin']['title']
# Parse links and references
if len(raw['alternate']) > 0:
obj.link = raw['alternate'][0]['href']
if 'thumbnail' in raw and len(raw['thumbnail']) > 0:
if 'url' in raw['thumbnail'][0]:
obj.visual = raw['thumbnail'][0]['url']
elif 'enclosure' in raw and len(raw['enclosure']) > 0:
if 'href' in raw['enclosure'][0]:
obj.visual = raw['enclosure'][0]['href']
elif 'visual' in raw and 'url' in raw['visual']:
obj.visual = raw['visual']['url']
if obj.visual.lower().strip() == 'none':
obj.visual = ''
# Collect text in nested JSON content
if 'content' in obj.raw:
obj.content = obj.raw['content']
else:
if 'summary' in obj.raw:
if 'content' in obj.raw['summary']:
obj.content = obj.raw['summary']['content']
else:
obj.content = obj.raw['summary']
else:
obj.content = ''
# Collect tags
tags = []
for tag in obj.raw['tags']:
if 'label' in tag:
label = tag['label'].replace(',','-')
label = label.strip().lower()
if len(label) > 3 and not label in tags:
tags.append(label)
obj.tags = ','.join(tags)
return obj

View file

@ -0,0 +1,71 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-03 13:46
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('wagtailcore', '__latest__'),
]
operations = [
migrations.CreateModel(
name='Entry',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('raw', models.TextField(blank=True, editable=False)),
('updated', models.DateTimeField(auto_now=True)),
('published', models.DateTimeField(auto_now_add=True)),
('entry_id', models.CharField(blank=True, editable=False, max_length=255, unique=True)),
('title', models.CharField(max_length=255)),
('author', models.CharField(blank=True, max_length=255)),
('link', models.URLField()),
('visual', models.URLField(blank=True)),
('content', models.TextField()),
('tags', models.TextField(blank=True)),
],
options={
'verbose_name_plural': 'Entries',
},
),
migrations.CreateModel(
name='FeedlySettings',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('feedly_auth', models.TextField(blank=True, help_text='Your developer authorization key')),
('feedly_pages', models.IntegerField(blank=True, choices=[(1, '2'), (2, '5'), (3, '10'), (4, '50')], help_text='How many pages to fetch?', null=True)),
],
options={
'verbose_name': 'Feedly',
},
),
migrations.CreateModel(
name='Stream',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255)),
('ident', models.CharField(max_length=255)),
],
),
migrations.AddField(
model_name='feedlysettings',
name='feedly_stream',
field=models.ManyToManyField(to='feedler.Stream'),
),
migrations.AddField(
model_name='feedlysettings',
name='site',
field=models.OneToOneField(editable=False, on_delete=django.db.models.deletion.CASCADE, to='wagtailcore.Site'),
),
migrations.AddField(
model_name='entry',
name='stream',
field=models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, to='feedler.Stream', verbose_name='Original stream'),
),
]

View file

@ -0,0 +1,29 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-03 15:21
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import wagtail.wagtailcore.fields
class Migration(migrations.Migration):
dependencies = [
('feedler', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='FeedPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('intro', wagtail.wagtailcore.fields.RichTextField(blank=True, default='')),
('stream', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='feedler.Stream', verbose_name='Filter to stream (optional)')),
],
options={
'verbose_name': 'Feeds',
},
bases=('wagtailcore.page',),
),
]

View file

View file

@ -0,0 +1,2 @@
from .models import *
from .admin import *

65
feedler/models/admin.py Normal file
View file

@ -0,0 +1,65 @@
# -*- coding: utf-8 -*-
import requests, json, codecs
from django.contrib import admin
from django.db import models
from django.db.models.signals import pre_save
from django.dispatch import receiver
from django.core.mail import send_mail
from wagtail.contrib.settings.models import BaseSetting, register_setting
from .models import Entry, Stream
import feedler.feedparser as feedparser
import logging
logger = logging.getLogger('feedler')
# Feedly integration module
@register_setting
class FeedlySettings(BaseSetting):
feedly_auth = models.TextField(
help_text='Your developer authorization key', blank=True)
feedly_pages = models.IntegerField(
choices=(
(1, '2'),
(2, '5'),
(3, '10'),
(4, '50'),
), blank=True, null=True,
help_text='How many pages to fetch?'
)
feedly_stream = models.ManyToManyField(Stream)
class Meta:
verbose_name = 'Feedly'
API_BASEURL = 'https://cloud.feedly.com/v3/streams/contents?streamId='
@receiver(pre_save, sender=FeedlySettings)
def handle_save_settings(sender, instance, *args, **kwargs):
if instance.feedly_auth:
streams = instance.feedly_stream.all()
for stream in streams:
# Start a request to download the feed
logger.info("Processing stream %s" % stream.title)
url = API_BASEURL + stream.ident
headers = {
'Authorization': 'OAuth ' + instance.feedly_auth
}
contents = requests.get(url, headers=headers).json()
if 'errorMessage' in contents:
raise PermissionError(contents['errorMessage'])
for raw_entry in contents['items']:
eid = raw_entry['id']
# Create or update data
try:
entry = Entry.objects.get(entry_id=eid)
logger.info("Updating entry '%s'" % eid)
except Entry.DoesNotExist:
logger.info("Adding entry '%s'" % eid)
entry = Entry()
entry = feedparser.parse(entry, raw_entry, stream)
entry.save()

67
feedler/models/models.py Normal file
View file

@ -0,0 +1,67 @@
# -*- coding: utf-8 -*-
from django.db import models
from wagtail.wagtailcore.models import Page, Orderable
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.fields import RichTextField
class Stream(models.Model):
title = models.CharField(max_length=255)
ident = models.CharField(max_length=255)
def __str__(self):
return self.title
class Entry(models.Model):
"""Implementation of the Entry from the feedly API as generic Django model
"""
raw = models.TextField(blank=True, editable=False)
updated = models.DateTimeField(auto_now=True, editable=False)
published = models.DateTimeField(auto_now_add=True, editable=False)
entry_id = models.CharField(max_length=255, unique=True, blank=True, editable=False)
title = models.CharField(max_length=255)
author = models.CharField(max_length=255, blank=True)
link = models.URLField()
visual = models.URLField(blank=True)
content = models.TextField()
tags = models.TextField(blank=True)
stream = models.ForeignKey(Stream,
blank=True, on_delete=models.CASCADE,
verbose_name='Original stream')
class Meta:
verbose_name_plural = 'Entries'
class FeedPage(Page):
intro = RichTextField(default='', blank=True)
stream = models.ForeignKey(Stream, on_delete=models.PROTECT,
null=True, blank=True, verbose_name='Filter to stream (optional)')
content_panels = [
FieldPanel('title'),
FieldPanel('intro'),
FieldPanel('stream'),
]
@property
def feedentries(self):
if self.stream:
entries = Entry.objects.filter(stream=self.stream)
else:
entries = Entry.objects.all()
# Order by most recent date first
entries = entries.order_by('-published')
return entries[:10]
def get_context(self, request):
# Update template context
context = super(FeedPage, self).get_context(request)
context['feedentries'] = self.feedentries
return context
class Meta:
verbose_name = "Feeds"

View file

@ -0,0 +1,52 @@
{% extends "base.html" %}
{% load static wagtailcore_tags %}
{% block body_class %}template-{{ self.get_verbose_name|slugify }}{% endblock %}
{% block extra_css %}
{% endblock %}
{% block title %}Feeds{% endblock %}
{% block content %}
<section id="article-index" class="article-index-page">
<div class="container">
<h2>{{ page.title }}</h2>
{% if page.intro %}
<p class="lead">{{ page.intro|richtext }}</p>
{% endif %}
</div>
</section>
<!-- Page body -->
<section id="news" class="feedpage-body">
<div class="container">
<div class="row">
{% for entry in feedentries %}
<div class="col-md-4 col-sm-6 col-xs-12">
{% if entry.visual %}
<div class="panel panel-default">
<img src="{{ entry.visual }}">
{% else %}
<div class="panel panel-fulltext">
{% endif %}
<div class="panel-body">
<h3><span>{{ entry.title|striptags|truncatewords_html:10 }}</span></h3>
<p>
<em><small><span>{{ entry.author }}</span></small></em><br><br>
{{ entry.content|striptags|truncatewords_html:25 }}
</p>
</div>
<a href="{{ entry.link }}" target="_blank" class="fill"></a>
</div>
</div>
<!-- {{ entry.raw }} -->
{% empty %}
<!-- No news today -->
{% endfor %}
</div>
</div>
</section>
{% endblock %}

3
feedler/tests.py Normal file
View file

@ -0,0 +1,3 @@
from django.test import TestCase
# Create your tests here.

10
feedler/urls.py Normal file
View file

@ -0,0 +1,10 @@
from django.conf.urls import include, url
from django.conf import settings
from django.contrib import admin
from django.conf.urls.i18n import i18n_patterns
from .api import api_router
urlpatterns = [
url(r'^api/v2/', api_router.urls),
]

28
feedler/wagtail_hooks.py Normal file
View file

@ -0,0 +1,28 @@
# -*- coding: utf-8 -*-
from wagtail.contrib.modeladmin.options import (
ModelAdmin, modeladmin_register)
from .models import Entry, Stream
class EntryModelAdmin(ModelAdmin):
model = Entry
menu_icon = 'date'
menu_order = 200
add_to_settings_menu = False
exclude_from_explorer = True
list_display = ('published', 'title', 'author', 'tags')
list_filter = ('author', 'tags')
search_fields = ('title', 'author', 'content', 'tags')
modeladmin_register(EntryModelAdmin)
class StreamModelAdmin(ModelAdmin):
model = Stream
menu_icon = 'date'
menu_order = 1000
add_to_settings_menu = True
exclude_from_explorer = True
list_display = ('title', 'ident')
modeladmin_register(StreamModelAdmin)

View file

Before

Width:  |  Height:  |  Size: 4.1 KiB

After

Width:  |  Height:  |  Size: 4.1 KiB

View file

Before

Width:  |  Height:  |  Size: 382 KiB

After

Width:  |  Height:  |  Size: 382 KiB

View file

Before

Width:  |  Height:  |  Size: 2.1 KiB

After

Width:  |  Height:  |  Size: 2.1 KiB

View file

Before

Width:  |  Height:  |  Size: 4.1 KiB

After

Width:  |  Height:  |  Size: 4.1 KiB

View file

Before

Width:  |  Height:  |  Size: 2.1 KiB

After

Width:  |  Height:  |  Size: 2.1 KiB

View file

Before

Width:  |  Height:  |  Size: 55 KiB

After

Width:  |  Height:  |  Size: 55 KiB

View file

Before

Width:  |  Height:  |  Size: 91 KiB

After

Width:  |  Height:  |  Size: 91 KiB

View file

Before

Width:  |  Height:  |  Size: 86 KiB

After

Width:  |  Height:  |  Size: 86 KiB

View file

Before

Width:  |  Height:  |  Size: 70 KiB

After

Width:  |  Height:  |  Size: 70 KiB

View file

Before

Width:  |  Height:  |  Size: 333 KiB

After

Width:  |  Height:  |  Size: 333 KiB

View file

Before

Width:  |  Height:  |  Size: 12 KiB

After

Width:  |  Height:  |  Size: 12 KiB

View file

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 15 KiB

View file

Before

Width:  |  Height:  |  Size: 12 KiB

After

Width:  |  Height:  |  Size: 12 KiB

View file

Before

Width:  |  Height:  |  Size: 169 KiB

After

Width:  |  Height:  |  Size: 169 KiB

View file

Before

Width:  |  Height:  |  Size: 42 KiB

After

Width:  |  Height:  |  Size: 42 KiB

View file

Before

Width:  |  Height:  |  Size: 42 KiB

After

Width:  |  Height:  |  Size: 42 KiB

Some files were not shown because too many files have changed in this diff Show more