Skip to content
Commits on Source (662)
[bumpversion]
current_version = 0.9.0
commit = True
[bumpversion:file:web/opdm_service/__init__.py]
/.gitlab
/compose
/config/.dcenv
/config/.env
/docs
/resources
/venv
.dockerignore
.gitignore
.gitlab-ci.yml
.tmuxinator.yml
.python-version
docker-compose.yml
docker-compose.*.yml
Dockerfile
dump*
# http://editorconfig.org
root = true
[*]
charset = utf-8
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
[*.{py,rst,ini}]
indent_style = space
indent_size = 4
[*.py]
line_length=120
known_first_party=project
multi_line_output=3
default_section=THIRDPARTY
[*.{html,css,scss,json,yml}]
indent_style = space
indent_size = 2
[*.md]
trim_trailing_whitespace = false
[Makefile]
indent_style = tab
[nginx.conf]
indent_style = space
indent_size = 2
......@@ -5,8 +5,9 @@ __pycache__
# Env file
env
*.env
# Lo
# Logs
*.log
pip-log.txt
......@@ -37,11 +38,20 @@ node_modules/
# osx .DS_Store
.DS_Store
.editorconfig
.gitattributes
/venv
dump.rdb
dump.sql
dump*
.ipynb_checkpoints
.python-version
*.zip
resources/data/bdap_anagrafe_enti.csv
resources/data/parsers
resources/data/out
resources/data/items.json
/resources/data/discrepancies.csv
/resources/data/cache
docker-compose.*.yml
image: docker:git
services:
- docker:dind
stages:
- build
- test
- release
- deploy
image: python:3.6-stretch
services:
- postgres:latest
variables:
CONTAINER_IMAGE: ${CI_REGISTRY}/${CI_PROJECT_PATH}:latest
DOCKER_DRIVER: overlay2
POSTGRES_DB: ${POSTGRES_DB}
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASS}
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache"
build:
cache:
key: "virtualenv-$CI_PROJECT_NAME"
paths:
- .cache/
- venv/
# ---------------------------------------------------------------------------
# build stage
# ---------------------------------------------------------------------------
build virtualenv:
stage: build
variables:
# DATABASE_URL: "sqlite://:memory:"
DATABASE_URL: "postgresql://${POSTGRES_USER}:${POSTGRES_PASS}@postgres/${POSTGRES_DB}"
script:
- pip install virtualenv
- virtualenv venv
- source venv/bin/activate
- echo "$(python -V) from $(which python) - $(pip -V)"
- pip install pip-tools safety
- ./install-requirements.sh
- safety check
except:
- schedules
# ---------------------------------------------------------------------------
# test stage
# ---------------------------------------------------------------------------
django tests:
stage: test
variables:
DATABASE_URL: "postgresql://${POSTGRES_USER}:${POSTGRES_PASS}@postgres/${POSTGRES_DB}"
DJANGO_SETTINGS_MODULE: "config.settings"
before_script:
- source venv/bin/activate
- echo "$(python -V) from $(which python) - $(pip -V)"
script:
- docker login -u gitlab-ci-token -p ${CI_JOB_TOKEN} registry.gitlab.depp.it
- docker build -t ${CONTAINER_IMAGE} ./web
- docker push ${CONTAINER_IMAGE}
- coverage run manage.py test && coverage report --omit="*/test*"
except:
- schedules
django_test:
flake8:
stage: test
before_script:
- pip install -r requirements/lint.txt
script:
- docker login -u gitlab-ci-token -p ${CI_JOB_TOKEN} registry.gitlab.depp.it
- docker run -e DATABASE_URL=spatialite:///test --rm ${CONTAINER_IMAGE} python manage.py test
- docker run -e DATABASE_URL=spatialite:///test --rm ${CONTAINER_IMAGE} python manage.py test popolo
# - docker run -e DATABASE_URL=spatialite:///test --rm ${CONTAINER_IMAGE} python manage.py test opdmetl
- flake8
allow_failure: true
except:
- schedules
rancher_staging:
# ---------------------------------------------------------------------------
# deploy stage
# ---------------------------------------------------------------------------
deploy to staging:
image: tmaier/docker-compose:18.06
stage: deploy
image: tagip/rancher-cli
environment:
name: staging
url: http://staging.service.opdm.openpolis.io/admin/
cache: {}
variables:
DEBUG: "on"
DOCKER_TLS_VERIFY: "1"
DOCKER_HOST: "tcp://${STAGING_HOST_IP}:2376"
DOCKER_CERT_PATH: "certs"
DOMAINS: ${DOMAINS_STAGING}
USE_EMAIL_SMTP: "off"
script:
- echo "Deploy to staging server"
- export SECRET_KEY="${STAGING_SECRET_KEY}"
- export ALLOWED_HOSTS=staging.service.opdm.openpolis.io
- rancher
--url ${RANCHER_URL}
--access-key ${RANCHER_STAGING_ACCESS_KEY}
--secret-key ${RANCHER_STAGING_SECRET_KEY}
--env op-staging --debug
up -d --pull --confirm-upgrade --stack opdm-service --force-recreate web
- rancher
--url ${RANCHER_URL}
--access-key ${RANCHER_STAGING_ACCESS_KEY}
--secret-key ${RANCHER_STAGING_SECRET_KEY}
--env op-staging --debug
restart opdm-service/nginx
- mkdir $DOCKER_CERT_PATH
- echo "$CA" > $DOCKER_CERT_PATH/ca.pem
- echo "$CLIENT_CERT" > $DOCKER_CERT_PATH/cert.pem
- echo "$CLIENT_KEY" > $DOCKER_CERT_PATH/key.pem
- docker build --compress -t openpolis/opdm/opdm-service:latest .
- docker-compose down
- docker-compose up -d --build
- docker exec opdm-service_web_1 python manage.py makemigrations popolo
- docker exec opdm-service_web_1 python manage.py migrate popolo
- docker exec opdm-service_web_1 python manage.py collectstatic --noinput
- docker exec opdm-service_web_1 python manage.py migrate
- rm -rf $DOCKER_CERT_PATH
only:
- master
except:
- schedules
environment:
name: staging
url: https://${DOMAINS_STAGING}
rancher_production:
reset_staging_db:on-schedule:
image: tmaier/docker-compose:18.06
stage: deploy
image: tagip/rancher-cli
environment:
name: production
url: http://service.opdm.openpolis.io/v1
cache: {}
variables:
DEBUG: "off"
DOCKER_TLS_VERIFY: "1"
DOCKER_HOST: "tcp://${STAGING_HOST_IP}:2376"
DOCKER_CERT_PATH: "certs"
DOMAINS: ${DOMAINS_STAGING}
script:
- mkdir $DOCKER_CERT_PATH
- echo "$CA" > $DOCKER_CERT_PATH/ca.pem
- echo "$CLIENT_CERT" > $DOCKER_CERT_PATH/cert.pem
- echo "$CLIENT_KEY" > $DOCKER_CERT_PATH/key.pem
- docker exec -u postgres opdm-service_postgres_1 psql -c "SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE pg_stat_activity.datname = 'opdm';"
- docker exec -u postgres opdm-service_postgres_1 bash -c "cd ~/data && dropdb opdm && createdb opdm && psql opdm < opdm_staging_20181114.sql"
only:
- schedules
deploy to production:
image: tmaier/docker-compose:18.06
stage: deploy
cache: {}
variables:
DEBUG: "off"
DOCKER_TLS_VERIFY: "1"
DOCKER_HOST: "tcp://${PRODUCTION_HOST_IP}:2376"
DOCKER_CERT_PATH: "certs"
DOMAINS: ${DOMAINS_PRODUCTION}
script:
- echo "Deploy to production server"
- export SECRET_KEY="${PRODUCTION_SECRET_KEY}"
- export ALLOWED_HOSTS=staging.service.opdm.openpolis.io
- rancher
--url ${RANCHER_URL}
--access-key ${RANCHER_PRODUCTION_ACCESS_KEY}
--secret-key ${RANCHER_PRODUCTION_SECRET_KEY}
--env op-production --debug
up -d --pull --confirm-upgrade --stack opdm-service --force-recreate web
- rancher
--url ${RANCHER_URL}
--access-key ${RANCHER_PRODUCTION_ACCESS_KEY}
--secret-key ${RANCHER_PRODUCTION_SECRET_KEY}
--env op-production --debug
restart opdm-service/nginx
- mkdir $DOCKER_CERT_PATH
- echo "$CA" > $DOCKER_CERT_PATH/ca.pem
- echo "$CLIENT_CERT" > $DOCKER_CERT_PATH/cert.pem
- echo "$CLIENT_KEY" > $DOCKER_CERT_PATH/key.pem
- docker build -t openpolis/opdm/opdm-service:latest .
- docker-compose down
- docker-compose up -d --build --force-recreate
- docker exec opdm-service_web_1 python manage.py makemigrations popolo
- docker exec opdm-service_web_1 python manage.py migrate popolo
- docker exec opdm-service_web_1 python manage.py migrate
- docker exec opdm-service_web_1 python manage.py collectstatic --noinput
- rm -rf $DOCKER_CERT_PATH
only:
- master
except:
- schedules
when: manual
tags:
- docker
environment:
name: production
url: https://${DOMAINS_PRODUCTION}
......@@ -30,8 +30,7 @@ post: pg_ctl -D /usr/local/var/postgres stop
windows:
- shell_plus:
- source venv/bin/activate
- cd web
- python manage.py shell_plus
- python manage.py shell_plus --ipython
- free_shell:
- source venv/bin/activate
- django_popolo:
......@@ -42,3 +41,6 @@ windows:
- git status
- redis_server:
- redis-server
- solr_server:
# start solr in foreground, using a custom path
- solr start -f -s /var/lib/solr/cores/
# Changelog
# OPDM Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
......@@ -6,6 +7,123 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
## [Unreleased]
### Added
- data changelog and main changelog urls and views added and linked to
in about page
- verification of administrative history of an institution from minint
## [1.0.0]
### Added
- import of ownerships from atoka's API
- /keyevents endpoint added in API, to handel KeyEvents
- /organizations API endpoint manages the `key_events` relations
- organizations loader created under `etl/loaders`
- JSONArrayExtractor created under `etl/extractors`
- `project.api_v1.etl.transformations.json2opdm` package added with
ETL classes dedicated to processes reading JSON content into OPDM (parsers)
- memberships/ PUT, POST and PATCH handle parameters from UI (#76)
- import_persons_memberships_from_op tasks implemented
accepts parameters to:
- import from different institutions
- handles pagination and limits
- handle updates and full imports
- Op2OpdmETL implemented into etl.transformations;
Handles different transform for persons and memberships,
needs to be used with the low-level interface.
- n_memberships and n_active_memberships computed fields added to organizations list
- classification_id filter added to /organizations endpoint
- Script to remove organs with no memberships in organizations having
more than 2 organs
- AKA api and tests implemented
- akas model implemented
- import_minint_year task to import from Minint yearly upgrades
- role_types endpoint added to API
- import role_types from google docs
- import_orgs_from_bdap management task revised
- new columns from BDAP csv file
- forma_giuridica_op buil rules implemented
- akas app added to project (to manage similarities)
- solr added to stack to implement search endpoints
- /memberships endpoint accepts now write operations
### Changed
- solr index changed in order to handle textual fields as single words,
using no unicode (diacritics); text_with_diacritics field type
introduced in schema.xml;
- solr deploy changed, default dir is now
/opt/solr/server/solr/mycores/opdm
- import_op_location_id_from_op has an --api-filters option,
to restrict the field of application of the procedure
- import_orgs_from_bdap refactored;
import utils moved to core.py;
lookup and update strategies are now parametric.
- data structure for get_organs_dict_* core methods get a consiglio_metropolitano key,
disambguating from consiglio, that refers to consiglio provinciale, when
the two are present for the same op_id, or istat code, or province id.
- script to add Government and Council organs to local institutions corrected;
it now provides the founding_date and dissolution_date, instead of start_date and end_date.
- update_strategy can now be specified for memberships import operations
- CF is computed and added in a post_save signal handler
- import_persons task now accepts persons-update-strategy arguments
- memberships loader now accepts upload_strategy (keep_old)
- AKAs hi and lo thresholds defined in settings
- core package refactored, all logic shared in imports is here
- org_id and org_scheme added to dataframe for Organizations
- PopoloPDLoader removed and unified into PopoloLoader
- refactoring ETL classes moved into etl.transformations
- deleted old and unreliable management tasks to import from OP; exception trapping in MembershipLoader improved
- PopoloPersonLoader and PopoloMembershipLoader added to separate loading phase;
- OPAPIPagedExtractor added to available extractors
- build_charge_descr_from_opapi_data method added to core.LabelsBuilder class;
- update_or_create_person_from_item method uses DB transactions;
- synch with solr is handled at low level, in a transactional way;
- search_person substituted with strategy-specific methods;
- PopoloPDLoader added to differentiate from PopoloLoader.
PopoloPDLoader uses Pandas as source of the data to load,
while PopoloLoader uses a generic iterator.
- All persons identifiers are added to solr index (as string).
Fields name are extracted from the `scheme` (OP_ID_s, ...).
- HAYSTACK_SIGNAL_PROCESSOR value is read from env.
Defaults to BaseSignalProcessor, so that manager tasks importing data
must handle DB-solr synchronization at low level.
- import of minintakas from OP creates AKA objects
- memberships import management task renamed
- minintakas import management task name changed
- Logging for RoleType.DoesNotExist exception improved
- exact_name and parent_identifier filters added to areas endpoint
- Search accepts birth_location_area as parameter (integer)
- Profession and Educational levels serializers and views added
- About page links to documentation corrected; swagger is now used
- Organization filters corrected and improved
- Django version pin-pointed
- When comparing objects returned from a POST or PUT request,
complex objects (Person, Area, ...) have an id in the original request,
and returns a dictionary.
- Posts and Memberships relations adjusted to reflect popolo standards
- All references to rancher removed from docs
- Posts and Memberships changed in imports
- write API (creation and update), implemented for Person, Organization and Area
- django-popolo and django-etl git urls commented in `requirements.txt`,
as they're installed separately in the docker image build process.
Developers need to install them manually.
- Organization, Person and Area writer serializers create and
update methods collected in a dedicated mixin.
- `detail_route` and `list_route` decorators substituted with `action`,
- `autocompleter` custom action added to `areas`
- `AreaAutocompleterSerializer` serializer implemented;
- `id` is now explicitly shown in Generic Relation Serializers
### Fixed
- HAYSTACK_SIGNAL_PROCESSOR set to disabled default processor, so that tests
do not raise errors generated from missing solr in test environment
- Data-migrations added to correct errors generated by
organization imports;
- typo in Task `post_save` signal corrected
- tabular.html template now handles the case of null `original.ct_id`
## [0.9.0]
### Added
- `former_parents` field added to `/areas/ID/` detail API view
......
Guglielmo Celata
Gabriele Lucci
\ No newline at end of file
# OPDM Changelog
All notable changes to the data in OPDM will be documented here.
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
## [Unreleased]
### Added
- public organization's ownerships (and owned organizations) from ATOKA
- administrative history of an organ from MININT:
- memberships' end date and reason
- electoral lists
## [2019-01-09]
### Fixed
- person's other_names synced from
FROM python:3.6-slim
# Update, upgrade and install useful tools and set aliases
ARG DEBIAN_FRONTEND=noninteractive
RUN apt-get -qy update \
&& apt-get install -qqy apt-utils \
&& apt-get -qqy upgrade \
&& apt-get install -qqqy --no-install-recommends \
gcc \
git \
libspatialite5 \
locales \
python-dev \
libxml2-dev libxslt-dev \
tmux \
less \
vim \
&& rm -rf /var/lib/apt/lists/*
RUN echo 'alias ll="ls -l"' >> ~/.bashrc \
&& echo 'alias la="ls -la"' >> ~/.bashrc
# add it locale
COPY locales.txt /etc/locale.gen
RUN locale-gen
# Create `/app` directory
RUN mkdir -p /app
WORKDIR /app
# Install projects requirements
COPY requirements/*.txt /app/requirements/
COPY install-requirements.sh /app/
RUN pip3 install --upgrade pip pip-tools && ./install-requirements.sh
# check for new changes in django-popolo and reinstall it if necessary
ADD https://api.github.com/repos/openpolis/django-popolo/branches/db_reset /dev/null
RUN pip3 install --exists-action=w -e git://github.com/openpolis/django-popolo@db_reset#egg=django-popolo
# check for new changes in opdm-etl
ADD "https://gitlab.depp.it/openpolis/opdm/opdm-etl/commits/master?format=atom" /dev/null
RUN pip3 install --exists-action=w -e git+https://gitlab.depp.it/openpolis/opdm/opdm-etl.git@master#egg=opdm-etl
# check for new changes in op-task-manager-project
ADD "https://gitlab.depp.it/openpolis/op-task-manager-project/commits/dev-opdm?format=atom" /dev/null
RUN pip3 install --exists-action=w -e git+https://gitlab.depp.it/openpolis/op-task-manager-project.git@dev-opdm#egg=taskmanager
# remove gcc and build dependencies to keep image small
RUN apt-get purge -y --auto-remove gcc python-dev
# copy the project in app
COPY . /app/
# remove locales, as not needed any longer
RUN rm /app/locales.txt
# create directory for the uwsgi spooler
RUN mkdir -p /var/lib/uwsgi
# Openpolis DataManager Service
[![pipeline status](https://gitlab.depp.it/openpolis/opdm/opdm-service/badges/develop/pipeline.svg)](https://gitlab.depp.it/openpolis/opdm/opdm-service/commits/develop)
[![coverage report](https://gitlab.depp.it/openpolis/opdm/opdm-service/badges/develop/coverage.svg)](https://gitlab.depp.it/openpolis/opdm/opdm-service/commits/develop)
The Openpolis DataManager service fetch interesting data around
the world, mixes and matches it and provides it back to all
those in need of it.
See `web/` path for the source code of this project.
See `project/` path for the source code of this project.
See `docs/` for documentation.
## Development
Development is performed on local workstations, without requiring
Docker.
......@@ -23,26 +27,40 @@ and it should be generated starting from `.env.sample`.
Variables in the linux environent at execution time override those
read from `.env`.
A running postgres database is required somewhere,
as well as a running redis-server (to store sessions).
A running postgres database is required somewhere, as well as a running
redis-server (to store sessions).
Connections to these servers should be defined in the `.env` file, as
`DATABASE_URL` and `REDIS_URL` values.
To start developing, clone this repository,
get into the `web` directory, then:
To start developing, clone this repository, get into the `project` directory,
then:
cd web
# generate (and modify) the .env file
cp .env.sample .env
# install and configure pyenv
# install python 3.6.5 version, using pyenv
pyenv install 3.6.5
# define python 3.6.5 as local, in the directory
pyenv local 3.6.5
# create and activate a virtualenv
python3 -m venv venv
python -m venv venv
source venv/bin/activate
# install requirements
pip install pip-tools
pip install --upgrade pip
./install-requirements.sh
# install local packages
# install local packages (overwrite)
pip install -e /Users/gu/Workspace/django-popolo
pip install -e /Users/gu/Workspace/opdm-etl
pip install -e /Users/gu/Workspace/op-task-manager-project
# install other useful packages (docs-writing, ipython, jupyter ...)
pip install Sphinx sphinx_rtd_theme ipython jupyter
......@@ -56,16 +74,17 @@ get into the `web` directory, then:
# now the server can be run
python manage.py runserver
### Restore development session
`tmuxinator` can be used to restore a development session.
The `.tmuxinator.yml` file contains all information needed to start
the session.
## Deploy
The stack is defined in `docker-compose.yml` file.
It uses:
......@@ -75,7 +94,7 @@ It uses:
- a cache system, at least for sessions (redis)
### Simple deploy (docker-compose)
### Deploy on a docker-machine server
Environment variables need to be defined:
......@@ -83,39 +102,35 @@ Environment variables need to be defined:
export POSTGRES_USER=opdm
export POSTGRES_PASS=opdm_pass
The web image must be built
This can be done using the dmctl utility:
dm ls # list of all docker machines
dmctl set op-staging-1 ls # env setup for the op-staging-1 machine
docker build -t registry.gitlab.depp.it/openpolis/opdm/opdm-service:latest web
The web image must be built
in case of a remote deploy (`docker-machine`) it must be pushed
to the gitlab registry:
docker build --compress -t openpolis/opdm/opdm-service:latest # build image
docker login registry.gitlab.depp.it
docker push registry.gitlab.depp.it/openpolis/opdm-service:latest
To deploy on a local or on a remote machine (using `docker-machine`):
dc up -d
dc exec web python manage.py [TASK]
...
dc up -d # start all services as daemon on the remote machine
dc stands for `docker-compose` here, a proper installation should
automatically provide you the shortcut.
To restart all services, after a change in `docker-compose.yml`:
dc up -d
**IMPORTANT**: Images need to be rebuilt whenever there are changes
**IMPORTANT**: The web app image need to be rebuilt whenever there are changes
in the source code.
dc up --build
To enter a shell in the container of the web service:
dc exec web /bin/bash
dc exec web bash
This can be done also to enter the other containers, but it shouldn't
be necessary.
......@@ -137,62 +152,34 @@ The source of inspiration for *dockerizing* this django app was:
https://www.capside.com/labs/deploying-full-django-stack-with-docker-compose/
### Advanced deploy (rancher-compose)
To deploy in the right rancher **environment**, the correct
API KEYs must be set in the rancher UI and then used in the
`rancher-compose` invocation.
`rancher-compose` (rc) uses the `docker-compose.yml` definitions
to deploy the stack to the specified environment of the
rancher-managed infrastructure.
rc --url http://rancher.openpolis.it \
--access-key $ACCESS_KEY \
--secret-key $SECRET_KEY \
up -d
The stack needs to be created manually, before th CI/CD integration
kicks in, so that the environment can be used.
### CI/CD Integration with GitLab
### CI/CD Integration with gitlab
When a push to the gitlab.depp.it repository is performed:
- the `web` service image is built from web/Dockerfile,
- the image is pushed to gitlab.depp.it's own docker registry
- a test is performed on the runner
- the web service is updated (image recreation is forced)
- the nginx service is restarted
- tests[^1] are performed on the runner
- if tests succed and branch is `develop`, deploy latest revision to [`staging`][opdm-service environments] using docker compose
- if tests succed and branch is `master`, deploy latest revision to [`production`][opdm-service environments] using docker compose
## Testing
To start tests regarding the service itself:
python project/manage.py test
To start tests regarding the models (popolo instance):
python project/manage.py test popolo
## Testing
To extract a ``coverage`` report:
coverage run --source=./,../django-popolo/ manage.py test popolo
coverage report -m
To tests the service:
But this makes no real sense within this project, at the moment.
python manage.py test
## Operations
Management tasks and other operations can be performed within the shell
of deployed web services, executing `/bin/bash`:
To extract a [``coverage``][coverage.py] report:
rancher env ls # list of all enviroments
rancher --env op-staging ps # processes in given environment
rancher --env op-staging exec -it opdm-service/web /bin/bash # shell into the service
coverage run manage.py test
coverage report
## License and Authors
See LICENSE.txt for the license this software is released under.
See authors in CONTRIBUTORS.txt
[^1]: unittest, [flake8](http://flake8.pycqa.org/) for common errors and PEP8 compliancy, [safety](https://pyup.io/safety/) for security checks by pyup
[opdm-service environments]: https://gitlab.depp.it/openpolis/opdm/opdm-service/environments
[coverage.py]: https://coverage.readthedocs.io
#!/usr/bin/env bash
# Simple script to manage prject dependecies
# Requires pip-tools https://github.com/jazzband/pip-tools/
# It uses `pip-compile` to generate up to date requirements list.
# Every command-line argument passed to this script, will be passed to
# `pip-compile` as option. `--generate-hashes` option is enforced.
# Use option `--upgrade` (or `-U`), to also update all dependecies.
#
# Run `pip-compile --help` to see a complete list of possible options.
if ! [ -x "$(command -v pip-compile)" ]; then
>&2 echo 'Command `pip-compile` is not installed.' \
'Please, install `pip-tools` module using `pip install pip-tools` and try again. '
exit 1
fi
# Arguments passed to `pip-compile`
args=$@
# Append default arguments
args+=" --generate-hashes"
export CUSTOM_COMPILE_COMMAND="./$(basename $0) --upgrade"
pip-compile ${args} requirements/base.in
pip-compile ${args} requirements/test.in
pip-compile ${args} requirements/lint.in
unset CUSTOM_COMPILE_COMMAND
FROM tutum/nginx
RUN rm /etc/nginx/sites-enabled/default
RUN rm /etc/nginx/sites-enabled/default \
&& echo 'alias ll="ls -l"' >> ~/.bashrc \
&& echo 'alias la="ls -la"' >> ~/.bashrc
ADD sites-enabled/ /etc/nginx/sites-enabled
server {
listen 80;
server_name service.opdm.openpolis.io;
server_name localhost service.opdm.openpolis.io staging.service.opdm.openpolis.io;
charset utf-8;
location /static {
......@@ -17,10 +16,16 @@ server {
}
location / {
proxy_pass http://web:8000;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
uwsgi_read_timeout 600;
proxy_read_timeout 300;
proxy_connect_timeout 300;
proxy_redirect off;
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-Proto $scheme;
uwsgi_pass web:8000;
include /etc/nginx/uwsgi_params;
}
}
FROM solr
MAINTAINER Guglielmo Celata "guglielmo@openpolis.it"
COPY config /opt/solr/server/solr/mycores/opdm/conf
COPY core.properties /opt/solr/server/solr/mycores/opdm/
USER root
RUN chown -R solr:solr /opt/solr/server/solr/mycores/opdm \
&& mkdir -p /opt/solr/server/solr/mycores/opdm/data \
&& chown -R solr:solr /opt/solr/server/solr/mycores/opdm/data \
&& mkdir -p /home/solr \
&& echo 'alias ll="ls -l"' >> /home/solr/.bashrc \
&& echo 'alias la="ls -la"' >> /home/solr/.bashrc \
&& chown -R solr:solr /home/solr
USER solr
CMD ["/opt/solr/bin/solr", "-f"]
# Set of Catalan contractions for ElisionFilter
# TODO: load this as a resource from the analyzer and sync it in build.xml
d
l
m
n
s
t
# Set of French contractions for ElisionFilter
# TODO: load this as a resource from the analyzer and sync it in build.xml
l
m
t
qu
n
s
j
d
c
jusqu
quoiqu
lorsqu
puisqu
# Set of Irish contractions for ElisionFilter
# TODO: load this as a resource from the analyzer and sync it in build.xml
d
m
b
# Set of Italian contractions for ElisionFilter
# TODO: load this as a resource from the analyzer and sync it in build.xml
c
l
all
dall
dell
nell
sull
coll
pell
gl
agl
dagl
degl
negl
sugl
un
m
t
s
v
d