Skip to content
Commits on Source (16)
......@@ -63,3 +63,5 @@ docker-compose.*.yml
/resources/data/partecipazioni_mef_amm_centrali.csv
/resources/data/partecipazioni_mef.csv
/resources/data/partecipazioni_mef_amm_centrali_not_in_opdm.csv
/resources/data/partecipazioni_mef_regioni.csv
/resources/data/partecipazioni_mef_regioni_not_in_opdm.csv
......@@ -7,7 +7,8 @@ stages:
image: python:3.6-stretch
services:
- postgres:latest
- mdillon/postgis:9.6-alpine
# - postgres:latest
variables:
......@@ -18,61 +19,63 @@ variables:
POSTGRES_PASSWORD: ${POSTGRES_PASS}
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache"
cache:
key: "virtualenv-$CI_PROJECT_NAME"
paths:
- .cache/
- venv/
# Anchors
.pull_cache_job: &pull_cache_job
cache: # Pull virtual environment from cache
key: "virtualenv-$CI_PROJECT_NAME"
policy: pull
paths:
- venv/
before_script:
- source venv/bin/activate # Activate virtual environment
- pip freeze # List installed packages in virtualenv
# ---------------------------------------------------------------------------
# build stage
# ---------------------------------------------------------------------------
# Build: build virtualenv job
build virtualenv:
stage: build
variables:
# DATABASE_URL: "sqlite://:memory:"
DATABASE_URL: "postgresql://${POSTGRES_USER}:${POSTGRES_PASS}@postgres/${POSTGRES_DB}"
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
cache:
key: "virtualenv-$CI_PROJECT_NAME"
paths:
- .cache/pip
- venv/
script:
- pip install virtualenv
- virtualenv venv
- source venv/bin/activate
- echo "$(python -V) from $(which python) - $(pip -V)"
- pip install pip-tools safety
- ./install-requirements.sh
- safety check
- pip install virtualenv safety pip-tools
- virtualenv venv # Create virtual environment
- source venv/bin/activate # Activate virtual environment
- safety check -r requirements.txt # Run a safety check on requirements.txt
- pip-sync requirements.txt # Install project requirements inside venv
- pip freeze # List installed packages in venv
except:
- schedules
# ---------------------------------------------------------------------------
# test stage
# ---------------------------------------------------------------------------
django tests:
# Test: flake8 job
flake8:
stage: test
variables:
DATABASE_URL: "postgresql://${POSTGRES_USER}:${POSTGRES_PASS}@postgres/${POSTGRES_DB}"
DJANGO_SETTINGS_MODULE: "config.settings"
before_script:
- source venv/bin/activate
- echo "$(python -V) from $(which python) - $(pip -V)"
<<: *pull_cache_job
script:
- coverage run manage.py test && coverage report --omit="*/test*"
- flake8
allow_failure: true
except:
- schedules
flake8:
# Test: django tests job
django tests:
stage: test
before_script:
- pip install -r requirements/lint.txt
<<: *pull_cache_job
variables:
DATABASE_URL: "postgis://${POSTGRES_USER}:${POSTGRES_PASS}@mdillon__postgis/${POSTGRES_DB}"
DJANGO_SETTINGS_MODULE: "config.settings"
script:
- flake8
allow_failure: true
- apt-get update
- apt-get install -y --allow-unauthenticated --no-install-recommends binutils libproj-dev gdal-bin
- coverage run manage.py test
- coverage report --omit="*/test*"
except:
- schedules
# ---------------------------------------------------------------------------
# deploy stage
# ---------------------------------------------------------------------------
# Deploy: deploy to staging job
deploy to staging:
image: tmaier/docker-compose:18.06
stage: deploy
......@@ -124,7 +127,8 @@ reset_staging_db:on-schedule:
- docker exec -u postgres opdm-service_postgres_1 bash -c "cd ~/data && dropdb opdm && createdb opdm && psql opdm < opdm_staging_20181114.sql"
only:
- schedules
# Deploy: deploy to production job
deploy to production:
image: tmaier/docker-compose:18.06
stage: deploy
......
......@@ -5,6 +5,17 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
## [1.1.9]
### Add
- script that computes participation level starts with broader criterion,
setting all organizations having classification TIPOLOGIA_IPA_BDAP set to PA
- atoka extractor considers additional CFs in identifiers with scheme
ALTRI_CF_ATOKA, when creating the batches of tax_ids to fetch
- atoka extractor extracts shares even when percentage is null;
- organizations lookup in opdm core OrganizationUtils looks for identifiers
within ALTRI_CF_ATOKA identifiers
## [1.1.8]
......
......@@ -5,6 +5,7 @@ ARG DEBIAN_FRONTEND=noninteractive
RUN apt-get -qy update \
&& apt-get install -qqy apt-utils \
&& apt-get -qqy upgrade \
&& apt-get install -y --no-install-recommends binutils libproj-dev gdal-bin \
&& apt-get install -qqqy --no-install-recommends \
gcc \
git \
......@@ -29,13 +30,12 @@ RUN mkdir -p /app
WORKDIR /app
# Install projects requirements
COPY requirements/*.txt /app/requirements/
COPY install-requirements.sh /app/
RUN pip3 install --upgrade pip pip-tools && ./install-requirements.sh
COPY requirements.txt /app/
RUN pip3 install --upgrade pip pip-tools && pip-sync
# check for new changes in django-popolo and reinstall it if necessary
ADD https://api.github.com/repos/openpolis/django-popolo/branches/db_reset /dev/null
RUN pip3 install --exists-action=w -e git://github.com/openpolis/django-popolo@db_reset#egg=django-popolo
ADD "https://gitlab.depp.it/openpolis/django-popolo/commits/master?format=atom" /dev/null
RUN pip3 install --exists-action=w -e git+https://gitlab.depp.it/openpolis/django-popolo@master#egg=django-popolo
# check for new changes in opdm-etl
ADD "https://gitlab.depp.it/openpolis/opdm/opdm-etl/commits/master?format=atom" /dev/null
......
......@@ -13,7 +13,7 @@ services:
- redis:redis
- solr:solr
environment:
- DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASS}@postgres/${POSTGRES_DB}
- DATABASE_URL=postgis://${POSTGRES_USER}:${POSTGRES_PASS}@postgres/${POSTGRES_DB}
- DEBUG=${DEBUG}
- HAYSTACK_SIGNAL_PROCESSOR=project.api_v1.core.PersonOnlySignalProcessor
- DEBUG_TOOLBAR_INTERNAL_IPS=172.20.0.*
......
#!/usr/bin/env bash
if [ -x "$(command -v pip-sync)" ]; then
if [[ -x "$(command -v pip-sync)" ]]; then
pip-sync requirements/*.txt
else
echo 'Command `pip-sync` is not installed. Falling back to `pip install`. '
......
......@@ -3,4 +3,4 @@
Openpolis Data Manager service package (backend)
"""
__version__ = '1.1.8'
__version__ = '1.1.9'
......@@ -1507,7 +1507,7 @@ class OrganizationUtils(object):
index.update_object(item)
@classmethod
def org_anagraphical_lookup(cls, item, logger, current=False):
def org_anagraphical_lookup(cls, item, logger=None, current=False):
"""anagraphical lookup strategy implementation
:param item: the item to lookup in the DB
......@@ -1518,9 +1518,11 @@ class OrganizationUtils(object):
org_id = 0
basic_filters = {"name__iexact": item["name"], "identifier": item["identifier"]}
discrimination_filter = None
if current:
basic_filters.update({"dissolution_date__isnull": True})
discrimination_filter = {"founding_date": item.get("founding_date", None)}
if 'founding_date' in item:
discrimination_filter = {"founding_date": item.get("founding_date")}
filters = basic_filters
try:
......@@ -1529,13 +1531,17 @@ class OrganizationUtils(object):
except Organization.DoesNotExist:
pass
except Organization.MultipleObjectsReturned:
filters.update(discrimination_filter)
try:
org = Organization.objects.get(**filters)
org_id = org.id
except Organization.DoesNotExist:
logger.warning("Could not find organization by name, cf and founding date: {0}".format(item))
except Organization.MultipleObjectsReturned:
if 'founding_date' in item:
filters.update(discrimination_filter)
try:
org = Organization.objects.get(**filters)
org_id = org.id
except Organization.DoesNotExist:
if logger:
logger.warning("Could not find organization by name, cf and founding date: {0}".format(item))
except Organization.MultipleObjectsReturned:
org_id = -1 * Organization.objects.filter(**filters).count()
else:
org_id = -1 * Organization.objects.filter(**filters).count()
return org_id
......@@ -1570,7 +1576,19 @@ class OrganizationUtils(object):
org = Organization.objects.get(**filters)
org_id = org.id
except Organization.DoesNotExist:
pass
if not identifier_scheme:
del filters['identifier']
filters.update({
'identifiers__scheme': 'ALTRI_CF_ATOKA',
'identifiers__identifier__icontains': item['identifier']
})
try:
org = Organization.objects.get(**filters)
org_id = org.id
except Organization.DoesNotExist:
pass
except Organization.MultipleObjectsReturned:
org_id = -1 * Organization.objects.filter(**filters).count()
except Organization.MultipleObjectsReturned:
org_id = -1 * Organization.objects.filter(**filters).count()
......@@ -1580,7 +1598,7 @@ class OrganizationUtils(object):
def org_lookup(cls, item, strategy, **kwargs):
"""
:param item:
:param strategy: lookup strategy (anagraphical, identifier, mixed, anagraphical_current, ...)
:param strategy: lookup strategy (anagraphical, identifier, mixed_current, anagraphical_current, ...)
:param kwargs: other params
- identifier_scheme
- logger
......
......@@ -35,9 +35,6 @@ class AreaTestCase(
),
"is_provincial_capital": faker.pybool(),
"inhabitants": faker.pyint(),
"geom": "string",
"gps_lat": "85.342000",
"gps_lon": "27.717200",
"start_date": "1970-01-01",
"end_date": "1982-01-01",
"end_reason": "end reason...",
......
......@@ -5,9 +5,9 @@ from codicefiscale import codicefiscale
from django.test.testcases import TestCase
from unittest.mock import MagicMock, patch
from popolo.tests.factories import AreaFactory, PersonFactory
from popolo.tests.factories import AreaFactory, PersonFactory, OrganizationFactory
from project.api_v1.core import PersonUtils, millis
from project.api_v1.core import PersonUtils, millis, OrganizationUtils
from project.atoka.connections import AtokaConn, AtokaObjectDoesNotExist, AtokaMultipleObjectsReturned, \
AtokaResponseError
from faker import Factory
......@@ -25,6 +25,198 @@ class PackageMethodsTestCase(TestCase):
self.assertEqual(t, t1)
class OrganizationUtilsTestCase(TestCase):
def test_org_anagraphical_lookup(self):
"""Test that an organization is found with anagraphical lookup
"""
org = OrganizationFactory(founding_date=faker.date(pattern="%Y-%m-%d", end_datetime="-27y"))
item = {
'name': org.name.upper(),
'identifier': org.identifier,
'founding_date': org.founding_date,
}
org_id = OrganizationUtils.org_anagraphical_lookup(
item
)
self.assertNotEqual(org_id, 0)
def test_org_anagraphical_lookup_multiple(self):
"""Multiple organizations with same name/id, having different founding_date
"""
name = faker.company()
identifier = faker.ssn()
org_a = OrganizationFactory(
name=name,
identifier=identifier,
founding_date=faker.date(pattern="%Y-%m-%d", end_datetime="-25y"),
dissolution_date='2010-01-01',
)
org_b = OrganizationFactory(
name=name,
identifier=identifier,
founding_date=faker.date(pattern="%Y-%m-%d", end_datetime="-10y"),
dissolution_date=None
)
# multiple organizations found when current is not specified (id < 0)
item = {
'name': org_a.name.upper(),
'identifier': org_a.identifier,
}
org_id = OrganizationUtils.org_anagraphical_lookup(item)
self.assertLess(org_id, 0)
# correct organization found when current specified
item = {
'name': org_a.name.upper(),
'identifier': org_a.identifier,
'founding_date': org_a.founding_date,
}
org_id = OrganizationUtils.org_anagraphical_lookup(
item,
)
self.assertNotEqual(org_id, org_b.id)
self.assertEqual(org_id, org_a.id)
def test_org_anagraphical_lookup_current(self):
"""Test that the current organization is correctly found
"""
current_org = OrganizationFactory(dissolution_date=None)
item = {
'name': current_org.name.upper(),
'identifier': current_org.identifier,
'founding_date': current_org.founding_date,
}
past_org = OrganizationFactory(
dissolution_date=faker.date(pattern="%Y-%m-%d", end_datetime="-2y"),
**item
)
# correct organization found when current specified
org_id = OrganizationUtils.org_anagraphical_lookup(
item, current=True
)
self.assertNotEqual(org_id, past_org.id)
self.assertEqual(org_id, current_org.id)
# multiple organizations found when current is not specified (id < 0)
org_id = OrganizationUtils.org_anagraphical_lookup(
item
)
self.assertLess(org_id, 0)
def test_org_identifier_lookup_with_scheme(self):
"""Test that the current organization is correctly found
when the lookup stategy uses a known identifier
"""
org = OrganizationFactory(dissolution_date=None)
org.add_identifier(scheme='TEST', identifier='123')
item = {
'name': org.name,
'identifiers': [
{'scheme': 'TEST', 'identifier': '123'},
{'scheme': 'OP_ID', 'identifier': '143098fg'}
]
}
org_id = OrganizationUtils.org_identifier_lookup(
item, 'TEST'
)
self.assertNotEqual(org_id, 0)
def test_org_identifier_lookup_with_scheme_fails_not_found(self):
"""Test that the organization is not found (id == 0)
"""
org = OrganizationFactory(dissolution_date=None)
org.add_identifier(scheme='TEST', identifier='123')
item = {
'name': org.name,
'identifiers': [
{'scheme': 'TEST', 'identifier': '124'},
{'scheme': 'OP_ID', 'identifier': '143098fg'}
]
}
org_id = OrganizationUtils.org_identifier_lookup(
item, 'TEST'
)
self.assertEqual(org_id, 0)
def test_org_identifier_lookup_with_scheme_fails_multiple(self):
"""Test that the organization is found multiple times (id < 0)
"""
name = faker.company()
scheme = 'TEST'
identifier = '123'
org_a = OrganizationFactory(name=name, dissolution_date=None)
org_a.add_identifier(scheme=scheme, identifier=identifier)
org_b = OrganizationFactory(name=name, dissolution_date=None)
org_b.add_identifier(scheme=scheme, identifier=identifier)
item = {
'name': name,
'identifiers': [
{'scheme': scheme, 'identifier': identifier},
{'scheme': 'OP_ID', 'identifier': '143098fg'}
]
}
org_id = OrganizationUtils.org_identifier_lookup(
item, scheme
)
self.assertLess(org_id, 0)
def test_org_identifier_lookup_no_scheme(self):
"""Test that the organization is correctly found
when looked up using the main identifier (CF)
"""
identifier = faker.ssn()
org = OrganizationFactory(
dissolution_date=None,
identifier=identifier
)
# correct organization found when identifier_scheme specified
item = {
'name': org.name,
'identifier': identifier
}
org_id = OrganizationUtils.org_identifier_lookup(
item, identifier_scheme=None
)
self.assertNotEqual(org_id, 0)
def test_org_identifier_lookup_multiple_cfs(self):
"""Test that the organization is correctly found
when looked up using the main identifier (CF),
even in the case when the identifier is in the ALTRI_CF_ATOKA identifier
"""
main_identifier = faker.ssn()
other_identifiers = [faker.ssn() for _ in range(1, 4)]
org = OrganizationFactory(
dissolution_date=None,
identifier=main_identifier
)
org.add_identifier(scheme='ALTRI_CF_ATOKA', identifier=",".join(other_identifiers))
# correct organization found when identifier_scheme specified
item = {
'name': org.name,
'identifier': other_identifiers[1]
}
org_id = OrganizationUtils.org_identifier_lookup(
item, identifier_scheme=None
)
self.assertNotEqual(org_id, 0)
class PersonUtilsTestCase(TestCase):
def test_verify_tax_id_with_atoka(self):
......
......@@ -168,10 +168,10 @@ class AtokaOwnershipsExtractor(Extractor):
'name': sho['name'],
'last_updated': sho['lastUpdate'],
'atoka_id': sho['id'],
'percentage': sho['ratio'] * 100.
'percentage': sho.get('ratio', 0.) * 100.
}
for sho in filter(
lambda x: x['active'] is True and x['typeOfRight'] == 'proprietà' and 'ratio' in x,
lambda x: x['active'] is True and x['typeOfRight'] == 'proprietà',
r['shares']['sharesOwned']
)
)
......
# -*- coding: utf-8 -*-
import itertools
import json
from popolo.models import Organization
......@@ -29,7 +30,7 @@ class Command(LoggingBaseCommand):
nargs='*', metavar='CLASS',
dest="classifications", type=int,
help="Only process specified classifications "
"(by id, ex: Ministero, Consiglio Reg., Città Metrop.: 498,133,279)",
"(by id, ex: Ministero, Consiglio Reg., Città Metrop.: 498 133 279)",
)
parser.add_argument(
"--tax-ids",
......@@ -105,11 +106,23 @@ class Command(LoggingBaseCommand):
self.logger.info('processing {0} organizations classified as {1}'.format(
organizations_group['n'], organizations_group['descr']
))
organization_group_qs = organizations_qs.filter(
classifications__classification_id=organizations_group['id']
)
# extract iterators for identifiers and multiple cfs
cfs = organization_group_qs.values_list('identifier', flat=True).distinct().iterator()
multiple_cfs = (o.split(",") for o in organization_group_qs.filter(
identifiers__scheme='ALTRI_CF_ATOKA'
).distinct().values_list('identifiers__identifier', flat=True).iterator())
# generate batches of batchsize, to query atoka's endpoint
# flatten iterators of cfs
batches = batch_generator(
batchsize, organizations_qs.filter(
classifications__classification_id=organizations_group['id']
).values_list('identifier', flat=True).distinct().iterator()
batchsize, itertools.chain(
*multiple_cfs, cfs
)
)
group_counter = 0
......
......@@ -49,7 +49,7 @@ class Command(LoggingBaseCommand):
c = cs[shares_level]
# tag org di livello shares_level,
# select di tutte quelle che risultano partecipate, ma non nei livelli
# select di tutte quelle che risultano partecipate, al livello precedente
orgs = Organization.objects.filter(
ownerships_as_owned__isnull=False
).distinct().filter(
......@@ -86,14 +86,18 @@ class Command(LoggingBaseCommand):
# start filtering current organizations with a tax_id,
# excluding those classified with a classification FORMA_GIURIDICA_OP
# that refers to a **private** organization
# orgs = Organization.objects.filter(
# classifications__classification__scheme='FORMA_GIURIDICA_OP'
# ).current().exclude(
# classifications__classification_id__in=[
# 11, 20, 24, 29, 48, 69, 83, 295, 321, 346, 403, 621, 941, 730, 1182, 1183, 1184, 1185, 1186, 1187,
# 1188, 1190, 1189, 1191, 1192, 1193, 1194, 1195, 1196, 1197, 1198, 1199, 1200, 1201, 1202
# ]
# ).filter(identifier__isnull=False)
orgs = Organization.objects.filter(
classifications__classification__scheme='FORMA_GIURIDICA_OP'
).current().exclude(
classifications__classification_id__in=[
11, 20, 24, 29, 48, 69, 83, 295, 321, 346, 403, 621, 941, 730, 1182, 1183, 1184, 1185, 1186, 1187,
1188, 1190, 1189, 1191, 1192, 1193, 1194, 1195, 1196, 1197, 1198, 1199, 1200, 1201, 1202
]
).filter(identifier__isnull=False)
classifications__classification__scheme='TIPOLOGIA_IPA_BDAP',
classifications__classification__code='PA'
).current().filter(identifier__isnull=False)
n_orgs = orgs.count()
for n, org in enumerate(orgs, start=1):
org.add_classification_rel(cs[0])
......
# This is an implicit value, here for clarity
--index-url https://pypi.python.org/simple/
# Django (py 2.7, 3.5, 3.6) * A high-level Python Web framework
Django<2 # https://www.djangoproject.com/
django-extensions # Extensions for Django
......@@ -17,7 +14,6 @@ djangorestframework-jwt # JWT auth endpoints
django-filter # QuerySet filtering from URL parameters
drf-yasg # Swagger/OpenAPI generator
drf-rw-serializers # Read and write serializers for DRF
flex # Swagger/OpenAPI validator
# Database drivers
......@@ -46,6 +42,17 @@ django-uwsgi==0.2.2 # uwsgi debug
# VCS packages
# Requirements to be pulled from version control systems
# Reference: https://pip.pypa.io/en/stable/reference/pip_install/#vcs-support
-e git+https://github.com/openpolis/django-popolo@db_reset#egg=django-popolo
-e git+https://gitlab.depp.it/openpolis/django-popolo@master#egg=django-popolo
-e git+https://gitlab.depp.it/openpolis/opdm/opdm-etl.git@master#egg=opdm-etl
-e git+https://gitlab.depp.it/openpolis/op-task-manager-project.git@dev-opdm#egg=taskmanager
# flake8: Style Guide Enforcement
flake8==3.5.0
# flake8-docstrings # Checks PEP 257 compliance
# Test requirements
coverage # Report code coverage
factory_boy # A versatile test fixtures replacement
Faker # Generates fake data
safety # Checks installed dependencies for known security vulnerabilities.
......@@ -2,88 +2,99 @@
# This file is autogenerated by pip-compile
# To update, run:
#
# ./compile-requirements.sh --upgrade
# pip-compile --output-file requirements.txt requirements.in
#
-e git+https://github.com/openpolis/django-popolo@db_reset#egg=django-popolo
-e git+https://gitlab.depp.it/openpolis/django-popolo@master#egg=django-popolo
-e git+https://gitlab.depp.it/openpolis/op-task-manager-project.git@dev-opdm#egg=taskmanager
-e git+https://gitlab.depp.it/openpolis/opdm/opdm-etl.git@master#egg=opdm-etl
asn1crypto==0.24.0 # via cryptography
attrs==18.2.0 # via automat, service-identity, twisted
automat==0.7.0 # via twisted
certifi==2018.11.29 # via requests
cffi==1.11.5 # via cryptography
cffi==1.12.1 # via cryptography
chardet==3.0.4 # via requests
click==7.0 # via flex
click==7.0 # via flex, safety
constantly==15.1.0 # via twisted
coreapi==2.3.3 # via drf-yasg
coreschema==0.0.4 # via coreapi, drf-yasg
cryptography==2.4.2 # via pyopenssl, service-identity
coverage==4.5.2
cryptography==2.5 # via pyopenssl, service-identity
cssselect==1.0.3 # via parsel, scrapy
django-autoslug==1.9.3
django-autoslug==1.9.4
django-braces==1.13.0
django-cors-headers==2.4.0
django-crispy-forms==1.7.2
django-csv-export-view==1.0.0
django-debug-toolbar==1.11
django-environ==0.4.5
django-extensions==2.1.4
django-filter==2.0.0
django-extensions==2.1.5
django-filter==2.1.0
django-haystack==2.8.1
django-model-utils==3.1.2
django-redis==4.10.0
django-uwsgi==0.2.2
django==1.11.18
django==1.11.20
djangorestframework-jwt==1.11.0
djangorestframework==3.9.1
dparse==0.4.1 # via safety
drf-rw-serializers==1.0.1
drf-yasg==1.12.1
drf-yasg==1.13.0
elasticsearch==6.3.1
factory-boy==2.11.1
faker==1.0.2
flake8==3.5.0
flex==6.14.0
hyperlink==18.0.0 # via twisted
idna==2.8 # via cryptography, hyperlink, requests
idna==2.8 # via hyperlink, requests
incremental==17.5.0 # via twisted
inflection==0.3.1 # via drf-yasg
isodate==0.6.0 # via rdflib
itypes==1.1.0 # via coreapi
jinja2==2.10 # via coreschema
jsonpointer==1.14 # via flex
lxml==4.3.0 # via parsel, scrapy
jsonpointer==2.0 # via flex
lxml==4.3.1 # via parsel, scrapy
markdown==3.0.1
markupsafe==1.1.0 # via jinja2
mccabe==0.6.1 # via flake8
mistune==0.8.4
munch==2.3.2
numpy==1.16.0 # via pandas
pandas==0.23.4
numpy==1.16.1 # via pandas
packaging==19.0 # via dparse, safety
pandas==0.24.1
parsel==1.5.1 # via scrapy
psycopg2-binary==2.7.6.1
pyasn1-modules==0.2.3 # via service-identity
psycopg2-binary==2.7.7
pyasn1-modules==0.2.4 # via service-identity
pyasn1==0.4.5 # via pyasn1-modules, service-identity
pycodestyle==2.3.1 # via flake8
pycparser==2.19 # via cffi
pydispatcher==2.0.5 # via scrapy
pyflakes==1.6.0 # via flake8
pyhamcrest==1.9.0 # via twisted
pyjwt==1.7.1 # via djangorestframework-jwt
pyopenssl==18.0.0 # via scrapy
pyparsing==2.3.1 # via rdflib
pyopenssl==19.0.0 # via scrapy
pyparsing==2.3.1 # via packaging, rdflib
pysolr==3.8.1
python-codicefiscale==0.3.3
python-dateutil==2.7.5 # via pandas, python-codicefiscale
python-codicefiscale==0.3.4
python-dateutil==2.8.0 # via faker, pandas, python-codicefiscale
python-slugify==1.2.6 # via python-codicefiscale
pytz==2018.9 # via django, pandas, tzlocal
pyyaml==3.13 # via flex
pyyaml==3.13 # via dparse, flex
queuelib==1.5.0 # via scrapy
rdflib-jsonld==0.4.0
rdflib==4.2.2 # via rdflib-jsonld, sparqlwrapper
redis==3.0.1 # via django-redis
requests-toolbelt==0.8.0
redis==3.2.0 # via django-redis
requests-toolbelt==0.9.1
requests==2.21.0
rfc3987==1.3.8 # via flex
ruamel.yaml==0.15.86 # via drf-yasg
scrapy==1.5.1
ruamel.yaml==0.15.88 # via drf-yasg
safety==1.8.5
scrapy==1.6.0
service-identity==18.1.0 # via scrapy
six==1.12.0 # via automat, cryptography, django-extensions, drf-yasg, flex, isodate, munch, parsel, pyhamcrest, pyopenssl, python-dateutil, scrapy, w3lib
six==1.12.0 # via automat, cryptography, django-extensions, dparse, drf-yasg, faker, flex, isodate, munch, packaging, parsel, pyhamcrest, pyopenssl, python-dateutil, scrapy, w3lib
sparqlwrapper==1.8.2
sqlparse==0.2.4 # via django-debug-toolbar
strict-rfc3339==0.7 # via flex
text-unidecode==1.2 # via faker
twisted==18.9.0 # via scrapy
tzlocal==1.5.1
unidecode==1.0.23 # via python-slugify
......
# flake8: Style Guide Enforcement
flake8==3.5.0
# Plugins # Style Guide Enforcement
# flake8-docstrings # Checks PEP 257 compliance
#
# This file is autogenerated by pip-compile
# To update, run:
#
# ./compile-requirements.sh --upgrade
#
flake8==3.5.0
mccabe==0.6.1 # via flake8
pycodestyle==2.3.1 # via flake8
pyflakes==1.6.0 # via flake8
# Put test requirements here...
coverage # Report code coverage
factory_boy # A versatile test fixtures replacement
Faker # Generates fake data
safety # Checks installed dependencies for known security vulnerabilities.
#
# This file is autogenerated by pip-compile
# To update, run:
#
# ./compile-requirements.sh --upgrade
#
certifi==2018.11.29 # via requests
chardet==3.0.4 # via requests
click==7.0 # via safety
coverage==4.5.2
dparse==0.4.1 # via safety
factory-boy==2.11.1
faker==1.0.1
idna==2.8 # via requests
packaging==18.0 # via dparse, safety
pyparsing==2.3.1 # via packaging
python-dateutil==2.7.5 # via faker
pyyaml==3.13 # via dparse
requests==2.21.0 # via safety
safety==1.8.4
six==1.12.0 # via dparse, faker, packaging, python-dateutil
text-unidecode==1.2 # via faker
urllib3==1.24.1 # via requests
[bumpversion]
current_version = 1.1.8
current_version = 1.1.9
commit = True
tag = True
tag_name = v{new_version}
......