Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update to py37 #759

Closed
wants to merge 5 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -16,3 +16,4 @@
.noseids
Dockerfile
/env_api
/venv
23 changes: 17 additions & 6 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
dist: trusty
dist: xenial

env:
global:
Expand All @@ -14,24 +14,35 @@ services:
- redis-server

python:
- 3.4
- 3.7

addons:
postgresql: "9.4"
apt:
packages:
- postgresql-9.4-postgis-2.3
- postgresql-9.4-postgis-2.4

install:
- mkdir /tmp/elasticsearch
- wget -O - https://download.elasticsearch.org/elasticsearch/release/org/elasticsearch/distribution/tar/elasticsearch/2.3.2/elasticsearch-2.3.2.tar.gz | tar xz --directory=/tmp/elasticsearch --strip-components=1
- /tmp/elasticsearch/bin/elasticsearch --daemonize --path.data /tmp

- curl -s -O https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-7.1.0-amd64.deb
- sudo dpkg -i --force-confnew elasticsearch-7.1.0-amd64.deb
- echo -e '-XX:+DisableExplicitGC\n-Djdk.io.permissionsUseCanonicalPath=true\n-Dlog4j.skipJansi=true\n-server\n' | sudo tee -a /etc/elasticsearch/jvm.options
- sudo chown -R elasticsearch:elasticsearch /etc/default/elasticsearch
- sudo systemctl start elasticsearch

# - mkdir /tmp/elasticsearch
# - wget -O - https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-7.1.0-linux-x86_64.tar.gz | tar xz --directory=/tmp/elasticsearch --strip-components=1
# - /tmp/elasticsearch/bin/elasticsearch --daemonize --path.data /tmp
- make -f config/travis .build/dev-requirements.timestamp
- make -f config/travis install


before_script:
# - sudo apt-get -qq update
# - sudo apt-get install postgresql-9.4-postgis-2.3 postgresql-9.4-postgis-scripts postgis -y -q
- echo "create user \"www-data\" with password 'www-data;'" | psql -U postgres
- PGUSER=postgres USER=travis scripts/create_user_db_test.sh
- sleep 10
- curl http://localhost:9200/

script:
Expand Down
39 changes: 15 additions & 24 deletions Dockerfile.in
Original file line number Diff line number Diff line change
@@ -1,22 +1,24 @@
FROM docker.io/debian:jessie
FROM docker.io/debian:buster

ENV DEBIAN_FRONTEND noninteractive

ENV LC_ALL en_US.UTF-8

RUN echo 'APT::Install-Recommends "0";' > /etc/apt/apt.conf.d/50no-install-recommends
RUN echo 'APT::Install-Suggests "0";' > /etc/apt/apt.conf.d/50no-install-suggests

RUN set -x \
&& apt-get update \
&& apt-get -y upgrade \
&& apt-get -y --no-install-recommends install locales \
&& echo "en_US.UTF-8 UTF-8" > /etc/locale.gen \
&& locale-gen en_US.UTF-8 \
&& dpkg-reconfigure locales \
&& /usr/sbin/update-locale LANG=en_US.UTF-8

COPY project.tar /tmp

WORKDIR /var/www/

RUN tar -xvf /tmp/project.tar && chown -R root:root /var/www

RUN set -x \
&& apt-get update \
&& apt-get -y upgrade \
&& apt-get -y install \
&& apt-get -y --no-install-recommends install \
python3 \
python3-chardet \
python3-colorama \
Expand All @@ -25,7 +27,7 @@ RUN set -x \
python3-requests \
python3-six \
python3-urllib3 \
libgeos-c1 \
libgeos-c1v5 \
libpq5 \
libffi6 \
make \
Expand All @@ -36,24 +38,13 @@ RUN set -x \
libpq-dev \
virtualenv \
gcc \
git \
locales \
&& echo "en_US.UTF-8 UTF-8" > /etc/locale.gen \
&& locale-gen en_US.UTF-8 \
&& dpkg-reconfigure locales \
&& /usr/sbin/update-locale LANG=en_US.UTF-8 \
git

RUN set -x \
&& make -f config/dev install \
&& py3compile -f -X '^.*gevent/_util_py2.py$' .build/venv/ \
&& rm -fr .cache \
&& apt-get -y purge \
python3-dev \
python3-pip \
libgeos-dev \
libffi-dev \
libpq-dev \
virtualenv \
gcc \
git \
&& apt-get -y --purge autoremove \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
Expand Down
2 changes: 1 addition & 1 deletion c2corg_api/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ def ping_connection(dbapi_connection, connection_record, connection_proxy):
cursor = dbapi_connection.cursor()
try:
cursor.execute('SELECT 1')
except:
except: # noqa
# raise DisconnectionError - pool will try
# connecting again up to three times before raising.
raise exc.DisconnectionError()
Expand Down
1 change: 1 addition & 0 deletions c2corg_api/caching.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ def create_region(name):
key_mangler=lambda key: '{0}:{1}:{2}'.format(KEY_PREFIX, name, key)
)


cache_document_cooked = create_region('cooked')
cache_document_detail = create_region('detail')
cache_document_listing = create_region('listing')
Expand Down
2 changes: 1 addition & 1 deletion c2corg_api/emails/email_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def get_translation(self, lang, key):
raise Exception('Bad language' + lang)
try:
return self._get_file_content(lang, key)
except:
except: # noqa
log.exception('The %s translation for %s could not be read' % (
lang, key))
return self._get_file_content('fr', key)
Expand Down
28 changes: 24 additions & 4 deletions c2corg_api/ext/colander_ext.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,27 @@
from geomet import wkb
from geoalchemy2.compat import buffer, bytes
import geojson
from geojson.validation import is_polygon, checkListOfObjects


# import from geojson
def _is_polygon(coords):
lengths = all(len(elem) >= 4 for elem in coords)
isring = all(elem[0] == elem[-1] for elem in coords)
return lengths and isring


def _checkListOfObjects(coord, pred): # noqa
""" This method provides checking list of geojson objects such Multipoint or
MultiLineString that each element of the list is valid geojson object.
This is helpful method for IsValid.
:param coord: List of coordinates
:type coord: list
:param pred: Predicate to check validation of each member in the coord
:type pred: function
:return: True if list contains valid objects, False otherwise
:rtype: bool
"""
return not isinstance(coord, list) or not all([pred(ls) for ls in coord])


class Geometry(SchemaType):
Expand Down Expand Up @@ -56,7 +76,7 @@ def deserialize(self, node, cstruct):
data = geojson.loads(cstruct)
except Invalid as exc:
raise exc
except:
except: # noqa
raise Invalid(node, 'Invalid geometry: %s' % cstruct)
if not isinstance(data, geojson.GeoJSON):
raise Invalid(node, 'Invalid geometry: %s' % cstruct)
Expand Down Expand Up @@ -110,7 +130,7 @@ def is_valid_geometry(obj):
return False

if isinstance(obj, geojson.MultiLineString) and \
checkListOfObjects(obj['coordinates'], lambda x: len(x) >= 2):
_checkListOfObjects(obj['coordinates'], lambda x: len(x) >= 2):
# Each segment must must have at least 2 positions
return False

Expand All @@ -129,7 +149,7 @@ def is_valid_geometry(obj):
return True

if isinstance(obj, geojson.MultiPolygon) and \
checkListOfObjects(obj['coordinates'], lambda x: is_polygon(x)):
_checkListOfObjects(obj['coordinates'], lambda x: _is_polygon(x)):
# the "coordinates" member must be an array
# of Polygon coordinate arrays
return False
Expand Down
1 change: 1 addition & 0 deletions c2corg_api/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
class BaseMixin(object):
__table_args__ = {'schema': schema}


Base = declarative_base(cls=BaseMixin)


Expand Down
1 change: 1 addition & 0 deletions c2corg_api/models/area.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
class _AreaMixin(object):
area_type = Column(area_type)


attributes = ['area_type']


Expand Down
2 changes: 2 additions & 0 deletions c2corg_api/models/article.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ class _ArticleMixin(object):
activities = Column(ArrayOfEnum(activity_type))
article_type = Column(enums.article_type)


attributes = ['categories', 'activities', 'article_type']


Expand Down Expand Up @@ -63,6 +64,7 @@ class ArchiveArticle(_ArticleMixin, ArchiveDocument):

__table_args__ = Base.__table_args__


schema_article_locale = schema_document_locale
schema_article_attributes = list(schema_attributes)
schema_article_attributes.remove('geometry')
Expand Down
2 changes: 2 additions & 0 deletions c2corg_api/models/association.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,7 @@ class AssociationLog(Base):
DateTime(timezone=True), default=func.now(), nullable=False,
index=True)


schema_association = SQLAlchemySchemaNode(
Association,
# whitelisted attributes
Expand Down Expand Up @@ -349,6 +350,7 @@ def _get_associations_to_add(new_associations, current_associations):

return to_add


association_keys = {
'routes': ROUTE_TYPE,
'waypoints': WAYPOINT_TYPE,
Expand Down
2 changes: 2 additions & 0 deletions c2corg_api/models/book.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ class _BookMixin(object):
publication_date = Column(String(100))
langs = Column(ARRAY(String(2)))


attributes = ['author', 'editor', 'activities', 'url', 'isbn',
'book_types', 'nb_pages', 'publication_date', 'langs']

Expand Down Expand Up @@ -72,6 +73,7 @@ class ArchiveBook(_BookMixin, ArchiveDocument):

__table_args__ = Base.__table_args__


schema_book_locale = schema_document_locale
schema_book_attributes = list(schema_attributes)
schema_book_attributes.remove('geometry')
Expand Down
2 changes: 2 additions & 0 deletions c2corg_api/models/document.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,6 +283,7 @@ class ArchiveDocumentLocale(Base, _DocumentLocaleMixin):
Base.__table_args__
)


# `geomet` does not support EWKB, so load geometries as WKB
Geometry.as_binary = 'ST_AsBinary'

Expand Down Expand Up @@ -401,6 +402,7 @@ def _almost_equals(self, geom, other_geom):

return g1.almost_equals(g2, decimals)


DocumentGeometry.lon_lat = column_property(
func.ST_AsGeoJSON(func.ST_Transform(DocumentGeometry.geom, 4326)),
deferred=True)
Expand Down
1 change: 1 addition & 0 deletions c2corg_api/models/enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ def enum(name, types):
return Enum(
name=name, metadata=Base.metadata, schema=schema, *types)


quality_type = enum(
'quality_type', attributes.quality_types)
waypoint_type = enum(
Expand Down
1 change: 1 addition & 0 deletions c2corg_api/models/feed.py
Original file line number Diff line number Diff line change
Expand Up @@ -557,5 +557,6 @@ def is_linked_to_doc(image_in, document_id, document_type):

return False


# the document types that have no entry in the feed
NO_FEED_DOCUMENT_TYPES = [IMAGE_TYPE, USERPROFILE_TYPE, AREA_TYPE]
3 changes: 3 additions & 0 deletions c2corg_api/models/image.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,7 @@ class ArchiveImage(_ImageMixin, ArchiveDocument):

__table_args__ = Base.__table_args__


# special schema for image locales: images can be created without title
schema_image_locale = SQLAlchemySchemaNode(
DocumentLocale,
Expand Down Expand Up @@ -155,6 +156,8 @@ class ArchiveImage(_ImageMixin, ArchiveDocument):
class SchemaImageList(MappingSchema):
images = SchemaNode(
Sequence(), schema_create_image, missing=None)


schema_create_image_list = SchemaImageList()


Expand Down
1 change: 1 addition & 0 deletions c2corg_api/models/route.py
Original file line number Diff line number Diff line change
Expand Up @@ -245,6 +245,7 @@ class _RouteLocaleMixin(object):

slackline_anchor2 = Column(String)


attributes_locales = [
'slope', 'remarks', 'gear', 'external_resources', 'route_history',
'slackline_anchor1', 'slackline_anchor2'
Expand Down
1 change: 1 addition & 0 deletions c2corg_api/models/topo_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ class _MapMixin(object):
scale = Column(map_scale)
code = Column(String)


attributes = [
'editor', 'scale', 'code'
]
Expand Down
1 change: 1 addition & 0 deletions c2corg_api/models/user_profile.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ class _UserProfileMixin(object):
activities = Column(ArrayOfEnum(activity_type))
categories = Column(ArrayOfEnum(user_category))


attributes = ['activities', 'categories']


Expand Down
1 change: 1 addition & 0 deletions c2corg_api/models/xreport.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,6 +185,7 @@ class _XreportLocaleMixin(object):
# Conséquences physiques et autres commentaires
other_comments = Column(String)


attributes_locales = [
'place', 'route_study', 'conditions', 'training', 'motivations',
'group_management', 'risk', 'time_management', 'safety',
Expand Down
4 changes: 2 additions & 2 deletions c2corg_api/scripts/es/fill_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def main(argv=sys.argv):

def fill_index(session, batch_size=1000):
client = elasticsearch_config['client']
index_name = elasticsearch_config['index']
index_prefix = elasticsearch_config['index_prefix']

status = {
'start_time': datetime.now(),
Expand Down Expand Up @@ -80,7 +80,7 @@ def progress(count, total_count):

for doc in sync.get_documents(session, doc_type, batch_size,
ignore_redirects=True):
batch.add(to_search_document(doc, index_name))
batch.add(to_search_document(doc, index_prefix))

count += 1
progress(count, total)
Expand Down
Loading