From 9052685e573c8538ba4d35bdf3e3cf5ad90688ed Mon Sep 17 00:00:00 2001 From: cbeauchesne Date: Thu, 21 Nov 2019 14:20:20 +0100 Subject: [PATCH 1/5] Update to python3.7 --- Dockerfile.in | 34 ++++++++++--------- c2corg_api/tests/tweens/test_rate_limiting.py | 8 ++--- dev_api.Dockerfile | 28 +++++++-------- requirements.txt | 26 +++++++------- requirements_pip.txt | 2 +- 5 files changed, 49 insertions(+), 49 deletions(-) diff --git a/Dockerfile.in b/Dockerfile.in index fc9ef7662..ba33309e3 100644 --- a/Dockerfile.in +++ b/Dockerfile.in @@ -1,22 +1,25 @@ -FROM docker.io/debian:jessie +FROM docker.io/debian:buster ENV DEBIAN_FRONTEND noninteractive -ENV LC_ALL en_US.UTF-8 - -RUN echo 'APT::Install-Recommends "0";' > /etc/apt/apt.conf.d/50no-install-recommends -RUN echo 'APT::Install-Suggests "0";' > /etc/apt/apt.conf.d/50no-install-suggests - +RUN set -x \ + && apt-get update \ + && apt-get -y upgrade \ + && apt-get -y --no-install-recommends install locales \ + && echo "en_US.UTF-8 UTF-8" > /etc/locale.gen \ + && locale-gen en_US.UTF-8 \ + && dpkg-reconfigure locales + && /usr/sbin/update-locale LANG=en_US.UTF-8 + COPY project.tar /tmp WORKDIR /var/www/ RUN tar -xvf /tmp/project.tar && chown -R root:root /var/www +# ICI CONTINUER RUN set -x \ - && apt-get update \ - && apt-get -y upgrade \ - && apt-get -y install \ + && apt-get -y --no-install-recommends install \ python3 \ python3-chardet \ python3-colorama \ @@ -25,7 +28,7 @@ RUN set -x \ python3-requests \ python3-six \ python3-urllib3 \ - libgeos-c1 \ + libgeos-c1v5 \ libpq5 \ libffi6 \ make \ @@ -36,12 +39,11 @@ RUN set -x \ libpq-dev \ virtualenv \ gcc \ - git \ - locales \ - && echo "en_US.UTF-8 UTF-8" > /etc/locale.gen \ - && locale-gen en_US.UTF-8 \ - && dpkg-reconfigure locales \ - && /usr/sbin/update-locale LANG=en_US.UTF-8 \ + git + +# ICI CONTINUER + +RUN set -x \ && make -f config/dev install \ && py3compile -f -X '^.*gevent/_util_py2.py$' .build/venv/ \ && rm -fr .cache \ diff --git a/c2corg_api/tests/tweens/test_rate_limiting.py b/c2corg_api/tests/tweens/test_rate_limiting.py index e65aeb029..5397ee846 100644 --- a/c2corg_api/tests/tweens/test_rate_limiting.py +++ b/c2corg_api/tests/tweens/test_rate_limiting.py @@ -28,13 +28,13 @@ def setUp(self): # noqa def test_contributor(self, _send_email): self._set_user('contributor') self._test_requests() - _send_email.assert_call_once() + _send_email.assert_called_once() @patch('c2corg_api.emails.email_service.EmailService._send_email') def test_moderator(self, _send_email): self._set_user('moderator') self._test_requests() - _send_email.assert_call_once() + _send_email.assert_called_once() def _test_requests(self): limit = self.limit_robot if self.user.robot else \ @@ -100,7 +100,7 @@ def test_blocked(self, _send_email): self.assertFalse(self.user.blocked) self._wait() for i in range(0, self.limit): - self._update_document() + self._update_document(status=200) self.session.refresh(self.user) self.assertEqual( self.user.ratelimit_remaining, self.limit - 1 - i) @@ -111,7 +111,7 @@ def test_blocked(self, _send_email): # User has reached their max number of allowed rate limited windows # thus is now blocked: self.assertTrue(self.user.blocked) - _send_email.assert_called_once() + self.assertEqual(self.user.ratelimit_times, self.max_times + 1) self._update_document(status=403) def _create_document(self): diff --git a/dev_api.Dockerfile b/dev_api.Dockerfile index a3bf1edf0..95b80263e 100644 --- a/dev_api.Dockerfile +++ b/dev_api.Dockerfile @@ -1,13 +1,18 @@ -FROM docker.io/debian:jessie +FROM docker.io/debian:buster ENV DEBIAN_FRONTEND noninteractive -ENV LC_ALL en_US.UTF-8 +RUN set -x \ + && apt-get update \ + && apt-get -y upgrade \ + && apt-get -y --no-install-recommends install locales \ + && echo "en_US.UTF-8 UTF-8" > /etc/locale.gen \ + && locale-gen en_US.UTF-8 \ + && dpkg-reconfigure locales \ + && /usr/sbin/update-locale LANG=en_US.UTF-8 RUN set -x \ - && apt-get update \ - && apt-get -y upgrade \ - && apt-get -y --no-install-recommends install \ + && apt-get -y --no-install-recommends install \ python3 \ python3-chardet \ python3-colorama \ @@ -16,7 +21,7 @@ RUN set -x \ python3-requests \ python3-six \ python3-urllib3 \ - libgeos-c1 \ + libgeos-c1v5 \ libpq5 \ libffi6 \ make \ @@ -29,20 +34,15 @@ RUN set -x \ libpq-dev \ virtualenv \ gcc \ - git \ - locales \ - && echo "en_US.UTF-8 UTF-8" > /etc/locale.gen \ - && locale-gen en_US.UTF-8 \ - && dpkg-reconfigure locales \ - && /usr/sbin/update-locale LANG=en_US.UTF-8 + git COPY ./ /var/www/ WORKDIR /var/www/ RUN set -x \ - && make -f config/docker-dev install \ - && py3compile -f -X '^.*gevent/_util_py2.py$' .build/venv/ + && make -f config/docker-dev install \ + && py3compile -f -X '^.*gevent/_util_py2.py$' .build/venv/ ENV version="{version}" \ PATH=/var/www/.build/venv/bin/:$PATH diff --git a/requirements.txt b/requirements.txt index 32a776e71..e88402f00 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,17 +1,18 @@ -alembic==0.8.8 +alembic==1.3.1 # last apscheduler==3.2.0 bcrypt==3.1.1 colander==1.7 +dogpile.cache==0.9.0 # Last elasticsearch==2.3.0 elasticsearch_dsl==2.0.0 geoalchemy2==0.4.0 geojson==1.3.3 geomet==0.1.1 -kombu==4.0.0 +kombu==4.6.6 # last # phpserialize is only required during the migration phpserialize==1.3.0 -psycopg2==2.7.3.2 -pyproj==1.9.5.1 +psycopg2==2.8.4 # last +pyproj==2.4.1 # Last pyramid-jwtauth==0.1.3 pyramid==1.7.3 pyramid_debugtoolbar==3.0.5 @@ -19,24 +20,21 @@ pyramid_mailer==0.14.1 pyramid_tm==1.0.1 python-json-logger==0.1.5 python-slugify==1.2.4 -redis==2.10.5 -requests==2.20.1 +redis==3.3.11 # last +requests==2.22.0 # Last setuptools==28.8.0 -Shapely==1.6.1 +Shapely==1.6.4.post2 # last SQLAlchemy==1.1.3 transaction==1.6.1 waitress==1.0.1 zope.sqlalchemy==0.7.7 + # gunicorn and related dependencies -gunicorn==19.6.0 -eventlet==0.20.0 -gevent==1.2a2 +gunicorn==20.0.0 # last +eventlet==0.25.1 # last +gevent==1.4.0 # Last gaiohttp-websocket==0.1.1 -# dogpile.cache -# needs: https://bitbucket.org/zzzeek/dogpile.cache/pull-requests/58 -git+https://bitbucket.org/tsauerwein/dogpile.cache.git@get_or_create_multi_should_cache - # ColanderAlchemy>=0.3.2 # needed for ColanderAlchemy: https://github.com/stefanofontanelli/ColanderAlchemy/pull/90 # + #91 diff --git a/requirements_pip.txt b/requirements_pip.txt index 433e33067..3877da207 100644 --- a/requirements_pip.txt +++ b/requirements_pip.txt @@ -1 +1 @@ -pip==9.0.1 +pip==19.3.1 From 9cf6d2d7d5eec5dac07b78d698e0e07b20783ca7 Mon Sep 17 00:00:00 2001 From: cbeauchesne Date: Fri, 22 Nov 2019 20:35:43 +0100 Subject: [PATCH 2/5] Update dependancies --- c2corg_api/ext/colander_ext.py | 26 +++++++++++-- requirements.txt | 68 +++++++++++++++++----------------- 2 files changed, 58 insertions(+), 36 deletions(-) diff --git a/c2corg_api/ext/colander_ext.py b/c2corg_api/ext/colander_ext.py index af868db1a..c99826fdc 100644 --- a/c2corg_api/ext/colander_ext.py +++ b/c2corg_api/ext/colander_ext.py @@ -7,7 +7,27 @@ from geomet import wkb from geoalchemy2.compat import buffer, bytes import geojson -from geojson.validation import is_polygon, checkListOfObjects + + +# import from geojson +def _is_polygon(coords): + lengths = all(len(elem) >= 4 for elem in coords) + isring = all(elem[0] == elem[-1] for elem in coords) + return lengths and isring + + +def _checkListOfObjects(coord, pred): + """ This method provides checking list of geojson objects such Multipoint or + MultiLineString that each element of the list is valid geojson object. + This is helpful method for IsValid. + :param coord: List of coordinates + :type coord: list + :param pred: Predicate to check validation of each member in the coord + :type pred: function + :return: True if list contains valid objects, False otherwise + :rtype: bool + """ + return not isinstance(coord, list) or not all([pred(ls) for ls in coord]) class Geometry(SchemaType): @@ -110,7 +130,7 @@ def is_valid_geometry(obj): return False if isinstance(obj, geojson.MultiLineString) and \ - checkListOfObjects(obj['coordinates'], lambda x: len(x) >= 2): + _checkListOfObjects(obj['coordinates'], lambda x: len(x) >= 2): # Each segment must must have at least 2 positions return False @@ -129,7 +149,7 @@ def is_valid_geometry(obj): return True if isinstance(obj, geojson.MultiPolygon) and \ - checkListOfObjects(obj['coordinates'], lambda x: is_polygon(x)): + _checkListOfObjects(obj['coordinates'], lambda x: _is_polygon(x)): # the "coordinates" member must be an array # of Polygon coordinate arrays return False diff --git a/requirements.txt b/requirements.txt index e88402f00..3081d2c3a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,43 +1,44 @@ -alembic==1.3.1 # last -apscheduler==3.2.0 -bcrypt==3.1.1 -colander==1.7 -dogpile.cache==0.9.0 # Last -elasticsearch==2.3.0 -elasticsearch_dsl==2.0.0 -geoalchemy2==0.4.0 -geojson==1.3.3 -geomet==0.1.1 -kombu==4.6.6 # last +alembic==1.3.1 # last +apscheduler==3.6.3 # last +bcrypt==3.1.7 # last +colander==1.7 # last +dogpile.cache==0.9.0 # Last +elasticsearch==2.3.0 # Needs to update ES containers to ES 7.0 +elasticsearch_dsl==2.0.0 # Needs to update ES containers to ES 7.0 +geoalchemy2==0.4.0 # TODO 0.6.3, camptocamp SA dep, does not support 3.7 officialy +geojson==2.5.0 # last +geomet==0.2.1 # last, but does not support 3.7 officialy +kombu==4.6.6 # last # phpserialize is only required during the migration -phpserialize==1.3.0 -psycopg2==2.8.4 # last -pyproj==2.4.1 # Last -pyramid-jwtauth==0.1.3 -pyramid==1.7.3 -pyramid_debugtoolbar==3.0.5 -pyramid_mailer==0.14.1 -pyramid_tm==1.0.1 -python-json-logger==0.1.5 -python-slugify==1.2.4 -redis==3.3.11 # last -requests==2.22.0 # Last -setuptools==28.8.0 -Shapely==1.6.4.post2 # last -SQLAlchemy==1.1.3 -transaction==1.6.1 -waitress==1.0.1 -zope.sqlalchemy==0.7.7 +phpserialize==1.3.0 # don't care, need to remove migration scripts +psycopg2==2.8.4 # last +pyproj==2.4.1 # Last +pyramid-jwtauth==0.1.3 # last, not maintained anymore, does not support 3.7 officialy +pyramid==1.10.4 # last +pyramid_debugtoolbar==4.5.1 # last +pyramid_mailer==0.15.1 # last, but does not support 3.7 officially ... +pyramid_tm==2.3 # last +python-json-logger==0.1.11 # last +python-slugify==4.0.0 # last +redis==3.3.11 # last +requests==2.22.0 # Last +setuptools==41.6.0 # last +Shapely==1.6.4.post2 # last +SQLAlchemy==1.1.3 # TODO 1.3.11 +transaction==2.4.0 # last +waitress==1.3.1 # last +zope.sqlalchemy==0.7.7 # TODO 1.2 # gunicorn and related dependencies -gunicorn==20.0.0 # last -eventlet==0.25.1 # last -gevent==1.4.0 # Last -gaiohttp-websocket==0.1.1 +gunicorn==20.0.0 # last +eventlet==0.25.1 # last +gevent==1.4.0 # Last +gaiohttp-websocket==0.1.1 # last, but very old ? # ColanderAlchemy>=0.3.2 # needed for ColanderAlchemy: https://github.com/stefanofontanelli/ColanderAlchemy/pull/90 # + #91 +# solution as now : ship this code into v6_api git+https://github.com/tsauerwein/ColanderAlchemy.git@c2corg # Cornice>1.2.1 @@ -51,6 +52,7 @@ git+https://github.com/c2corg/c2c_markdown.git@7012ce6 # for development use a local checkout # -e ../v6_common git+https://github.com/c2corg/v6_common.git@ca2a1be + # Discourse API client https://github.com/c2corg/pydiscourse/archive/ea03a3a.zip From de9c692043893bfea4b8c3e395ea9f9368ca4367 Mon Sep 17 00:00:00 2001 From: cbeauchesne Date: Wed, 25 Dec 2019 11:56:05 +0100 Subject: [PATCH 3/5] Update elasticsearch --- .gitignore | 1 + c2corg_api/scripts/es/fill_index.py | 4 +- c2corg_api/scripts/es/sync.py | 10 +- c2corg_api/scripts/initializees.py | 109 ++-- c2corg_api/search/__init__.py | 7 +- c2corg_api/search/advanced_search.py | 2 +- c2corg_api/search/mapping.py | 561 +++++++++--------- c2corg_api/search/mapping_types.py | 8 +- c2corg_api/search/mappings/area_mapping.py | 6 +- c2corg_api/search/mappings/article_mapping.py | 4 +- c2corg_api/search/mappings/book_mapping.py | 4 +- c2corg_api/search/mappings/image_mapping.py | 4 +- c2corg_api/search/mappings/outing_mapping.py | 4 +- c2corg_api/search/mappings/route_mapping.py | 4 +- .../search/mappings/topo_map_mapping.py | 4 +- c2corg_api/search/mappings/user_mapping.py | 4 +- .../search/mappings/waypoint_mapping.py | 4 +- c2corg_api/search/mappings/xreport_mapping.py | 4 +- c2corg_api/search/search.py | 12 +- c2corg_api/search/search_filters.py | 16 +- .../tests/scripts/es/test_fill_index.py | 6 +- c2corg_api/tests/scripts/es/test_syncer.py | 6 +- c2corg_api/tests/search/__init__.py | 8 +- .../tests/search/test_search_filters.py | 20 +- c2corg_api/tests/views/__init__.py | 12 +- c2corg_api/tests/views/test_user.py | 5 +- c2corg_api/tests/views/test_user_account.py | 2 +- c2corg_api/tests/views/test_user_profile.py | 2 +- c2corg_api/tests/views/test_waypoint.py | 2 +- c2corg_api/views/health.py | 3 +- config/docker-dev | 7 +- dev-requirements.txt | 4 +- docker-compose.yml | 21 +- es_migration/2017-03-29_slackline.py | 3 +- requirements.txt | 4 +- 35 files changed, 474 insertions(+), 403 deletions(-) diff --git a/.gitignore b/.gitignore index 25dceb329..7a4310f33 100644 --- a/.gitignore +++ b/.gitignore @@ -16,3 +16,4 @@ .noseids Dockerfile /env_api +/venv diff --git a/c2corg_api/scripts/es/fill_index.py b/c2corg_api/scripts/es/fill_index.py index f675b719f..7cc16aa38 100644 --- a/c2corg_api/scripts/es/fill_index.py +++ b/c2corg_api/scripts/es/fill_index.py @@ -52,7 +52,7 @@ def main(argv=sys.argv): def fill_index(session, batch_size=1000): client = elasticsearch_config['client'] - index_name = elasticsearch_config['index'] + index_prefix = elasticsearch_config['index_prefix'] status = { 'start_time': datetime.now(), @@ -80,7 +80,7 @@ def progress(count, total_count): for doc in sync.get_documents(session, doc_type, batch_size, ignore_redirects=True): - batch.add(to_search_document(doc, index_name)) + batch.add(to_search_document(doc, index_prefix)) count += 1 progress(count, total) diff --git a/c2corg_api/scripts/es/sync.py b/c2corg_api/scripts/es/sync.py index e53b480ce..414399f8e 100644 --- a/c2corg_api/scripts/es/sync.py +++ b/c2corg_api/scripts/es/sync.py @@ -279,14 +279,14 @@ def sync_documents(session, changed_documents, batch_size): def sync_deleted_documents(session, deleted_documents, batch_size): client = elasticsearch_config['client'] batch = ElasticBatch(client, batch_size) - index = elasticsearch_config['index'] + index_prefix = elasticsearch_config['index_prefix'] n = 0 with batch: for document_id, doc_type in deleted_documents: batch.add({ - '_index': index, + '_index': f"{index_prefix}_{doc_type}", '_id': document_id, - '_type': doc_type, + # '_type': doc_type, 'id': document_id, '_op_type': 'delete' }) @@ -363,10 +363,10 @@ def get_documents(session, doc_type, batch_size, document_ids=None, def create_search_documents(doc_type, documents, batch): to_search_document = search_documents[doc_type].to_search_document - index = elasticsearch_config['index'] + index_prefix = elasticsearch_config['index_prefix'] n = 0 for doc in documents: - batch.add(to_search_document(doc, index)) + batch.add(to_search_document(doc, index_prefix)) n += 1 log.info('Sent {} document(s) of type {}'.format(n, doc_type)) diff --git a/c2corg_api/scripts/initializees.py b/c2corg_api/scripts/initializees.py index 22c24a8c7..40c7dd407 100644 --- a/c2corg_api/scripts/initializees.py +++ b/c2corg_api/scripts/initializees.py @@ -1,16 +1,16 @@ import os import sys -from c2corg_api.search.mappings.area_mapping import SearchArea -from c2corg_api.search.mappings.article_mapping import SearchArticle -from c2corg_api.search.mappings.book_mapping import SearchBook -from c2corg_api.search.mappings.image_mapping import SearchImage -from c2corg_api.search.mappings.outing_mapping import SearchOuting -from c2corg_api.search.mappings.xreport_mapping import SearchXreport -from c2corg_api.search.mappings.route_mapping import SearchRoute -from c2corg_api.search.mappings.topo_map_mapping import SearchTopoMap -from c2corg_api.search.mappings.user_mapping import SearchUser -from c2corg_api.search.mappings.waypoint_mapping import SearchWaypoint +from c2corg_api.search.mappings.area_mapping import SearchArea, AREA_TYPE +from c2corg_api.search.mappings.article_mapping import SearchArticle, ARTICLE_TYPE +from c2corg_api.search.mappings.book_mapping import SearchBook, BOOK_TYPE +from c2corg_api.search.mappings.image_mapping import SearchImage, IMAGE_TYPE +from c2corg_api.search.mappings.outing_mapping import SearchOuting, OUTING_TYPE +from c2corg_api.search.mappings.xreport_mapping import SearchXreport, XREPORT_TYPE +from c2corg_api.search.mappings.route_mapping import SearchRoute, ROUTE_TYPE +from c2corg_api.search.mappings.topo_map_mapping import SearchTopoMap, MAP_TYPE +from c2corg_api.search.mappings.user_mapping import SearchUser, USERPROFILE_TYPE +from c2corg_api.search.mappings.waypoint_mapping import SearchWaypoint, WAYPOINT_TYPE from elasticsearch_dsl import Index from pyramid.paster import ( @@ -20,9 +20,23 @@ from pyramid.scripts.common import parse_vars -from c2corg_api.search.mapping import analysis_settings +from c2corg_api.search.mapping import es_index_settings from c2corg_api.search import configure_es_from_config, elasticsearch_config +# TODO : use from c2corg_api.search import search_documents + +_types = [ + (SearchArea, AREA_TYPE), + (SearchArticle, ARTICLE_TYPE), + (SearchBook, BOOK_TYPE), + (SearchImage, IMAGE_TYPE), + (SearchOuting, OUTING_TYPE), + (SearchXreport, XREPORT_TYPE), + (SearchRoute, ROUTE_TYPE), + (SearchTopoMap, MAP_TYPE), + (SearchUser, USERPROFILE_TYPE), + (SearchWaypoint, WAYPOINT_TYPE), +] def usage(argv): cmd = os.path.basename(argv[0]) @@ -46,45 +60,60 @@ def setup_es(): """Create the ElasticSearch index and configure the mapping. """ client = elasticsearch_config['client'] - index_name = elasticsearch_config['index'] + index_prefix = elasticsearch_config['index_prefix'] info = client.info() print('ElasticSearch version: {0}'.format(info['version']['number'])) - if client.indices.exists(index_name): - print('Index "{0}" already exists. To re-create the index, manually ' - 'delete the index and run this script again.'.format(index_name)) - print('To delete the index run:') - print('curl -XDELETE \'http://{0}:{1}/{2}/\''.format( - elasticsearch_config['host'], elasticsearch_config['port'], - index_name)) - sys.exit(0) + for klass, letter in _types: + index_name = f"{index_prefix}_{letter}" - index = Index(index_name) - index.settings(analysis=analysis_settings) + if client.indices.exists(index_name): + print('Index "{0}" already exists. To re-create the index, manually ' + 'delete the index and run this script again.'.format(index_name)) + print('To delete the index run:') + print('curl -XDELETE \'http://{0}:{1}/{2}/\''.format( + elasticsearch_config['host'], elasticsearch_config['port'], + index_name)) + sys.exit(0) - index.doc_type(SearchArea) - index.doc_type(SearchBook) - index.doc_type(SearchImage) - index.doc_type(SearchOuting) - index.doc_type(SearchXreport) - index.doc_type(SearchRoute) - index.doc_type(SearchTopoMap) - index.doc_type(SearchUser) - index.doc_type(SearchWaypoint) - index.doc_type(SearchArticle) + index = Index(index_name) + index.settings(**es_index_settings) - index.create() + index.document(klass) + index.create() + print('Index "{0}" created'.format(index_name)) - print('Index "{0}" created'.format(index_name)) + # index = Index(index_name) + # index.settings(**es_index_settings) + + # index.document(SearchArea) + # index.document(SearchBook) + # index.document(SearchImage) + # index.document(SearchOuting) + # index.document(SearchXreport) + # index.document(SearchRoute) + # index.document(SearchTopoMap) + # index.document(SearchUser) + # index.document(SearchWaypoint) + # index.document(SearchArticle) + + # index.create() + + # print('Index "{0}" created'.format(index_name)) def drop_index(silent=True): """Remove the ElasticSearch index. """ - index = Index(elasticsearch_config['index']) - try: - index.delete() - except Exception as exc: - if not silent: - raise exc + + index_prefix = elasticsearch_config['index_prefix'] + + for _, letter in _types: + index = Index(f"{index_prefix}_{letter}") + + try: + index.delete() + except Exception as exc: + if not silent: + raise exc diff --git a/c2corg_api/search/__init__.py b/c2corg_api/search/__init__.py index 8276a605b..7189138b4 100644 --- a/c2corg_api/search/__init__.py +++ b/c2corg_api/search/__init__.py @@ -35,7 +35,7 @@ elasticsearch_config = { 'client': None, - 'index': None, + 'index_prefix': None, 'host': None, 'port': None } @@ -53,7 +53,7 @@ def configure_es_from_config(settings): client = client_from_config(settings) connections.add_connection('default', client) elasticsearch_config['client'] = client - elasticsearch_config['index'] = settings['elasticsearch.index'] + elasticsearch_config['index_prefix'] = settings['elasticsearch.index'] elasticsearch_config['host'] = settings['elasticsearch.host'] elasticsearch_config['port'] = int(settings['elasticsearch.port']) @@ -77,8 +77,7 @@ def __init__(self, settings): def create_search(document_type): return Search( using=elasticsearch_config['client'], - index=elasticsearch_config['index'], - doc_type=search_documents[document_type]) + index=elasticsearch_config['index_prefix'] + "_" + document_type) def get_text_query(search_term, lang=None): diff --git a/c2corg_api/search/advanced_search.py b/c2corg_api/search/advanced_search.py index 3cf2dc450..7e1192104 100644 --- a/c2corg_api/search/advanced_search.py +++ b/c2corg_api/search/advanced_search.py @@ -22,7 +22,7 @@ def search(url_params, meta_params, doc_type): # only request the document ids from ES response = query.execute() document_ids = [int(doc.meta.id) for doc in response] - total = response.hits.total + total = response.hits.total.value return document_ids, total diff --git a/c2corg_api/search/mapping.py b/c2corg_api/search/mapping.py index 40532a5a8..4f4d4c183 100644 --- a/c2corg_api/search/mapping.py +++ b/c2corg_api/search/mapping.py @@ -5,13 +5,19 @@ QEnumRange from c2corg_common.attributes import default_langs from c2corg_common.sortable_search_attributes import sortable_quality_types -from elasticsearch_dsl import DocType, String, MetaField, Long, GeoPoint +from elasticsearch_dsl import Document as DocType, Text as String, MetaField, \ + Long, GeoPoint, Keyword class BaseMeta: # disable the '_all' field, see - # https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-all-field.html - all = MetaField(enabled=False) + # https://www.elastic.co/guide/en/elasticsearch/reference/2.4/mapping-all-field.html + + # no more used : + # https://www.elastic.co/guide/en/elasticsearch/reference/6.0/mapping-all-field.html + + # all = MetaField(enabled=False) + pass # for the title fields a simpler analyzer is used. @@ -20,7 +26,7 @@ class BaseMeta: # https://github.com/komoot/photon/blob/master/es/index_settings.json def default_title_field(): return String( - index='not_analyzed', + # index='not_analyzed', similarity='c2corgsimilarity', fields={ 'ngram': String( @@ -48,7 +54,7 @@ class Meta(BaseMeta): pass id = Long() - doc_type = Enum() + # doc_type = Enum() quality = QEnumRange( 'qa', model_field=Document.quality, enum_mapper=sortable_quality_types) available_locales = QEnumArray('l', enum=default_langs) @@ -107,11 +113,11 @@ class Meta(BaseMeta): analyzer='index_basque', search_analyzer='search_basque') @staticmethod - def to_search_document(document, index, include_areas=True): + def to_search_document(document, index_prefix, include_areas=True): search_document = { - '_index': index, + '_index': f"{index_prefix}_{document.type}", '_id': document.document_id, - '_type': document.type, + # '_type': document.type, 'id': document.document_id } @@ -181,277 +187,282 @@ def copy_enum_range_fields( """To support partial-matching required for the autocomplete search, we have to set up a n-gram filter for each language analyzer. See also: -https://www.elastic.co/guide/en/elasticsearch/guide/current/_index_time_search_as_you_type.html +https://www.elastic.co/guide/en/elasticsearch/guide/2.4/_index_time_search_as_you_type.html The original definitions of the analyzers are taken from here: -https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-lang-analyzer.html +https://www.elastic.co/guide/en/elasticsearch/reference/2.4/analysis-lang-analyzer.html """ -analysis_settings = { - "filter": { - "autocomplete_filter": { - "type": "edge_ngram", - "min_gram": 2, - "max_gram": 20 - }, - # filters for the english analyzers - "english_stop": { - "type": "stop", - "stopwords": "_english_" - }, - "english_stemmer": { - "type": "stemmer", - "language": "english" - }, - "english_possessive_stemmer": { - "type": "stemmer", - "language": "possessive_english" - }, - # filters for the french analyzers - "french_elision": { - "type": "elision", - "articles_case": True, - "articles": [ - "l", "m", "t", "qu", "n", "s", - "j", "d", "c", "jusqu", "quoiqu", - "lorsqu", "puisqu" - ] - }, - "french_stop": { - "type": "stop", - "stopwords": "_french_" - }, - "french_stemmer": { - "type": "stemmer", - "language": "light_french" - }, - # filters for the german analyzers - "german_stop": { - "type": "stop", - "stopwords": "_german_" - }, - "german_stemmer": { - "type": "stemmer", - "language": "light_german" - }, - # filters for the italian analyzers - "italian_elision": { - "type": "elision", - "articles_case": True, - "articles": [ - "c", "l", "all", "dall", "dell", - "nell", "sull", "coll", "pell", - "gl", "agl", "dagl", "degl", "negl", - "sugl", "un", "m", "t", "s", "v", "d" - ] - }, - "italian_stop": { - "type": "stop", - "stopwords": "_italian_" - }, - "italian_stemmer": { - "type": "stemmer", - "language": "light_italian" - }, - # filters for the spanish analyzers - "spanish_stop": { - "type": "stop", - "stopwords": "_spanish_" - }, - "spanish_stemmer": { - "type": "stemmer", - "language": "light_spanish" - }, - # filters for the catalan analyzers - "catalan_elision": { - "type": "elision", - "articles_case": True, - "articles": ["d", "l", "m", "n", "s", "t"] - }, - "catalan_stop": { - "type": "stop", - "stopwords": "_catalan_" - }, - "catalan_stemmer": { - "type": "stemmer", - "language": "catalan" - }, - # filters for the basque analyzers - "basque_stop": { - "type": "stop", - "stopwords": "_basque_" - }, - "basque_stemmer": { - "type": "stemmer", - "language": "basque" - } - }, - "similarity": { - "c2corgsimilarity": { - "type": "BM25" - } - }, - "char_filter": { - "punctuationgreedy": { - "type": "pattern_replace", - "pattern": "[\\.,]" + +es_index_settings = { + "index": { + "similarity": { + "c2corgsimilarity": { + "type": "BM25" + } } }, - "analyzer": { - "index_ngram": { - "char_filter": ["punctuationgreedy"], - "filter": [ - "word_delimiter", "lowercase", "asciifolding", "unique", - "autocomplete_filter"], - "tokenizer": "standard" - }, - "search_ngram": { - "char_filter": ["punctuationgreedy"], - "filter": [ - "word_delimiter", "lowercase", "asciifolding", "unique"], - "tokenizer": "standard" - }, - "index_raw": { - "char_filter": ["punctuationgreedy"], - "filter": [ - "word_delimiter", "lowercase", "asciifolding", "unique"], - "tokenizer": "standard" - }, - "search_raw": { - "char_filter": ["punctuationgreedy"], - "filter": [ - "word_delimiter", "lowercase", "asciifolding", "unique"], - "tokenizer": "standard" - }, - "index_english": { - "type": "custom", - "tokenizer": "standard", - "filter": [ - "english_possessive_stemmer", - "lowercase", - "english_stop", - "english_stemmer", - "autocomplete_filter" - ] - }, - "search_english": { - "type": "custom", - "tokenizer": "standard", - "filter": [ - "english_possessive_stemmer", - "lowercase", - "english_stop", - "english_stemmer" - ] - }, - "index_french": { - "tokenizer": "standard", - "filter": [ - "french_elision", - "lowercase", - "french_stop", - "french_stemmer", - "autocomplete_filter" - ] - }, - "search_french": { - "tokenizer": "standard", - "filter": [ - "french_elision", - "lowercase", - "french_stop", - "french_stemmer", - "autocomplete_filter" - ] - }, - "index_german": { - "tokenizer": "standard", - "filter": [ - "lowercase", - "german_stop", - "german_normalization", - "german_stemmer", - "autocomplete_filter" - ] - }, - "search_german": { - "tokenizer": "standard", - "filter": [ - "lowercase", - "german_stop", - "german_normalization", - "german_stemmer" - ] - }, - "index_italian": { - "tokenizer": "standard", - "filter": [ - "italian_elision", - "lowercase", - "italian_stop", - "italian_stemmer", - "autocomplete_filter" - ] - }, - "search_italian": { - "tokenizer": "standard", - "filter": [ - "italian_elision", - "lowercase", - "italian_stop", - "italian_stemmer" - ] - }, - "index_spanish": { - "tokenizer": "standard", - "filter": [ - "lowercase", - "spanish_stop", - "spanish_stemmer", - "autocomplete_filter" - ] - }, - "search_spanish": { - "tokenizer": "standard", - "filter": [ - "lowercase", - "spanish_stop", - "spanish_stemmer" - ] - }, - "index_catalan": { - "tokenizer": "standard", - "filter": [ - "catalan_elision", - "lowercase", - "catalan_stop", - "catalan_stemmer", - "autocomplete_filter" - ] - }, - "search_catalan": { - "tokenizer": "standard", - "filter": [ - "catalan_elision", - "lowercase", - "catalan_stop", - "catalan_stemmer" - ] - }, - "index_basque": { - "tokenizer": "standard", - "filter": [ - "lowercase", - "basque_stop", - "basque_stemmer", - "autocomplete_filter" - ] - }, - "search_basque": { - "tokenizer": "standard", - "filter": [ - "lowercase", - "basque_stop", - "basque_stemmer" - ] + "analysis": { + "filter": { + "autocomplete_filter": { + "type": "edge_ngram", + "min_gram": 2, + "max_gram": 20 + }, + # filters for the english analyzers + "english_stop": { + "type": "stop", + "stopwords": "_english_" + }, + "english_stemmer": { + "type": "stemmer", + "language": "english" + }, + "english_possessive_stemmer": { + "type": "stemmer", + "language": "possessive_english" + }, + # filters for the french analyzers + "french_elision": { + "type": "elision", + "articles_case": True, + "articles": [ + "l", "m", "t", "qu", "n", "s", + "j", "d", "c", "jusqu", "quoiqu", + "lorsqu", "puisqu" + ] + }, + "french_stop": { + "type": "stop", + "stopwords": "_french_" + }, + "french_stemmer": { + "type": "stemmer", + "language": "light_french" + }, + # filters for the german analyzers + "german_stop": { + "type": "stop", + "stopwords": "_german_" + }, + "german_stemmer": { + "type": "stemmer", + "language": "light_german" + }, + # filters for the italian analyzers + "italian_elision": { + "type": "elision", + "articles_case": True, + "articles": [ + "c", "l", "all", "dall", "dell", + "nell", "sull", "coll", "pell", + "gl", "agl", "dagl", "degl", "negl", + "sugl", "un", "m", "t", "s", "v", "d" + ] + }, + "italian_stop": { + "type": "stop", + "stopwords": "_italian_" + }, + "italian_stemmer": { + "type": "stemmer", + "language": "light_italian" + }, + # filters for the spanish analyzers + "spanish_stop": { + "type": "stop", + "stopwords": "_spanish_" + }, + "spanish_stemmer": { + "type": "stemmer", + "language": "light_spanish" + }, + # filters for the catalan analyzers + "catalan_elision": { + "type": "elision", + "articles_case": True, + "articles": ["d", "l", "m", "n", "s", "t"] + }, + "catalan_stop": { + "type": "stop", + "stopwords": "_catalan_" + }, + "catalan_stemmer": { + "type": "stemmer", + "language": "catalan" + }, + # filters for the basque analyzers + "basque_stop": { + "type": "stop", + "stopwords": "_basque_" + }, + "basque_stemmer": { + "type": "stemmer", + "language": "basque" + } + }, + "char_filter": { + "punctuationgreedy": { + "type": "pattern_replace", + "pattern": "[\\.,]" + } + }, + "analyzer": { + "index_ngram": { + "char_filter": ["punctuationgreedy"], + "filter": [ + "word_delimiter", "lowercase", "asciifolding", "unique", + "autocomplete_filter"], + "tokenizer": "standard" + }, + "search_ngram": { + "char_filter": ["punctuationgreedy"], + "filter": [ + "word_delimiter", "lowercase", "asciifolding", "unique"], + "tokenizer": "standard" + }, + "index_raw": { + "char_filter": ["punctuationgreedy"], + "filter": [ + "word_delimiter", "lowercase", "asciifolding", "unique"], + "tokenizer": "standard" + }, + "search_raw": { + "char_filter": ["punctuationgreedy"], + "filter": [ + "word_delimiter", "lowercase", "asciifolding", "unique"], + "tokenizer": "standard" + }, + "index_english": { + "type": "custom", + "tokenizer": "standard", + "filter": [ + "english_possessive_stemmer", + "lowercase", + "english_stop", + "english_stemmer", + "autocomplete_filter" + ] + }, + "search_english": { + "type": "custom", + "tokenizer": "standard", + "filter": [ + "english_possessive_stemmer", + "lowercase", + "english_stop", + "english_stemmer" + ] + }, + "index_french": { + "tokenizer": "standard", + "filter": [ + "french_elision", + "lowercase", + "french_stop", + "french_stemmer", + "autocomplete_filter" + ] + }, + "search_french": { + "tokenizer": "standard", + "filter": [ + "french_elision", + "lowercase", + "french_stop", + "french_stemmer", + "autocomplete_filter" + ] + }, + "index_german": { + "tokenizer": "standard", + "filter": [ + "lowercase", + "german_stop", + "german_normalization", + "german_stemmer", + "autocomplete_filter" + ] + }, + "search_german": { + "tokenizer": "standard", + "filter": [ + "lowercase", + "german_stop", + "german_normalization", + "german_stemmer" + ] + }, + "index_italian": { + "tokenizer": "standard", + "filter": [ + "italian_elision", + "lowercase", + "italian_stop", + "italian_stemmer", + "autocomplete_filter" + ] + }, + "search_italian": { + "tokenizer": "standard", + "filter": [ + "italian_elision", + "lowercase", + "italian_stop", + "italian_stemmer" + ] + }, + "index_spanish": { + "tokenizer": "standard", + "filter": [ + "lowercase", + "spanish_stop", + "spanish_stemmer", + "autocomplete_filter" + ] + }, + "search_spanish": { + "tokenizer": "standard", + "filter": [ + "lowercase", + "spanish_stop", + "spanish_stemmer" + ] + }, + "index_catalan": { + "tokenizer": "standard", + "filter": [ + "catalan_elision", + "lowercase", + "catalan_stop", + "catalan_stemmer", + "autocomplete_filter" + ] + }, + "search_catalan": { + "tokenizer": "standard", + "filter": [ + "catalan_elision", + "lowercase", + "catalan_stop", + "catalan_stemmer" + ] + }, + "index_basque": { + "tokenizer": "standard", + "filter": [ + "lowercase", + "basque_stop", + "basque_stemmer", + "autocomplete_filter" + ] + }, + "search_basque": { + "tokenizer": "standard", + "filter": [ + "lowercase", + "basque_stop", + "basque_stemmer" + ] + } } } } diff --git a/c2corg_api/search/mapping_types.py b/c2corg_api/search/mapping_types.py index 26d497d6d..681caaa06 100644 --- a/c2corg_api/search/mapping_types.py +++ b/c2corg_api/search/mapping_types.py @@ -1,4 +1,4 @@ -from elasticsearch_dsl import String, Long, Integer, Boolean, Date +from elasticsearch_dsl import Text as String, Keyword, Long, Integer, Boolean, Date # this module contains classes to mark the fields of a mapping that can be # used in a search. @@ -75,12 +75,10 @@ class QClass(QueryableMixin, clazz): return QClass -class Enum(String): +class Enum(Keyword): """Field type for enums that should not be analyzed before indexing. """ - def __init__(self, *args, **kwargs): - kwargs['index'] = 'not_analyzed' - super(Enum, self).__init__(*args, **kwargs) + pass class EnumArray(Enum): diff --git a/c2corg_api/search/mappings/area_mapping.py b/c2corg_api/search/mappings/area_mapping.py index 38c1a0746..8ce77620f 100644 --- a/c2corg_api/search/mappings/area_mapping.py +++ b/c2corg_api/search/mappings/area_mapping.py @@ -12,16 +12,16 @@ class Meta(BaseMeta): FIELDS = ['area_type'] @staticmethod - def to_search_document(document, index): + def to_search_document(document, index_prefix): search_document = SearchDocument.to_search_document( - document, index, include_areas=False) + document, index_prefix, include_areas=False) if document.redirects_to: return search_document SearchDocument.copy_fields( search_document, document, SearchArea.FIELDS) - + return search_document SearchArea.queryable_fields = QueryableMixin.get_queryable_fields(SearchArea) diff --git a/c2corg_api/search/mappings/article_mapping.py b/c2corg_api/search/mappings/article_mapping.py index 1ad46e517..18f8b1f3e 100644 --- a/c2corg_api/search/mappings/article_mapping.py +++ b/c2corg_api/search/mappings/article_mapping.py @@ -17,8 +17,8 @@ class Meta(BaseMeta): FIELDS = ['activities', 'article_type'] @staticmethod - def to_search_document(document, index): - search_document = SearchDocument.to_search_document(document, index) + def to_search_document(document, index_prefix): + search_document = SearchDocument.to_search_document(document, index_prefix) if document.redirects_to: return search_document diff --git a/c2corg_api/search/mappings/book_mapping.py b/c2corg_api/search/mappings/book_mapping.py index 117e844e7..0298fc759 100644 --- a/c2corg_api/search/mappings/book_mapping.py +++ b/c2corg_api/search/mappings/book_mapping.py @@ -15,8 +15,8 @@ class Meta(BaseMeta): FIELDS = ['activities', 'book_types'] @staticmethod - def to_search_document(document, index): - search_document = SearchDocument.to_search_document(document, index) + def to_search_document(document, index_prefix): + search_document = SearchDocument.to_search_document(document, index_prefix) if document.redirects_to: return search_document diff --git a/c2corg_api/search/mappings/image_mapping.py b/c2corg_api/search/mappings/image_mapping.py index a7019effa..17c65ccc6 100644 --- a/c2corg_api/search/mappings/image_mapping.py +++ b/c2corg_api/search/mappings/image_mapping.py @@ -23,8 +23,8 @@ class Meta(BaseMeta): ] @staticmethod - def to_search_document(document, index): - search_document = SearchDocument.to_search_document(document, index) + def to_search_document(document, index_prefix): + search_document = SearchDocument.to_search_document(document, index_prefix) if document.redirects_to: return search_document diff --git a/c2corg_api/search/mappings/outing_mapping.py b/c2corg_api/search/mappings/outing_mapping.py index e5d06599a..3f1d06729 100644 --- a/c2corg_api/search/mappings/outing_mapping.py +++ b/c2corg_api/search/mappings/outing_mapping.py @@ -119,8 +119,8 @@ class Meta(BaseMeta): ] @staticmethod - def to_search_document(document, index): - search_document = SearchDocument.to_search_document(document, index) + def to_search_document(document, index_prefix): + search_document = SearchDocument.to_search_document(document, index_prefix) if document.redirects_to: return search_document diff --git a/c2corg_api/search/mappings/route_mapping.py b/c2corg_api/search/mappings/route_mapping.py index b93ca1676..aa7541972 100644 --- a/c2corg_api/search/mappings/route_mapping.py +++ b/c2corg_api/search/mappings/route_mapping.py @@ -145,8 +145,8 @@ class Meta(BaseMeta): ] @staticmethod - def to_search_document(document, index): - search_document = SearchDocument.to_search_document(document, index) + def to_search_document(document, index_prefix): + search_document = SearchDocument.to_search_document(document, index_prefix) if document.redirects_to: return search_document diff --git a/c2corg_api/search/mappings/topo_map_mapping.py b/c2corg_api/search/mappings/topo_map_mapping.py index 56837404f..e302ef419 100644 --- a/c2corg_api/search/mappings/topo_map_mapping.py +++ b/c2corg_api/search/mappings/topo_map_mapping.py @@ -10,8 +10,8 @@ class Meta(BaseMeta): FIELDS = [] @staticmethod - def to_search_document(document, index): - return SearchDocument.to_search_document(document, index) + def to_search_document(document, index_prefix): + return SearchDocument.to_search_document(document, index_prefix) SearchTopoMap.queryable_fields = QueryableMixin.get_queryable_fields( SearchTopoMap) diff --git a/c2corg_api/search/mappings/user_mapping.py b/c2corg_api/search/mappings/user_mapping.py index 2d992877b..18324368f 100644 --- a/c2corg_api/search/mappings/user_mapping.py +++ b/c2corg_api/search/mappings/user_mapping.py @@ -10,8 +10,8 @@ class Meta(BaseMeta): FIELDS = [] @staticmethod - def to_search_document(document, index): - search_document = SearchDocument.to_search_document(document, index) + def to_search_document(document, index_prefix): + search_document = SearchDocument.to_search_document(document, index_prefix) if document.redirects_to: return search_document diff --git a/c2corg_api/search/mappings/waypoint_mapping.py b/c2corg_api/search/mappings/waypoint_mapping.py index 4bc536c2f..a0e0edb05 100644 --- a/c2corg_api/search/mappings/waypoint_mapping.py +++ b/c2corg_api/search/mappings/waypoint_mapping.py @@ -101,8 +101,8 @@ class Meta(BaseMeta): ] @staticmethod - def to_search_document(document, index): - search_document = SearchDocument.to_search_document(document, index) + def to_search_document(document, index_prefix): + search_document = SearchDocument.to_search_document(document, index_prefix) if document.redirects_to: return search_document diff --git a/c2corg_api/search/mappings/xreport_mapping.py b/c2corg_api/search/mappings/xreport_mapping.py index ba11a54e7..a800afaaf 100644 --- a/c2corg_api/search/mappings/xreport_mapping.py +++ b/c2corg_api/search/mappings/xreport_mapping.py @@ -42,8 +42,8 @@ class Meta(BaseMeta): ] @staticmethod - def to_search_document(document, index): - search_document = SearchDocument.to_search_document(document, index) + def to_search_document(document, index_prefix): + search_document = SearchDocument.to_search_document(document, index_prefix) if document.redirects_to: return search_document diff --git a/c2corg_api/search/search.py b/c2corg_api/search/search.py index 375e1f626..7d4c7c0eb 100644 --- a/c2corg_api/search/search.py +++ b/c2corg_api/search/search.py @@ -40,13 +40,19 @@ def do_multi_search_for_types(search_types, search_term, limit, lang): and returns a list of tuples (document_ids, total) containing the results for each type. """ - multi_search = MultiSearch(index=elasticsearch_config['index']) + multi_search = MultiSearch() for search_type in search_types: (_, get_documents_config) = search_type + + # TODO fields sert à quoi ? + # search(get_documents_config.document_type).\ + # query(get_text_query_on_title(search_term, lang)).\ + # fields([]).\ + # extra(from_=0, size=limit) + search = create_search(get_documents_config.document_type).\ query(get_text_query_on_title(search_term, lang)).\ - fields([]).\ extra(from_=0, size=limit) multi_search = multi_search.add(search) @@ -56,7 +62,7 @@ def do_multi_search_for_types(search_types, search_term, limit, lang): for response in responses: # only requesting the document ids from ES document_ids = [int(doc.meta.id) for doc in response] - total = response.hits.total + total = response.hits.total.value results_for_type.append((document_ids, total)) return results_for_type diff --git a/c2corg_api/search/search_filters.py b/c2corg_api/search/search_filters.py index 406e94b70..c71de263e 100644 --- a/c2corg_api/search/search_filters.py +++ b/c2corg_api/search/search_filters.py @@ -11,7 +11,7 @@ from c2corg_api.search import create_search, search_documents, \ get_text_query_on_title from elasticsearch_dsl.query import Range, Term, Terms, Bool, GeoBoundingBox, \ - Missing + Exists log = logging.getLogger(__name__) @@ -36,8 +36,12 @@ def build_query(url_params, meta_params, doc_type): if filter: search = search.filter(filter) + # TODO : not sure of that, test it + # search = search.\ + # fields([]).\ + # extra(from_=offset, size=limit) + search = search.\ - fields([]).\ extra(from_=offset, size=limit) if url_params.get('bbox'): @@ -171,8 +175,8 @@ def create_enum_range_min_max_filter(field, query_term): Range(**kwargs_start), Range(**kwargs_end), Bool(must=[ - Missing(field=field.field_min), - Missing(field=field.field_max) + ~Exists(field=field.field_min), + ~Exists(field=field.field_max) ]) ])) @@ -310,8 +314,8 @@ def create_number_range_filter(field, query_term): Range(**kwargs_start), Range(**kwargs_end), Bool(must=[ - Missing(field=field.field_min), - Missing(field=field.field_max) + ~Exists(field=field.field_min), + ~Exists(field=field.field_max) ]) ])) diff --git a/c2corg_api/tests/scripts/es/test_fill_index.py b/c2corg_api/tests/scripts/es/test_fill_index.py index 0ff534d1e..629805df1 100644 --- a/c2corg_api/tests/scripts/es/test_fill_index.py +++ b/c2corg_api/tests/scripts/es/test_fill_index.py @@ -156,20 +156,20 @@ def test_fill_index(self): waypoint2 = SearchWaypoint.get(id=71172) self.assertIsNotNone(waypoint2) self.assertEqual(waypoint2.title_en, 'Mont Blanc') - self.assertEqual(waypoint2.title_fr, '') + self.assertIsNone(waypoint2.title_fr) self.assertEqual(waypoint2.doc_type, 'w') route = SearchRoute.get(id=71173) self.assertIsNotNone(route) self.assertEqual(route.title_en, 'Mont Blanc : Face N') - self.assertEqual(route.title_fr, '') + self.assertIsNone(route.title_fr) self.assertEqual(route.doc_type, 'r') self.assertEqual(route.durations, [0]) outing = SearchOuting.get(id=71175) self.assertIsNotNone(outing) self.assertEqual(outing.title_en, 'Mont Blanc : Face N !') - self.assertEqual(outing.title_fr, '') + self.assertIsNone(outing.title_fr) self.assertEqual(outing.doc_type, 'o') self.assertEqual(outing.frequentation, 3) diff --git a/c2corg_api/tests/scripts/es/test_syncer.py b/c2corg_api/tests/scripts/es/test_syncer.py index fff2c903e..425e82588 100644 --- a/c2corg_api/tests/scripts/es/test_syncer.py +++ b/c2corg_api/tests/scripts/es/test_syncer.py @@ -45,8 +45,8 @@ def test_process_task(self): session=self.session) next(syncer.consume(limit=1)) - index = elasticsearch_config['index'] - doc = SearchWaypoint.get(id=document_id, index=index) + index_prefix = elasticsearch_config['index_prefix'] + doc = SearchWaypoint.get(id=document_id, index=f"{index_prefix}_w") self.assertEqual(doc['title_fr'], 'Mont Granier') self.assertEqual(doc['doc_type'], 'w') @@ -61,4 +61,4 @@ def test_process_task(self): next(syncer.consume(limit=1)) with self.assertRaises(NotFoundError): - SearchWaypoint.get(id=document_id, index=index) + SearchWaypoint.get(id=document_id, index=f"{index_prefix}_w") diff --git a/c2corg_api/tests/search/__init__.py b/c2corg_api/tests/search/__init__.py index a33b90592..dbdc2ca88 100644 --- a/c2corg_api/tests/search/__init__.py +++ b/c2corg_api/tests/search/__init__.py @@ -1,6 +1,6 @@ from c2corg_api.scripts import initializees from c2corg_api.scripts.es.fill_index import fill_index -from c2corg_api.search import elasticsearch_config +from c2corg_api.search import elasticsearch_config, search_documents def reset_search_index(session): @@ -15,5 +15,7 @@ def reset_search_index(session): def force_search_index(): """Force that the search index is updated. """ - elasticsearch_config['client'].indices.refresh( - elasticsearch_config['index']) + indices = elasticsearch_config['client'].indices + index_prefix = elasticsearch_config['index_prefix'] + for type_letter in search_documents: + indices.refresh(f"{index_prefix}_{type_letter}") diff --git a/c2corg_api/tests/search/test_search_filters.py b/c2corg_api/tests/search/test_search_filters.py index 00aeafafe..5e3bce3e9 100644 --- a/c2corg_api/tests/search/test_search_filters.py +++ b/c2corg_api/tests/search/test_search_filters.py @@ -7,7 +7,7 @@ from c2corg_api.search.mappings.waypoint_mapping import SearchWaypoint from c2corg_api.tests import BaseTestCase from elasticsearch_dsl.query import Range, Term, Terms, Bool, GeoBoundingBox, \ - Missing + Exists class AdvancedSearchTest(BaseTestCase): @@ -29,7 +29,6 @@ def test_build_query(self): filter(Term(available_locales='fr')).\ filter(Terms(areas=[1234, 4567])). \ filter(Range(elevation={'gte': 1500})). \ - fields([]).\ extra(from_=0, size=10) self.assertQueryEqual(query, expected_query) @@ -52,7 +51,6 @@ def test_build_query_bbox(self): 'left': 6.28279913, 'bottom': 46.03129072, 'right': 6.28369744, 'top': 46.03191439}, type='indexed')).\ - fields([]).\ extra(from_=0, size=10) self.assertQueryEqual(query, expected_query) @@ -67,7 +65,6 @@ def test_build_query_limit_offset(self): query = build_query(params, meta_params, 'w') expected_query = create_search('w'). \ query(get_text_query_on_title('search word')). \ - fields([]).\ extra(from_=40, size=20) self.assertQueryEqual(query, expected_query) @@ -82,7 +79,6 @@ def test_build_query_sort_outing(self): query = build_query(params, meta_params, 'o') expected_query = create_search('o'). \ filter(Term(activities='skitouring')).\ - fields([]).\ sort({'date_end': {'order': 'desc'}}, {'id': {'order': 'desc'}}).\ extra(from_=40, size=20) self.assertQueryEqual(query, expected_query) @@ -91,7 +87,7 @@ def assertQueryEqual(self, query1, query2): # noqa q1 = query1.to_dict() q2 = query2.to_dict() - self.assertEqual(q1['fields'], q2['fields']) + # self.assertEqual(q1['fields'], q2['fields']) # TODO self.assertEqual(q1['from'], q2['from']) self.assertEqual(q1['size'], q2['size']) self.assertEqual(q1.get('sort'), q2.get('sort')) @@ -213,8 +209,8 @@ def test_create_filter_enum_range_min_max(self): Range(climbing_rating_min={'gt': 17}), Range(climbing_rating_max={'lt': 5}), Bool(must=[ - Missing(field='climbing_rating_min'), - Missing(field='climbing_rating_max') + ~Exists(field='climbing_rating_min'), + ~Exists(field='climbing_rating_max') ]) ]))) @@ -244,8 +240,8 @@ def test_create_filter_integer_range(self): Range(elevation_min={'gt': 2400}), Range(elevation_max={'lt': 1200}), Bool(must=[ - Missing(field='elevation_min'), - Missing(field='elevation_max') + ~Exists(field='elevation_min'), + ~Exists(field='elevation_max') ]) ]))) self.assertEqual( @@ -254,8 +250,8 @@ def test_create_filter_integer_range(self): Range(height_min={'gt': 2400}), Range(height_max={'lt': 1200}), Bool(must=[ - Missing(field='height_min'), - Missing(field='height_max') + ~Exists(field='height_min'), + ~Exists(field='height_max') ]) ]))) diff --git a/c2corg_api/tests/views/__init__.py b/c2corg_api/tests/views/__init__.py index 439980e13..9864c4099 100644 --- a/c2corg_api/tests/views/__init__.py +++ b/c2corg_api/tests/views/__init__.py @@ -635,7 +635,7 @@ def _validate_document(self, body, headers=None, validate_with_auth=False): self.sync_es() search_doc = search_documents[self._doc_type].get( id=doc.document_id, - index=elasticsearch_config['index']) + index=elasticsearch_config['index_prefix'] + '_' + self._doc_type) self.assertIsNotNone(search_doc['doc_type']) self.assertEqual(search_doc['doc_type'], doc.type) @@ -822,7 +822,7 @@ def put_success_all( # check updates to the search index search_doc = search_documents[self._doc_type].get( id=document.document_id, - index=elasticsearch_config['index']) + index=elasticsearch_config['index_prefix'] + '_' + self._doc_type) self.assertEqual(search_doc['doc_type'], document.type) self.assertEqual(search_doc['title_en'], archive_locale.title) @@ -907,7 +907,7 @@ def put_success_figures_only( sync_es(self.session) search_doc = search_documents[self._doc_type].get( id=document.document_id, - index=elasticsearch_config['index']) + index=elasticsearch_config['index_prefix'] + '_' + self._doc_type) self.assertEqual(search_doc['doc_type'], document.type) @@ -1003,7 +1003,7 @@ def put_success_lang_only( sync_es(self.session) search_doc = search_documents[self._doc_type].get( id=document.document_id, - index=elasticsearch_config['index']) + index=elasticsearch_config['index_prefix'] + '_' + self._doc_type) self.assertEqual(search_doc['doc_type'], document.type) self.assertEqual( @@ -1095,7 +1095,9 @@ def put_success_new_lang( sync_es(self.session) search_doc = search_documents[self._doc_type].get( id=document.document_id, - index=elasticsearch_config['index']) + index=f"{elasticsearch_config['index_prefix']}_{self._doc_type}", + # doc_type=self._doc_type, + ) self.assertEqual(search_doc['doc_type'], document.type) self.assertEqual( diff --git a/c2corg_api/tests/views/test_user.py b/c2corg_api/tests/views/test_user.py index 3ca9c6431..59f72ba64 100644 --- a/c2corg_api/tests/views/test_user.py +++ b/c2corg_api/tests/views/test_user.py @@ -263,7 +263,8 @@ def test_register_search_index(self, _send_email): sync_es(self.session) search_doc = search_documents[USERPROFILE_TYPE].get( id=user_id, - index=elasticsearch_config['index'], ignore=404) + index=elasticsearch_config['index_prefix'] + '_' + USERPROFILE_TYPE, + ignore=404) self.assertIsNone(search_doc) # Simulate confirmation email validation @@ -275,7 +276,7 @@ def test_register_search_index(self, _send_email): self.sync_es() search_doc = search_documents[USERPROFILE_TYPE].get( id=user_id, - index=elasticsearch_config['index']) + index=elasticsearch_config['index_prefix'] + '_' + USERPROFILE_TYPE) self.assertIsNotNone(search_doc) self.assertIsNotNone(search_doc['doc_type']) diff --git a/c2corg_api/tests/views/test_user_account.py b/c2corg_api/tests/views/test_user_account.py index 493f616fb..298b682f5 100644 --- a/c2corg_api/tests/views/test_user_account.py +++ b/c2corg_api/tests/views/test_user_account.py @@ -89,7 +89,7 @@ def test_update_account_name_discourse_up(self): self.sync_es() search_doc = search_documents[USERPROFILE_TYPE].get( id=user_id, - index=elasticsearch_config['index']) + index=elasticsearch_config['index_prefix'] + '_' + USERPROFILE_TYPE) # and check that the cache version of the user profile was updated self.check_cache_version(user_id, 2) diff --git a/c2corg_api/tests/views/test_user_profile.py b/c2corg_api/tests/views/test_user_profile.py index 703c1e876..e2edad90d 100644 --- a/c2corg_api/tests/views/test_user_profile.py +++ b/c2corg_api/tests/views/test_user_profile.py @@ -356,7 +356,7 @@ def _check_es_index(self): sync_es(self.session) search_doc = SearchUser.get( id=self.profile1.document_id, - index=elasticsearch_config['index']) + index=elasticsearch_config['index_prefix'] + '_' + USERPROFILE_TYPE) self.assertEqual(search_doc['doc_type'], self.profile1.type) self.assertEqual( search_doc['title_en'], 'Contributor contributor') diff --git a/c2corg_api/tests/views/test_waypoint.py b/c2corg_api/tests/views/test_waypoint.py index f50bd47eb..6d805352a 100644 --- a/c2corg_api/tests/views/test_waypoint.py +++ b/c2corg_api/tests/views/test_waypoint.py @@ -857,7 +857,7 @@ def test_put_success_all(self): # check that the route was updated in the search index search_doc = SearchRoute.get( id=route.document_id, - index=elasticsearch_config['index']) + index=elasticsearch_config['index_prefix'] + '_r') self.assertEqual( search_doc['title_en'], 'Mont Granier! : Mont Blanc from the air') diff --git a/c2corg_api/views/health.py b/c2corg_api/views/health.py index c771970e6..40b433336 100644 --- a/c2corg_api/views/health.py +++ b/c2corg_api/views/health.py @@ -63,7 +63,8 @@ def _add_es_status(self, status): try: client = elasticsearch_config['client'] - index = elasticsearch_config['index'] + index_prefix = elasticsearch_config['index_prefix'] + index = f"{index_prefix}_o" # TODO iterate ion types stats = client.indices.stats(index, metric='docs') es_docs = stats['indices'][index]['total']['docs']['count'] success = True diff --git a/config/docker-dev b/config/docker-dev index 2978ec046..5ea9d9d67 100644 --- a/config/docker-dev +++ b/config/docker-dev @@ -4,10 +4,13 @@ ui_url ?= http://localhost:6553 include Makefile -export db_name = c2corg_${instanceid} +export db_host = postgresql +export db_name = c2corg +export elasticsearch_host = elasticsearch +export elasticsearch_index = c2corg_${instanceid} + export tests_db_host = postgresql export tests_db_name = c2corg_tests -export elasticsearch_index = c2corg_${instanceid} export tests_elasticsearch_host = elasticsearch export tests_elasticsearch_index = c2corg_${instanceid}_tests diff --git a/dev-requirements.txt b/dev-requirements.txt index 45e9f2205..1972081d1 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,5 +1,5 @@ -flake8==3.0.4 -pep8-naming==0.4.1 +flake8==3.7.9 +pep8-naming==0.9.1 nose==1.3.7 WebTest==2.0.20 ipdb==0.10.1 diff --git a/docker-compose.yml b/docker-compose.yml index 28feea290..d6831b725 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,3 +1,12 @@ +# En cours : +# docker-compose exec api make -f config/docker-dev test +# fill_es_index development.ini + + +# read this : +# https://www.elastic.co/fr/blog/removal-of-mapping-types-elasticsearch +# https://github.com/sabricot/django-elasticsearch-dsl/issues/134#issuecomment-482149563 + version: "3.7" services: api: @@ -24,6 +33,8 @@ services: - ./common.ini.in:/var/www/common.ini.in - ./development.ini.in:/var/www/development.ini.in - ./test.ini.in:/var/www/test.ini.in + - ./config/docker-dev:/var/www/config/docker-dev + - ./dev-requirements.txt:/var/www/dev-requirements.txt command: make -f config/docker serve links: - postgresql @@ -31,6 +42,8 @@ services: - redis postgresql: + # TODO : trouver un moyen de lancer ca + # docker-compose exec postgresql /v6_api/scripts/create_user_db_test.sh build: context: . dockerfile: dev_db.Dockerfile @@ -43,10 +56,14 @@ services: - 5432:5432 elasticsearch: - image: 'docker.io/c2corg/c2corg_es:anon-2018-11-02' + # TODO : trouver un moyen de lancer ca : + # docker-compose exec api initialize_c2corg_api_es development.ini + # docker-compose exec api make -f config/docker run-syncer + image: 'elasticsearch:7.1.0' ports: - 9200:9200 - command: -Des.index.number_of_replicas=0 -Des.path.data=/c2corg_anon + environment: + discovery.type: 'single-node' # needed for ES start : https://github.com/elastic/elasticsearch/issues/25067 redis: image: 'docker.io/redis:3.2' diff --git a/es_migration/2017-03-29_slackline.py b/es_migration/2017-03-29_slackline.py index be2028948..bbfccaa5c 100644 --- a/es_migration/2017-03-29_slackline.py +++ b/es_migration/2017-03-29_slackline.py @@ -33,7 +33,8 @@ def migrate(): """ Add the field "slackline_type" to the route mapping. """ client = elasticsearch_config['client'] - index_name = elasticsearch_config['index'] + index_prefix = elasticsearch_config['index_prefix'] + index_name = f"{index_prefix}_r" mapping_name = SearchRoute._doc_type.name field_name = 'slackline_type' diff --git a/requirements.txt b/requirements.txt index 3081d2c3a..75e39b165 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,8 +3,8 @@ apscheduler==3.6.3 # last bcrypt==3.1.7 # last colander==1.7 # last dogpile.cache==0.9.0 # Last -elasticsearch==2.3.0 # Needs to update ES containers to ES 7.0 -elasticsearch_dsl==2.0.0 # Needs to update ES containers to ES 7.0 +elasticsearch==7.1.0 # last +elasticsearch_dsl==7.1.0 # last geoalchemy2==0.4.0 # TODO 0.6.3, camptocamp SA dep, does not support 3.7 officialy geojson==2.5.0 # last geomet==0.2.1 # last, but does not support 3.7 officialy From 64f42963d4d7f13ac00cf0d9363e31670f788563 Mon Sep 17 00:00:00 2001 From: cbeauchesne Date: Wed, 25 Dec 2019 16:11:37 +0100 Subject: [PATCH 4/5] Lint --- c2corg_api/__init__.py | 2 +- c2corg_api/caching.py | 1 + c2corg_api/emails/email_service.py | 2 +- c2corg_api/ext/colander_ext.py | 30 +++---- c2corg_api/models/__init__.py | 1 + c2corg_api/models/area.py | 1 + c2corg_api/models/article.py | 2 + c2corg_api/models/association.py | 2 + c2corg_api/models/book.py | 2 + c2corg_api/models/document.py | 2 + c2corg_api/models/enums.py | 1 + c2corg_api/models/feed.py | 1 + c2corg_api/models/image.py | 3 + c2corg_api/models/route.py | 1 + c2corg_api/models/topo_map.py | 1 + c2corg_api/models/user_profile.py | 1 + c2corg_api/models/xreport.py | 1 + c2corg_api/scripts/es/sync.py | 1 + c2corg_api/scripts/es/syncer.py | 5 +- c2corg_api/scripts/initializees.py | 40 +++------- c2corg_api/scripts/jobs/scheduler.py | 1 + .../scripts/loadtests/create_test_users.py | 1 + .../scripts/migration/analyze_all_tables.py | 2 + .../migration/documents/associations.py | 3 + .../scripts/migration/documents/document.py | 2 + .../migration/documents/route_title_prefix.py | 1 + c2corg_api/scripts/migration/migrate.py | 2 + c2corg_api/scripts/redis-flushdb.py | 1 + c2corg_api/search/__init__.py | 1 + c2corg_api/search/mapping.py | 6 +- c2corg_api/search/mapping_types.py | 2 +- c2corg_api/search/mappings/area_mapping.py | 3 +- c2corg_api/search/mappings/article_mapping.py | 4 +- c2corg_api/search/mappings/book_mapping.py | 4 +- c2corg_api/search/mappings/image_mapping.py | 4 +- c2corg_api/search/mappings/outing_mapping.py | 4 +- c2corg_api/search/mappings/route_mapping.py | 4 +- .../search/mappings/topo_map_mapping.py | 1 + c2corg_api/search/mappings/user_mapping.py | 4 +- .../search/mappings/waypoint_mapping.py | 4 +- c2corg_api/search/mappings/xreport_mapping.py | 4 +- c2corg_api/search/notify_sync.py | 4 +- c2corg_api/search/search.py | 3 +- c2corg_api/search/search_filters.py | 3 +- c2corg_api/search/utils.py | 3 + c2corg_api/security/discourse_client.py | 1 + c2corg_api/tests/__init__.py | 1 + c2corg_api/tests/views/__init__.py | 9 ++- c2corg_api/tests/views/test_user.py | 6 +- c2corg_api/tests/views/test_user_account.py | 3 +- c2corg_api/tests/views/test_user_profile.py | 5 +- c2corg_api/views/__init__.py | 12 +-- c2corg_api/views/document_merge.py | 8 +- c2corg_api/views/document_schemas.py | 1 + c2corg_api/views/forum.py | 7 +- c2corg_api/views/health.py | 8 +- c2corg_api/views/sitemap.py | 78 +++++++++---------- c2corg_api/views/sso.py | 4 +- c2corg_api/views/user.py | 25 +++--- c2corg_api/views/user_account.py | 4 +- c2corg_api/views/user_block.py | 4 +- c2corg_api/views/validation.py | 2 +- dev-requirements.txt | 6 +- docker-compose.yml | 1 + es_migration/2017-03-29_slackline.py | 6 +- test.log | 56 +++++++++++++ 66 files changed, 261 insertions(+), 157 deletions(-) create mode 100644 test.log diff --git a/c2corg_api/__init__.py b/c2corg_api/__init__.py index 3863544e2..062bea154 100644 --- a/c2corg_api/__init__.py +++ b/c2corg_api/__init__.py @@ -79,7 +79,7 @@ def ping_connection(dbapi_connection, connection_record, connection_proxy): cursor = dbapi_connection.cursor() try: cursor.execute('SELECT 1') - except: + except: # noqa # raise DisconnectionError - pool will try # connecting again up to three times before raising. raise exc.DisconnectionError() diff --git a/c2corg_api/caching.py b/c2corg_api/caching.py index 37f13b0f1..2c00d611c 100644 --- a/c2corg_api/caching.py +++ b/c2corg_api/caching.py @@ -21,6 +21,7 @@ def create_region(name): key_mangler=lambda key: '{0}:{1}:{2}'.format(KEY_PREFIX, name, key) ) + cache_document_cooked = create_region('cooked') cache_document_detail = create_region('detail') cache_document_listing = create_region('listing') diff --git a/c2corg_api/emails/email_service.py b/c2corg_api/emails/email_service.py index da2544bf4..6ca470f65 100644 --- a/c2corg_api/emails/email_service.py +++ b/c2corg_api/emails/email_service.py @@ -32,7 +32,7 @@ def get_translation(self, lang, key): raise Exception('Bad language' + lang) try: return self._get_file_content(lang, key) - except: + except: # noqa log.exception('The %s translation for %s could not be read' % ( lang, key)) return self._get_file_content('fr', key) diff --git a/c2corg_api/ext/colander_ext.py b/c2corg_api/ext/colander_ext.py index c99826fdc..69eeed540 100644 --- a/c2corg_api/ext/colander_ext.py +++ b/c2corg_api/ext/colander_ext.py @@ -10,23 +10,23 @@ # import from geojson -def _is_polygon(coords): - lengths = all(len(elem) >= 4 for elem in coords) - isring = all(elem[0] == elem[-1] for elem in coords) +def _is_polygon(coords): + lengths = all(len(elem) >= 4 for elem in coords) + isring = all(elem[0] == elem[-1] for elem in coords) return lengths and isring -def _checkListOfObjects(coord, pred): - """ This method provides checking list of geojson objects such Multipoint or - MultiLineString that each element of the list is valid geojson object. - This is helpful method for IsValid. - :param coord: List of coordinates - :type coord: list - :param pred: Predicate to check validation of each member in the coord - :type pred: function - :return: True if list contains valid objects, False otherwise - :rtype: bool - """ +def _checkListOfObjects(coord, pred): # noqa + """ This method provides checking list of geojson objects such Multipoint or + MultiLineString that each element of the list is valid geojson object. + This is helpful method for IsValid. + :param coord: List of coordinates + :type coord: list + :param pred: Predicate to check validation of each member in the coord + :type pred: function + :return: True if list contains valid objects, False otherwise + :rtype: bool + """ return not isinstance(coord, list) or not all([pred(ls) for ls in coord]) @@ -76,7 +76,7 @@ def deserialize(self, node, cstruct): data = geojson.loads(cstruct) except Invalid as exc: raise exc - except: + except: # noqa raise Invalid(node, 'Invalid geometry: %s' % cstruct) if not isinstance(data, geojson.GeoJSON): raise Invalid(node, 'Invalid geometry: %s' % cstruct) diff --git a/c2corg_api/models/__init__.py b/c2corg_api/models/__init__.py index b3c678738..e33aa4ce9 100644 --- a/c2corg_api/models/__init__.py +++ b/c2corg_api/models/__init__.py @@ -17,6 +17,7 @@ class BaseMixin(object): __table_args__ = {'schema': schema} + Base = declarative_base(cls=BaseMixin) diff --git a/c2corg_api/models/area.py b/c2corg_api/models/area.py index c8048fb49..4b99d0b5c 100644 --- a/c2corg_api/models/area.py +++ b/c2corg_api/models/area.py @@ -21,6 +21,7 @@ class _AreaMixin(object): area_type = Column(area_type) + attributes = ['area_type'] diff --git a/c2corg_api/models/article.py b/c2corg_api/models/article.py index 5cbc54a4d..d7a7880e0 100644 --- a/c2corg_api/models/article.py +++ b/c2corg_api/models/article.py @@ -22,6 +22,7 @@ class _ArticleMixin(object): activities = Column(ArrayOfEnum(activity_type)) article_type = Column(enums.article_type) + attributes = ['categories', 'activities', 'article_type'] @@ -63,6 +64,7 @@ class ArchiveArticle(_ArticleMixin, ArchiveDocument): __table_args__ = Base.__table_args__ + schema_article_locale = schema_document_locale schema_article_attributes = list(schema_attributes) schema_article_attributes.remove('geometry') diff --git a/c2corg_api/models/association.py b/c2corg_api/models/association.py index 76281eb3e..8e987fcc9 100644 --- a/c2corg_api/models/association.py +++ b/c2corg_api/models/association.py @@ -112,6 +112,7 @@ class AssociationLog(Base): DateTime(timezone=True), default=func.now(), nullable=False, index=True) + schema_association = SQLAlchemySchemaNode( Association, # whitelisted attributes @@ -349,6 +350,7 @@ def _get_associations_to_add(new_associations, current_associations): return to_add + association_keys = { 'routes': ROUTE_TYPE, 'waypoints': WAYPOINT_TYPE, diff --git a/c2corg_api/models/book.py b/c2corg_api/models/book.py index fe9d78b59..2617cd4aa 100644 --- a/c2corg_api/models/book.py +++ b/c2corg_api/models/book.py @@ -30,6 +30,7 @@ class _BookMixin(object): publication_date = Column(String(100)) langs = Column(ARRAY(String(2))) + attributes = ['author', 'editor', 'activities', 'url', 'isbn', 'book_types', 'nb_pages', 'publication_date', 'langs'] @@ -72,6 +73,7 @@ class ArchiveBook(_BookMixin, ArchiveDocument): __table_args__ = Base.__table_args__ + schema_book_locale = schema_document_locale schema_book_attributes = list(schema_attributes) schema_book_attributes.remove('geometry') diff --git a/c2corg_api/models/document.py b/c2corg_api/models/document.py index f66d6c1d3..98b3e9a03 100644 --- a/c2corg_api/models/document.py +++ b/c2corg_api/models/document.py @@ -283,6 +283,7 @@ class ArchiveDocumentLocale(Base, _DocumentLocaleMixin): Base.__table_args__ ) + # `geomet` does not support EWKB, so load geometries as WKB Geometry.as_binary = 'ST_AsBinary' @@ -401,6 +402,7 @@ def _almost_equals(self, geom, other_geom): return g1.almost_equals(g2, decimals) + DocumentGeometry.lon_lat = column_property( func.ST_AsGeoJSON(func.ST_Transform(DocumentGeometry.geom, 4326)), deferred=True) diff --git a/c2corg_api/models/enums.py b/c2corg_api/models/enums.py index 1563d64fd..8aa577cfe 100644 --- a/c2corg_api/models/enums.py +++ b/c2corg_api/models/enums.py @@ -8,6 +8,7 @@ def enum(name, types): return Enum( name=name, metadata=Base.metadata, schema=schema, *types) + quality_type = enum( 'quality_type', attributes.quality_types) waypoint_type = enum( diff --git a/c2corg_api/models/feed.py b/c2corg_api/models/feed.py index 09143ef1f..44b6a23c0 100644 --- a/c2corg_api/models/feed.py +++ b/c2corg_api/models/feed.py @@ -557,5 +557,6 @@ def is_linked_to_doc(image_in, document_id, document_type): return False + # the document types that have no entry in the feed NO_FEED_DOCUMENT_TYPES = [IMAGE_TYPE, USERPROFILE_TYPE, AREA_TYPE] diff --git a/c2corg_api/models/image.py b/c2corg_api/models/image.py index e7e7cd790..d8639d2c6 100644 --- a/c2corg_api/models/image.py +++ b/c2corg_api/models/image.py @@ -112,6 +112,7 @@ class ArchiveImage(_ImageMixin, ArchiveDocument): __table_args__ = Base.__table_args__ + # special schema for image locales: images can be created without title schema_image_locale = SQLAlchemySchemaNode( DocumentLocale, @@ -155,6 +156,8 @@ class ArchiveImage(_ImageMixin, ArchiveDocument): class SchemaImageList(MappingSchema): images = SchemaNode( Sequence(), schema_create_image, missing=None) + + schema_create_image_list = SchemaImageList() diff --git a/c2corg_api/models/route.py b/c2corg_api/models/route.py index 19350f783..1a753613c 100644 --- a/c2corg_api/models/route.py +++ b/c2corg_api/models/route.py @@ -245,6 +245,7 @@ class _RouteLocaleMixin(object): slackline_anchor2 = Column(String) + attributes_locales = [ 'slope', 'remarks', 'gear', 'external_resources', 'route_history', 'slackline_anchor1', 'slackline_anchor2' diff --git a/c2corg_api/models/topo_map.py b/c2corg_api/models/topo_map.py index 7e59a9536..fdeeae94e 100644 --- a/c2corg_api/models/topo_map.py +++ b/c2corg_api/models/topo_map.py @@ -25,6 +25,7 @@ class _MapMixin(object): scale = Column(map_scale) code = Column(String) + attributes = [ 'editor', 'scale', 'code' ] diff --git a/c2corg_api/models/user_profile.py b/c2corg_api/models/user_profile.py index 1bf624248..f43aaab95 100644 --- a/c2corg_api/models/user_profile.py +++ b/c2corg_api/models/user_profile.py @@ -22,6 +22,7 @@ class _UserProfileMixin(object): activities = Column(ArrayOfEnum(activity_type)) categories = Column(ArrayOfEnum(user_category)) + attributes = ['activities', 'categories'] diff --git a/c2corg_api/models/xreport.py b/c2corg_api/models/xreport.py index a8fde0157..a71ad56b2 100644 --- a/c2corg_api/models/xreport.py +++ b/c2corg_api/models/xreport.py @@ -185,6 +185,7 @@ class _XreportLocaleMixin(object): # Conséquences physiques et autres commentaires other_comments = Column(String) + attributes_locales = [ 'place', 'route_study', 'conditions', 'training', 'motivations', 'group_management', 'risk', 'time_management', 'safety', diff --git a/c2corg_api/scripts/es/sync.py b/c2corg_api/scripts/es/sync.py index 414399f8e..34c43f6d0 100644 --- a/c2corg_api/scripts/es/sync.py +++ b/c2corg_api/scripts/es/sync.py @@ -370,6 +370,7 @@ def create_search_documents(doc_type, documents, batch): n += 1 log.info('Sent {} document(s) of type {}'.format(n, doc_type)) + # association types that require an update association_types_to_check = { # needed to update waypoint ids for routes diff --git a/c2corg_api/scripts/es/syncer.py b/c2corg_api/scripts/es/syncer.py index 716a1dbc5..9518415f6 100644 --- a/c2corg_api/scripts/es/syncer.py +++ b/c2corg_api/scripts/es/syncer.py @@ -43,7 +43,7 @@ def process_task(self, body, message): message.ack() try: self.sync() - except: + except: # noqa log.error('Sync failed', exc_info=True) log.info('Waiting on messages') @@ -52,7 +52,7 @@ def sync(self): try: sync_es(session, self.batch_size) session.commit() - except: + except: # noqa session.rollback() raise finally: @@ -94,5 +94,6 @@ def main(argv=sys.argv): except KeyboardInterrupt: log.info('Syncer stopped') + if __name__ == "__main__": main() diff --git a/c2corg_api/scripts/initializees.py b/c2corg_api/scripts/initializees.py index 40c7dd407..a93c41bc3 100644 --- a/c2corg_api/scripts/initializees.py +++ b/c2corg_api/scripts/initializees.py @@ -1,16 +1,6 @@ import os import sys -from c2corg_api.search.mappings.area_mapping import SearchArea, AREA_TYPE -from c2corg_api.search.mappings.article_mapping import SearchArticle, ARTICLE_TYPE -from c2corg_api.search.mappings.book_mapping import SearchBook, BOOK_TYPE -from c2corg_api.search.mappings.image_mapping import SearchImage, IMAGE_TYPE -from c2corg_api.search.mappings.outing_mapping import SearchOuting, OUTING_TYPE -from c2corg_api.search.mappings.xreport_mapping import SearchXreport, XREPORT_TYPE -from c2corg_api.search.mappings.route_mapping import SearchRoute, ROUTE_TYPE -from c2corg_api.search.mappings.topo_map_mapping import SearchTopoMap, MAP_TYPE -from c2corg_api.search.mappings.user_mapping import SearchUser, USERPROFILE_TYPE -from c2corg_api.search.mappings.waypoint_mapping import SearchWaypoint, WAYPOINT_TYPE from elasticsearch_dsl import Index from pyramid.paster import ( @@ -21,22 +11,11 @@ from pyramid.scripts.common import parse_vars from c2corg_api.search.mapping import es_index_settings -from c2corg_api.search import configure_es_from_config, elasticsearch_config - -# TODO : use from c2corg_api.search import search_documents - -_types = [ - (SearchArea, AREA_TYPE), - (SearchArticle, ARTICLE_TYPE), - (SearchBook, BOOK_TYPE), - (SearchImage, IMAGE_TYPE), - (SearchOuting, OUTING_TYPE), - (SearchXreport, XREPORT_TYPE), - (SearchRoute, ROUTE_TYPE), - (SearchTopoMap, MAP_TYPE), - (SearchUser, USERPROFILE_TYPE), - (SearchWaypoint, WAYPOINT_TYPE), -] +from c2corg_api.search import ( + configure_es_from_config, + elasticsearch_config, + search_documents) + def usage(argv): cmd = os.path.basename(argv[0]) @@ -65,12 +44,13 @@ def setup_es(): info = client.info() print('ElasticSearch version: {0}'.format(info['version']['number'])) - for klass, letter in _types: + for letter, klass in search_documents.items(): index_name = f"{index_prefix}_{letter}" if client.indices.exists(index_name): - print('Index "{0}" already exists. To re-create the index, manually ' - 'delete the index and run this script again.'.format(index_name)) + print('Index "{0}" already exists. ' + 'To re-create the index, manually delete the index ' + 'and run this script again.'.format(index_name)) print('To delete the index run:') print('curl -XDELETE \'http://{0}:{1}/{2}/\''.format( elasticsearch_config['host'], elasticsearch_config['port'], @@ -109,7 +89,7 @@ def drop_index(silent=True): index_prefix = elasticsearch_config['index_prefix'] - for _, letter in _types: + for letter in search_documents: index = Index(f"{index_prefix}_{letter}") try: diff --git a/c2corg_api/scripts/jobs/scheduler.py b/c2corg_api/scripts/jobs/scheduler.py index f856e0b04..d28ec97e4 100644 --- a/c2corg_api/scripts/jobs/scheduler.py +++ b/c2corg_api/scripts/jobs/scheduler.py @@ -37,5 +37,6 @@ def main(argv=sys.argv): signal.pause() + if __name__ == "__main__": main() diff --git a/c2corg_api/scripts/loadtests/create_test_users.py b/c2corg_api/scripts/loadtests/create_test_users.py index 60b51f42d..d4311ccc1 100644 --- a/c2corg_api/scripts/loadtests/create_test_users.py +++ b/c2corg_api/scripts/loadtests/create_test_users.py @@ -94,5 +94,6 @@ def main(argv=sys.argv): print('Created %d users with base username `%s`' % ( NB_USERS_TO_CREATE, BASE_USERNAME)) + if __name__ == "__main__": main() diff --git a/c2corg_api/scripts/migration/analyze_all_tables.py b/c2corg_api/scripts/migration/analyze_all_tables.py index 578bbf0fb..c772f9110 100644 --- a/c2corg_api/scripts/migration/analyze_all_tables.py +++ b/c2corg_api/scripts/migration/analyze_all_tables.py @@ -3,6 +3,8 @@ from c2corg_api.scripts.migration.migrate_base import MigrateBase +# flake8: noqa + class AnalyzeAllTables(MigrateBase): """Run "analyze" on all tables. """ diff --git a/c2corg_api/scripts/migration/documents/associations.py b/c2corg_api/scripts/migration/documents/associations.py index 7c59802f3..2ad2847e4 100644 --- a/c2corg_api/scripts/migration/documents/associations.py +++ b/c2corg_api/scripts/migration/documents/associations.py @@ -16,6 +16,9 @@ from c2corg_api.scripts.migration.batch import SimpleBatch from c2corg_api.scripts.migration.migrate_base import MigrateBase + +# flake8: noqa + associations_query_count =\ 'select count(*) from (' \ ' select a.main_id, a.linked_id from app_documents_associations a ' \ diff --git a/c2corg_api/scripts/migration/documents/document.py b/c2corg_api/scripts/migration/documents/document.py index 615bd07dd..b87af011a 100644 --- a/c2corg_api/scripts/migration/documents/document.py +++ b/c2corg_api/scripts/migration/documents/document.py @@ -12,6 +12,8 @@ from c2corg_common.attributes import quality_types +# flake8: noqa + DEFAULT_QUALITY = quality_types[2] diff --git a/c2corg_api/scripts/migration/documents/route_title_prefix.py b/c2corg_api/scripts/migration/documents/route_title_prefix.py index cc9bcadc5..7529d2b05 100644 --- a/c2corg_api/scripts/migration/documents/route_title_prefix.py +++ b/c2corg_api/scripts/migration/documents/route_title_prefix.py @@ -24,6 +24,7 @@ def migrate(self): self.stop() + SQL_SET_TITLE_PREFIX_SAME_CULTURE = """ with v as (select rl.id, l2.title from guidebook.routes_locales rl join guidebook.documents_locales l1 diff --git a/c2corg_api/scripts/migration/migrate.py b/c2corg_api/scripts/migration/migrate.py index 6289eb81f..b430e8e4f 100644 --- a/c2corg_api/scripts/migration/migrate.py +++ b/c2corg_api/scripts/migration/migrate.py @@ -49,6 +49,8 @@ from alembic.config import Config +# flake8: noqa + # no-op function referenced from `migration.ini` (required for # `get_appsettings` to work) def no_op(global_config, **settings): pass diff --git a/c2corg_api/scripts/redis-flushdb.py b/c2corg_api/scripts/redis-flushdb.py index 395713b4a..45c9ca98f 100644 --- a/c2corg_api/scripts/redis-flushdb.py +++ b/c2corg_api/scripts/redis-flushdb.py @@ -47,5 +47,6 @@ def main(argv=sys.argv): r.flushdb() log.info('Flushed cache') + if __name__ == "__main__": main() diff --git a/c2corg_api/search/__init__.py b/c2corg_api/search/__init__.py index 7189138b4..302bd30ad 100644 --- a/c2corg_api/search/__init__.py +++ b/c2corg_api/search/__init__.py @@ -112,6 +112,7 @@ def get_text_query_on_title(search_term, search_lang=None): return MultiMatch(query=search_term, fields=fields) + search_documents = { AREA_TYPE: SearchArea, ARTICLE_TYPE: SearchArticle, diff --git a/c2corg_api/search/mapping.py b/c2corg_api/search/mapping.py index 4f4d4c183..dc75d02c5 100644 --- a/c2corg_api/search/mapping.py +++ b/c2corg_api/search/mapping.py @@ -1,12 +1,12 @@ import json from c2corg_api.models.document import Document -from c2corg_api.search.mapping_types import Enum, QEnumArray, QLong, \ +from c2corg_api.search.mapping_types import QEnumArray, QLong, \ QEnumRange from c2corg_common.attributes import default_langs from c2corg_common.sortable_search_attributes import sortable_quality_types -from elasticsearch_dsl import Document as DocType, Text as String, MetaField, \ - Long, GeoPoint, Keyword +from elasticsearch_dsl import Document as DocType, Text as String, \ + Long, GeoPoint class BaseMeta: diff --git a/c2corg_api/search/mapping_types.py b/c2corg_api/search/mapping_types.py index 681caaa06..c784f7ed9 100644 --- a/c2corg_api/search/mapping_types.py +++ b/c2corg_api/search/mapping_types.py @@ -1,4 +1,4 @@ -from elasticsearch_dsl import Text as String, Keyword, Long, Integer, Boolean, Date +from elasticsearch_dsl import Keyword, Long, Integer, Boolean, Date # this module contains classes to mark the fields of a mapping that can be # used in a search. diff --git a/c2corg_api/search/mappings/area_mapping.py b/c2corg_api/search/mappings/area_mapping.py index 8ce77620f..d5bd24779 100644 --- a/c2corg_api/search/mappings/area_mapping.py +++ b/c2corg_api/search/mappings/area_mapping.py @@ -21,7 +21,8 @@ def to_search_document(document, index_prefix): SearchDocument.copy_fields( search_document, document, SearchArea.FIELDS) - + return search_document + SearchArea.queryable_fields = QueryableMixin.get_queryable_fields(SearchArea) diff --git a/c2corg_api/search/mappings/article_mapping.py b/c2corg_api/search/mappings/article_mapping.py index 18f8b1f3e..0803b43c8 100644 --- a/c2corg_api/search/mappings/article_mapping.py +++ b/c2corg_api/search/mappings/article_mapping.py @@ -18,7 +18,8 @@ class Meta(BaseMeta): @staticmethod def to_search_document(document, index_prefix): - search_document = SearchDocument.to_search_document(document, index_prefix) + search_document = SearchDocument.to_search_document( + document, index_prefix) if document.redirects_to: return search_document @@ -30,5 +31,6 @@ def to_search_document(document, index_prefix): return search_document + SearchArticle.queryable_fields = QueryableMixin.get_queryable_fields( SearchArticle) diff --git a/c2corg_api/search/mappings/book_mapping.py b/c2corg_api/search/mappings/book_mapping.py index 0298fc759..928653509 100644 --- a/c2corg_api/search/mappings/book_mapping.py +++ b/c2corg_api/search/mappings/book_mapping.py @@ -16,7 +16,8 @@ class Meta(BaseMeta): @staticmethod def to_search_document(document, index_prefix): - search_document = SearchDocument.to_search_document(document, index_prefix) + search_document = SearchDocument.to_search_document( + document, index_prefix) if document.redirects_to: return search_document @@ -26,5 +27,6 @@ def to_search_document(document, index_prefix): return search_document + SearchBook.queryable_fields = QueryableMixin.get_queryable_fields( SearchBook) diff --git a/c2corg_api/search/mappings/image_mapping.py b/c2corg_api/search/mappings/image_mapping.py index 17c65ccc6..c86e111ab 100644 --- a/c2corg_api/search/mappings/image_mapping.py +++ b/c2corg_api/search/mappings/image_mapping.py @@ -24,7 +24,8 @@ class Meta(BaseMeta): @staticmethod def to_search_document(document, index_prefix): - search_document = SearchDocument.to_search_document(document, index_prefix) + search_document = SearchDocument.to_search_document( + document, index_prefix) if document.redirects_to: return search_document @@ -34,4 +35,5 @@ def to_search_document(document, index_prefix): return search_document + SearchImage.queryable_fields = QueryableMixin.get_queryable_fields(SearchImage) diff --git a/c2corg_api/search/mappings/outing_mapping.py b/c2corg_api/search/mappings/outing_mapping.py index 3f1d06729..c92543f17 100644 --- a/c2corg_api/search/mappings/outing_mapping.py +++ b/c2corg_api/search/mappings/outing_mapping.py @@ -120,7 +120,8 @@ class Meta(BaseMeta): @staticmethod def to_search_document(document, index_prefix): - search_document = SearchDocument.to_search_document(document, index_prefix) + search_document = SearchDocument.to_search_document( + document, index_prefix) if document.redirects_to: return search_document @@ -150,6 +151,7 @@ def to_search_document(document, index_prefix): return search_document + SearchOuting.queryable_fields = QueryableMixin.get_queryable_fields( SearchOuting) SearchOuting.queryable_fields['date'] = QDateRange( diff --git a/c2corg_api/search/mappings/route_mapping.py b/c2corg_api/search/mappings/route_mapping.py index aa7541972..ef954d2a4 100644 --- a/c2corg_api/search/mappings/route_mapping.py +++ b/c2corg_api/search/mappings/route_mapping.py @@ -146,7 +146,8 @@ class Meta(BaseMeta): @staticmethod def to_search_document(document, index_prefix): - search_document = SearchDocument.to_search_document(document, index_prefix) + search_document = SearchDocument.to_search_document( + document, index_prefix) if document.redirects_to: return search_document @@ -170,6 +171,7 @@ def to_search_document(document, index_prefix): return search_document + SearchRoute.queryable_fields = QueryableMixin.get_queryable_fields( SearchRoute) SearchRoute.queryable_fields['ele'] = QNumberRange( diff --git a/c2corg_api/search/mappings/topo_map_mapping.py b/c2corg_api/search/mappings/topo_map_mapping.py index e302ef419..7c9d83c11 100644 --- a/c2corg_api/search/mappings/topo_map_mapping.py +++ b/c2corg_api/search/mappings/topo_map_mapping.py @@ -13,5 +13,6 @@ class Meta(BaseMeta): def to_search_document(document, index_prefix): return SearchDocument.to_search_document(document, index_prefix) + SearchTopoMap.queryable_fields = QueryableMixin.get_queryable_fields( SearchTopoMap) diff --git a/c2corg_api/search/mappings/user_mapping.py b/c2corg_api/search/mappings/user_mapping.py index 18324368f..62b2647b3 100644 --- a/c2corg_api/search/mappings/user_mapping.py +++ b/c2corg_api/search/mappings/user_mapping.py @@ -11,7 +11,8 @@ class Meta(BaseMeta): @staticmethod def to_search_document(document, index_prefix): - search_document = SearchDocument.to_search_document(document, index_prefix) + search_document = SearchDocument.to_search_document( + document, index_prefix) if document.redirects_to: return search_document @@ -22,4 +23,5 @@ def to_search_document(document, index_prefix): return search_document + SearchUser.queryable_fields = QueryableMixin.get_queryable_fields(SearchUser) diff --git a/c2corg_api/search/mappings/waypoint_mapping.py b/c2corg_api/search/mappings/waypoint_mapping.py index a0e0edb05..93e553a3e 100644 --- a/c2corg_api/search/mappings/waypoint_mapping.py +++ b/c2corg_api/search/mappings/waypoint_mapping.py @@ -102,7 +102,8 @@ class Meta(BaseMeta): @staticmethod def to_search_document(document, index_prefix): - search_document = SearchDocument.to_search_document(document, index_prefix) + search_document = SearchDocument.to_search_document( + document, index_prefix) if document.redirects_to: return search_document @@ -116,6 +117,7 @@ def to_search_document(document, index_prefix): return search_document + SearchWaypoint.queryable_fields = QueryableMixin.get_queryable_fields( SearchWaypoint) SearchWaypoint.queryable_fields['crat'] = QEnumRangeMinMax( diff --git a/c2corg_api/search/mappings/xreport_mapping.py b/c2corg_api/search/mappings/xreport_mapping.py index a800afaaf..f0411b98f 100644 --- a/c2corg_api/search/mappings/xreport_mapping.py +++ b/c2corg_api/search/mappings/xreport_mapping.py @@ -43,7 +43,8 @@ class Meta(BaseMeta): @staticmethod def to_search_document(document, index_prefix): - search_document = SearchDocument.to_search_document(document, index_prefix) + search_document = SearchDocument.to_search_document( + document, index_prefix) if document.redirects_to: return search_document @@ -57,6 +58,7 @@ def to_search_document(document, index_prefix): return search_document + SearchXreport.queryable_fields = QueryableMixin.get_queryable_fields( SearchXreport) SearchXreport.queryable_fields['date'] = QDate('xdate', 'date') diff --git a/c2corg_api/search/notify_sync.py b/c2corg_api/search/notify_sync.py index aa9718d86..0c7fc2a15 100644 --- a/c2corg_api/search/notify_sync.py +++ b/c2corg_api/search/notify_sync.py @@ -20,7 +20,7 @@ def on_revive(channel): channel.queue_unbind( queue_config.queue.name, exchange=queue_config.exchange.name) - except: + except: # noqa pass # the re-create the queue @@ -48,7 +48,7 @@ def run_when_successful(success, *args, **kws): if success: try: operation() - except: + except: # noqa log.error('Scheduled operation failed', exc_info=True) else: log.warn('Scheduled operation is not run because transaction ' diff --git a/c2corg_api/search/search.py b/c2corg_api/search/search.py index 7d4c7c0eb..c13fc25dc 100644 --- a/c2corg_api/search/search.py +++ b/c2corg_api/search/search.py @@ -1,5 +1,4 @@ -from c2corg_api.search import create_search, elasticsearch_config, \ - get_text_query_on_title +from c2corg_api.search import create_search, get_text_query_on_title from c2corg_api.views.document_listings import get_documents from elasticsearch_dsl.search import MultiSearch diff --git a/c2corg_api/search/search_filters.py b/c2corg_api/search/search_filters.py index c71de263e..960eea25c 100644 --- a/c2corg_api/search/search_filters.py +++ b/c2corg_api/search/search_filters.py @@ -38,7 +38,7 @@ def build_query(url_params, meta_params, doc_type): # TODO : not sure of that, test it # search = search.\ - # fields([]).\ + # fields([]).\ # extra(from_=offset, size=limit) search = search.\ @@ -383,6 +383,7 @@ def parse_enum_value(valid_values, s): else: return None + DATE_REGEX = re.compile('^(?:[0-9]{2})?[0-9]{2}-[0-3]?[0-9]-[0-3]?[0-9]$') diff --git a/c2corg_api/search/utils.py b/c2corg_api/search/utils.py index 6b988b129..1e0b96409 100644 --- a/c2corg_api/search/utils.py +++ b/c2corg_api/search/utils.py @@ -1,5 +1,8 @@ import re + +# flake8: noqa + BBCODE_TAGS = [ 'b', 'i', 'u', 's', 'q', 'c', 'sup', 'ind', 'url', 'email', 'acr(onym)?', 'colou?r', 'picto', 'p', 'center', 'right', 'left', 'justify', diff --git a/c2corg_api/security/discourse_client.py b/c2corg_api/security/discourse_client.py index 54f1e1502..9f829c070 100644 --- a/c2corg_api/security/discourse_client.py +++ b/c2corg_api/security/discourse_client.py @@ -151,6 +151,7 @@ def redirect_without_nonce(self, user): nonce = self.request_nonce() return self.create_response_payload(user, nonce, '/session/sso_login') + c = None diff --git a/c2corg_api/tests/__init__.py b/c2corg_api/tests/__init__.py index ca53f7a92..bbd92fe0d 100644 --- a/c2corg_api/tests/__init__.py +++ b/c2corg_api/tests/__init__.py @@ -184,6 +184,7 @@ def setup_package(): fill_index(DBSession) DBSession.remove() + # keep the database schema after a test run (useful for debugging) keep = False diff --git a/c2corg_api/tests/views/__init__.py b/c2corg_api/tests/views/__init__.py index 9864c4099..c75b58bdc 100644 --- a/c2corg_api/tests/views/__init__.py +++ b/c2corg_api/tests/views/__init__.py @@ -820,9 +820,10 @@ def put_success_all( if check_es: sync_es(self.session) # check updates to the search index + index = elasticsearch_config['index_prefix'] + '_' + self._doc_type search_doc = search_documents[self._doc_type].get( id=document.document_id, - index=elasticsearch_config['index_prefix'] + '_' + self._doc_type) + index=index) self.assertEqual(search_doc['doc_type'], document.type) self.assertEqual(search_doc['title_en'], archive_locale.title) @@ -1001,9 +1002,10 @@ def put_success_lang_only( # check updates to the search index if check_es: sync_es(self.session) + index = elasticsearch_config['index_prefix'] + '_' + self._doc_type search_doc = search_documents[self._doc_type].get( id=document.document_id, - index=elasticsearch_config['index_prefix'] + '_' + self._doc_type) + index=index) self.assertEqual(search_doc['doc_type'], document.type) self.assertEqual( @@ -1093,9 +1095,10 @@ def put_success_new_lang( # check updates to the search index if check_es: sync_es(self.session) + index = f"{elasticsearch_config['index_prefix']}_{self._doc_type}" search_doc = search_documents[self._doc_type].get( id=document.document_id, - index=f"{elasticsearch_config['index_prefix']}_{self._doc_type}", + index=index, # doc_type=self._doc_type, ) diff --git a/c2corg_api/tests/views/test_user.py b/c2corg_api/tests/views/test_user.py index 59f72ba64..255d84583 100644 --- a/c2corg_api/tests/views/test_user.py +++ b/c2corg_api/tests/views/test_user.py @@ -261,9 +261,10 @@ def test_register_search_index(self, _send_email): # check that the profile is not inserted in the search index sync_es(self.session) + index = elasticsearch_config['index_prefix'] + '_' + USERPROFILE_TYPE search_doc = search_documents[USERPROFILE_TYPE].get( id=user_id, - index=elasticsearch_config['index_prefix'] + '_' + USERPROFILE_TYPE, + index=index, ignore=404) self.assertIsNone(search_doc) @@ -274,9 +275,10 @@ def test_register_search_index(self, _send_email): # check that the profile is inserted in the index after confirmation self.sync_es() + index = elasticsearch_config['index_prefix'] + '_' + USERPROFILE_TYPE search_doc = search_documents[USERPROFILE_TYPE].get( id=user_id, - index=elasticsearch_config['index_prefix'] + '_' + USERPROFILE_TYPE) + index=index) self.assertIsNotNone(search_doc) self.assertIsNotNone(search_doc['doc_type']) diff --git a/c2corg_api/tests/views/test_user_account.py b/c2corg_api/tests/views/test_user_account.py index 298b682f5..9ae7569d1 100644 --- a/c2corg_api/tests/views/test_user_account.py +++ b/c2corg_api/tests/views/test_user_account.py @@ -87,9 +87,10 @@ def test_update_account_name_discourse_up(self): # check that the search index is updated with the new name self.sync_es() + index = elasticsearch_config['index_prefix'] + '_' + USERPROFILE_TYPE search_doc = search_documents[USERPROFILE_TYPE].get( id=user_id, - index=elasticsearch_config['index_prefix'] + '_' + USERPROFILE_TYPE) + index=index) # and check that the cache version of the user profile was updated self.check_cache_version(user_id, 2) diff --git a/c2corg_api/tests/views/test_user_profile.py b/c2corg_api/tests/views/test_user_profile.py index e2edad90d..6ae19ed65 100644 --- a/c2corg_api/tests/views/test_user_profile.py +++ b/c2corg_api/tests/views/test_user_profile.py @@ -354,9 +354,8 @@ def test_put_success_new_lang(self): def _check_es_index(self): sync_es(self.session) - search_doc = SearchUser.get( - id=self.profile1.document_id, - index=elasticsearch_config['index_prefix'] + '_' + USERPROFILE_TYPE) + index = elasticsearch_config['index_prefix'] + '_' + USERPROFILE_TYPE + search_doc = SearchUser.get(id=self.profile1.document_id, index=index) self.assertEqual(search_doc['doc_type'], self.profile1.type) self.assertEqual( search_doc['title_en'], 'Contributor contributor') diff --git a/c2corg_api/views/__init__.py b/c2corg_api/views/__init__.py index a0e57a02e..ff452291c 100644 --- a/c2corg_api/views/__init__.py +++ b/c2corg_api/views/__init__.py @@ -136,12 +136,12 @@ def to_json_dict(obj, schema, with_special_locales_attrs=False, obj_dict['cooked'] = cook(obj_dict['locales'][0]) if with_special_geometry_attrs and obj.type in(ROUTE_TYPE, OUTING_TYPE): - geometry_special_attributes = ['has_geom_detail'] - geometry_dict = obj_dict['geometry'] - geometry = obj.geometry - for attr in geometry_special_attributes: - if hasattr(geometry, attr): - geometry_dict[attr] = getattr(geometry, attr) + geometry_special_attributes = ['has_geom_detail'] + geometry_dict = obj_dict['geometry'] + geometry = obj.geometry + for attr in geometry_special_attributes: + if hasattr(geometry, attr): + geometry_dict[attr] = getattr(geometry, attr) return obj_dict diff --git a/c2corg_api/views/document_merge.py b/c2corg_api/views/document_merge.py index e0178d4ad..8ee8e324a 100644 --- a/c2corg_api/views/document_merge.py +++ b/c2corg_api/views/document_merge.py @@ -84,12 +84,12 @@ def validate_documents(request, **kwargs): # do they have the same type? if source_type != target_type: - request.errors.add( - 'body', 'types', 'documents must have the same type') + request.errors.add( + 'body', 'types', 'documents must have the same type') if source_type == USERPROFILE_TYPE: - request.errors.add( - 'body', 'types', 'merging user accounts is not supported') + request.errors.add( + 'body', 'types', 'merging user accounts is not supported') @resource(path='/documents/merge', cors_policy=cors_policy) diff --git a/c2corg_api/views/document_schemas.py b/c2corg_api/views/document_schemas.py index aab801ab7..8c26b79b2 100644 --- a/c2corg_api/views/document_schemas.py +++ b/c2corg_api/views/document_schemas.py @@ -162,6 +162,7 @@ def adapt_outing_schema_for_activities(activities, field_list_type): fields = get_all_fields(fields_outing, activities, field_list_type) return restrict_schema(schema_outing, fields) + outing_schema_adaptor = make_schema_adaptor( adapt_outing_schema_for_activities, 'activities', 'fields') outing_listing_schema_adaptor = make_schema_adaptor( diff --git a/c2corg_api/views/forum.py b/c2corg_api/views/forum.py index ee28ed73b..1ade98002 100644 --- a/c2corg_api/views/forum.py +++ b/c2corg_api/views/forum.py @@ -47,6 +47,7 @@ class SchemaTopicCreate(colander.MappingSchema): document_id = colander.SchemaNode(colander.Int()) lang = colander.SchemaNode(colander.String()) + schema_topic_create = SchemaTopicCreate() @@ -97,7 +98,7 @@ def collection_post(self): # category could be id or name try: category = int(category) - except: + except: # noqa pass client = get_discourse_client(settings) @@ -120,7 +121,7 @@ def collection_post(self): if locale.type == document_types.OUTING_TYPE: try: self.invite_participants(client, locale, topic_id) - except: + except: # noqa log.error('Inviting participants of outing {} failed' .format(locale.document_id), exc_info=True) @@ -137,7 +138,7 @@ def invite_participants(self, client, locale, topic_id): try: client.client.invite_user_to_topic_by_username(forum_username, topic_id) - except: + except: # noqa log.error('Inviting forum user {} in topic {} failed' .format(forum_username, topic_id), exc_info=True) diff --git a/c2corg_api/views/health.py b/c2corg_api/views/health.py index 40b433336..8f7ead4f2 100644 --- a/c2corg_api/views/health.py +++ b/c2corg_api/views/health.py @@ -49,7 +49,7 @@ def _add_database_status(self, status): try: last_es_syncer_run, _ = es_sync.get_status(DBSession) success = True - except: + except: # noqa log.exception('Getting last es syncer run failed') self.request.response.status_code = 500 @@ -64,11 +64,11 @@ def _add_es_status(self, status): try: client = elasticsearch_config['client'] index_prefix = elasticsearch_config['index_prefix'] - index = f"{index_prefix}_o" # TODO iterate ion types + index = f"{index_prefix}_o" # TODO iterate on types stats = client.indices.stats(index, metric='docs') es_docs = stats['indices'][index]['total']['docs']['count'] success = True - except: + except: # noqa log.exception('Getting indexed docs count failed') self.request.response.status_code = 500 @@ -83,7 +83,7 @@ def _add_redis_status(self, status): client = cache_document_detail.backend.client redis_keys = client.dbsize() success = True - except: + except: # noqa log.exception('Getting redis keys failed') status['redis'] = 'ok' if success else 'error' diff --git a/c2corg_api/views/sitemap.py b/c2corg_api/views/sitemap.py index dbe317381..b16360798 100644 --- a/c2corg_api/views/sitemap.py +++ b/c2corg_api/views/sitemap.py @@ -122,49 +122,49 @@ def _get_sitemap_index(): def _get_sitemap(doc_type, i): - fields = [ - Document.document_id, DocumentLocale.lang, DocumentLocale.title, - CacheVersion.last_updated - ] - - # include `title_prefix` for routes - is_route = doc_type == ROUTE_TYPE - if is_route: - fields.append(RouteLocale.title_prefix) + fields = [ + Document.document_id, DocumentLocale.lang, DocumentLocale.title, + CacheVersion.last_updated + ] - base_query = DBSession. \ - query(*fields). \ - select_from(Document). \ - join(DocumentLocale, - Document.document_id == DocumentLocale.document_id) + # include `title_prefix` for routes + is_route = doc_type == ROUTE_TYPE + if is_route: + fields.append(RouteLocale.title_prefix) - if is_route: - # joining on `RouteLocale.__table_` instead of `RouteLocale` to - # avoid that SQLAlchemy create an additional join on DocumentLocale - base_query = base_query. \ - join(RouteLocale.__table__, - DocumentLocale.id == RouteLocale.id) + base_query = DBSession. \ + query(*fields). \ + select_from(Document). \ + join(DocumentLocale, + Document.document_id == DocumentLocale.document_id) + if is_route: + # joining on `RouteLocale.__table_` instead of `RouteLocale` to + # avoid that SQLAlchemy create an additional join on DocumentLocale base_query = base_query. \ - join(CacheVersion, - Document.document_id == CacheVersion.document_id). \ - filter(Document.redirects_to.is_(None)). \ - filter(Document.type == doc_type). \ - order_by(Document.document_id, DocumentLocale.lang). \ - limit(PAGES_PER_SITEMAP). \ - offset(PAGES_PER_SITEMAP * i) - - document_locales = base_query.all() - - if not document_locales: - raise HTTPNotFound() - - return { - 'pages': [ - _format_page(locale, is_route) - for locale in document_locales - ] - } + join(RouteLocale.__table__, + DocumentLocale.id == RouteLocale.id) + + base_query = base_query. \ + join(CacheVersion, + Document.document_id == CacheVersion.document_id). \ + filter(Document.redirects_to.is_(None)). \ + filter(Document.type == doc_type). \ + order_by(Document.document_id, DocumentLocale.lang). \ + limit(PAGES_PER_SITEMAP). \ + offset(PAGES_PER_SITEMAP * i) + + document_locales = base_query.all() + + if not document_locales: + raise HTTPNotFound() + + return { + 'pages': [ + _format_page(locale, is_route) + for locale in document_locales + ] + } def _format_page(document_locale, is_route): diff --git a/c2corg_api/views/sso.py b/c2corg_api/views/sso.py index 7c4fac1d8..889595064 100644 --- a/c2corg_api/views/sso.py +++ b/c2corg_api/views/sso.py @@ -57,6 +57,7 @@ class SsoSyncSchema(colander.MappingSchema): groups = colander.SchemaNode(colander.String(), missing=None) + sso_sync_schema = SsoSyncSchema() @@ -242,6 +243,7 @@ def sso_expire_from_now(): class SsoLoginSchema(colander.MappingSchema): token = colander.SchemaNode(colander.String()) + sso_login_schema = SsoLoginSchema() @@ -279,7 +281,7 @@ def post(self): try: r = client.redirect_without_nonce(user) response['redirect_internal'] = r - except: + except: # noqa # Any error with discourse should not prevent login log.warning( 'Error logging into discourse for %d', user.id, diff --git a/c2corg_api/views/user.py b/c2corg_api/views/user.py index ce12f9bbd..5e969689e 100644 --- a/c2corg_api/views/user.py +++ b/c2corg_api/views/user.py @@ -55,7 +55,7 @@ def validate_json_password(request, **kwargs): # later on requires plain string otherwise it raises # the "Unicode-objects must be encoded before hashing" error. request.validated['password'] = password.encode(ENCODING) - except: + except: # noqa request.errors.add('body', 'password', 'Invalid') @@ -143,7 +143,7 @@ def validate_captcha(request, **kwargs): request.errors.add('body', 'captcha', 'Error, please retry') return - except: + except: # noqa log.exception('Request error while checking captcha') # We want a notification and not a 500 to let the user immediately # resend a response. @@ -186,7 +186,7 @@ def post(self): DBSession.add(user) try: DBSession.flush() - except: + except: # noqa log.warning('Error persisting user', exc_info=True) raise HTTPInternalServerError('Error persisting user') @@ -252,7 +252,7 @@ def post(self): client = get_discourse_client(settings) r = client.redirect_without_nonce(user) response['redirect_internal'] = r - except: + except: # noqa # Since only the password is changed, any error with discourse # must not prevent login and validation. log.error( @@ -262,7 +262,7 @@ def post(self): user.clear_validation_nonce() try: DBSession.flush() - except: + except: # noqa log.warning('Error persisting user', exc_info=True) raise HTTPInternalServerError('Error persisting user') @@ -310,7 +310,7 @@ def post(self): try: DBSession.flush() - except: + except: # noqa log.warning('Error persisting user', exc_info=True) raise HTTPInternalServerError('Error persisting user') @@ -355,7 +355,7 @@ def post(self): client = get_discourse_client(settings) r = client.redirect_without_nonce(user) response['redirect_internal'] = r - except: + except: # noqa # Any error with discourse must prevent login and validation log.error( 'Error logging into discourse for %d', user.id, @@ -364,7 +364,7 @@ def post(self): try: DBSession.flush() - except: + except: # noqa log.warning('Error persisting user', exc_info=True) raise HTTPInternalServerError('Error persisting user') @@ -395,13 +395,13 @@ def post(self): try: client = get_discourse_client(request.registry.settings) client.sync_sso(user) - except: + except: # noqa log.error('Error syncing email with discourse', exc_info=True) raise HTTPInternalServerError('Error with Discourse') try: DBSession.flush() - except: + except: # noqa log.warning('Error persisting user', exc_info=True) raise HTTPInternalServerError('Error persisting user') @@ -412,6 +412,7 @@ class LoginSchema(colander.MappingSchema): username = colander.SchemaNode(colander.String()) password = colander.SchemaNode(colander.String()) + login_schema = LoginSchema() @@ -461,7 +462,7 @@ def post(self): else: r = client.redirect_without_nonce(user) response['redirect_internal'] = r - except: + except: # noqa # Any error with discourse should not prevent login log.warning( 'Error logging into discourse for %d', user.id, @@ -506,7 +507,7 @@ def post(self): settings = request.registry.settings client = get_discourse_client(settings) result['logged_out_discourse_user'] = client.logout(userid) - except: + except: # noqa # Any error with discourse should not prevent logout log.warning( 'Error logging out of discourse for %d', userid, diff --git a/c2corg_api/views/user_account.py b/c2corg_api/views/user_account.py index 7a3bda1fc..1584a2f3b 100644 --- a/c2corg_api/views/user_account.py +++ b/c2corg_api/views/user_account.py @@ -166,13 +166,13 @@ def post(self): try: client = get_discourse_client(request.registry.settings) client.sync_sso(user) - except: + except: # noqa log.error('Error syncing with discourse', exc_info=True) raise HTTPInternalServerError('Error with Discourse') try: DBSession.flush() - except: + except: # noqa log.warning('Error persisting user', exc_info=True) raise HTTPInternalServerError('Error persisting user') diff --git a/c2corg_api/views/user_block.py b/c2corg_api/views/user_block.py index 884598d96..0770539ec 100644 --- a/c2corg_api/views/user_block.py +++ b/c2corg_api/views/user_block.py @@ -58,7 +58,7 @@ def post(self): block_duration = 99999 # 99999 days = 273 years client.suspend( user.id, block_duration, 'account blocked by moderator') - except: + except: # noqa log.error( 'Suspending account in Discourse failed: %d', user.id, exc_info=True) @@ -98,7 +98,7 @@ def post(self): try: client = get_discourse_client(self.request.registry.settings) client.unsuspend(user.id) - except: + except: # noqa log.error( 'Unsuspending account in Discourse failed: %d', user.id, exc_info=True) diff --git a/c2corg_api/views/validation.py b/c2corg_api/views/validation.py index c624319bf..7affa4ee5 100644 --- a/c2corg_api/views/validation.py +++ b/c2corg_api/views/validation.py @@ -166,7 +166,7 @@ def check_get_for_integer_property(request, key, required): try: request.validated[key] = int(request.GET.get(key)) - except: + except: # noqa request.errors.add('querystring', key, 'invalid ' + key) diff --git a/dev-requirements.txt b/dev-requirements.txt index 1972081d1..182a52467 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,6 +1,6 @@ flake8==3.7.9 pep8-naming==0.9.1 nose==1.3.7 -WebTest==2.0.20 -ipdb==0.10.1 -httmock==1.2.5 +WebTest==2.0.33 +ipdb==0.12.3 +httmock==1.3.0 diff --git a/docker-compose.yml b/docker-compose.yml index d6831b725..86b03b294 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -27,6 +27,7 @@ services: redis_url: 'redis://redis:6379/' version: '' volumes: + - ./es_migration:/var/www/es_migration - ./alembic_migration:/var/www/alembic_migration - ./c2corg_api:/var/www/c2corg_api - ./Makefile:/var/www/Makefile diff --git a/es_migration/2017-03-29_slackline.py b/es_migration/2017-03-29_slackline.py index bbfccaa5c..c67acc0fb 100644 --- a/es_migration/2017-03-29_slackline.py +++ b/es_migration/2017-03-29_slackline.py @@ -4,10 +4,7 @@ from c2corg_api.search import configure_es_from_config, elasticsearch_config from c2corg_api.search.mappings.route_mapping import SearchRoute from elasticsearch_dsl import Index -from pyramid.paster import ( - get_appsettings, - setup_logging, - ) +from pyramid.paster import get_appsettings, setup_logging from pyramid.scripts.common import parse_vars @@ -70,5 +67,6 @@ def migrate(): print('Field "{0}" created'.format(field_name)) + if __name__ == "__main__": main() diff --git a/test.log b/test.log new file mode 100644 index 000000000..7757caa7e --- /dev/null +++ b/test.log @@ -0,0 +1,56 @@ +^CTraceback (most recent call last): + File ".build/venv/bin/nosetests", line 8, in + sys.exit(run_exit()) + File "/var/www/.build/venv/lib/python3.7/site-packages/nose/core.py", line 121, in __init__ + **extra_args) + File "/usr/lib/python3.7/unittest/main.py", line 100, in __init__ + self.parseArgs(argv) + File "/var/www/.build/venv/lib/python3.7/site-packages/nose/core.py", line 179, in parseArgs + self.createTests() + File "/var/www/.build/venv/lib/python3.7/site-packages/nose/core.py", line 193, in createTests + self.test = self.testLoader.loadTestsFromNames(self.testNames) + File "/var/www/.build/venv/lib/python3.7/site-packages/nose/loader.py", line 481, in loadTestsFromNames + return unittest.TestLoader.loadTestsFromNames(self, names, module) + File "/usr/lib/python3.7/unittest/loader.py", line 220, in loadTestsFromNames + suites = [self.loadTestsFromName(name, module) for name in names] + File "/usr/lib/python3.7/unittest/loader.py", line 220, in + suites = [self.loadTestsFromName(name, module) for name in names] + File "/var/www/.build/venv/lib/python3.7/site-packages/nose/loader.py", line 418, in loadTestsFromName + addr.filename, addr.module) + File "/var/www/.build/venv/lib/python3.7/site-packages/nose/importer.py", line 47, in importFromPath + return self.importFromDir(dir_path, fqname) + File "/var/www/.build/venv/lib/python3.7/site-packages/nose/importer.py", line 94, in importFromDir + mod = load_module(part_fqname, fh, filename, desc) + File "/var/www/.build/venv/lib/python3.7/imp.py", line 244, in load_module + return load_package(name, filename) + File "/var/www/.build/venv/lib/python3.7/imp.py", line 216, in load_package + return _load(spec) + File "", line 696, in _load + File "", line 677, in _load_unlocked + File "", line 728, in exec_module + File "", line 219, in _call_with_frames_removed + File "/var/www/c2corg_api/__init__.py", line 8, in + from c2corg_api.models import DBSession, Base + File "/var/www/c2corg_api/models/__init__.py", line 35, in + from c2corg_api.models import area_association # noqa + File "/var/www/c2corg_api/models/area_association.py", line 6, in + from c2corg_api.views import set_best_locale + File "/var/www/c2corg_api/views/__init__.py", line 35, in + @view_config(context=HTTPNotFound) + File "/var/www/.build/venv/lib/python3.7/site-packages/pyramid/view.py", line 222, in __init__ + self._get_info() + File "/var/www/.build/venv/lib/python3.7/site-packages/pyramid/view.py", line 227, in _get_info + frameinfo = inspect.getframeinfo(frame) + File "/usr/lib/python3.7/inspect.py", line 1464, in getframeinfo + lines, lnum = findsource(frame) + File "/usr/lib/python3.7/inspect.py", line 780, in findsource + module = getmodule(object, file) + File "/usr/lib/python3.7/inspect.py", line 739, in getmodule + f = getabsfile(module) + File "/usr/lib/python3.7/inspect.py", line 708, in getabsfile + _filename = getsourcefile(object) or getfile(object) + File "/usr/lib/python3.7/inspect.py", line 693, in getsourcefile + if os.path.exists(filename): + File "/var/www/.build/venv/lib/python3.7/genericpath.py", line 19, in exists + os.stat(path) +KeyboardInterrupt From 872a473c524b85709b7f018e6a13eca42d6c4b17 Mon Sep 17 00:00:00 2001 From: cbeauchesne Date: Fri, 27 Dec 2019 19:32:11 +0100 Subject: [PATCH 5/5] Update travis script --- .travis.yml | 23 +++++++++++++++++------ Dockerfile.in | 13 +------------ config/dev | 1 - 3 files changed, 18 insertions(+), 19 deletions(-) diff --git a/.travis.yml b/.travis.yml index e07e51934..d55dea5ff 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,4 +1,4 @@ -dist: trusty +dist: xenial env: global: @@ -14,24 +14,35 @@ services: - redis-server python: -- 3.4 +- 3.7 addons: postgresql: "9.4" apt: packages: - - postgresql-9.4-postgis-2.3 + - postgresql-9.4-postgis-2.4 install: -- mkdir /tmp/elasticsearch -- wget -O - https://download.elasticsearch.org/elasticsearch/release/org/elasticsearch/distribution/tar/elasticsearch/2.3.2/elasticsearch-2.3.2.tar.gz | tar xz --directory=/tmp/elasticsearch --strip-components=1 -- /tmp/elasticsearch/bin/elasticsearch --daemonize --path.data /tmp + +- curl -s -O https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-7.1.0-amd64.deb +- sudo dpkg -i --force-confnew elasticsearch-7.1.0-amd64.deb +- echo -e '-XX:+DisableExplicitGC\n-Djdk.io.permissionsUseCanonicalPath=true\n-Dlog4j.skipJansi=true\n-server\n' | sudo tee -a /etc/elasticsearch/jvm.options +- sudo chown -R elasticsearch:elasticsearch /etc/default/elasticsearch +- sudo systemctl start elasticsearch + +# - mkdir /tmp/elasticsearch +# - wget -O - https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-7.1.0-linux-x86_64.tar.gz | tar xz --directory=/tmp/elasticsearch --strip-components=1 +# - /tmp/elasticsearch/bin/elasticsearch --daemonize --path.data /tmp - make -f config/travis .build/dev-requirements.timestamp - make -f config/travis install + before_script: +# - sudo apt-get -qq update +# - sudo apt-get install postgresql-9.4-postgis-2.3 postgresql-9.4-postgis-scripts postgis -y -q - echo "create user \"www-data\" with password 'www-data;'" | psql -U postgres - PGUSER=postgres USER=travis scripts/create_user_db_test.sh +- sleep 10 - curl http://localhost:9200/ script: diff --git a/Dockerfile.in b/Dockerfile.in index ba33309e3..428b9b803 100644 --- a/Dockerfile.in +++ b/Dockerfile.in @@ -8,7 +8,7 @@ RUN set -x \ && apt-get -y --no-install-recommends install locales \ && echo "en_US.UTF-8 UTF-8" > /etc/locale.gen \ && locale-gen en_US.UTF-8 \ - && dpkg-reconfigure locales + && dpkg-reconfigure locales \ && /usr/sbin/update-locale LANG=en_US.UTF-8 COPY project.tar /tmp @@ -17,7 +17,6 @@ WORKDIR /var/www/ RUN tar -xvf /tmp/project.tar && chown -R root:root /var/www -# ICI CONTINUER RUN set -x \ && apt-get -y --no-install-recommends install \ python3 \ @@ -41,21 +40,11 @@ RUN set -x \ gcc \ git -# ICI CONTINUER - RUN set -x \ && make -f config/dev install \ && py3compile -f -X '^.*gevent/_util_py2.py$' .build/venv/ \ && rm -fr .cache \ && apt-get -y purge \ - python3-dev \ - python3-pip \ - libgeos-dev \ - libffi-dev \ - libpq-dev \ - virtualenv \ - gcc \ - git \ && apt-get -y --purge autoremove \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* diff --git a/config/dev b/config/dev index 7c00ff4d8..c386425ac 100644 --- a/config/dev +++ b/config/dev @@ -11,7 +11,6 @@ export tests_elasticsearch_index = c2corg_${instanceid}_tests export redis_db_queue = 6 export redis_db_cache = 7 -export redis_url = memory:/// # in case of unexpected errors, show the debug toolbar? export show_debugger_for_errors = false