From 120e506e0c1199a7d63d8c470341fddb301232b4 Mon Sep 17 00:00:00 2001 From: Laurent Guilbert Date: Thu, 29 Dec 2016 11:34:33 +0100 Subject: [PATCH] #1 - Started trimming es dsl. --- .gitignore | 1 + setup.py | 4 +- tests/base.py | 36 ++++---- tests/doc_types.py | 27 ------ tests/models.py | 49 +++++------ tests/settings.py | 16 ++-- tests/test_mixins.py | 178 +++++++++++++++++++--------------------- tests/test_paginator.py | 84 ------------------- trampoline/apps.py | 85 ++++++++----------- trampoline/mixins.py | 93 +++++++++------------ trampoline/paginator.py | 33 -------- trampoline/tasks.py | 31 ++++--- trampoline/version.py | 2 +- trampoline/views.py | 40 --------- 14 files changed, 226 insertions(+), 453 deletions(-) delete mode 100644 tests/doc_types.py delete mode 100644 tests/test_paginator.py delete mode 100644 trampoline/paginator.py delete mode 100644 trampoline/views.py diff --git a/.gitignore b/.gitignore index eea8bf7..202a6e8 100644 --- a/.gitignore +++ b/.gitignore @@ -12,3 +12,4 @@ node_modules/ nose_trampoline.egg-info/ parts/ trampoline.log +ve/ diff --git a/setup.py b/setup.py index ae1a151..4b42e97 100644 --- a/setup.py +++ b/setup.py @@ -10,9 +10,9 @@ install_requires = [ 'celery', - 'elasticsearch_dsl>=2.0.0,<3.0.0', - 'tqdm', + 'elasticsearch', 'six', + 'tqdm', ] if sys.version_info.major == 2: install_requires.append('futures') diff --git a/tests/base.py b/tests/base.py index 28b6674..bf9ca6c 100644 --- a/tests/base.py +++ b/tests/base.py @@ -11,29 +11,29 @@ class BaseTestCase(TransactionTestCase): def refresh(self): - trampoline_config.connection.indices.refresh('_all') + trampoline_config.es.indices.refresh('_all') - def docExists(self, obj, obj_id): + def docExists(self, obj): doc_type = obj.get_es_doc_type() - doc_type_name = doc_type._doc_type.name - index_name = doc_type._doc_type.index - obj_id = obj_id or obj.pk - return trampoline_config.connection.exists( - index=index_name, - doc_type=doc_type_name, - id=obj_id, + index = obj.get_es_index() + return trampoline_config.es.exists( + index=index, + doc_type=doc_type, + id=obj.pk ) def aliasExists(self, index, name): - return trampoline_config.connection.indices.exists_alias( + return trampoline_config.es.indices.exists_alias( index=index, name=name) def indexExists(self, index): - return trampoline_config.connection.indices.exists(index=index) + return trampoline_config.es.indices.exists(index=index) - def typeExists(self, index, doc_type_name): - return trampoline_config.connection.indices.exists_type( - index=index, doc_type=doc_type_name) + def typeExists(self, index, doc_type): + return trampoline_config.es.indices.exists_type( + index=index, + doc_type=doc_type + ) def assertAliasExists(self, index, name): self.assertTrue(self.aliasExists(index, name)) @@ -53,8 +53,8 @@ def assertTypeExists(self, index, doc_type): def assertTypeDoesntExist(self, index, doc_type): self.assertFalse(self.typeExists(index, doc_type)) - def assertDocExists(self, obj, obj_id=None): - self.assertTrue(self.docExists(obj, obj_id)) + def assertDocExists(self, obj): + self.assertTrue(self.docExists(obj)) - def assertDocDoesntExist(self, obj, obj_id=None): - self.assertFalse(self.docExists(obj, obj_id)) + def assertDocDoesntExist(self, obj): + self.assertFalse(self.docExists(obj)) diff --git a/tests/doc_types.py b/tests/doc_types.py deleted file mode 100644 index e828da4..0000000 --- a/tests/doc_types.py +++ /dev/null @@ -1,27 +0,0 @@ -""" -Doc types for trampoline tests. -""" -from elasticsearch_dsl import DocType -from elasticsearch_dsl import String - - -class TokenDoc(DocType): - name = String(index='not_analyzed') - - class Meta: - index = 'foobar' - doc_type = 'token' - - -class PersonDoc(DocType): - first_name = String(index='not_analyzed') - last_name = String(index='not_analyzed') - full_name = String(index='not_analyzed') - - class Meta: - index = 'foobar' - doc_type = 'person' - - @staticmethod - def prepare_full_name(obj): - return u"{0} {1}".format(obj.first_name, obj.last_name) diff --git a/tests/models.py b/tests/models.py index ac30272..aa83e2a 100644 --- a/tests/models.py +++ b/tests/models.py @@ -3,41 +3,42 @@ """ from django.db import models -from tests.doc_types import TokenDoc, PersonDoc from trampoline.mixins import ESIndexableMixin +class TokenSerializer(object): + + def __init__(self, token, *args, **kwargs): + self.token = token + + @property + def data(self): + return { + 'name': self.token.name, + 'number': self.token.number, + } + + class Token(ESIndexableMixin, models.Model): - name = models.CharField(max_length=200) + name = models.CharField(default='token', max_length=200) + number = models.IntegerField(default=42) + + es_serializer = TokenSerializer def __unicode__(self): return self.name - es_doc_type = TokenDoc - def is_indexable(self): if self.name == 'not_indexable': return False return True @classmethod - def get_indexable_queryset(cls): - return Token.objects.all() - - def get_es_doc_mapping(self): - doc = TokenDoc() - doc.name = self.name - if doc.name == 'raise_exception': - raise RuntimeError - return doc - - -class Person(ESIndexableMixin, models.Model): - first_name = models.CharField(max_length=200) - last_name = models.CharField(max_length=200) - - def __unicode__(self): - return self.name - - es_doc_type = PersonDoc - es_auto_doc_type_mapping = True + def get_es_doc_type_mapping(self): + return { + 'properties': { + 'name': { + 'type': 'string' + } + } + } diff --git a/tests/settings.py b/tests/settings.py index 3d357c8..62bc9bc 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -2,6 +2,8 @@ Test settings for trampoline. """ +DEBUG = True + DATABASES = { 'default': { 'NAME': 'trampoline.db', @@ -41,20 +43,14 @@ ################################################## TRAMPOLINE = { - 'INDICES': { - 'foobar': { - 'models': [ - 'tests.models.Token', - # Make sure only one doc_type is created. - 'tests.models.Token', - 'tests.models.Person', - ] - }, - }, + 'MODELS': [ + 'tests.models.Token', + ], 'OPTIONS': { 'disabled': False, 'fail_silently': True, }, + 'VERSION_SUFFIX': '_1', } ################################################## diff --git a/tests/test_mixins.py b/tests/test_mixins.py index 2351c60..46fc1fc 100644 --- a/tests/test_mixins.py +++ b/tests/test_mixins.py @@ -3,28 +3,29 @@ """ from django.conf import settings -from elasticsearch_dsl import Index - from trampoline.mixins import ESIndexableMixin +from trampoline import get_trampoline_config from tests.base import BaseTestCase -from tests.models import Person from tests.models import Token +trampoline_config = get_trampoline_config() + class TestMixins(BaseTestCase): def setUp(self): super(TestMixins, self).setUp() self.doc_type = Token.get_es_doc_type() - self.index = Index(self.doc_type._doc_type.index) - self.index.doc_type(self.doc_type) - self.index.create() + self.index = Token.get_es_index() + body = {'mappings': {}} + body['mappings'][self.doc_type] = Token.get_es_doc_type_mapping() + trampoline_config.es.indices.create(index=self.index, body=body) self.refresh() def tearDown(self): super(TestMixins, self).tearDown() - self.index.delete() + trampoline_config.es.indices.delete(index=self.index) def test_is_indexable(self): self.assertTrue(ESIndexableMixin().is_indexable()) @@ -36,25 +37,14 @@ def test_get_indexable_queryset(self): ) def test_get_es_doc(self): - token = Token(name="token") + token = Token() self.assertIsNone(token.get_es_doc()) token.save() self.assertIsNotNone(token.get_es_doc()) - def test_auto_doc_type_mapping(self): - person = Person(first_name="Simion", last_name="Baws") - person.save() - doc_type = person.get_es_doc_mapping() - self.assertEqual(doc_type.first_name, person.first_name) - self.assertEqual(doc_type.last_name, person.last_name) - self.assertEqual( - doc_type.full_name, - u"{0} {1}".format(person.first_name, person.last_name) - ) - def test_es_index(self): settings.TRAMPOLINE['OPTIONS']['disabled'] = True - token = Token.objects.create(name='token') + token = Token.objects.create() settings.TRAMPOLINE['OPTIONS']['disabled'] = False self.assertDocDoesntExist(token) @@ -90,77 +80,77 @@ def test_es_index(self): token.es_index(async=False) settings.TRAMPOLINE['OPTIONS']['fail_silently'] = True - def test_es_delete(self): - # Asynchronous call. - token = Token.objects.create(name='token') - self.assertDocExists(token) - token.es_delete() - self.assertDocDoesntExist(Token, token.pk) - - # Synchronous call. - token = Token.objects.create(name='token') - self.assertDocExists(token) - token.es_delete(async=False) - self.assertDocDoesntExist(Token, token.pk) - - # Fail silently if document doesn't exist. - token.es_delete() - - from trampoline import get_trampoline_config - trampoline_config = get_trampoline_config() - - # Fake delete to raise exception. - backup_delete = trampoline_config.connection.delete - - def delete_raise_exception(*args, **kwargs): - raise RuntimeError - trampoline_config.connection.delete = delete_raise_exception - - # Fail silently - token.es_delete() - - # Hard fail. - settings.TRAMPOLINE['OPTIONS']['fail_silently'] = False - with self.assertRaises(RuntimeError): - token.es_delete() - settings.TRAMPOLINE['OPTIONS']['fail_silently'] = True - - trampoline_config.connection.delete = backup_delete - - def test_save(self): - token = Token(name='token') - - settings.TRAMPOLINE['OPTIONS']['disabled'] = True - token.save() - settings.TRAMPOLINE['OPTIONS']['disabled'] = False - self.assertDocDoesntExist(token) - - token.save() - doc = token.get_es_doc() - self.assertEqual(doc.name, 'token') - self.assertEqual(doc._id, str(token.pk)) - - # Update model and synchronise doc. - token.name = 'kento' - token.save() - doc = token.get_es_doc() - self.assertEqual(doc.name, 'kento') - - # Instance is not indexable. - token = Token.objects.create(name='not_indexable') - self.assertDocDoesntExist(token) - - def test_delete(self): - token = Token.objects.create(name='token') - token_id = token.pk - self.assertDocExists(token) - - settings.TRAMPOLINE['OPTIONS']['disabled'] = True - token.delete() - settings.TRAMPOLINE['OPTIONS']['disabled'] = False - self.assertDocExists(Token, token_id) - - token.save() - token_id = token.pk - token.delete() - self.assertDocDoesntExist(Token, token_id) + # def test_es_delete(self): + # # Asynchronous call. + # token = Token.objects.create(name='token') + # self.assertDocExists(token) + # token.es_delete() + # self.assertDocDoesntExist(Token, token.pk) + # + # # Synchronous call. + # token = Token.objects.create(name='token') + # self.assertDocExists(token) + # token.es_delete(async=False) + # self.assertDocDoesntExist(Token, token.pk) + # + # # Fail silently if document doesn't exist. + # token.es_delete() + # + # from trampoline import get_trampoline_config + # trampoline_config = get_trampoline_config() + # + # # Fake delete to raise exception. + # backup_delete = trampoline_config.connection.delete + # + # def delete_raise_exception(*args, **kwargs): + # raise RuntimeError + # trampoline_config.connection.delete = delete_raise_exception + # + # # Fail silently + # token.es_delete() + # + # # Hard fail. + # settings.TRAMPOLINE['OPTIONS']['fail_silently'] = False + # with self.assertRaises(RuntimeError): + # token.es_delete() + # settings.TRAMPOLINE['OPTIONS']['fail_silently'] = True + # + # trampoline_config.connection.delete = backup_delete + # + # def test_save(self): + # token = Token(name='token') + # + # settings.TRAMPOLINE['OPTIONS']['disabled'] = True + # token.save() + # settings.TRAMPOLINE['OPTIONS']['disabled'] = False + # self.assertDocDoesntExist(token) + # + # token.save() + # doc = token.get_es_doc() + # self.assertEqual(doc.name, 'token') + # self.assertEqual(doc._id, str(token.pk)) + # + # # Update model and synchronise doc. + # token.name = 'kento' + # token.save() + # doc = token.get_es_doc() + # self.assertEqual(doc.name, 'kento') + # + # # Instance is not indexable. + # token = Token.objects.create(name='not_indexable') + # self.assertDocDoesntExist(token) + # + # def test_delete(self): + # token = Token.objects.create(name='token') + # token_id = token.pk + # self.assertDocExists(token) + # + # settings.TRAMPOLINE['OPTIONS']['disabled'] = True + # token.delete() + # settings.TRAMPOLINE['OPTIONS']['disabled'] = False + # self.assertDocExists(Token, token_id) + # + # token.save() + # token_id = token.pk + # token.delete() + # self.assertDocDoesntExist(Token, token_id) diff --git a/tests/test_paginator.py b/tests/test_paginator.py deleted file mode 100644 index db4f896..0000000 --- a/tests/test_paginator.py +++ /dev/null @@ -1,84 +0,0 @@ -""" -Test paginator for trampoline. -""" -from elasticsearch_dsl import Index -from elasticsearch_dsl import Search - -from trampoline.paginator import ESSearchPaginator - -from tests.base import BaseTestCase -from tests.models import Token -from tests.views import PaginatedContentView - - -class TestPaginator(BaseTestCase): - - def setUp(self): - super(TestPaginator, self).setUp() - self.doc_type = Token.get_es_doc_type() - self.index = Index(self.doc_type._doc_type.index) - self.index.doc_type(self.doc_type) - self.index.create() - self.refresh() - - for i in range(3): - Token.objects.create(name='token {0}'.format(i)) - self.refresh() - - def tearDown(self): - super(TestPaginator, self).tearDown() - self.index.delete() - - def test_paginator(self): - search = Search( - index=Token.es_doc_type._doc_type.index, - doc_type=Token.es_doc_type._doc_type.name - ) - search = search.sort('name') - - page_size = 2 - paginator = ESSearchPaginator(search, page_size) - - page = paginator.page(1) - - self.assertTrue(page.has_other_pages) - self.assertEqual(len(page.hits), page_size) - self.assertEqual(page.total_count, 3) - - self.assertEqual(page.hits[0]['name'], 'token 0') - self.assertEqual(page.hits[1]['name'], 'token 1') - - self.assertEqual(page.paginator, paginator) - self.assertEqual(page.number, 1) - self.assertIsNotNone(page.response) - - page = paginator.page(2) - - self.assertFalse(page.has_other_pages) - self.assertEqual(len(page.hits), 1) - - self.assertEqual(page.hits[0]['name'], 'token 2') - - def test_pagination_mixin(self): - class Request(object): - GET = {} - - view = PaginatedContentView() - view.request = Request() - - self.assertEqual(view.page_size, 2) - - view.request.GET = {} - self.assertEqual(view.get_page_number(), 1) - view.request.GET = {'page': -2} - self.assertEqual(view.get_page_number(), 1) - view.request.GET = {'page': 'foobar'} - self.assertEqual(view.get_page_number(), 1) - view.request.GET = {'page': 5} - self.assertEqual(view.get_page_number(), 5) - - page = view.paginate_search() - self.assertIsNotNone(page) - self.assertIsNotNone(view.page) - - self.assertEqual(view.get_context_data()['page'], view.page) diff --git a/trampoline/apps.py b/trampoline/apps.py index fe2461e..14413d8 100644 --- a/trampoline/apps.py +++ b/trampoline/apps.py @@ -7,14 +7,14 @@ import logging import six +from elasticsearch import Elasticsearch + from django.conf import settings from django.db import transaction from django.db.models.signals import class_prepared from django.db.models.signals import post_delete from django.db.models.signals import post_save -from elasticsearch_dsl.connections import connections - logger = logging.getLogger(__name__) try: @@ -24,15 +24,16 @@ DEFAULT_TRAMPOLINE = { - 'CONNECTIONS': { - 'default': {'hosts': 'localhost'}, - }, - 'INDICES': {}, + 'HOSTS': [ + {'host': 'localhost'}, + ], + 'MODELS': [], 'OPTIONS': { 'fail_silently': True, 'disabled': False, 'celery_queue': None }, + 'VERSION_SUFFIX': '', } @@ -65,8 +66,8 @@ def class_prepared_check_indexable(sender, **kwargs): trampoline_config = get_trampoline_config() # Only register indexation signals for models defined in the settings. - sender_path = u"{0}.{1}".format(sender.__module__, sender.__name__) - if sender_path not in trampoline_config.model_paths: + sender_path = '{0}.{1}'.format(sender.__module__, sender.__name__) + if sender_path not in trampoline_config.models_paths: return post_save.connect( @@ -79,10 +80,7 @@ def class_prepared_check_indexable(sender, **kwargs): post_delete_es_delete, sender=sender, weak=False, - dispatch_uid=( - 'trampoline_post_delete_{0}' - .format(sender.__name__) - ) + dispatch_uid='trampoline_post_delete_{0}'.format(sender.__name__) ) @@ -95,23 +93,24 @@ def __init__(self, *args, **kwargs): super(TrampolineConfig, self).__init__(*args, **kwargs) def ready(self): - if 'HOST' in self.settings: - raise NotImplementedError('"HOST" key replaced by "CONNECTIONS"') - options = {} - for alias, details in self.settings['CONNECTIONS'].items(): - options[alias] = details + self._es = Elasticsearch(hosts=self.hosts) - connections.configure(**options) + @property + def settings(self): + USER_TRAMPOLINE = getattr(settings, 'TRAMPOLINE', {}) + TRAMPOLINE = deepcopy(DEFAULT_TRAMPOLINE) + return recursive_update(TRAMPOLINE, USER_TRAMPOLINE) - def get_index_models(self, index_name): - try: - model_paths = self.indices[index_name]['models'] - except KeyError: - return [] + @property + def es(self): + return self._es + @property + def indexable_models(self): models = [] - for model_path in model_paths: + for model_path in self.models_paths: module_path, model_name = model_path.rsplit('.', 1) + print(module_path, model_name) module = __import__(module_path, fromlist=['']) model = getattr(module, model_name) if model not in models: @@ -119,34 +118,16 @@ def get_index_models(self, index_name): return models @property - def model_paths(self): - model_paths = [] - for index_name in self.indices: - try: - model_paths += self.indices[index_name]['models'] - except KeyError: - pass - return model_paths + def hosts(self): + return self.settings['HOSTS'] @property - def settings(self): - USER_TRAMPOLINE = getattr(settings, 'TRAMPOLINE', {}) - TRAMPOLINE = deepcopy(DEFAULT_TRAMPOLINE) - return recursive_update(TRAMPOLINE, USER_TRAMPOLINE) - - def get_connection(self, alias='default'): - if not alias: - alias = 'default' - return connections.get_connection(alias) - connection = property(get_connection) + def version_suffix(self): + return self.settings['VERSION_SUFFIX'] @property - def host(self): - return self.settings['HOST'] - - @property - def indices(self): - return self.settings['INDICES'] + def models_paths(self): + return self.settings['MODELS'] @property def should_fail_silently(self): @@ -163,13 +144,13 @@ def celery_queue(self): try: # Try to import AppConfig to check if this feature is available. from django.apps import AppConfig # noqa - - def get_trampoline_config(): - from django.apps import apps - return apps.get_app_config('trampoline') except ImportError: app_config = TrampolineConfig() app_config.ready() def get_trampoline_config(): return app_config +else: + def get_trampoline_config(): + from django.apps import apps + return apps.get_app_config('trampoline') diff --git a/trampoline/mixins.py b/trampoline/mixins.py index f35f5f6..5d4b9dc 100644 --- a/trampoline/mixins.py +++ b/trampoline/mixins.py @@ -17,99 +17,82 @@ class ESIndexableMixin(object): """ Provide the required methods and attributes to index django models. """ - es_doc_type = None - es_auto_doc_type_mapping = False + es_serializer = None @classmethod - def get_indexable_queryset(cls): # pragma: no cover + def get_indexable_queryset(cls): return cls._default_manager.all() @classmethod - def get_es_doc_type(cls): # pragma: no cover - return cls.es_doc_type + def get_es_index(cls): + return cls.__name__.lower() + trampoline_config.version_suffix + + @classmethod + def get_es_doc_type(cls): + return cls.__name__.lower() + + @classmethod + def get_es_doc_type_mapping(self): + return {} + + def get_es_serializer(self): + return self.es_serializer + + def get_es_body(self): + serializer = self.get_es_serializer() + return serializer(self).data def is_indexable(self): return True - def get_es_doc_mapping(self): - if self.es_auto_doc_type_mapping is True: - return self.get_es_auto_doc_mapping() - raise NotImplementedError - - def get_es_auto_doc_mapping(self): - """ - Automatically map values from the model to the doc_type. - If a field is not present on the model, a method "prepare_{field}" - must be implemented on the doc_type. - """ - doc_type = self.es_doc_type() - for field in doc_type._doc_type.mapping: - prep_func = getattr(doc_type, 'prepare_{0}'.format(field), None) - if prep_func is not None and callable(prep_func): - value = prep_func(self) - elif hasattr(self, field): - value = getattr(self, field, None) - else: - raise NotImplementedError( - u"Field {0} is not on {1} and {2} doesn't implement a " - "\"prepare_{3}\" method." - .format(field, self.__class__, doc_type.__class__, field) - ) - setattr(doc_type, field, value) - return doc_type - def get_es_doc(self): if not self.pk: return None - doc_type = self.get_es_doc_type() - index_name = doc_type._doc_type.index - doc = doc_type.get(index=index_name, id=self.pk, ignore=404) - return doc + return trampoline_config.es.get( + index=self.get_es_index(), + doc_type=self.get_es_doc_type(), + id=self.pk + ) - def es_index(self, async=True, countdown=0, index_name=None, queue=None): + def es_index(self, async=True, countdown=0, index=None, queue=None): if trampoline_config.is_disabled or not self.is_indexable(): return - doc_type = self.get_es_doc_type() - index_name = index_name or doc_type._doc_type.index + index = index or self.get_es_index() queue = queue or trampoline_config.celery_queue - content_type = ContentType.objects.get_for_model(self) if async: result = es_index_object.apply_async( - args=(index_name, content_type.pk, self.pk), + args=(index, content_type.pk, self.pk), countdown=countdown, queue=queue ) else: if trampoline_config.should_fail_silently: result = es_index_object.apply( - args=(index_name, content_type.pk, self.pk) + args=(index, content_type.pk, self.pk) ) else: - result = es_index_object.run( - index_name, - content_type.pk, - self.pk - ) + result = es_index_object.run(index, content_type.pk, self.pk) return result - def es_delete(self, async=True, index_name=None, queue=None): + def es_delete(self, async=True, index=None, queue=None): if trampoline_config.is_disabled: return doc_type = self.get_es_doc_type() - doc_type_name = doc_type._doc_type.name - index_name = index_name or doc_type._doc_type.index + index = index or self.get_es_index() queue = queue or trampoline_config.celery_queue - using = doc_type._doc_type.using if async: - es_delete_doc.delay(index_name, doc_type_name, self.pk, using) - es_delete_doc.apply_async( - args=(index_name, doc_type_name, self.pk, using), + result = es_delete_doc.apply_async( + args=(index, doc_type, self.pk), queue=queue ) else: - es_delete_doc.apply((index_name, doc_type_name, self.pk, using)) + if trampoline_config.should_fail_silently: + result = es_delete_doc.apply((index, doc_type, self.pk)) + else: + result = es_delete_doc.run(index, doc_type, self.pk) + return result diff --git a/trampoline/paginator.py b/trampoline/paginator.py deleted file mode 100644 index d854ac5..0000000 --- a/trampoline/paginator.py +++ /dev/null @@ -1,33 +0,0 @@ -""" -Paginator for trampoline. -""" - - -class ESSearchPaginator(object): - - def __init__(self, search, page_size): - self.search = search - self.page_size = page_size - - def page(self, page_number): - return Page(self, page_number) - - -class Page(object): - - def __init__(self, paginator, page_number): - self.paginator = paginator - self.number = page_number - - bottom_offset = self.paginator.page_size * (page_number - 1) - top_offset = bottom_offset + self.paginator.page_size - search = self.paginator.search[bottom_offset:top_offset] - response = search.execute() - self.hits = response.hits - self.response = response - - self.total_count = response.hits.total - if self.total_count > (self.paginator.page_size * page_number): - self.has_other_pages = True - else: - self.has_other_pages = False diff --git a/trampoline/tasks.py b/trampoline/tasks.py index 3fdb269..b4bd989 100644 --- a/trampoline/tasks.py +++ b/trampoline/tasks.py @@ -21,7 +21,7 @@ @shared_task def es_index_object( - index_name, + index, content_type_id, object_id, fail_silently=None): @@ -35,16 +35,22 @@ def es_index_object( obj = content_type.model_class()._default_manager.get(pk=object_id) if not obj.is_indexable(): return STATUS_IGNORED - doc = obj.get_es_doc_mapping() - doc.meta.id = obj.pk - doc.save(index=index_name) + + doc_type = obj.get_es_doc_type() + body = obj.get_es_body() + trampoline_config.es.create( + index=index, + doc_type=doc_type, + id=obj.pk, + body=body + ) except: if fail_silently: logger.error( "Exception occured while indexing object.", exc_info=True, extra={ - 'index_name': index_name, + 'index': index, 'content_type_id': content_type_id, 'object_id': object_id, } @@ -57,10 +63,9 @@ def es_index_object( @shared_task def es_delete_doc( - index_name, - doc_type_name, + index, + doc_type, doc_id, - using=None, fail_silently=None): """ Delete a document from the index. @@ -68,9 +73,9 @@ def es_delete_doc( if fail_silently is None: fail_silently = trampoline_config.should_fail_silently try: - trampoline_config.get_connection(using).delete( - index=index_name, - doc_type=doc_type_name, + trampoline_config.es.delete( + index=index, + doc_type=doc_type, id=doc_id, ignore=404, ) @@ -80,8 +85,8 @@ def es_delete_doc( "Exception occured while deleting document.", exc_info=True, extra={ - 'index_name': index_name, - 'doc_type_name': doc_type_name, + 'index_name': index, + 'doc_type': doc_type, 'doc_id': doc_id, } ) diff --git a/trampoline/version.py b/trampoline/version.py index 7e49527..3b3dacb 100644 --- a/trampoline/version.py +++ b/trampoline/version.py @@ -1 +1 @@ -__version__ = '1.0' +__version__ = '2.0' diff --git a/trampoline/views.py b/trampoline/views.py deleted file mode 100644 index e6a7315..0000000 --- a/trampoline/views.py +++ /dev/null @@ -1,40 +0,0 @@ -""" -Views for trampoline. -""" -from django.utils.functional import cached_property - -from trampoline.paginator import ESSearchPaginator - - -class ESPaginationMixin(object): - page_size = 10 - - def get_search(self): - raise NotImplementedError - - def get_page_number(self): - number = 1 - try: - number = int(self.request.GET.get('page')) - except (TypeError, ValueError): - pass - if number < 1: - number = 1 - return number - - def paginate_search(self): - search = self.get_search() - paginator = ESSearchPaginator(search, self.page_size) - page_number = self.get_page_number() - return paginator.page(page_number) - - @cached_property - def page(self): - return self.paginate_search() - - def get_context_data(self, *args, **kwargs): - context = ( - super(ESPaginationMixin, self).get_context_data(*args, **kwargs) - ) - context.update({'page': self.page}) - return context