From 8332c39b7db4154a6a47c3900df3f16c6eab9ec2 Mon Sep 17 00:00:00 2001 From: Giovanni Barillari Date: Tue, 28 Jun 2016 14:10:00 +0200 Subject: [PATCH] Improving parsing performance (#385) --- pydal/adapters/base.py | 18 +++++++++--------- pydal/objects.py | 14 +++++++++++--- pydal/parsers/__init__.py | 8 ++------ tests/base.py | 2 +- 4 files changed, 23 insertions(+), 19 deletions(-) diff --git a/pydal/adapters/base.py b/pydal/adapters/base.py index 60b70327c..bd8a55064 100644 --- a/pydal/adapters/base.py +++ b/pydal/adapters/base.py @@ -177,7 +177,7 @@ def expand_all(self, fields, tablenames): append(field) return new_fields - def parse_value(self, value, field_type, blob_decode=True): + def parse_value(self, value, field_itype, field_type, blob_decode=True): #[Note - gi0baro] I think next if block can be (should be?) avoided if field_type != 'blob' and isinstance(value, str): try: @@ -193,7 +193,7 @@ def parse_value(self, value, field_type, blob_decode=True): elif field_type == 'blob' and not blob_decode: return value else: - return self.parser.parse(value, field_type) + return self.parser.parse(value, field_itype, field_type) def _add_operators_to_parsed_row(self, rid, table, row): for key, record_operator in iteritems(self.db.record_operators): @@ -223,10 +223,10 @@ def _parse(self, row, tmps, fields, colnames, blob_decode, tablename = None #: do we have a real column? if tmp: - (tablename, fieldname, table, field, ft) = tmp + (tablename, fieldname, table, field, ft, fit) = tmp colset = new_row[tablename] #: parse value - value = self.parse_value(value, ft, blob_decode) + value = self.parse_value(value, fit, ft, blob_decode) if field.filter_out: value = field.filter_out(value) colset[fieldname] = value @@ -241,7 +241,8 @@ def _parse(self, row, tmps, fields, colnames, blob_decode, value, table, tablename, colset) #: otherwise we set the value in extras else: - value = self.parse_value(value, fields[j].type, blob_decode) + value = self.parse_value( + value, fields[j]._itype, fields[j].type, blob_decode) extras[colname] = value new_column_name = self._regex_select_as_parser(colname) if new_column_name is not None: @@ -260,9 +261,7 @@ def _parse(self, row, tmps, fields, colnames, blob_decode, pass # not enough fields to define virtual field for f, v in fields_lazy[tablename]: try: - new_row[tablename][f] = (v.handler or VirtualCommand)( - v.f, new_row - ) + new_row[tablename][f] = v.handler(v.f, new_row) except (AttributeError, KeyError): pass # not enough fields to define virtual field return new_row @@ -285,7 +284,8 @@ def _parse_expand_colnames(self, colnames): table = self.db[tablename] field = table[fieldname] ft = field.type - tmps.append((tablename, fieldname, table, field, ft)) + fit = field._itype + tmps.append((tablename, fieldname, table, field, ft, fit)) if tablename not in fields_virtual: fields_virtual[tablename] = [ (f.name, f) for f in table._virtual_fields diff --git a/pydal/objects.py b/pydal/objects.py index e17158f1b..f0f8edb45 100644 --- a/pydal/objects.py +++ b/pydal/objects.py @@ -19,9 +19,9 @@ from .exceptions import NotFoundException, NotAuthorizedException from .helpers.regex import REGEX_TABLE_DOT_FIELD, REGEX_ALPHANUMERIC, \ REGEX_PYTHON_KEYWORDS, REGEX_STORE_PATTERN, REGEX_UPLOAD_PATTERN, \ - REGEX_CLEANUP_FN, REGEX_VALID_TB_FLD + REGEX_CLEANUP_FN, REGEX_VALID_TB_FLD, REGEX_TYPE from .helpers.classes import Reference, MethodAdder, SQLCallableList, SQLALL, \ - Serializable, BasicStorage + Serializable, BasicStorage, SQLCustomType from .helpers.methods import list_represent, bar_decode_integer, \ bar_decode_string, bar_encode, archive_record, cleanup, \ use_common_filters, pluralize @@ -1064,6 +1064,10 @@ def __init__(self, db, op, first=None, second=None, type=None, self.type = first.type else: self.type = type + if isinstance(self.type, str): + self._itype = REGEX_TYPE.match(self.type).group(0) + else: + self._itype = None self.optional_args = optional_args @property @@ -1372,7 +1376,7 @@ class FieldMethod(object): def __init__(self, name, f=None, handler=None): # for backward compatibility (self.name, self.f) = (name, f) if f else ('unknown', name) - self.handler = handler + self.handler = handler or VirtualCommand @implements_bool @@ -1475,6 +1479,10 @@ def __init__(self, fieldname, type='string', length=None, default=DEFAULT, self.requires = requires if requires is not None else [] self.map_none = map_none self._rname = rname + stype = self.type + if isinstance(self.type, SQLCustomType): + stype = self.type.type + self._itype = REGEX_TYPE.match(stype).group(0) if stype else None def set_attributes(self, *args, **attributes): self.__dict__.update(*args, **attributes) diff --git a/pydal/parsers/__init__.py b/pydal/parsers/__init__.py index e992506e2..372c6448e 100644 --- a/pydal/parsers/__init__.py +++ b/pydal/parsers/__init__.py @@ -97,12 +97,8 @@ def __init__(self, adapter): def _default(self, value, field_type): return value - def get_parser(self, field_type): - key = REGEX_TYPE.match(field_type).group(0) - return self.registered[key] - - def parse(self, value, field_type): - return self.get_parser(field_type)(value, field_type) + def parse(self, value, field_itype, field_type): + return self.registered[field_itype](value, field_type) from .base import BasicParser diff --git a/tests/base.py b/tests/base.py index 6a4fd67b3..e77ee4f71 100644 --- a/tests/base.py +++ b/tests/base.py @@ -105,7 +105,7 @@ def testRun(self): if db._adapter.parser.registered.get('datetime') is None: return - parse = lambda v: db._adapter.parser.parse(v, 'datetime') + parse = lambda v: db._adapter.parser.parse(v, 'datetime', 'datetime') dt = parse('2015-09-04t12:33:36.223245') self.assertEqual(dt.microsecond, 223245)