diff --git a/pydal/adapters/base.py b/pydal/adapters/base.py index b83346772..81eb799b6 100644 --- a/pydal/adapters/base.py +++ b/pydal/adapters/base.py @@ -346,19 +346,6 @@ def drop_table(self, table, mode=''): def rowslice(self, rows, minimum=0, maximum=None): return rows - def alias(self, table, alias): - other = copy.copy(table) - other['_ot'] = other._ot or other.sqlsafe - other['ALL'] = SQLALL(other) - other['_tablename'] = alias - for fieldname in other.fields: - other[fieldname] = copy.copy(other[fieldname]) - other[fieldname]._tablename = alias - other[fieldname].tablename = alias - other[fieldname].table = other - table._db[alias] = other - return other - class DebugHandler(ExecutionHandler): def before_execute(self, command): @@ -423,14 +410,10 @@ def execute(self, *args, **kwargs): def _expand(self, expression, field_type=None, colnames=False, query_env={}): if isinstance(expression, Field): - et = expression.table if not colnames: - table_rname = et.query_alias - rv = '%s.%s' % (table_rname, expression._rname or - (self.dialect.quote(expression.name))) + rv = expression.sqlsafe else: - rv = '%s.%s' % (self.dialect.quote(et._tablename), - self.dialect.quote(expression.name)) + rv = expression.longname if field_type == 'string' and expression.type not in ( 'string', 'text', 'json', 'password'): rv = self.dialect.cast(rv, self.types['text'], query_env) @@ -465,7 +448,7 @@ def _expand(self, expression, field_type=None, colnames=False, def _expand_for_index(self, expression, field_type=None, colnames=False, query_env={}): if isinstance(expression, Field): - return expression._rname or self.dialect.quote(expression.name) + return expression._rname return self._expand(expression, field_type, colnames, query_env) @contextmanager @@ -480,10 +463,10 @@ def lastrowid(self, table): def _insert(self, table, fields): if fields: return self.dialect.insert( - table.sqlsafe, - ','.join(el[0].sqlsafe_name for el in fields), + table._rname, + ','.join(el[0]._rname for el in fields), ','.join(self.expand(v, f.type) for f, v in fields)) - return self.dialect.insert_empty(table.sqlsafe) + return self.dialect.insert_empty(table._rname) def insert(self, table, fields): query = self._insert(table, fields) @@ -511,17 +494,16 @@ def insert(self, table, fields): def _update(self, table, query, fields): sql_q = '' - tablename = table.sqlsafe query_env = dict(current_scope=[table._tablename]) if query: if use_common_filters(query): query = self.common_filter(query, [table]) sql_q = self.expand(query, query_env=query_env) sql_v = ','.join([ - '%s=%s' % (field.sqlsafe_name, + '%s=%s' % (field._rname, self.expand(value, field.type, query_env=query_env)) for (field, value) in fields]) - return self.dialect.update(tablename, sql_v, sql_q) + return self.dialect.update(table, sql_v, sql_q) def update(self, table, query, fields): sql = self._update(table, query, fields) @@ -539,13 +521,12 @@ def update(self, table, query, fields): def _delete(self, table, query): sql_q = '' - tablename = table.sqlsafe query_env = dict(current_scope=[table._tablename]) if query: if use_common_filters(query): query = self.common_filter(query, [table]) sql_q = self.expand(query, query_env=query_env) - return self.dialect.delete(tablename, sql_q) + return self.dialect.delete(table, sql_q) def delete(self, table, query): sql = self._delete(table, query) @@ -703,7 +684,7 @@ def _select_wcols(self, query, fields, left=False, join=False, if (limitby and not groupby and query_tables and orderby_on_limitby and not orderby): sql_ord = ', '.join([ - tablemap[t].sqlsafe + '.' + tablemap[t][x].sqlsafe_name + tablemap[t][x].sqlsafe for t in query_tables if not isinstance(tablemap[t], Select) for x in (hasattr(tablemap[t], '_primarykey') and tablemap[t]._primarykey or ['_id']) @@ -838,7 +819,7 @@ def truncate(self, table, mode=''): def create_index(self, table, index_name, *fields, **kwargs): expressions = [ - field.sqlsafe_name if isinstance(field, Field) else field + field._rname if isinstance(field, Field) else field for field in fields] sql = self.dialect.create_index( index_name, table, expressions, **kwargs) diff --git a/pydal/adapters/firebird.py b/pydal/adapters/firebird.py index 67a440db5..cee01923d 100644 --- a/pydal/adapters/firebird.py +++ b/pydal/adapters/firebird.py @@ -51,7 +51,7 @@ def lastrowid(self, table): return long(self.cursor.fetchone()[0]) def create_sequence_and_triggers(self, query, table, **args): - tablename = table._tablename + tablename = table._rname sequence_name = table._sequence_name trigger_name = table._trigger_name self.execute(query) diff --git a/pydal/adapters/google.py b/pydal/adapters/google.py index e3b4bd586..3c8381450 100644 --- a/pydal/adapters/google.py +++ b/pydal/adapters/google.py @@ -374,7 +374,7 @@ def select(self, query, fields, attributes): (t.name == 'nativeRef' and item) or getattr(item, t.name) for t in fields ] for item in items] - colnames = ['%s.%s' % (table._tablename, t.name) for t in fields] + colnames = [t.longname for t in fields] processor = attributes.get('processor', self.parse) return processor(rows, fields, colnames, False) diff --git a/pydal/adapters/ingres.py b/pydal/adapters/ingres.py index 898fe8203..0cf76a24d 100644 --- a/pydal/adapters/ingres.py +++ b/pydal/adapters/ingres.py @@ -34,16 +34,16 @@ def create_sequence_and_triggers(self, query, table, **args): # Older Ingres releases could use rule/trigger like Oracle above. if hasattr(table, '_primarykey'): modify_tbl_sql = 'modify %s to btree unique on %s' % \ - (table._tablename, + (table._rname, ', '.join(["'%s'" % x for x in table.primarykey])) self.execute(modify_tbl_sql) else: - tmp_seqname = '%s_iisq' % table._tablename + tmp_seqname = '%s_iisq' % table._raw_rname query = query.replace(self.dialect.INGRES_SEQNAME, tmp_seqname) self.execute('create sequence %s' % tmp_seqname) self.execute(query) self.execute( - 'modify %s to btree unique on %s' % (table._tablename, 'id')) + 'modify %s to btree unique on %s' % (table._rname, 'id')) @adapters.register_for('ingresu') diff --git a/pydal/adapters/mongo.py b/pydal/adapters/mongo.py index 02bf973a3..6c77e2b3c 100644 --- a/pydal/adapters/mongo.py +++ b/pydal/adapters/mongo.py @@ -326,7 +326,7 @@ def __select(self, query, fields, left=False, join=False, distinct=False, # Mongodb reserved uuid key colname = (tablename + '.' + 'id', '_id') else: - colname = (tablename + '.' + field.name, field.name) + colname = (field.longname, field.name) elif not isinstance(query, Expression): colname = (field.name, field.name) colnames.append(colname[1]) diff --git a/pydal/adapters/oracle.py b/pydal/adapters/oracle.py index 22a594de6..2f6a8df6d 100644 --- a/pydal/adapters/oracle.py +++ b/pydal/adapters/oracle.py @@ -60,8 +60,8 @@ def lastrowid(self, table): return long(self.cursor.fetchone()[0]) def create_sequence_and_triggers(self, query, table, **args): - tablename = table._rname or table._tablename - id_name = table._id.name + tablename = table._rname + id_name = table._id._rname sequence_name = table._sequence_name trigger_name = table._trigger_name self.execute(query) @@ -94,21 +94,21 @@ def sqlsafe_table(self, tablename, original_tablename=None): def _build_value_for_insert(self, field, value, r_values): if field.type is 'text': - r_values[':' + field.sqlsafe_name] = self.expand(value, field.type) - return ':' + field.sqlsafe_name + r_values[':' + field._rname] = self.expand(value, field.type) + return ':' + field._rname return self.expand(value, field.type) def _insert(self, table, fields): if fields: r_values = {} return self.dialect.insert( - table.sqlsafe, - ','.join(el[0].sqlsafe_name for el in fields), + table._rname, + ','.join(el[0]._rname for el in fields), ','.join( self._build_value_for_insert(f, v, r_values) for f, v in fields) ), r_values - return self.dialect.insert_empty(table.sqlsafe), None + return self.dialect.insert_empty(table._rname), None def insert(self, table, fields): query, values = self._insert(table, fields) diff --git a/pydal/adapters/postgres.py b/pydal/adapters/postgres.py index e132330e9..100b2d5fa 100644 --- a/pydal/adapters/postgres.py +++ b/pydal/adapters/postgres.py @@ -116,13 +116,13 @@ def _insert(self, table, fields): retval = None if hasattr(table, '_id'): self._last_insert = (table._id, 1) - retval = table._id.name + retval = table._id._rname return self.dialect.insert( - table.sqlsafe, - ','.join(el[0].sqlsafe_name for el in fields), + table._rname, + ','.join(el[0]._rname for el in fields), ','.join(self.expand(v, f.type) for f, v in fields), retval) - return self.dialect.insert_empty(table.sqlsafe) + return self.dialect.insert_empty(table._rname) @with_connection def prepare(self, key): diff --git a/pydal/adapters/sap.py b/pydal/adapters/sap.py index adc330965..046bf95a2 100644 --- a/pydal/adapters/sap.py +++ b/pydal/adapters/sap.py @@ -45,5 +45,5 @@ def create_sequence_and_triggers(self, query, table, **args): self.execute('CREATE SEQUENCE %s;' % table._sequence_name) self.execute( "ALTER TABLE %s ALTER COLUMN %s SET DEFAULT NEXTVAL('%s');" % - (table._tablename, table._id.name, table._sequence_name)) + (table._rname, table._id._rname, table._sequence_name)) self.execute(query) diff --git a/pydal/adapters/sqlite.py b/pydal/adapters/sqlite.py index 0068695a5..36f79c37a 100644 --- a/pydal/adapters/sqlite.py +++ b/pydal/adapters/sqlite.py @@ -87,7 +87,7 @@ def delete(self, table, query): counter = super(SQLite, self).delete(table, query) if counter: for field in table._referenced_by: - if field.type == 'reference ' + table._tablename \ + if field.type == 'reference ' + table._dalname \ and field.ondelete == 'CASCADE': db(field.belongs(deleted)).delete() return counter diff --git a/pydal/contrib/imap_adapter.py b/pydal/contrib/imap_adapter.py index c84e0c6d9..c3486c881 100644 --- a/pydal/contrib/imap_adapter.py +++ b/pydal/contrib/imap_adapter.py @@ -524,7 +524,7 @@ def select(self, query, fields, attributes): fetch_results = list() if isinstance(query, Query): - tablename = self.get_table(query) + tablename = self.get_table(query)._dalname mailbox = self.connection.mailbox_names.get(tablename, None) if mailbox is None: raise ValueError("Mailbox name not found: %s" % mailbox) @@ -601,7 +601,7 @@ def select(self, query, fields, attributes): if allfields: colnames = ["%s.%s" % (tablename, field) for field in self.search_fields.keys()] else: - colnames = ["%s.%s" % (tablename, field.name) for field in fields] + colnames = [field.longname for field in fields] for k in colnames: imapfields_dict[k] = k @@ -800,10 +800,11 @@ def add_payload(message, obj): else: raise NotImplementedError("IMAP empty insert is not implemented") - def update(self, tablename, query, fields): + def update(self, table, query, fields): # TODO: the adapter should implement an .expand method commands = list() rowcount = 0 + tablename = table._dalname if use_common_filters(query): query = self.common_filter(query, [tablename,]) mark = [] @@ -855,8 +856,9 @@ def count(self,query,distinct=None): counter = len(store_list) return counter - def delete(self, tablename, query): + def delete(self, table, query): counter = 0 + tablename = table._dalname if query: if use_common_filters(query): query = self.common_filter(query, [tablename,]) diff --git a/pydal/dialects/base.py b/pydal/dialects/base.py index 03bfbb628..6495145c8 100644 --- a/pydal/dialects/base.py +++ b/pydal/dialects/base.py @@ -148,13 +148,15 @@ def insert_empty(self, table): def where(self, query): return 'WHERE %s' % query - def update(self, tablename, values, where=None): + def update(self, table, values, where=None): + tablename = self.writing_alias(table) whr = '' if where: whr = ' %s' % self.where(where) return 'UPDATE %s SET %s%s;' % (tablename, values, whr) - def delete(self, tablename, where=None): + def delete(self, table, where=None): + tablename = self.writing_alias(table) whr = '' if where: whr = ' %s' % self.where(where) @@ -470,18 +472,18 @@ def primary_key(self, key): return 'PRIMARY KEY(%s)' % key def drop_table(self, table, mode): - return ['DROP TABLE %s;' % table.sqlsafe] + return ['DROP TABLE %s;' % table._rname] def truncate(self, table, mode=''): if mode: mode = " %s" % mode - return ['TRUNCATE TABLE %s%s;' % (table.sqlsafe, mode)] + return ['TRUNCATE TABLE %s%s;' % (table._rname, mode)] def create_index(self, name, table, expressions, unique=False): uniq = ' UNIQUE' if unique else '' with self.adapter.index_expander(): rv = 'CREATE%s INDEX %s ON %s (%s);' % ( - uniq, self.quote(name), table.sqlsafe, ','.join( + uniq, self.quote(name), table._rname, ','.join( self.expand(field) for field in expressions)) return rv @@ -494,6 +496,9 @@ def constraint_name(self, table, fieldname): def concat_add(self, tablename): return ', ADD ' + def writing_alias(self, table): + return table.sql_fullref + class NoSQLDialect(CommonDialect): @sqltype_for('string') diff --git a/pydal/dialects/firebird.py b/pydal/dialects/firebird.py index bccf5f588..2cacd2c0c 100644 --- a/pydal/dialects/firebird.py +++ b/pydal/dialects/firebird.py @@ -105,10 +105,10 @@ def select(self, fields, tables, where=None, groupby=None, having=None, def drop_table(self, table, mode): sequence_name = table._sequence_name return [ - 'DROP TABLE %s %s;' % (table.sqlsafe, mode), + 'DROP TABLE %s %s;' % (table._rname, mode), 'DROP GENERATOR %s;' % sequence_name] def truncate(self, table, mode=''): return [ - 'DELETE FROM %s;' % table._tablename, + 'DELETE FROM %s;' % table._rname, 'SET GENERATOR %s TO 0;' % table._sequence_name] diff --git a/pydal/dialects/mssql.py b/pydal/dialects/mssql.py index 9c2536d71..907ab3d26 100644 --- a/pydal/dialects/mssql.py +++ b/pydal/dialects/mssql.py @@ -75,9 +75,9 @@ def type_reference_fk(self): @sqltype_for('reference TFK') def type_reference_tfk(self): - return ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY ' + \ + return ' CONSTRAINT FK_%(constraint_name)s_PK FOREIGN KEY ' + \ '(%(field_name)s) REFERENCES %(foreign_table)s ' + \ - '(%(foreign_key)s) ON DELETE %(on_delete_action)s', + '(%(foreign_key)s) ON DELETE %(on_delete_action)s' @sqltype_for('geometry') def type_geometry(self): @@ -90,6 +90,21 @@ def type_geography(self): def varquote(self, val): return varquote_aux(val, '[%s]') + def update(self, table, values, where=None): + tablename = self.writing_alias(table) + whr = '' + if where: + whr = ' %s' % self.where(where) + return 'UPDATE %s SET %s FROM %s%s;' % ( + table.sql_shortref, values, tablename, whr) + + def delete(self, table, where=None): + tablename = self.writing_alias(table) + whr = '' + if where: + whr = ' %s' % self.where(where) + return 'DELETE %s FROM %s%s;' % (table.sql_shortref, tablename, whr) + def select(self, fields, tables, where=None, groupby=None, having=None, orderby=None, limitby=None, distinct=False, for_update=False): dst, whr, grp, order, limit, offset, upd = '', '', '', '', '', '', '' @@ -180,7 +195,7 @@ def concat_add(self, tablename): return '; ALTER TABLE %s ADD ' % tablename def drop_index(self, name, table): - return 'DROP INDEX %s ON %s;' % (self.quote(name), table.sqlsafe) + return 'DROP INDEX %s ON %s;' % (self.quote(name), table._rname) def st_astext(self, first, query_env={}): return '%s.STAsText()' % self.expand(first, query_env=query_env) @@ -393,7 +408,7 @@ def extract(self, first, what, query_env={}): def truncate(self, table, mode=''): if mode: mode = " %s" % mode - return ['TRUNCATE %s%s;' % (table.sqlsafe, mode)] + return ['TRUNCATE %s%s;' % (table._rname, mode)] def select(self, *args, **kwargs): return SQLDialect.select(self, *args, **kwargs) diff --git a/pydal/dialects/mysql.py b/pydal/dialects/mysql.py index 088480079..f9b0e3519 100644 --- a/pydal/dialects/mysql.py +++ b/pydal/dialects/mysql.py @@ -56,6 +56,13 @@ def varquote(self, val): def insert_empty(self, table): return 'INSERT INTO %s VALUES (DEFAULT);' % table + def delete(self, table, where=None): + tablename = self.writing_alias(table) + whr = '' + if where: + whr = ' %s' % self.where(where) + return 'DELETE %s FROM %s%s;' % (table.sql_shortref, tablename, whr) + @property def random(self): return 'RAND()' @@ -86,8 +93,8 @@ def cast(self, first, second, query_env={}): def drop_table(self, table, mode): # breaks db integrity but without this mysql does not drop table return [ - 'SET FOREIGN_KEY_CHECKS=0;', 'DROP TABLE %s;' % table.sqlsafe, + 'SET FOREIGN_KEY_CHECKS=0;', 'DROP TABLE %s;' % table._rname, 'SET FOREIGN_KEY_CHECKS=1;'] def drop_index(self, name, table): - return 'DROP INDEX %s ON %s;' % (self.quote(name), table.sqlsafe) + return 'DROP INDEX %s ON %s;' % (self.quote(name), table._rname) diff --git a/pydal/dialects/oracle.py b/pydal/dialects/oracle.py index 3452b5977..2c4f72b0e 100644 --- a/pydal/dialects/oracle.py +++ b/pydal/dialects/oracle.py @@ -115,5 +115,5 @@ def select(self, fields, tables, where=None, groupby=None, having=None, def drop_table(self, table, mode): sequence_name = table._sequence_name return [ - 'DROP TABLE %s %s;' % (table.sqlsafe, mode), + 'DROP TABLE %s %s;' % (table._rname, mode), 'DROP SEQUENCE %s;' % sequence_name] diff --git a/pydal/dialects/postgre.py b/pydal/dialects/postgre.py index 514a9d9b8..4f7b7e61b 100644 --- a/pydal/dialects/postgre.py +++ b/pydal/dialects/postgre.py @@ -58,7 +58,7 @@ def sequence_name(self, tablename): def insert(self, table, fields, values, returning=None): ret = '' if returning: - ret = 'RETURNING %s' % self.quote(returning) + ret = 'RETURNING %s' % returning return 'INSERT INTO %s(%s) VALUES (%s)%s;' % ( table, fields, values, ret) @@ -115,7 +115,7 @@ def ilike(self, first, second, escape=None, query_env={}): def drop_table(self, table, mode): if mode not in ['restrict', 'cascade', '']: raise ValueError('Invalid mode: %s' % mode) - return ['DROP TABLE ' + table.sqlsafe + ' ' + mode + ';'] + return ['DROP TABLE ' + table._rname + ' ' + mode + ';'] def create_index(self, name, table, expressions, unique=False, where=None): uniq = ' UNIQUE' if unique else '' @@ -124,7 +124,7 @@ def create_index(self, name, table, expressions, unique=False, where=None): whr = ' %s' % self.where(where) with self.adapter.index_expander(): rv = 'CREATE%s INDEX %s ON %s (%s)%s;' % ( - uniq, self.quote(name), table.sqlsafe, ','.join( + uniq, self.quote(name), table._rname, ','.join( self.expand(field) for field in expressions), whr) return rv diff --git a/pydal/dialects/sqlite.py b/pydal/dialects/sqlite.py index f168a0ea1..5094efc1e 100644 --- a/pydal/dialects/sqlite.py +++ b/pydal/dialects/sqlite.py @@ -40,10 +40,16 @@ def select(self, fields, tables, where=None, groupby=None, having=None, for_update) def truncate(self, table, mode=''): - tablename = table._tablename + tablename = self.adapter.expand(table._raw_rname, 'string') return [ - self.delete(tablename), - self.delete('sqlite_sequence', "name='%s'" % tablename)] + self.delete(table), + "DELETE FROM sqlite_sequence WHERE name=%s" % tablename] + + def writing_alias(self, table): + if table._dalname != table._tablename: + raise SyntaxError( + 'SQLite does not support UPDATE/DELETE on aliased table') + return table._rname @dialects.register_for(Spatialite) diff --git a/pydal/dialects/teradata.py b/pydal/dialects/teradata.py index 25c074d30..cf96a0249 100644 --- a/pydal/dialects/teradata.py +++ b/pydal/dialects/teradata.py @@ -97,4 +97,4 @@ def select(self, fields, tables, where=None, groupby=None, having=None, dst, limit, fields, tables, whr, grp, order, offset, upd) def truncate(self, table, mode=''): - return ['DELETE FROM %s ALL;' % table._tablename] + return ['DELETE FROM %s ALL;' % table._rname] diff --git a/pydal/migrator.py b/pydal/migrator.py index 759de6c0a..f45c2bb1a 100644 --- a/pydal/migrator.py +++ b/pydal/migrator.py @@ -52,7 +52,7 @@ def create_table(self, table, migrate=True, fake_migrate=False, if referenced == '.': referenced = tablename constraint_name = self.dialect.constraint_name( - tablename, field_name) + table._raw_rname, field._raw_rname) # if not '.' in referenced \ # and referenced != tablename \ # and hasattr(table,'_primarykey'): @@ -87,21 +87,21 @@ def create_table(self, table, migrate=True, fake_migrate=False, TFK[rtablename] = {} TFK[rtablename][rfieldname] = field_name else: - fk = rtable.sqlsafe + ' (' + rfield.sqlsafe_name + ')' + fk = rtable._rname + ' (' + rfield._rname + ')' ftype = ftype + \ types['reference FK'] % dict( # should be quoted constraint_name=constraint_name, foreign_key=fk, - table_name=table.sqlsafe, - field_name=field.sqlsafe_name, + table_name=table._rname, + field_name=field._rname, on_delete_action=field.ondelete) else: # make a guess here for circular references if referenced in db: - id_fieldname = db[referenced]._id.sqlsafe_name + id_fieldname = db[referenced]._id._rname elif referenced == tablename: - id_fieldname = table._id.sqlsafe_name + id_fieldname = table._id._rname else: # make a guess id_fieldname = self.dialect.quote('id') #gotcha: the referenced table must be defined before @@ -111,18 +111,18 @@ def create_table(self, table, migrate=True, fake_migrate=False, #migrations and model relationship work also if tables #are not defined in order if referenced == tablename: - real_referenced = db[referenced].sqlsafe + real_referenced = db[referenced]._rname else: real_referenced = ( - referenced in db and db[referenced].sqlsafe or + referenced in db and db[referenced]._rname or referenced) rfield = db[referenced]._id ftype_info = dict( - index_name=self.dialect.quote(field_name+'__idx'), - field_name=field.sqlsafe_name, + index_name=self.dialect.quote(field._raw_rname+'__idx'), + field_name=field._rname, constraint_name=self.dialect.quote(constraint_name), foreign_key='%s (%s)' % ( - real_referenced, rfield.sqlsafe_name), + real_referenced, rfield._rname), on_delete_action=field.ondelete) ftype_info['null'] = ' NOT NULL' if field.notnull else \ self.dialect.allow_null @@ -158,8 +158,8 @@ def create_table(self, table, migrate=True, fake_migrate=False, schema = parms[0] ftype = "SELECT AddGeometryColumn ('%%(schema)s', '%%(tablename)s', '%%(fieldname)s', %%(srid)s, '%s', %%(dimension)s);" % types[geotype] ftype = ftype % dict(schema=schema, - tablename=tablename, - fieldname=field_name, srid=srid, + tablename=table._raw_rname, + fieldname=field._raw_rname, srid=srid, dimension=dimension) postcreation_fields.append(ftype) elif field_type not in types: @@ -185,7 +185,9 @@ def create_table(self, table, migrate=True, fake_migrate=False, notnull=field.notnull, sortable=sortable, type=str(field_type), - sql=ftype) + sql=ftype, + rname=field._rname, + raw_rname=field._raw_rname) if field.notnull and field.default is not None: # Caveat: sql_fields and sql_fields_aux @@ -201,40 +203,46 @@ def create_table(self, table, migrate=True, fake_migrate=False, # geometry fields are added after the table has been created, not now if not (self.dbengine == 'postgres' and field_type.startswith('geom')): - fields.append('%s %s' % (field.sqlsafe_name, ftype)) + fields.append('%s %s' % (field._rname, ftype)) other = ';' # backend-specific extensions to fields if self.dbengine == 'mysql': if not hasattr(table, "_primarykey"): - fields.append('PRIMARY KEY (%s)' % ( - self.dialect.quote(table._id.name))) + fields.append('PRIMARY KEY (%s)' % (table._id._rname)) engine = self.adapter.adapter_args.get('engine', 'InnoDB') other = ' ENGINE=%s CHARACTER SET utf8;' % engine fields = ',\n '.join(fields) for rtablename in TFK: + rtable = db[rtablename] rfields = TFK[rtablename] - pkeys = [ - self.dialect.quote(pk) for pk in db[rtablename]._primarykey] - fkeys = [self.dialect.quote(rfields[k].name) for k in pkeys] + pkeys = [rtable[pk]._rname for pk in rtable._primarykey] + fk_fields = [table[rfields[k]] for k in rtable._primarykey] + fkeys = [f._rname for f in fk_fields] + constraint_name = self.dialect.constraint_name( + table._raw_rname, '_'.join(f._raw_rname for f in fk_fields)) + on_delete = list(set(f.ondelete for f in fk_fields)) + if len(on_delete) > 1: + raise SyntaxError('Table %s has incompatible ON DELETE actions in multi-field foreign key.' % table._dalname) fields = fields + ',\n ' + \ types['reference TFK'] % dict( - table_name=table.sqlsafe, + constraint_name=constraint_name, + table_name=table._rname, field_name=', '.join(fkeys), - foreign_table=table.sqlsafe, + foreign_table=rtable._rname, foreign_key=', '.join(pkeys), - on_delete_action=field.ondelete) + on_delete_action=on_delete[0]) if getattr(table, '_primarykey', None): query = "CREATE TABLE %s(\n %s,\n %s) %s" % \ - (table.sqlsafe, fields, + (table._rname, fields, self.dialect.primary_key(', '.join([ - self.dialect.quote(pk) + table[pk]._rname for pk in table._primarykey])), other) else: query = "CREATE TABLE %s(\n %s\n)%s" % \ - (table.sqlsafe, fields, other) + (table._rname, fields, other) uri = self.adapter.uri if uri.startswith('sqlite:///') \ @@ -289,6 +297,15 @@ def create_table(self, table, migrate=True, fake_migrate=False, self.file_close(tfile) raise RuntimeError('File %s appears corrupted' % table._dbt) self.file_close(tfile) + # add missing rnames + for key, item in sql_fields_old.items(): + tmp = sql_fields.get(key) + if tmp: + item.setdefault('rname', tmp['rname']) + item.setdefault('raw_rname', tmp['raw_rname']) + else: + item.setdefault('rname', self.dialect.quote(key)) + item.setdefault('raw_rname', key) if sql_fields != sql_fields_old: self.migrate_table( table, @@ -312,8 +329,14 @@ def migrate_table(self, table, sql_fields, sql_fields_old, sql_fields_aux, db = table._db db._migrated.append(table._tablename) tablename = table._tablename + if self.dbengine in ('firebird',): + drop_expr = 'ALTER TABLE %s DROP %s;' + else: + drop_expr = 'ALTER TABLE %s DROP COLUMN %s;' + field_types = dict((x.lower(), table[x].type) + for x in sql_fields.keys() if x in table) # make sure all field names are lower case to avoid - # migrations because of case cahnge + # migrations because of case change sql_fields = dict(map(self._fix, iteritems(sql_fields))) sql_fields_old = dict(map(self._fix, iteritems(sql_fields_old))) sql_fields_aux = dict(map(self._fix, iteritems(sql_fields_aux))) @@ -325,7 +348,7 @@ def migrate_table(self, table, sql_fields, sql_fields_old, sql_fields_aux, for key in sql_fields_old: if key not in keys: keys.append(key) - new_add = self.dialect.concat_add(tablename) + new_add = self.dialect.concat_add(table._rname) metadata_change = False sql_fields_current = copy.copy(sql_fields_old) @@ -339,12 +362,22 @@ def migrate_table(self, table, sql_fields, sql_fields_old, sql_fields_aux, query = [sql_fields[key]['sql']] else: query = ['ALTER TABLE %s ADD %s %s;' % ( - table.sqlsafe, self.dialect.quote(key), + table._rname, sql_fields[key]['rname'], sql_fields_aux[key]['sql'].replace(', ', new_add))] metadata_change = True elif self.dbengine in ('sqlite', 'spatialite'): if key in sql_fields: sql_fields_current[key] = sql_fields[key] + # Field rname has changes, add new column + if (sql_fields[key]['raw_rname'].lower() != + sql_fields_old[key]['raw_rname'].lower()): + tt = sql_fields_aux[key]['sql'].replace(', ', new_add) + query = [ + 'ALTER TABLE %s ADD %s %s;' % ( + table._rname, sql_fields[key]['rname'], tt), + 'UPDATE %s SET %s=%s;' % ( + table._rname, sql_fields[key]['rname'], + sql_fields_old[key]['rname'])] metadata_change = True elif key not in sql_fields: del sql_fields_current[key] @@ -355,48 +388,45 @@ def migrate_table(self, table, sql_fields, sql_fields_old, sql_fields_aux, schema = parms.split(',')[0] query = ["SELECT DropGeometryColumn ('%(schema)s', \ '%(table)s', '%(field)s');" % dict( - schema=schema, table=tablename, field=key)] - elif self.dbengine in ('firebird',): - query = ['ALTER TABLE %s DROP %s;' % ( - self.dialect.quote(tablename), - self.dialect.quote(key))] + schema=schema, table=table._raw_rname, + field=sql_fields_old[key]['raw_rname'])] else: - query = ['ALTER TABLE %s DROP COLUMN %s;' % ( - self.dialect.quote(tablename), - self.dialect.quote(key))] + query = [drop_expr % ( + table._rname, sql_fields_old[key]['rname'])] + metadata_change = True + # The field has a new rname, temp field is not needed + elif (sql_fields[key]['raw_rname'].lower() != + sql_fields_old[key]['raw_rname'].lower()): + sql_fields_current[key] = sql_fields[key] + tt = sql_fields_aux[key]['sql'].replace(', ', new_add) + query = [ + 'ALTER TABLE %s ADD %s %s;' % ( + table._rname, sql_fields[key]['rname'], tt), + 'UPDATE %s SET %s=%s;' % ( + table._rname, sql_fields[key]['rname'], + sql_fields_old[key]['rname']), + drop_expr % (table._rname, sql_fields_old[key]['rname']), + ] metadata_change = True elif ( sql_fields[key]['sql'] != sql_fields_old[key]['sql'] and not - (key in table.fields and - isinstance(table[key].type, SQLCustomType)) and not + isinstance(field_types.get(key), SQLCustomType) and not sql_fields[key]['type'].startswith('reference') and not sql_fields[key]['type'].startswith('double') and not sql_fields[key]['type'].startswith('id')): sql_fields_current[key] = sql_fields[key] - t = tablename tt = sql_fields_aux[key]['sql'].replace(', ', new_add) - if self.dbengine in ('firebird',): - drop_expr = 'ALTER TABLE %s DROP %s;' - else: - drop_expr = 'ALTER TABLE %s DROP COLUMN %s;' - key_tmp = key + '__tmp' + key_tmp = self.dialect.quote(key + '__tmp') query = [ - 'ALTER TABLE %s ADD %s %s;' % ( - self.dialect.quote(t), self.dialect.quote(key_tmp), - tt), + 'ALTER TABLE %s ADD %s %s;' % (table._rname, key_tmp, tt), 'UPDATE %s SET %s=%s;' % ( - self.dialect.quote(t), self.dialect.quote(key_tmp), - self.dialect.quote(key)), - drop_expr % ( - self.dialect.quote(t), self.dialect.quote(key)), + table._rname, key_tmp, sql_fields_old[key]['rname']), + drop_expr % (table._rname, sql_fields_old[key]['rname']), 'ALTER TABLE %s ADD %s %s;' % ( - self.dialect.quote(t), - self.dialect.quote(key), tt), + table._rname, sql_fields[key]['rname'], tt), 'UPDATE %s SET %s=%s;' % ( - self.dialect.quote(t), self.dialect.quote(key), - self.dialect.quote(key_tmp)), - drop_expr % ( - self.dialect.quote(t), self.dialect.quote(key_tmp)) + table._rname, sql_fields[key]['rname'], key_tmp), + drop_expr % (table._rname, key_tmp) ] metadata_change = True elif sql_fields[key]['type'] != sql_fields_old[key]['type']: diff --git a/pydal/objects.py b/pydal/objects.py index fdc56524c..69086a502 100644 --- a/pydal/objects.py +++ b/pydal/objects.py @@ -34,6 +34,7 @@ attempt_upload_on_insert, attempt_upload_on_update, delete_uploaded_files ) from .helpers.serializers import serializers +from .utils import deprecated DEFAULTLENGTH = {'string': 512, 'password': 512, 'upload': 512, 'text': 2**15, @@ -221,16 +222,17 @@ def __init__(self, db, tablename, *fields, **args): super(Table, self).__init__() self._actual = False # set to True by define_table() self._db = db - self._tablename = tablename + self._tablename = self._dalname = tablename if not isinstance(tablename, str) or hasattr(DAL, tablename) or not \ REGEX_VALID_TB_FLD.match(tablename) or \ REGEX_PYTHON_KEYWORDS.match(tablename): raise SyntaxError('Field: invalid table name: %s, ' 'use rname for "funny" names' % tablename) - self._ot = None - self._rname = args.get('rname') + self._rname = args.get('rname') or \ + db and db._adapter.dialect.quote(tablename) + self._raw_rname = args.get('rname') or db and tablename self._sequence_name = args.get('sequence_name') or \ - db and db._adapter.dialect.sequence_name(self._rname or tablename) + db and db._adapter.dialect.sequence_name(self._raw_rname) self._trigger_name = args.get('trigger_name') or \ db and db._adapter.dialect.trigger_name(tablename) self._common_filter = args.get('common_filter') @@ -346,9 +348,7 @@ def check_reserved(field_name): self[field_name] = field if field.type == 'id': self['id'] = field - field.tablename = field._tablename = tablename - field.table = field._table = self - field.db = field._db = db + field.bind(self) self.ALL = SQLALL(self) if _primarykey is not None: @@ -381,7 +381,7 @@ def _enable_record_versioning(self, current_record_label=None): db = self._db archive_db = archive_db or db - archive_name = archive_name % dict(tablename=self._tablename) + archive_name = archive_name % dict(tablename=self._dalname) if archive_name in archive_db.tables(): return # do not try define the archive if already exists fieldnames = self.fields() @@ -408,7 +408,7 @@ def _enable_record_versioning(self, AND, [ tab.is_active == True for tab in db._adapter.tables(query).values() - if tab.real_name == self.real_name] + if tab._raw_rname == self._raw_rname] ) query = self._common_filter if query: @@ -624,38 +624,34 @@ def __repr__(self): return '' % (self._tablename, ', '.join(self.fields())) def __str__(self): - if self._ot is not None: - return self._db._adapter.dialect._as(self._ot, self._tablename) - return self._tablename + if self._tablename == self._dalname: + return self._tablename + return self._db._adapter.dialect._as(self._dalname, self._tablename) @property + @deprecated('sqlsafe', 'sql_shortref', 'Table') def sqlsafe(self): - rname = self._rname - if rname: - return rname - return self._db._adapter.sqlsafe_table(self._tablename) + return self.sql_shortref @property + @deprecated('sqlsafe_alias', 'sql_fullref', 'Table') def sqlsafe_alias(self): - rname = self._rname - ot = self._ot - if rname and not ot: - return rname - return self._db._adapter.sqlsafe_table(self._tablename, self._ot) - - def query_name(self, *args, **kwargs): - return (self.sqlsafe_alias,) + return self.sql_fullref @property - def query_alias(self): - if self._rname and not self._ot: + def sql_shortref(self): + if self._tablename == self._dalname: return self._rname - return self._db._adapter.dialect.quote(self._tablename) + return self._db._adapter.sqlsafe_table(self._tablename) @property - def real_name(self): - """Backend name of the table""" - return self._rname or self._ot or self.sqlsafe + def sql_fullref(self): + if self._tablename == self._dalname: + return self._rname + return self._db._adapter.sqlsafe_table(self._tablename, self._rname) + + def query_name(self, *args, **kwargs): + return (self.sql_fullref,) def _drop(self, mode=''): return self._db._adapter.dialect.drop_table(self, mode) @@ -1025,7 +1021,18 @@ def as_dict(self, flat=False, sanitize=True): return table_as_dict def with_alias(self, alias): - return self._db._adapter.alias(self, alias) + other = copy.copy(self) + other['ALL'] = SQLALL(other) + other['_tablename'] = alias + for fieldname in other.fields: + tmp = self[fieldname].clone() + tmp.bind(other) + other[fieldname] = tmp + if 'id' in self and 'id' not in other.fields: + other['id'] = other[self.id.name] + other._id = other[self._id.name] + self._db[alias] = other + return other def on(self, query): return Expression(self._db, self._db._adapter.dialect.on, self, query) @@ -1041,6 +1048,7 @@ class Select(BasicStorage): def __init__(self, db, query, fields, attributes): self._db = db self._tablename = None # alias will be stored here + self._rname = self._raw_rname = self._dalname = None self._common_filter = None self._query = query # if false, the subquery will never reference tables from parent scope @@ -1072,9 +1080,7 @@ def __init__(self, db, query, fields, attributes): raise SyntaxError("duplicate field %s in select query" % field.name) fieldcheck.add(checkname) - field.tablename = field._tablename = self._tablename - field.table = field._table = self - field.db = field._db = db + field.bind(self) self.fields.append(field.name) self[field.name] = field self.ALL = SQLALL(self) @@ -1126,11 +1132,9 @@ def with_alias(self, alias): other['ALL'] = SQLALL(other) other['_tablename'] = alias for fieldname in other.fields: - other[fieldname] = copy.copy(other[fieldname]) - other[fieldname]._tablename = alias - other[fieldname].tablename = alias - other[fieldname]._table = other - other[fieldname].table = other + tmp = self[fieldname].clone() + tmp.bind(other) + other[fieldname] = tmp return other def on(self, query): @@ -1166,15 +1170,11 @@ def query_name(self, outer_scoped=[]): return (sql,) @property - def query_alias(self): + def sql_shortref(self): if self._tablename is None: raise SyntaxError("Subselect must be aliased for use in a JOIN") return self._db._adapter.dialect.quote(self._tablename) - @property - def real_name(self): - return None - def _filter_fields(self, record, id=False): return dict([(k, v) for (k, v) in iteritems(record) if k in self.fields and (self[k].type != 'id' or id)]) @@ -1567,6 +1567,7 @@ def __init__(self, fieldname, type='string', length=None, default=DEFAULT, filter_in=None, filter_out=None, custom_qualifier=None, map_none=None, rname=None): self._db = self.db = None # both for backward compatibility + self.table = self._table = None self.op = None self.first = None self.second = None @@ -1620,21 +1621,39 @@ def __init__(self, fieldname, type='string', length=None, default=DEFAULT, fieldname.replace('_', ' ').title()) self.requires = requires if requires is not None else [] self.map_none = map_none - self._rname = rname + self._rname = self._raw_rname = rname stype = self.type if isinstance(self.type, SQLCustomType): stype = self.type.type self._itype = REGEX_TYPE.match(stype).group(0) if stype else None + def bind(self, table): + if self._table is not None: + raise ValueError( + 'Field %s is already bound to a table' % self.longname) + self.db = self._db = table._db + self.table = self._table = table + self.tablename = self._tablename = table._tablename + if self._db and self._rname is None: + self._rname = self._db._adapter.sqlsafe_field(self.name) + self._raw_rname = self.name + def set_attributes(self, *args, **attributes): self.__dict__.update(*args, **attributes) def clone(self, point_self_references_to=False, **args): field = copy.copy(self) if point_self_references_to and \ - field.type == 'reference %s'+field._tablename: + self.type == 'reference %s' % self._tablename: field.type = 'reference %s' % point_self_references_to field.__dict__.update(args) + field.db = field._db = None + field.table = field._table = None + field.tablename = field._tablename = None + if self._db and \ + self._rname == self._db._adapter.sqlsafe_field(self.name): + # Reset the name because it may need to be requoted by bind() + field._rname = field._raw_rname = None return field def store(self, file, filename=None, path=None): @@ -1838,24 +1857,29 @@ def __bool__(self): return True def __str__(self): - try: + if self._table: return '%s.%s' % (self.tablename, self.name) - except: - return '.%s' % self.name + return '.%s' % self.name def __hash__(self): return id(self) @property def sqlsafe(self): - if self._table: - return self._table.sqlsafe + '.' + \ - (self._rname or self._db._adapter.sqlsafe_field(self.name)) - return '.%s' % self.name + if self._table is None: + raise SyntaxError('Field %s is not bound to any table' % self.name) + return self._table.sql_shortref + '.' + self._rname @property + @deprecated('sqlsafe_name', '_rname', 'Field') def sqlsafe_name(self): - return self._rname or self._db._adapter.sqlsafe_field(self.name) + return self._rname + + @property + def longname(self): + if self._table is None: + raise SyntaxError('Field %s is not bound to any table' % self.name) + return self._table._tablename + '.' + self.name class Query(Serializable): diff --git a/tests/indexes.py b/tests/indexes.py index d00bc1596..6eb647252 100644 --- a/tests/indexes.py +++ b/tests/indexes.py @@ -28,7 +28,7 @@ def testRun(self): with db._adapter.index_expander(): coalesce_sql = str(db.tt.bb.coalesce(None)) expected_sql = 'CREATE INDEX %s ON %s (%s,%s);' % ( - db._adapter.dialect.quote('idx_aa_and_bb'), db.tt.sqlsafe, + db._adapter.dialect.quote('idx_aa_and_bb'), db.tt.sql_shortref, db.tt.aa.sqlsafe_name, coalesce_sql) self.assertEqual(sql, expected_sql) rv = db.tt.create_index( diff --git a/tests/sql.py b/tests/sql.py index 8794045b0..e7c521e04 100644 --- a/tests/sql.py +++ b/tests/sql.py @@ -15,8 +15,7 @@ from pydal.objects import Table, Expression, Row from ._compat import unittest from ._adapt import ( - DEFAULT_URI, IS_POSTGRESQL, IS_SQLITE, IS_MSSQL, IS_MYSQL, IS_TERADATA, - _quote) + DEFAULT_URI, IS_POSTGRESQL, IS_SQLITE, IS_MSSQL, IS_MYSQL, IS_TERADATA) from ._helpers import DALtest long = integer_types[-1] @@ -672,11 +671,13 @@ def testMethods(self): tmp = [row['aa'], row['bb'], row['aa']+2, row['aa']+1] self.assertEqual(list(result[idx]), tmp) # Check that query expansion methods don't work without alias - self.assertEqual(sub.real_name, None) + self.assertEqual(sub._rname, None) + self.assertEqual(sub._raw_rname, None) + self.assertEqual(sub._dalname, None) with self.assertRaises(SyntaxError): sub.query_name() with self.assertRaises(SyntaxError): - sub.query_alias + sub.sql_shortref with self.assertRaises(SyntaxError): sub.on(sub.aa != None) # Alias checks @@ -686,9 +687,11 @@ def testMethods(self): self.assertEqual(result[idx]['tt'].as_dict(), row) self.assertEqual(result[idx]['exp'], row['aa']+1) # Check query expansion methods again - self.assertEqual(sub.real_name, None) + self.assertEqual(sub._rname, None) + self.assertEqual(sub._raw_rname, None) + self.assertEqual(sub._dalname, None) self.assertEqual(sub.query_name()[0], str(sub)) - self.assertEqual(sub.query_alias, db._adapter.dialect.quote('foo')) + self.assertEqual(sub.sql_shortref, db._adapter.dialect.quote('foo')) self.assertIsInstance(sub.on(sub.aa != None), Expression) def testSelectArguments(self): @@ -1286,6 +1289,59 @@ def testOps(self): #self.assertEqual(db(t0).select(op).first()[op], 2) +class TestTableAliasing(DALtest): + + def testRun(self): + db = self.connect() + db.define_table('t1', Field('aa')) + db.define_table('t2', + Field('pk', type='id', unique=True, notnull=True), + Field('bb', type='integer'), rname='tt') + tab1 = db.t1.with_alias('test1') + tab2 = db.t2.with_alias('test2') + self.assertIs(tab2.id, tab2.pk) + self.assertIs(tab2._id, tab2.pk) + self.assertEqual(tab1._dalname, 't1') + self.assertEqual(tab1._tablename, 'test1') + self.assertEqual(tab2._dalname, 't2') + self.assertEqual(tab2._tablename, 'test2') + self.assertEqual(tab2._rname, 'tt') + tab1.insert(aa='foo') + tab1.insert(aa='bar') + result = db(tab1).select(tab1.aa, orderby=tab1.aa) + self.assertEqual(result.as_list(), [{'aa': 'bar'}, {'aa': 'foo'}]) + + if not IS_SQLITE: + db(tab1.aa == 'foo').update(aa='baz') + result = db(tab1).select(tab1.aa, orderby=tab1.aa) + self.assertEqual(result.as_list(), [{'aa': 'bar'}, {'aa': 'baz'}]) + db(tab1.aa == 'bar').delete() + result = db(tab1).select(tab1.aa, orderby=tab1.aa) + self.assertEqual(result.as_list(), [{'aa': 'baz'}]) + else: + with self.assertRaises(SyntaxError): + db(tab1.aa == 'foo').update(aa='baz') + with self.assertRaises(SyntaxError): + db(tab1.aa == 'bar').delete() + + tab2.insert(bb=123) + tab2.insert(bb=456) + result = db(tab2).select(tab2.bb, orderby=tab2.bb) + self.assertEqual(result.as_list(), [{'bb': 123}, {'bb': 456}]) + + if not IS_SQLITE: + db(tab2.bb == 456).update(bb=789) + result = db(tab2).select(tab2.bb, orderby=tab2.bb) + self.assertEqual(result.as_list(), [{'bb': 123}, {'bb': 789}]) + db(tab2.bb == 123).delete() + result = db(tab2).select(tab2.bb, orderby=tab2.bb) + self.assertEqual(result.as_list(), [{'bb': 789}]) + else: + with self.assertRaises(SyntaxError): + db(tab2.bb == 456).update(bb=789) + with self.assertRaises(SyntaxError): + db(tab2.bb == 123).delete() + class TestJoin(DALtest): def testRun(self): @@ -1379,31 +1435,151 @@ def testRun(self): class TestMigrations(unittest.TestCase): def testRun(self): + db = DAL(DEFAULT_URI, check_reserved=['all']) + db.define_table('tt', Field('aa'), Field('BB'), + migrate='.storage.table') + db.define_table('t1', Field('aa'), Field('BB'), + migrate='.storage.rname', rname='foo') + db.commit() + db.close() db = DAL(DEFAULT_URI, check_reserved=['all']) db.define_table('tt', Field('aa'), migrate='.storage.table') + db.define_table('t1', Field('aa'), migrate='.storage.rname', + rname='foo') db.commit() db.close() db = DAL(DEFAULT_URI, check_reserved=['all']) db.define_table('tt', Field('aa'), Field('b'), migrate='.storage.table') + db.define_table('t1', Field('aa'), Field('b'), + migrate='.storage.rname', rname='foo') db.commit() db.close() db = DAL(DEFAULT_URI, check_reserved=['all']) db.define_table('tt', Field('aa'), Field('b', 'text'), migrate='.storage.table') + db.define_table('t1', Field('aa'), Field('b', 'text'), + migrate='.storage.rname', rname='foo') db.commit() db.close() db = DAL(DEFAULT_URI, check_reserved=['all']) db.define_table('tt', Field('aa'), migrate='.storage.table') + db.define_table('t1', Field('aa'), migrate='.storage.rname', + rname='foo') + db.tt.drop() + db.t1.drop() + db.commit() + db.close() + + def testFieldRName(self): + def checkWrite(db, table, data): + rowid = table.insert(**data) + query = (table._id == rowid) + fields = [table[x] for x in data.keys()] + row = db(query).select(*fields).first() + self.assertIsNot(row, None) + self.assertEqual(row.as_dict(), data) + db(query).delete() + + # Create tables + db = DAL(DEFAULT_URI, check_reserved=['all']) + db.define_table('tt', Field('aa', rname='faa'), + Field('BB', rname='fbb'), migrate='.storage.table') + db.define_table('t1', Field('aa', rname='faa'), + Field('BB', rname='fbb'), migrate='.storage.rname', rname='foo') + data = dict(aa='aa1', BB='BB1') + checkWrite(db, db.tt, data) + checkWrite(db, db.t1, data) + db.commit() + db.close() + + # Drop field defined by CREATE TABLE + db = DAL(DEFAULT_URI, check_reserved=['all']) + db.define_table('tt', Field('aa', rname='faa'), + migrate='.storage.table') + db.define_table('t1', Field('aa', rname='faa'), + migrate='.storage.rname', rname='foo') + data = dict(aa='aa2') + checkWrite(db, db.tt, data) + checkWrite(db, db.t1, data) + db.commit() + db.close() + + # Add new field + db = DAL(DEFAULT_URI, check_reserved=['all']) + db.define_table('tt', Field('aa', rname='faa'), Field('b', rname='fb'), + migrate='.storage.table') + db.define_table('t1', Field('aa', rname='faa'), Field('b', rname='fb'), + migrate='.storage.rname', rname='foo') + data = dict(aa='aa3', b='b3') + integrity = dict(aa='data', b='integrity') + checkWrite(db, db.tt, data) + checkWrite(db, db.t1, data) + db.tt.insert(**integrity) + db.t1.insert(**integrity) + db.commit() + db.close() + + # Change field type + db = DAL(DEFAULT_URI, check_reserved=['all']) + db.define_table('tt', Field('aa', rname='faa'), + Field('b', 'text', rname='fb'), migrate='.storage.table') + db.define_table('t1', Field('aa', rname='faa'), + Field('b', 'text', rname='fb'), migrate='.storage.rname', + rname='foo') + data = dict(aa='aa4', b='b4') + checkWrite(db, db.tt, data) + checkWrite(db, db.t1, data) + row = db(db.tt).select(*[db.tt[x] for x in integrity.keys()]).first() + self.assertIsNot(row, None) + self.assertEqual(row.as_dict(), integrity) + row2 = db(db.t1).select(*[db.t1[x] for x in integrity.keys()]).first() + self.assertIsNot(row2, None) + self.assertEqual(row2.as_dict(), integrity) + db.commit() + db.close() + + # Change field rname + db = DAL(DEFAULT_URI, check_reserved=['all']) + db.define_table('tt', Field('aa', rname='faa'), + Field('b', 'text', rname='xb'), migrate='.storage.table') + db.define_table('t1', Field('aa', rname='faa'), + Field('b', 'text', rname='xb'), migrate='.storage.rname', + rname='foo') + data = dict(aa='aa4', b='b4') + checkWrite(db, db.tt, data) + checkWrite(db, db.t1, data) + row = db(db.tt).select(*[db.tt[x] for x in integrity.keys()]).first() + self.assertIsNot(row, None) + self.assertEqual(row.as_dict(), integrity) + row2 = db(db.t1).select(*[db.t1[x] for x in integrity.keys()]).first() + self.assertIsNot(row2, None) + self.assertEqual(row2.as_dict(), integrity) + db.commit() + db.close() + + # Drop field defined by ALTER TABLE + db = DAL(DEFAULT_URI, check_reserved=['all']) + db.define_table('tt', Field('aa', rname='faa'), + migrate='.storage.table') + db.define_table('t1', Field('aa', rname='faa'), + migrate='.storage.rname', rname='foo') + data = dict(aa='aa5') + checkWrite(db, db.tt, data) + checkWrite(db, db.t1, data) db.tt.drop() + db.t1.drop() db.commit() db.close() + def tearDown(self): if os.path.exists('.storage.db'): os.unlink('.storage.db') if os.path.exists('.storage.table'): os.unlink('.storage.table') + if os.path.exists('.storage.rname'): + os.unlink('.storage.rname') class TestReference(DALtest): @@ -1807,7 +1983,7 @@ class TestRNameTable(DALtest): def testSelect(self): db = self.connect() - rname = _quote(db, 'a very complicated tablename') + rname = 'a_very_complicated_tablename' db.define_table( 'easy_name', Field('a_field'), @@ -1836,14 +2012,14 @@ def testSelect(self): avg = db.easy_name.id.avg() rtn = db(db.easy_name.id > 0).select(avg) self.assertEqual(rtn[0][avg], 3) - rname = _quote(db, 'this is the person table') + rname = 'this_is_the_person_table' db.define_table( 'person', Field('name', default="Michael"), Field('uuid'), rname=rname ) - rname = _quote(db, 'this is the pet table') + rname = 'this_is_the_pet_table' db.define_table( 'pet', Field('friend','reference person'), @@ -1916,7 +2092,7 @@ def testSelect(self): for key in ['reference','reference FK']: db._adapter.types[key]=db._adapter.types[key].replace( '%(on_delete_action)s','NO ACTION') - rname = _quote(db, 'the cubs') + rname = 'the_cubs' db.define_table('pet_farm', Field('name'), Field('father','reference pet_farm'), @@ -1953,8 +2129,8 @@ def testSelect(self): def testJoin(self): db = self.connect() - rname = _quote(db, 'this is table t1') - rname2 = _quote(db, 'this is table t2') + rname = 'this_is_table_t1' + rname2 = 'this_is_table_t2' db.define_table('t1', Field('aa'), rname=rname) db.define_table('t2', Field('aa'), Field('b', db.t1), rname=rname2) i1 = db.t1.insert(aa='1') @@ -2023,8 +2199,8 @@ class TestRNameFields(DALtest): # tests for highly experimental rname attribute def testSelect(self): db = self.connect() - rname = _quote(db, 'a very complicated fieldname') - rname2 = _quote(db, 'rrating from 1 to 10') + rname = 'a_very_complicated_fieldname' + rname2 = 'rrating_from_1_to_10' db.define_table( 'easy_name', Field('a_field', rname=rname), @@ -2058,13 +2234,14 @@ def testSelect(self): rtn = db(db.easy_name.id > 0).select(avg) self.assertEqual(rtn[0][avg], 2) - rname = _quote(db, 'this is the person name') + rname = 'this_is_the_person_name' db.define_table( 'person', + Field('id', type='id', rname='fooid'), Field('name', default="Michael", rname=rname), Field('uuid') ) - rname = _quote(db, 'this is the pet name') + rname = 'this_is_the_pet_name' db.define_table( 'pet', Field('friend','reference person'), @@ -2131,7 +2308,7 @@ def testSelect(self): self.assertEqual(rtn[2].pet.name, 'Gertie') #aliases - rname = _quote(db, 'the cub name') + rname = 'the_cub_name' if DEFAULT_URI.startswith('mssql'): #multiple cascade gotcha for key in ['reference','reference FK']: @@ -2172,7 +2349,7 @@ def testSelect(self): def testRun(self): db = self.connect() - rname = _quote(db, 'a very complicated fieldname') + rname = 'a_very_complicated_fieldname' for ft in ['string', 'text', 'password', 'upload', 'blob']: db.define_table('tt', Field('aa', ft, default='', rname=rname)) self.assertEqual(db.tt.insert(aa='x'), 1) @@ -2245,7 +2422,7 @@ def testRun(self): def testInsert(self): db = self.connect() - rname = _quote(db, 'a very complicated fieldname') + rname = 'a_very_complicated_fieldname' db.define_table('tt', Field('aa', rname=rname)) self.assertEqual(db.tt.insert(aa='1'), 1) self.assertEqual(db.tt.insert(aa='1'), 2) @@ -2260,8 +2437,8 @@ def testInsert(self): def testJoin(self): db = self.connect() - rname = _quote(db, 'this is field aa') - rname2 = _quote(db, 'this is field b') + rname = 'this_is_field_aa' + rname2 = 'this_is_field_b' db.define_table('t1', Field('aa', rname=rname)) db.define_table('t2', Field('aa', rname=rname), Field('b', db.t1, rname=rname2)) i1 = db.t1.insert(aa='1') @@ -2325,6 +2502,28 @@ def testJoin(self): db.dog.drop() self.assertEqual(len(db.person._referenced_by),0) + def testTFK(self): + db = self.connect() + if 'reference TFK' not in db._adapter.types: + self.skipTest('Adapter does not support TFK references') + db.define_table('t1', + Field('id1', type='string', length=1, rname='foo1'), + Field('id2', type='integer', rname='foo2'), + Field('val', type='integer'), + primarykey=['id1', 'id2']) + db.define_table('t2', + Field('ref1', type=db.t1.id1, rname='bar1'), + Field('ref2', type=db.t1.id2, rname='bar2')) + db.t1.insert(id1='a', id2=1, val=10) + db.t1.insert(id1='a', id2=2, val=30) + db.t2.insert(ref1='a', ref2=1) + query = (db.t1.id1 == db.t2.ref1) & (db.t1.id2 == db.t2.ref2) + result = db(query).select(db.t1.ALL) + self.assertEqual(len(result), 1) + self.assertEqual(result[0]['id1'], 'a') + self.assertEqual(result[0]['id2'], 1) + self.assertEqual(result[0]['val'], 10) + class TestQuoting(DALtest): @@ -2471,7 +2670,8 @@ def testGisMigration(self): if not IS_POSTGRESQL: return for b in [True, False]: db = DAL(DEFAULT_URI, check_reserved=['all'], ignore_field_case=b) - t0 = db.define_table('t0', Field('Point', 'geometry()')) + t0 = db.define_table('t0', Field('Point', 'geometry()'), + Field('rname_point', 'geometry()', rname='foo')) db.commit() db.close() db = DAL(DEFAULT_URI, check_reserved=['all'], ignore_field_case=b)