Ticket #6148: 6148_django1.5.diff
File 6148_django1.5.diff, 194.6 KB (added by , 13 years ago) |
---|
-
django/conf/global_settings.py
407 407 DEFAULT_TABLESPACE = '' 408 408 DEFAULT_INDEX_TABLESPACE = '' 409 409 410 # The default database schema to use for tables 411 DEFAULT_SCHEMA = None 412 410 413 # Default X-Frame-Options header value 411 414 X_FRAME_OPTIONS = 'SAMEORIGIN' 412 415 -
django/db/models/fields/related.py
1 1 from operator import attrgetter 2 2 3 from django.db import connection, router 3 from django.db import connection, router, QName 4 4 from django.db.backends import util 5 5 from django.db.models import signals, get_model 6 6 from django.db.models.fields import (AutoField, Field, IntegerField, … … 568 568 # dealing with PK values. 569 569 fk = self.through._meta.get_field(self.source_field_name) 570 570 source_col = fk.column 571 join_table = self.through._meta.db_table572 571 connection = connections[db] 573 572 qn = connection.ops.quote_name 573 join_alias = connection.qname(self.through) 574 574 qs = qs.extra(select={'_prefetch_related_val': 575 '%s.%s' % ( qn(join_table), qn(source_col))})575 '%s.%s' % (join_alias, qn(source_col))}) 576 576 select_attname = fk.rel.get_related_field().get_attname() 577 577 return (qs, 578 578 attrgetter('_prefetch_related_val'), … … 1085 1085 to = to.lower() 1086 1086 meta = type('Meta', (object,), { 1087 1087 'db_table': field._get_m2m_db_table(klass._meta), 1088 'db_schema': field._get_m2m_db_schema(klass._meta), 1088 1089 'managed': managed, 1089 1090 'auto_created': klass, 1090 1091 'app_label': klass._meta.app_label, … … 1121 1122 through=kwargs.pop('through', None)) 1122 1123 1123 1124 self.db_table = kwargs.pop('db_table', None) 1125 self.db_schema = kwargs.pop('db_schema', None) 1124 1126 if kwargs['rel'].through is not None: 1125 1127 assert self.db_table is None, "Cannot specify a db_table if an intermediary model is used." 1128 assert self.db_schema is None, "Cannot specify a db_schema if an intermediary model is used." 1129 # TODO: by default use the containing model's db_schema 1130 1126 1131 1127 1132 Field.__init__(self, **kwargs) 1128 1133 … … 1142 1147 return util.truncate_name('%s_%s' % (opts.db_table, self.name), 1143 1148 connection.ops.max_name_length()) 1144 1149 1150 def _get_m2m_db_schema(self, opts): 1151 "Function that can be curried to provide the m2m schema name for this relation" 1152 if self.rel.through is not None and self.rel.through._meta.db_schema: 1153 return self.rel.through._meta.db_schema 1154 elif self.db_schema: 1155 return self.db_schema 1156 else: 1157 return opts.db_schema 1158 1159 1160 def _get_m2m_qualified_name(self, opts): 1161 "Function that can be curried to provide the qualified m2m table name for this relation" 1162 schema = self._get_m2m_db_schema(opts) 1163 table = self._get_m2m_db_table(opts) 1164 return QName(schema, table, False) 1165 1145 1166 def _get_m2m_attr(self, related, attr): 1146 1167 "Function that can be curried to provide the source accessor or DB column name for the m2m table" 1147 1168 cache_attr = '_m2m_%s_cache' % attr … … 1212 1233 1213 1234 # Set up the accessor for the m2m table name for the relation 1214 1235 self.m2m_db_table = curry(self._get_m2m_db_table, cls._meta) 1236 self.m2m_db_schema = curry(self._get_m2m_db_schema, cls._meta) 1237 self.m2m_qualified_name = curry(self._get_m2m_qualified_name, 1238 cls._meta) 1215 1239 1216 1240 # Populate some necessary rel arguments so that cross-app relations 1217 1241 # work correctly. … … 1223 1247 if isinstance(self.rel.to, basestring): 1224 1248 target = self.rel.to 1225 1249 else: 1226 target = self.rel.to._meta. db_table1250 target = self.rel.to._meta.qualified_name 1227 1251 cls._meta.duplicate_targets[self.column] = (target, "m2m") 1228 1252 1229 1253 def contribute_to_related_class(self, cls, related): -
django/db/models/query.py
1558 1558 A dict mapping column names to model field names. 1559 1559 """ 1560 1560 if not hasattr(self, '_model_fields'): 1561 converter = connections[self.db].introspection. table_name_converter1561 converter = connections[self.db].introspection.identifier_converter 1562 1562 self._model_fields = {} 1563 1563 for field in self.model._meta.fields: 1564 1564 name, column = field.get_attname_column() -
django/db/models/sql/query.py
16 16 from django.db.models import signals 17 17 from django.db.models.expressions import ExpressionNode 18 18 from django.db.models.fields import FieldDoesNotExist 19 from django.db.models.query_utils import InvalidQuery20 19 from django.db.models.sql import aggregates as base_aggregates_module 21 20 from django.db.models.sql.constants import * 22 21 from django.db.models.sql.datastructures import EmptyResultSet, Empty, MultiJoin … … 59 58 def get_columns(self): 60 59 if self.cursor is None: 61 60 self._execute_query() 62 converter = connections[self.using].introspection. table_name_converter61 converter = connections[self.using].introspection.identifier_converter 63 62 return [converter(column_meta[0]) 64 63 for column_meta in self.cursor.description] 65 64 … … 638 637 Callback used by deferred_to_columns(). The "target" parameter should 639 638 be a set instance. 640 639 """ 641 table = model._meta. db_table640 table = model._meta.qualified_name 642 641 if table not in target: 643 642 target[table] = set() 644 643 for field in fields: … … 659 658 self.alias_refcount[alias] += 1 660 659 return alias, False 661 660 662 # Create a new alias for this table. 663 if current: 661 # Create a new alias for this table if this table is already used 662 # in the query, or if there is the same table name used in the query 663 # under different schema qualified name. 664 if current or table_name[1] in [t[1] for t in self.tables]: 664 665 alias = '%s%d' % (self.alias_prefix, len(self.alias_map) + 1) 665 current.append(alias) 666 if current: 667 current.append(alias) 666 668 else: 667 669 # The first occurence of a table uses the table name directly. 668 670 alias = table_name … … 832 834 alias = self.tables[0] 833 835 self.ref_alias(alias) 834 836 else: 835 alias = self.join((None, self.model._meta. db_table, None, None))837 alias = self.join((None, self.model._meta.qualified_name, None, None)) 836 838 return alias 837 839 838 840 def count_active_tables(self): … … 945 947 seen[model] = root_alias 946 948 else: 947 949 link_field = opts.get_ancestor_link(model) 948 seen[model] = self.join((root_alias, model._meta. db_table,950 seen[model] = self.join((root_alias, model._meta.qualified_name, 949 951 link_field.column, model._meta.pk.column)) 950 952 self.included_inherited_models = seen 951 953 … … 1329 1331 (id(opts), lhs_col), ())) 1330 1332 dupe_set.add((opts, lhs_col)) 1331 1333 opts = int_model._meta 1332 alias = self.join((alias, opts. db_table, lhs_col,1334 alias = self.join((alias, opts.qualified_name, lhs_col, 1333 1335 opts.pk.column), exclusions=exclusions) 1334 1336 joins.append(alias) 1335 1337 exclusions.add(alias) … … 1355 1357 (table1, from_col1, to_col1, table2, from_col2, 1356 1358 to_col2, opts, target) = cached_data 1357 1359 else: 1358 table1 = field.m2m_ db_table()1360 table1 = field.m2m_qualified_name() 1359 1361 from_col1 = opts.get_field_by_name( 1360 1362 field.m2m_target_field_name())[0].column 1361 1363 to_col1 = field.m2m_column_name() 1362 1364 opts = field.rel.to._meta 1363 table2 = opts. db_table1365 table2 = opts.qualified_name 1364 1366 from_col2 = field.m2m_reverse_name() 1365 1367 to_col2 = opts.get_field_by_name( 1366 1368 field.m2m_reverse_target_field_name())[0].column … … 1388 1390 else: 1389 1391 opts = field.rel.to._meta 1390 1392 target = field.rel.get_related_field() 1391 table = opts. db_table1393 table = opts.qualified_name 1392 1394 from_col = field.column 1393 1395 to_col = target.column 1394 1396 orig_opts._join_cache[name] = (table, from_col, to_col, … … 1410 1412 (table1, from_col1, to_col1, table2, from_col2, 1411 1413 to_col2, opts, target) = cached_data 1412 1414 else: 1413 table1 = field.m2m_ db_table()1415 table1 = field.m2m_qualified_name() 1414 1416 from_col1 = opts.get_field_by_name( 1415 1417 field.m2m_reverse_target_field_name())[0].column 1416 1418 to_col1 = field.m2m_reverse_name() 1417 1419 opts = orig_field.opts 1418 table2 = opts. db_table1420 table2 = opts.qualified_name 1419 1421 from_col2 = field.m2m_column_name() 1420 1422 to_col2 = opts.get_field_by_name( 1421 1423 field.m2m_target_field_name())[0].column … … 1439 1441 local_field = opts.get_field_by_name( 1440 1442 field.rel.field_name)[0] 1441 1443 opts = orig_field.opts 1442 table = opts. db_table1444 table = opts.qualified_name 1443 1445 from_col = local_field.column 1444 1446 to_col = field.column 1445 1447 # In case of a recursive FK, use the to_field for … … 1722 1724 else: 1723 1725 opts = self.model._meta 1724 1726 if not self.select: 1725 count = self.aggregates_module.Count((self.join((None, opts. db_table, None, None)), opts.pk.column),1727 count = self.aggregates_module.Count((self.join((None, opts.qualified_name, None, None)), opts.pk.column), 1726 1728 is_summary=True, distinct=True) 1727 1729 else: 1728 1730 # Because of SQL portability issues, multi-column, distinct -
django/db/models/sql/subqueries.py
41 41 where = self.where_class() 42 42 where.add((Constraint(None, field.column, field), 'in', 43 43 pk_list[offset:offset + GET_ITERATOR_CHUNK_SIZE]), AND) 44 self.do_query(self.model._meta. db_table, where, using=using)44 self.do_query(self.model._meta.qualified_name, where, using=using) 45 45 46 46 class UpdateQuery(Query): 47 47 """ -
django/db/models/sql/compiler.py
1 1 from itertools import izip 2 2 3 from django.conf import settings 3 4 from django.core.exceptions import FieldError 4 from django.db import transaction 5 from django.db import transaction, QName 5 6 from django.db.backends.util import truncate_name 6 7 from django.db.models.query_utils import select_related_descend 7 8 from django.db.models.sql.constants import * … … 27 28 # cleaned. We are not using a clone() of the query here. 28 29 """ 29 30 if not self.query.tables: 30 self.query.join((None, self.query.model._meta. db_table, None, None))31 self.query.join((None, self.query.model._meta.qualified_name, None, None)) 31 32 if (not self.query.select and self.query.default_cols and not 32 33 self.query.included_inherited_models): 33 34 self.query.setup_inherited_models() … … 42 43 """ 43 44 if name in self.quote_cache: 44 45 return self.quote_cache[name] 45 if ((name in self.query.alias_map and name not in self.query.table_map) or 46 name in self.query.extra_select): 46 if name in self.query.alias_map and not isinstance(name, tuple): 47 47 self.quote_cache[name] = name 48 48 return name 49 r = self.connection.ops.quote_name(name) 49 if isinstance(name, tuple): 50 r = self.connection.ops.qualified_name(name) 51 else: 52 r = self.connection.ops.quote_name(name) 50 53 self.quote_cache[name] = r 51 54 return r 52 55 … … 281 284 alias = start_alias 282 285 else: 283 286 link_field = opts.get_ancestor_link(model) 284 alias = self.query.join((start_alias, model._meta.db_table, 285 link_field.column, model._meta.pk.column)) 287 alias = self.query.join((start_alias, 288 model._meta.qualified_name, 289 link_field.column, 290 model._meta.pk.column)) 286 291 seen[model] = alias 287 292 else: 288 293 # If we're starting from the base model of the queryset, the … … 508 513 qn2 = self.connection.ops.quote_name 509 514 first = True 510 515 for alias in self.query.tables: 511 if not self.query.alias_refcount[alias]: 516 # Extra tables can end up in self.tables, but not in the 517 # alias_map if they aren't in a join. That's OK. We skip them. 518 if not self.query.alias_refcount[alias] or alias not in self.query.alias_map: 512 519 continue 513 try: 514 name, alias, join_type, lhs, lhs_col, col, nullable = self.query.alias_map[alias] 515 except KeyError: 516 # Extra tables can end up in self.tables, but not in the 517 # alias_map if they aren't in a join. That's OK. We skip them. 518 continue 519 alias_str = (alias != name and ' %s' % alias or '') 520 name, alias, join_type, lhs, lhs_col, col, nullable = self.query.alias_map[alias] 521 alias_str = alias != name and ' %s' % qn(alias) or '' 520 522 if join_type and not first: 521 523 result.append('%s %s%s ON (%s.%s = %s.%s)' 522 524 % (join_type, qn(name), alias_str, qn(lhs), … … 526 528 result.append('%s%s%s' % (connector, qn(name), alias_str)) 527 529 first = False 528 530 for t in self.query.extra_tables: 529 alias, unused = self.query.table_alias(t) 530 # Only add the alias if it's not already present (the table_alias() 531 # calls increments the refcount, so an alias refcount of one means 532 # this is the only reference. 533 if alias not in self.query.alias_map or self.query.alias_refcount[alias] == 1: 531 # Plain string table names are assumed to be in default schema 532 if not isinstance(t, QName): 533 t = QName(self.connection.schema, t, False) 534 # Only add the table if it is not already in the query. 535 if t not in self.query.table_map or self.query.alias_refcount[t] == 0: 536 # This will add the table into the query properly, however we 537 # are not interested in the alias it gets, we add it as a 538 # plain table. 539 self.query.table_alias(t) 534 540 connector = not first and ', ' or '' 535 result.append('%s%s' % (connector, qn( alias)))541 result.append('%s%s' % (connector, qn(t))) 536 542 first = False 537 543 return result, [] 538 544 … … 546 552 if (len(self.query.model._meta.fields) == len(self.query.select) and 547 553 self.connection.features.allows_group_by_pk): 548 554 self.query.group_by = [ 549 (self.query.model._meta. db_table, self.query.model._meta.pk.column)555 (self.query.model._meta.qualified_name, self.query.model._meta.pk.column) 550 556 ] 551 557 552 558 group_by = self.query.group_by or [] … … 614 620 # what "used" specifies). 615 621 avoid = avoid_set.copy() 616 622 dupe_set = orig_dupe_set.copy() 617 table = f.rel.to._meta. db_table623 table = f.rel.to._meta.qualified_name 618 624 promote = nullable or f.null 619 625 if model: 620 626 int_opts = opts … … 635 641 ())) 636 642 dupe_set.add((opts, lhs_col)) 637 643 int_opts = int_model._meta 638 alias = self.query.join((alias, int_opts. db_table, lhs_col,644 alias = self.query.join((alias, int_opts.qualified_name, lhs_col, 639 645 int_opts.pk.column), exclusions=used, 640 646 promote=promote) 641 647 alias_chain.append(alias) … … 687 693 # what "used" specifies). 688 694 avoid = avoid_set.copy() 689 695 dupe_set = orig_dupe_set.copy() 690 table = model._meta. db_table696 table = model._meta.qualified_name 691 697 692 698 int_opts = opts 693 699 alias = root_alias … … 710 716 dupe_set.add((opts, lhs_col)) 711 717 int_opts = int_model._meta 712 718 alias = self.query.join( 713 (alias, int_opts. db_table, lhs_col, int_opts.pk.column),719 (alias, int_opts.qualified_name, lhs_col, int_opts.pk.column), 714 720 exclusions=used, promote=True, reuse=used 715 721 ) 716 722 alias_chain.append(alias) … … 775 781 # into `resolve_columns` because it wasn't selected. 776 782 only_load = self.deferred_to_columns() 777 783 if only_load: 778 db_table = self.query.model._meta. db_table784 db_table = self.query.model._meta.qualified_name 779 785 fields = [f for f in fields if db_table in only_load and 780 786 f.column in only_load[db_table]] 781 787 row = self.resolve_columns(row, fields) … … 856 862 # We don't need quote_name_unless_alias() here, since these are all 857 863 # going to be column names (so we can avoid the extra overhead). 858 864 qn = self.connection.ops.quote_name 865 qn3 = self.connection.ops.qualified_name 859 866 opts = self.query.model._meta 860 result = ['INSERT INTO %s' % qn (opts.db_table)]867 result = ['INSERT INTO %s' % qn3(opts.qualified_name)] 861 868 862 869 has_fields = bool(self.query.fields) 863 870 fields = self.query.fields if has_fields else [opts.pk] … … 887 894 ] 888 895 if self.return_id and self.connection.features.can_return_id_from_insert: 889 896 params = params[0] 890 col = "%s.%s" % (qn (opts.db_table), qn(opts.pk.column))897 col = "%s.%s" % (qn3(opts.qualified_name), qn(opts.pk.column)) 891 898 result.append("VALUES (%s)" % ", ".join(placeholders[0])) 892 899 r_fmt, r_params = self.connection.ops.return_insert_id() 893 900 result.append(r_fmt % col) … … 913 920 if self.connection.features.can_return_id_from_insert: 914 921 return self.connection.ops.fetch_returned_insert_id(cursor) 915 922 return self.connection.ops.last_insert_id(cursor, 916 self.query.model._meta. db_table, self.query.model._meta.pk.column)923 self.query.model._meta.qualified_name, self.query.model._meta.pk.column) 917 924 918 925 919 926 class SQLDeleteCompiler(SQLCompiler): … … 939 946 self.pre_sql_setup() 940 947 if not self.query.values: 941 948 return '', () 942 table = self.query.tables[0] 949 opts = self.query.model._meta 950 table = self.connection.ops.qualified_name(opts.qualified_name) 943 951 qn = self.quote_name_unless_alias 944 result = ['UPDATE %s' % qn(table)] 952 alias = qn(self.query.tables[0]) 953 if table == alias: 954 alias = '' 955 else: 956 alias = ' %s' % alias 957 result = ['UPDATE %s%s' % (table, alias)] 945 958 result.append('SET') 946 959 values, update_params = [], [] 947 960 for field, model, val in self.query.values: -
django/db/models/options.py
2 2 from bisect import bisect 3 3 4 4 from django.conf import settings 5 from django.db import QName 5 6 from django.db.models.related import RelatedObject 6 7 from django.db.models.fields.related import ManyToManyRel 7 8 from django.db.models.fields import AutoField, FieldDoesNotExist … … 17 18 DEFAULT_NAMES = ('verbose_name', 'verbose_name_plural', 'db_table', 'ordering', 18 19 'unique_together', 'permissions', 'get_latest_by', 19 20 'order_with_respect_to', 'app_label', 'db_tablespace', 20 'abstract', 'managed', 'proxy', 'auto_created' )21 'abstract', 'managed', 'proxy', 'auto_created', 'db_schema') 21 22 22 23 class Options(object): 23 24 def __init__(self, meta, app_label=None): … … 26 27 self.module_name, self.verbose_name = None, None 27 28 self.verbose_name_plural = None 28 29 self.db_table = '' 30 self.db_schema = '' 31 self.qualified_name = QName(schema=None, table='', db_format=False) 29 32 self.ordering = [] 30 33 self.unique_together = [] 31 34 self.permissions = [] … … 112 115 # If the db_table wasn't provided, use the app_label + module_name. 113 116 if not self.db_table: 114 117 self.db_table = "%s_%s" % (self.app_label, self.module_name) 118 # TODO: Using connection.ops is wrong: in multidb setup this doesn't work 119 # correctly except if different connections happen to have the same 120 # max_name_length. 115 121 self.db_table = truncate_name(self.db_table, connection.ops.max_name_length()) 122 self.qualified_name = QName(schema=self.db_schema, table=self.db_table, 123 db_format=False) 116 124 117 125 def _prepare(self, model): 118 126 if self.order_with_respect_to: … … 193 201 self.pk = target._meta.pk 194 202 self.proxy_for_model = target 195 203 self.db_table = target._meta.db_table 204 self.db_schema = target._meta.db_schema 205 self.qualified_name = target._meta.qualified_name 196 206 197 207 def __repr__(self): 198 208 return '<Options for %s>' % self.object_name -
django/db/__init__.py
3 3 from django.core.exceptions import ImproperlyConfigured 4 4 from django.db.utils import (ConnectionHandler, ConnectionRouter, 5 5 load_backend, DEFAULT_DB_ALIAS, DatabaseError, IntegrityError) 6 from collections import namedtuple 6 7 8 # All table names must be QNames. The db_format argument 9 # tells if the qualified name is in a format that is ready 10 # to be used in the database or if the qname needs to be 11 # converted first. 12 QName = namedtuple("QName", "schema table db_format") 13 7 14 __all__ = ('backend', 'connection', 'connections', 'router', 'DatabaseError', 8 15 'IntegrityError', 'DEFAULT_DB_ALIAS') 9 16 10 11 17 if DEFAULT_DB_ALIAS not in settings.DATABASES: 12 18 raise ImproperlyConfigured("You must define a '%s' database" % DEFAULT_DB_ALIAS) 13 19 -
django/db/utils.py
78 78 conn['ENGINE'] = 'django.db.backends.dummy' 79 79 conn.setdefault('OPTIONS', {}) 80 80 conn.setdefault('TIME_ZONE', 'UTC' if settings.USE_TZ else settings.TIME_ZONE) 81 for setting in ['NAME', 'USER', 'PASSWORD', 'HOST', 'PORT']: 81 conn.setdefault('SCHEMA', settings.DEFAULT_SCHEMA) 82 for setting in ['NAME', 'USER', 'PASSWORD', 'HOST', 'PORT', 'SCHEMA']: 82 83 conn.setdefault(setting, '') 83 84 for setting in ['TEST_CHARSET', 'TEST_COLLATION', 'TEST_NAME', 'TEST_MIRROR']: 84 85 conn.setdefault(setting, None) 86 # Some databases need a unique prefix for schemas to avoid name 87 # collisions between production database or another testing database 88 # in the same instance. This needs to be a string unlike the other 89 # TEST_ settings above. 90 conn.setdefault('TEST_SCHEMA_PREFIX', '') 91 conn.setdefault('TEST_SCHEMAS', []) 85 92 86 93 def __getitem__(self, alias): 87 94 if hasattr(self._connections, alias): -
django/db/backends/postgresql_psycopg2/introspection.py
1 from django.db import QName 1 2 from django.db.backends import BaseDatabaseIntrospection 2 3 3 4 … … 21 22 1266: 'TimeField', 22 23 1700: 'DecimalField', 23 24 } 25 26 def get_schema_list(self, cursor): 27 cursor.execute(""" 28 SELECT n.nspname 29 FROM pg_catalog.pg_namespace n 30 WHERE n.nspname != 'information_schema' AND n.nspname not like 'pg_%%'""") 31 return [row[0] for row in cursor.fetchall()] 24 32 25 def get_table_list(self, cursor): 26 "Returns a list of table names in the current database." 33 def get_visible_tables_list(self, cursor): 34 """ 35 Returns a list of all by-default visible table names in the current 36 database. 37 """ 38 sql = """ 39 SELECT n.nspname, c.relname 40 FROM pg_catalog.pg_class c 41 LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace 42 WHERE c.relkind IN ('r', 'v', '') 43 AND n.nspname NOT IN ('pg_catalog', 'pg_toast') 44 AND (pg_catalog.pg_table_is_visible(c.oid)""" 45 # We must add the default schema to always visible schemas to make 46 # things work nicely. 47 if self.connection.schema: 48 sql += " OR n.nspname = %s)" 49 cursor.execute(sql, (self.connection.schema,)) 50 else: 51 cursor.execute(sql + ')') 52 return [QName(row[0], row[1], True) for row in cursor.fetchall()] 53 54 def get_qualified_tables_list(self, cursor, schemas): 55 """ 56 Returns schema qualified names of all tables in the current database. 57 """ 58 if not schemas: 59 return [] 27 60 cursor.execute(""" 28 SELECT c.relname61 SELECT n.nspname, c.relname 29 62 FROM pg_catalog.pg_class c 30 63 LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace 31 64 WHERE c.relkind IN ('r', 'v', '') 32 AND n.nspname NOT IN ('pg_catalog', 'pg_toast') 33 AND pg_catalog.pg_table_is_visible(c.oid)""") 34 return [row[0] for row in cursor.fetchall()] 65 AND n.nspname IN %s""", (tuple(schemas),)) 66 return [QName(row[0], row[1], True) for row in cursor.fetchall()] 35 67 36 def get_table_description(self, cursor, table_name):68 def get_table_description(self, cursor, qname): 37 69 "Returns a description of the table, with the DB-API cursor.description interface." 38 70 # As cursor.description does not return reliably the nullable property, 39 71 # we have to query the information_schema (#7783) 40 cursor.execute(""" 41 SELECT column_name, is_nullable 42 FROM information_schema.columns 43 WHERE table_name = %s""", [table_name]) 72 qname = self.qname_converter(qname) 73 if not qname.schema: 74 cursor.execute(""" 75 SELECT column_name, is_nullable 76 FROM information_schema.columns 77 WHERE table_name = %s""", 78 [qname.table]) 79 else: 80 cursor.execute(""" 81 SELECT column_name, is_nullable 82 FROM information_schema.columns 83 WHERE table_schema = %s and table_name = %s""", 84 [qname.schema, qname.table]) 44 85 null_map = dict(cursor.fetchall()) 45 cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name)) 86 cursor.execute( 87 "SELECT * FROM %s LIMIT 1" % self.connection.ops.qualified_name(qname)) 46 88 return [tuple([item for item in line[:6]] + [null_map[line[0]]==u'YES']) 47 for line in cursor.description]89 for line in cursor.description] 48 90 49 def get_relations(self, cursor, table_name):91 def get_relations(self, cursor, qname): 50 92 """ 51 93 Returns a dictionary of {field_index: (field_index_other_table, other_table)} 52 representing all relationships to the given table. Indexes are 0-based. 94 representing all relationships to the given table. Indexes are 0-based. The 95 other_table will be in qualified format. 53 96 """ 54 cursor.execute(""" 55 SELECT con.conkey, con.confkey, c2.relname 56 FROM pg_constraint con, pg_class c1, pg_class c2 57 WHERE c1.oid = con.conrelid 58 AND c2.oid = con.confrelid 59 AND c1.relname = %s 60 AND con.contype = 'f'""", [table_name]) 97 qname = self.qname_converter(qname) 98 if not qname.schema: 99 cursor.execute(""" 100 SELECT con.conkey, con.confkey, nsp2.nspname, c2.relname 101 FROM pg_constraint con, pg_class c1, pg_class c2, 102 pg_namespace nsp2 103 WHERE c1.oid = con.conrelid 104 AND c2.oid = con.confrelid 105 AND nsp2.oid = c2.relnamespace 106 AND c1.relname = %s 107 AND con.contype = 'f'""", 108 [qname.table]) 109 else: 110 cursor.execute(""" 111 SELECT con.conkey, con.confkey, nsp2.nspname, c2.relname 112 FROM pg_constraint con, pg_class c1, pg_class c2, 113 pg_namespace nsp1, pg_namespace nsp2 114 WHERE c1.oid = con.conrelid 115 AND nsp1.oid = c1.relnamespace 116 AND c2.oid = con.confrelid 117 AND nsp2.oid = c2.relnamespace 118 AND nsp1.nspname = %s 119 AND c1.relname = %s 120 AND con.contype = 'f'""", 121 [qname.schema, qname.table]) 61 122 relations = {} 62 123 for row in cursor.fetchall(): 63 124 # row[0] and row[1] are single-item lists, so grab the single item. 64 relations[row[0][0] - 1] = (row[1][0] - 1, row[2]) 125 relations[row[0][0] - 1] = (row[1][0] - 1, 126 QName(row[2], row[3], True)) 65 127 return relations 66 128 67 def get_indexes(self, cursor, table_name):129 def get_indexes(self, cursor, qname): 68 130 """ 69 131 Returns a dictionary of fieldname -> infodict for the given table, 70 132 where each infodict is in the format: … … 73 135 """ 74 136 # This query retrieves each index on the given table, including the 75 137 # first associated field name 76 cursor.execute(""" 77 SELECT attr.attname, idx.indkey, idx.indisunique, idx.indisprimary 78 FROM pg_catalog.pg_class c, pg_catalog.pg_class c2, 79 pg_catalog.pg_index idx, pg_catalog.pg_attribute attr 80 WHERE c.oid = idx.indrelid 81 AND idx.indexrelid = c2.oid 82 AND attr.attrelid = c.oid 83 AND attr.attnum = idx.indkey[0] 84 AND c.relname = %s""", [table_name]) 138 qname = self.qname_converter(qname) 139 if not qname.schema: 140 cursor.execute(""" 141 SELECT attr.attname, idx.indkey, idx.indisunique, idx.indisprimary 142 FROM pg_catalog.pg_class c, pg_catalog.pg_class c2, 143 pg_catalog.pg_index idx, pg_catalog.pg_attribute attr 144 WHERE c.oid = idx.indrelid 145 AND idx.indexrelid = c2.oid 146 AND attr.attrelid = c.oid 147 AND attr.attnum = idx.indkey[0] 148 AND c.relname = %s""", 149 [qname.table]) 150 else: 151 cursor.execute(""" 152 SELECT attr.attname, idx.indkey, idx.indisunique, idx.indisprimary 153 FROM pg_catalog.pg_class c, pg_catalog.pg_namespace nsp, 154 pg_catalog.pg_class c2, pg_catalog.pg_index idx, 155 pg_catalog.pg_attribute attr 156 WHERE c.oid = idx.indrelid 157 AND nsp.oid = c.relnamespace 158 AND idx.indexrelid = c2.oid 159 AND attr.attrelid = c.oid 160 AND attr.attnum = idx.indkey[0] 161 AND nsp.nspname = %s AND c.relname = %s""", 162 [qname.schema, qname.table]) 85 163 indexes = {} 86 164 for row in cursor.fetchall(): 87 165 # row[1] (idx.indkey) is stored in the DB as an array. It comes out as … … 92 170 continue 93 171 indexes[row[0]] = {'primary_key': row[3], 'unique': row[2]} 94 172 return indexes 173 174 def qname_converter(self, qname, force_schema=False): 175 """ 176 On postgresql it is impossible to force usage of schema - 177 we do not know what the default schema is. In fact, there can be 178 multiple schemas. 179 """ 180 assert isinstance(qname, QName) 181 if qname.db_format: 182 return qname 183 return QName((qname.schema or self.connection.schema), qname.table, 184 True) -
django/db/backends/postgresql_psycopg2/operations.py
1 from django.db import QName 1 2 from django.db.backends import BaseDatabaseOperations 2 3 3 4 … … 56 57 return 'HOST(%s)' 57 58 return '%s' 58 59 59 def last_insert_id(self, cursor, table_name, pk_name):60 def last_insert_id(self, cursor, qualified_name, pk_name): 60 61 # Use pg_get_serial_sequence to get the underlying sequence name 61 62 # from the table name and column name (available since PostgreSQL 8) 62 63 cursor.execute("SELECT CURRVAL(pg_get_serial_sequence('%s','%s'))" % ( 63 self.qu ote_name(table_name), pk_name))64 self.qualified_name(qualified_name), pk_name)) 64 65 return cursor.fetchone()[0] 65 66 66 67 def no_limit_value(self): … … 71 72 return name # Quoting once is enough. 72 73 return '"%s"' % name 73 74 75 def qualified_name(self, qname): 76 """ 77 Return the table's name in fully qualified format 78 (schema_name.tbl_name) if there is a schema_name, else returns just 79 the tbl_name in quoted format. 80 81 convert_name has no effect on PostgreSQL, as there is no need 82 to do anything else than quoting for the name even in testing. 83 """ 84 assert isinstance(qname, QName) 85 schema = qname.schema 86 if not qname.db_format and not schema: 87 schema = self.connection.schema 88 if schema: 89 return "%s.%s" % (self.quote_name(schema), 90 self.quote_name(qname.table)) 91 else: 92 return self.quote_name(qname.table) 93 74 94 def set_time_zone_sql(self): 75 95 return "SET TIME ZONE %s" 76 96 … … 81 101 # table. 82 102 sql = ['%s %s;' % \ 83 103 (style.SQL_KEYWORD('TRUNCATE'), 84 style.SQL_FIELD(', '.join([self.quote_name(table) for table in tables])) 104 style.SQL_FIELD(', '.join([self.qualified_name(table) 105 for table in tables])) 85 106 )] 86 107 87 108 # 'ALTER SEQUENCE sequence_name RESTART WITH 1;'... style SQL statements 88 109 # to reset sequence indices 89 110 for sequence_info in sequences: 90 table_name = sequence_info['table'] 111 qname = sequence_info['qname'] 112 qualified_name = self.qualified_name(qname) 91 113 column_name = sequence_info['column'] 92 114 if not (column_name and len(column_name) > 0): 93 115 # This will be the case if it's an m2m using an autogenerated … … 95 117 column_name = 'id' 96 118 sql.append("%s setval(pg_get_serial_sequence('%s','%s'), 1, false);" % \ 97 119 (style.SQL_KEYWORD('SELECT'), 98 style.SQL_TABLE( self.quote_name(table_name)),120 style.SQL_TABLE(qualified_name), 99 121 style.SQL_FIELD(column_name)) 100 122 ) 101 123 return sql … … 121 143 122 144 for f in model._meta.local_fields: 123 145 if isinstance(f, models.AutoField): 146 qualified_name = self.qualified_name(model._meta.qualified_name) 124 147 output.append("%s setval(pg_get_serial_sequence('%s','%s'), coalesce(max(%s), 1), max(%s) %s null) %s %s;" % \ 125 148 (style.SQL_KEYWORD('SELECT'), 126 style.SQL_TABLE(q n(model._meta.db_table)),149 style.SQL_TABLE(qualified_name), 127 150 style.SQL_FIELD(f.column), 128 151 style.SQL_FIELD(qn(f.column)), 129 152 style.SQL_FIELD(qn(f.column)), 130 153 style.SQL_KEYWORD('IS NOT'), 131 154 style.SQL_KEYWORD('FROM'), 132 style.SQL_TABLE(q n(model._meta.db_table))))155 style.SQL_TABLE(qualified_name))) 133 156 break # Only one AutoField is allowed per model, so don't bother continuing. 134 157 for f in model._meta.many_to_many: 135 158 if not f.rel.through: 159 qualified_name = self.qualified_name(f.m2m_qualified_name()) 136 160 output.append("%s setval(pg_get_serial_sequence('%s','%s'), coalesce(max(%s), 1), max(%s) %s null) %s %s;" % \ 137 161 (style.SQL_KEYWORD('SELECT'), 138 style.SQL_TABLE(q n(f.m2m_db_table())),162 style.SQL_TABLE(qualified_name), 139 163 style.SQL_FIELD('id'), 140 164 style.SQL_FIELD(qn('id')), 141 165 style.SQL_FIELD(qn('id')), 142 166 style.SQL_KEYWORD('IS NOT'), 143 167 style.SQL_KEYWORD('FROM'), 144 style.SQL_TABLE(q n(f.m2m_db_table()))))168 style.SQL_TABLE(qualified_name))) 145 169 return output 146 170 147 171 def savepoint_create_sql(self, sid): -
django/db/backends/postgresql_psycopg2/creation.py
43 43 def sql_indexes_for_field(self, model, f, style): 44 44 if f.db_index and not f.unique: 45 45 qn = self.connection.ops.quote_name 46 db_table = model._meta.db_table 46 qn3 = self.connection.ops.qualified_name 47 qualified_name = model._meta.qualified_name 48 db_table = qualified_name[1] 47 49 tablespace = f.db_tablespace or model._meta.db_tablespace 48 50 if tablespace: 49 51 tablespace_sql = self.connection.ops.tablespace_sql(tablespace) … … 56 58 return (style.SQL_KEYWORD('CREATE INDEX') + ' ' + 57 59 style.SQL_TABLE(qn(truncate_name(index_name,self.connection.ops.max_name_length()))) + ' ' + 58 60 style.SQL_KEYWORD('ON') + ' ' + 59 style.SQL_TABLE(qn (db_table)) + ' ' +61 style.SQL_TABLE(qn3(qualified_name)) + ' ' + 60 62 "(%s%s)" % (style.SQL_FIELD(qn(f.column)), opclass) + 61 63 "%s;" % tablespace_sql) 62 64 … … 85 87 self.connection.connection.rollback() 86 88 self.connection.connection.set_isolation_level( 87 89 psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) 90 91 def sql_destroy_schema(self, schema, style): 92 qn = self.connection.ops.quote_name 93 return "%s %s CASCADE;" % (style.SQL_KEYWORD('DROP SCHEMA'), qn(schema)) -
django/db/backends/postgresql_psycopg2/base.py
83 83 has_bulk_insert = True 84 84 supports_tablespaces = True 85 85 can_distinct_on_fields = True 86 namespaced_schemas = True 86 87 87 88 class DatabaseWrapper(BaseDatabaseWrapper): 88 89 vendor = 'postgresql' … … 121 122 self.validation = BaseDatabaseValidation(self) 122 123 self._pg_version = None 123 124 125 def _get_test_schema_prefix(self): 126 return '' 127 test_schema_prefix = property(_get_test_schema_prefix) 128 124 129 def check_constraints(self, table_names=None): 125 130 """ 126 131 To check constraints, we set constraints to immediate. Then, when, we're done we must ensure they -
django/db/backends/creation.py
2 2 import time 3 3 4 4 from django.conf import settings 5 from django.db import QName 5 6 from django.db.utils import load_backend 7 from django.core.management.color import no_style 6 8 7 9 # The prefix to put on the default database name when creating 8 10 # the test database. … … 28 30 """ 29 31 return '%x' % (abs(hash(args)) % 4294967296L) # 2**32 30 32 33 def sql_create_schema(self, schema, style): 34 """ 35 Returns the SQL required to create a single schema 36 """ 37 qn = self.connection.ops.quote_name 38 output = "%s %s;" % (style.SQL_KEYWORD('CREATE SCHEMA'), qn(schema)) 39 return output 40 31 41 def sql_create_model(self, model, style, known_models=set()): 32 42 """ 33 43 Returns the SQL required to create a single model, as a tuple of: … … 40 50 table_output = [] 41 51 pending_references = {} 42 52 qn = self.connection.ops.quote_name 53 qn3 = self.connection.ops.qualified_name 43 54 for f in opts.local_fields: 44 55 col_type = f.db_type(connection=self.connection) 45 56 tablespace = f.db_tablespace or opts.db_tablespace … … 79 90 for f in field_constraints])) 80 91 81 92 full_statement = [style.SQL_KEYWORD('CREATE TABLE') + ' ' + 82 style.SQL_TABLE(qn (opts.db_table)) + ' (']93 style.SQL_TABLE(qn3(opts.qualified_name)) + ' ('] 83 94 for i, line in enumerate(table_output): # Combine and add commas. 84 95 full_statement.append( 85 96 ' %s%s' % (line, i < len(table_output)-1 and ',' or '')) … … 96 107 # Add any extra SQL needed to support auto-incrementing primary 97 108 # keys. 98 109 auto_column = opts.auto_field.db_column or opts.auto_field.name 99 autoinc_sql = self.connection.ops.autoinc_sql(opts. db_table,110 autoinc_sql = self.connection.ops.autoinc_sql(opts.qualified_name, 100 111 auto_column) 101 112 if autoinc_sql: 102 113 for stmt in autoinc_sql: … … 109 120 Return the SQL snippet defining the foreign key reference for a field. 110 121 """ 111 122 qn = self.connection.ops.quote_name 123 from_qname = field.model._meta.qualified_name 124 to_qname = field.rel.to._meta.qualified_name 125 qname = self.qualified_name_for_ref(from_qname, to_qname) 112 126 if field.rel.to in known_models: 113 127 output = [style.SQL_KEYWORD('REFERENCES') + ' ' + 114 style.SQL_TABLE(qn (field.rel.to._meta.db_table)) + ' (' +128 style.SQL_TABLE(qname) + ' (' + 115 129 style.SQL_FIELD(qn(field.rel.to._meta.get_field( 116 130 field.rel.field_name).column)) + ')' + 117 131 self.connection.ops.deferrable_sql() … … 132 146 from django.db.backends.util import truncate_name 133 147 134 148 if not model._meta.managed or model._meta.proxy: 149 # So, we have a reference to either unmanaged model or to 150 # a proxy model. Lets just clear the pending_references 151 # for now. 152 if model in pending_references: 153 del pending_references[model] 135 154 return [] 136 155 qn = self.connection.ops.quote_name 156 qn3 = self.connection.ops.qualified_name 137 157 final_output = [] 138 158 opts = model._meta 139 159 if model in pending_references: 140 160 for rel_class, f in pending_references[model]: 141 161 rel_opts = rel_class._meta 142 162 r_table = rel_opts.db_table 163 r_qname = rel_opts.qualified_name 143 164 r_col = f.column 144 165 table = opts.db_table 166 qname = self.qualified_name_for_ref(r_qname, opts.qualified_name) 145 167 col = opts.get_field(f.rel.field_name).column 146 168 # For MySQL, r_name must be unique in the first 64 characters. 147 169 # So we are careful with character usage here. … … 149 171 r_col, col, self._digest(r_table, table)) 150 172 final_output.append(style.SQL_KEYWORD('ALTER TABLE') + 151 173 ' %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)%s;' % 152 (qn (r_table), qn(truncate_name(174 (qn3(r_qname), qn(truncate_name( 153 175 r_name, self.connection.ops.max_name_length())), 154 qn(r_col), qn (table), qn(col),176 qn(r_col), qname, qn(col), 155 177 self.connection.ops.deferrable_sql())) 156 178 del pending_references[model] 157 179 return final_output 158 180 181 def qualified_name_for_ref(self, from_table, ref_table): 182 """ 183 In certain databases if the from_table is in qualified format and 184 ref_table is not, it is assumed the ref_table references a table 185 in the same schema as from_table is from. However, we want the 186 reference to be to default schema, not the same schema the from_table 187 is. This method will fix this issue where that is a problem. 188 """ 189 return self.connection.ops.qualified_name(ref_table) 190 159 191 def sql_indexes_for_model(self, model, style): 160 192 """ 161 193 Returns the CREATE INDEX SQL statements for a single model. … … 171 203 """ 172 204 Return the CREATE INDEX SQL statements for a single model field. 173 205 """ 174 from django.db.backends.util import truncate_name175 176 206 if f.db_index and not f.unique: 177 207 qn = self.connection.ops.quote_name 208 qn3 = self.connection.ops.qualified_name 178 209 tablespace = f.db_tablespace or model._meta.db_tablespace 179 210 if tablespace: 180 211 tablespace_sql = self.connection.ops.tablespace_sql(tablespace) … … 182 213 tablespace_sql = ' ' + tablespace_sql 183 214 else: 184 215 tablespace_sql = '' 185 i_name = '%s_%s' % (model._meta.db_table, self._digest(f.column))216 qualified_name = self.qualified_index_name(model, f.column) 186 217 output = [style.SQL_KEYWORD('CREATE INDEX') + ' ' + 187 style.SQL_TABLE(qn(truncate_name( 188 i_name, self.connection.ops.max_name_length()))) + ' ' + 218 style.SQL_TABLE(qualified_name) + ' ' + 189 219 style.SQL_KEYWORD('ON') + ' ' + 190 style.SQL_TABLE(qn (model._meta.db_table)) + ' ' +220 style.SQL_TABLE(qn3(model._meta.qualified_name)) + ' ' + 191 221 "(%s)" % style.SQL_FIELD(qn(f.column)) + 192 222 "%s;" % tablespace_sql] 193 223 else: 194 224 output = [] 195 225 return output 196 226 227 def qualified_index_name(self, model, col): 228 """ 229 Some databases do support schemas, but indexes can not be placed in a 230 different schema. So, to support those databases, we need to be able 231 to return the index name in different qualified format than the rest 232 of the database identifiers. 233 """ 234 from django.db.backends.util import truncate_name 235 i_name = '%s_%s' % (model._meta.db_table, self._digest(col)) 236 i_name = truncate_name(i_name, self.connection.ops.max_name_length()) 237 return self.connection.ops.qualified_name( 238 QName(model._meta.db_schema, i_name, False)) 239 240 def sql_destroy_schema(self, schema, style): 241 """ 242 Returns the SQL required to destroy a single schema. 243 """ 244 return "" 245 197 246 def sql_destroy_model(self, model, references_to_delete, style): 198 247 """ 199 248 Return the DROP TABLE and restraint dropping statements for a single … … 202 251 if not model._meta.managed or model._meta.proxy: 203 252 return [] 204 253 # Drop the table now 205 qn = self.connection.ops.quote_name254 qn3 = self.connection.ops.qualified_name 206 255 output = ['%s %s;' % (style.SQL_KEYWORD('DROP TABLE'), 207 style.SQL_TABLE(qn (model._meta.db_table)))]256 style.SQL_TABLE(qn3(model._meta.qualified_name)))] 208 257 if model in references_to_delete: 209 258 output.extend(self.sql_remove_table_constraints( 210 259 model, references_to_delete, style)) 211 260 if model._meta.has_auto_field: 212 ds = self.connection.ops.drop_sequence_sql(model._meta. db_table)261 ds = self.connection.ops.drop_sequence_sql(model._meta.qualified_name) 213 262 if ds: 214 263 output.append(ds) 215 264 return output … … 220 269 return [] 221 270 output = [] 222 271 qn = self.connection.ops.quote_name 272 qn3 = self.connection.ops.qualified_name 223 273 for rel_class, f in references_to_delete[model]: 224 274 table = rel_class._meta.db_table 275 qname = rel_class._meta.qualified_name 225 276 col = f.column 226 277 r_table = model._meta.db_table 227 278 r_col = model._meta.get_field(f.rel.field_name).column … … 229 280 col, r_col, self._digest(table, r_table)) 230 281 output.append('%s %s %s %s;' % \ 231 282 (style.SQL_KEYWORD('ALTER TABLE'), 232 style.SQL_TABLE(qn (table)),283 style.SQL_TABLE(qn3(qname)), 233 284 style.SQL_KEYWORD(self.connection.ops.drop_foreignkey_sql()), 234 285 style.SQL_FIELD(qn(truncate_name( 235 286 r_name, self.connection.ops.max_name_length()))))) … … 240 291 """ 241 292 Creates a test database, prompting the user for confirmation if the 242 293 database already exists. Returns the name of the test database created. 294 295 Also creates needed schemas, which on some backends live in the same 296 namespace than databases. If there are schema name clashes, prompts 297 the user for confirmation. 243 298 """ 244 299 # Don't import django.core.management if it isn't needed. 245 300 from django.core.management import call_command … … 253 308 print "Creating test database for alias '%s'%s..." % ( 254 309 self.connection.alias, test_db_repr) 255 310 256 self._create_test_db(verbosity, autoclobber) 311 schemas = self.get_schemas() 312 self._create_test_db(verbosity, autoclobber, schemas) 257 313 314 # Create the test schemas. 315 self.connection.settings_dict["NAME"] = test_database_name 258 316 self.connection.close() 259 self.connection.settings_dict["NAME"] = test_database_name 317 schemas = ['%s%s' % (self.connection.test_schema_prefix, s) for s in schemas] 318 created_schemas = self._create_test_schemas(verbosity, schemas, autoclobber) 260 319 261 320 # Confirm the feature set of the test database 262 321 self.connection.features.confirm() … … 292 351 # the side effect of initializing the test database. 293 352 self.connection.cursor() 294 353 295 return test_database_name 354 return test_database_name, created_schemas 296 355 356 def _create_test_schemas(self, verbosity, schemas, autoclobber): 357 style = no_style() 358 cursor = self.connection.cursor() 359 existing_schemas = self.connection.introspection.get_schema_list(cursor) 360 if not self.connection.features.namespaced_schemas: 361 conflicts = [s for s in existing_schemas if s in schemas] 362 else: 363 conflicts = [] 364 if conflicts: 365 print 'The following schemas already exists: %s' % ', '.join(conflicts) 366 if not autoclobber: 367 confirm = raw_input( 368 "Type 'yes' if you would like to try deleting these schemas " 369 "or 'no' to cancel: ") 370 if autoclobber or confirm == 'yes': 371 try: 372 # Some databases (well, MySQL) complain about foreign keys when 373 # dropping a database. So, disable the constraints temporarily. 374 self.connection.disable_constraint_checking() 375 for schema in conflicts: 376 if verbosity >= 1: 377 print "Destroying schema %s" % schema 378 cursor.execute(self.sql_destroy_schema(schema, style)) 379 existing_schemas.remove(schema) 380 finally: 381 self.connection.enable_constraint_checking() 382 else: 383 print "Tests cancelled." 384 sys.exit(1) 385 386 to_create = [s for s in schemas if s not in existing_schemas] 387 for schema in to_create: 388 if verbosity >= 1: 389 print "Creating schema %s" % schema 390 cursor.execute(self.sql_create_schema(schema, style)) 391 self.connection.settings_dict['TEST_SCHEMAS'].append(schema) 392 return to_create 393 394 def get_schemas(self): 395 from django.db import models 396 apps = models.get_apps() 397 schemas = set() 398 for app in apps: 399 app_models = models.get_models(app, include_auto_created=True) 400 for model in app_models: 401 schema = model._meta.db_schema 402 if schema: 403 schemas.add(schema) 404 conn_default_schema = self.connection.settings_dict['SCHEMA'] 405 if conn_default_schema: 406 schemas.add(conn_default_schema) 407 return schemas 408 297 409 def _get_test_db_name(self): 298 410 """ 299 411 Internal implementation - returns the name of the test DB that will be … … 305 417 return self.connection.settings_dict['TEST_NAME'] 306 418 return TEST_DATABASE_PREFIX + self.connection.settings_dict['NAME'] 307 419 308 def _create_test_db(self, verbosity, autoclobber ):420 def _create_test_db(self, verbosity, autoclobber, schemas): 309 421 """ 310 422 Internal implementation - creates the test db tables. 311 423 """ … … 335 447 if verbosity >= 1: 336 448 print ("Destroying old test database '%s'..." 337 449 % self.connection.alias) 450 # MySQL nicely doesn't have a drop-cascade option, nor 451 # does it allow dropping a database having foreign key 452 # references pointing to it. So, we just disable foreign 453 # key checks and then immediately enable them. MySQL is 454 # happy after this hack, and other databases simply do 455 # not care. 456 try: 457 self.connection.disable_constraint_checking() 458 cursor.execute( 459 "DROP DATABASE %s" % qn(test_database_name)) 460 finally: 461 self.connection.enable_constraint_checking() 338 462 cursor.execute( 339 "DROP DATABASE %s" % qn(test_database_name))340 cursor.execute(341 463 "CREATE DATABASE %s %s" % (qn(test_database_name), 342 464 suffix)) 343 465 except Exception, e: … … 348 470 print "Tests cancelled." 349 471 sys.exit(1) 350 472 473 self.connection.settings_dict['TEST_SCHEMAS'].append(test_database_name) 351 474 return test_database_name 352 475 353 def destroy_test_db(self, old_database_name, verbosity=1):476 def destroy_test_db(self, old_database_name, created_schemas, verbosity=1): 354 477 """ 355 478 Destroy a test database, prompting the user for confirmation if the 356 479 database already exists. 357 480 """ 481 # On databases where there is no support for multiple databases 482 # with multiple schemas we need to destroy the created schemas 483 # manually. 484 cursor = self.connection.cursor() 485 style = no_style() 486 if not self.connection.features.namespaced_schemas: 487 try: 488 self.connection.disable_constraint_checking() 489 for schema in created_schemas: 490 if verbosity >= 1: 491 print "Destroying schema '%s'..." % schema 492 cursor.execute(self.sql_destroy_schema(schema, style)) 493 finally: 494 self.connection.enable_constraint_checking() 358 495 self.connection.close() 359 496 test_database_name = self.connection.settings_dict['NAME'] 497 498 360 499 if verbosity >= 1: 361 500 test_db_repr = '' 362 501 if verbosity >= 2: … … 429 568 settings_dict['ENGINE'], 430 569 settings_dict['NAME'] 431 570 ) 571 572 def post_create_pending_references(self, pending_references, as_sql=False): 573 """ 574 Create any pending references which need special handling (for example 575 different connections). The as_sql flag tells us if we should return 576 the raw SQL used. This is needed for the "sql" management commands. 577 """ 578 raise NotImplementedError -
django/db/backends/sqlite3/base.py
11 11 import re 12 12 import sys 13 13 14 from django.db import utils 14 from django.db import utils, QName 15 15 from django.db.backends import * 16 16 from django.db.backends.signals import connection_created 17 17 from django.db.backends.sqlite3.client import DatabaseClient … … 84 84 supports_mixed_date_datetime_comparisons = False 85 85 has_bulk_insert = True 86 86 can_combine_inserts_with_and_without_auto_increment_pk = True 87 supports_foreign_keys = False 88 # SQLite doesn't support schemas at all, but our hack of appending 89 # the schema name to table name creates namespaced schemas from 90 # Django's perspective 91 namespaced_schemas = True 87 92 88 93 def _supports_stddev(self): 89 94 """Confirm support for STDDEV and related stats functions … … 139 144 return name # Quoting once is enough. 140 145 return '"%s"' % name 141 146 147 def qualified_name(self, qname): 148 # Fake schema support by using the schema as a prefix to the 149 # table name. Keep record of what names are already qualified 150 # to avoid double-qualifying. 151 assert isinstance(qname, QName) 152 if qname.db_format: 153 # A name from DB must not have a schema (no schema support) 154 assert not qname.schema 155 schema = None 156 else: 157 schema = qname.schema or self.connection.schema 158 if schema: 159 return self.quote_name('%s_%s' % (schema, qname.table)) 160 else: 161 return self.quote_name(qname.table) 162 142 163 def no_limit_value(self): 143 164 return -1 144 165 … … 146 167 # NB: The generated SQL below is specific to SQLite 147 168 # Note: The DELETE FROM... SQL generated below works for SQLite databases 148 169 # because constraints don't exist 149 sql = ['%s %s %s;' % \ 170 sql = [] 171 for table in tables: 172 sql.append('%s %s %s;' % \ 150 173 (style.SQL_KEYWORD('DELETE'), 151 174 style.SQL_KEYWORD('FROM'), 152 style.SQL_FIELD(self.qu ote_name(table))153 ) for table in tables]175 style.SQL_FIELD(self.qualified_name(table)) 176 )) 154 177 # Note: No requirement for reset of auto-incremented indices (cf. other 155 178 # sql_flush() implementations). Just return SQL at this point 156 179 return sql … … 243 266 self.introspection = DatabaseIntrospection(self) 244 267 self.validation = BaseDatabaseValidation(self) 245 268 269 def convert_schema(self, schema): 270 # No real schema support. 271 return None 272 246 273 def _sqlite_create_connection(self): 247 274 settings_dict = self.settings_dict 248 275 if not settings_dict['NAME']: … … 295 322 """ 296 323 cursor = self.cursor() 297 324 if table_names is None: 298 table_names = self.introspection.get_table_list(cursor) 325 table_names = self.introspection.get_visible_tables_list(cursor) 326 else: 327 table_names = [self.introspection.qname_converter(t) for t in table_names] 299 328 for table_name in table_names: 300 329 primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name) 301 330 if not primary_key_column_name: … … 307 336 LEFT JOIN `%s` as REFERRED 308 337 ON (REFERRING.`%s` = REFERRED.`%s`) 309 338 WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL""" 310 % (primary_key_column_name, column_name, table_name , referenced_table_name,339 % (primary_key_column_name, column_name, table_name[1], referenced_table_name[1], 311 340 column_name, referenced_column_name, column_name, referenced_column_name)) 312 341 for bad_row in cursor.fetchall(): 313 342 raise utils.IntegrityError("The row in table '%s' with primary key '%s' has an invalid " 314 343 "foreign key: %s.%s contains a value '%s' that does not have a corresponding value in %s.%s." 315 % (table_name, bad_row[0], table_name , column_name, bad_row[1],344 % (table_name, bad_row[0], table_name[1], column_name, bad_row[1], 316 345 referenced_table_name, referenced_column_name)) 317 346 318 347 def close(self): -
django/db/backends/sqlite3/introspection.py
1 1 import re 2 from django.db import QName 2 3 from django.db.backends import BaseDatabaseIntrospection 3 4 4 5 # This light wrapper "fakes" a dictionary interface, because some SQLite data … … 41 42 class DatabaseIntrospection(BaseDatabaseIntrospection): 42 43 data_types_reverse = FlexibleFieldLookupDict() 43 44 44 def get_ table_list(self, cursor):45 "Returns a list of table names in the current database ."45 def get_visible_tables_list(self, cursor): 46 "Returns a list of table names in the current database" 46 47 # Skip the sqlite_sequence system table used for autoincrement key 47 48 # generation. 48 49 cursor.execute(""" 49 50 SELECT name FROM sqlite_master 50 51 WHERE type='table' AND NOT name='sqlite_sequence' 51 52 ORDER BY name""") 52 return [ row[0]for row in cursor.fetchall()]53 return [QName(None, row[0], True) for row in cursor.fetchall()] 53 54 54 def get_table_description(self, cursor, table_name):55 def get_table_description(self, cursor, qualified_name): 55 56 "Returns a description of the table, with the DB-API cursor.description interface." 56 57 return [(info['name'], info['type'], None, None, None, None, 57 info['null_ok']) for info in self._table_info(cursor, table_name)]58 info['null_ok']) for info in self._table_info(cursor, qualified_name[1])] 58 59 59 def get_relations(self, cursor, table_name):60 def get_relations(self, cursor, qualified_name): 60 61 """ 61 62 Returns a dictionary of {field_index: (field_index_other_table, other_table)} 62 63 representing all relationships to the given table. Indexes are 0-based. 63 64 """ 65 table_name = qualified_name[1] 64 66 65 67 # Dictionary of relations to return 66 68 relations = {} … … 98 100 99 101 name = other_desc.split(' ', 1)[0].strip('"') 100 102 if name == column: 101 relations[field_index] = (other_index, table) 103 relations[field_index] = (other_index, 104 QName(None, table, True)) 102 105 break 103 106 104 107 return relations 105 108 106 def get_key_columns(self, cursor, table_name):109 def get_key_columns(self, cursor, qname): 107 110 """ 108 111 Returns a list of (column_name, referenced_table_name, referenced_column_name) for all 109 112 key columns in given table. 110 113 """ 114 table_name = self.qname_converter(qname)[1] 111 115 key_columns = [] 112 116 113 117 # Schema for this table … … 128 132 continue 129 133 130 134 # This will append (column_name, referenced_table_name, referenced_column_name) to key_columns 131 key_columns.append(tuple([s.strip('"') for s in m.groups()])) 135 add = tuple([s.strip('"') for s in m.groups()]) 136 add = add[0], (None, add[1], True), add[2] 137 key_columns.append(add) 132 138 133 139 return key_columns 134 140 135 def get_indexes(self, cursor, table_name):141 def get_indexes(self, cursor, qualified_name): 136 142 """ 137 143 Returns a dictionary of fieldname -> infodict for the given table, 138 144 where each infodict is in the format: 139 145 {'primary_key': boolean representing whether it's the primary key, 140 146 'unique': boolean representing whether it's a unique index} 141 147 """ 148 table_name = qualified_name[1] 142 149 indexes = {} 143 150 for info in self._table_info(cursor, table_name): 144 151 indexes[info['name']] = {'primary_key': info['pk'] != 0, … … 157 164 indexes[name]['unique'] = True 158 165 return indexes 159 166 160 def get_primary_key_column(self, cursor, table_name):167 def get_primary_key_column(self, cursor, qname): 161 168 """ 162 169 Get the column name of the primary key for the given table. 163 170 """ 171 qname = self.qname_converter(qname) 172 table_name = qname[1] 164 173 # Don't use PRAGMA because that causes issues with some transactions 165 174 cursor.execute("SELECT sql FROM sqlite_master WHERE tbl_name = %s AND type = %s", [table_name, "table"]) 166 175 results = cursor.fetchone()[0].strip() … … 180 189 'null_ok': not field[3], 181 190 'pk': field[5] # undocumented 182 191 } for field in cursor.fetchall()] 192 193 def identifier_converter(self, identifier): 194 return identifier 195 196 def qname_converter(self, qname, force_schema=False): 197 # For SQLite force_schema does nothing, as the default schema is 198 # None. 199 assert isinstance(qname, QName) 200 assert not (qname.schema and qname.db_format) 201 return QName(None, self.connection.ops.qualified_name(qname)[1:-1], 202 True) -
django/db/backends/sqlite3/creation.py
33 33 34 34 def sql_for_pending_references(self, model, style, pending_references): 35 35 "SQLite3 doesn't support constraints" 36 if model in pending_references: 37 del pending_references[model] 36 38 return [] 37 39 38 40 def sql_remove_table_constraints(self, model, references_to_delete, style): 39 41 "SQLite3 doesn't support constraints" 40 42 return [] 41 43 44 def sql_create_schema(self, schema, verbosity): 45 "SQLite3 doesn't support schemas" 46 return 47 42 48 def _get_test_db_name(self): 43 49 test_database_name = self.connection.settings_dict['TEST_NAME'] 44 50 if test_database_name and test_database_name != ':memory:': 45 51 return test_database_name 46 52 return ':memory:' 47 53 48 def _create_test_db(self, verbosity, autoclobber ):54 def _create_test_db(self, verbosity, autoclobber, schemas): 49 55 test_database_name = self._get_test_db_name() 50 56 if test_database_name != ':memory:': 51 57 # Erase the old test database … … 65 71 sys.exit(1) 66 72 return test_database_name 67 73 74 def _create_test_schemas(self, verbosity, schemas, cursor): 75 return [] 76 68 77 def _destroy_test_db(self, test_database_name, verbosity): 69 78 if test_database_name and test_database_name != ":memory:": 70 79 # Remove the SQLite database file … … 81 90 SQLite since the databases will be distinct despite having the same 82 91 TEST_NAME. See http://www.sqlite.org/inmemorydb.html 83 92 """ 84 settings_dict = self.connection.settings_dict85 93 test_dbname = self._get_test_db_name() 86 94 sig = [self.connection.settings_dict['NAME']] 87 95 if test_dbname == ':memory:': -
django/db/backends/mysql/introspection.py
1 from django.db import QName 1 2 from django.db.backends import BaseDatabaseIntrospection 2 3 from MySQLdb import ProgrammingError, OperationalError 3 4 from MySQLdb.constants import FIELD_TYPE … … 28 29 FIELD_TYPE.VAR_STRING: 'CharField', 29 30 } 30 31 31 def get_table_list(self, cursor): 32 "Returns a list of table names in the current database." 33 cursor.execute("SHOW TABLES") 34 return [row[0] for row in cursor.fetchall()] 32 def get_visible_tables_list(self, cursor): 33 "Returns a list of visible tables" 34 return self.get_qualified_tables_list(cursor, [self.connection.settings_dict['NAME']]) 35 35 36 def get_table_description(self, cursor, table_name): 36 def get_qualified_tables_list(self, cursor, schemas): 37 default_schema = self.connection.convert_schema(None) 38 if default_schema: 39 schemas.append(default_schema) 40 if not schemas: 41 return [] 42 param_list = ', '.join(['%s']*len(schemas)) 43 cursor.execute(""" 44 SELECT table_schema, table_name 45 FROM information_schema.tables 46 WHERE table_schema in (%s)""" % param_list, schemas) 47 return [QName(row[0], row[1], True) for row in cursor.fetchall()] 48 49 def get_table_description(self, cursor, qname): 37 50 "Returns a description of the table, with the DB-API cursor.description interface." 38 cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name)) 51 qname = self.qname_converter(qname) 52 cursor.execute("SELECT * FROM %s LIMIT 1" 53 % self.connection.ops.qualified_name(qname)) 39 54 return cursor.description 40 55 41 def _name_to_index(self, cursor, table_name):56 def _name_to_index(self, cursor, qname): 42 57 """ 43 58 Returns a dictionary of {field_name: field_index} for the given table. 44 59 Indexes are 0-based. 45 60 """ 46 return dict( [(d[0], i) for i, d in enumerate(self.get_table_description(cursor, table_name))])61 return dict((d[0], i) for i, d in enumerate(self.get_table_description(cursor, qname))) 47 62 48 def get_relations(self, cursor, table_name):63 def get_relations(self, cursor, qname): 49 64 """ 50 65 Returns a dictionary of {field_index: (field_index_other_table, other_table)} 51 66 representing all relationships to the given table. Indexes are 0-based. 52 67 """ 53 my_field_dict = self._name_to_index(cursor, table_name)54 constraints = self.get_key_columns(cursor, table_name)68 my_field_dict = self._name_to_index(cursor, qname) 69 constraints = self.get_key_columns(cursor, qname) 55 70 relations = {} 56 71 for my_fieldname, other_table, other_field in constraints: 57 72 other_field_index = self._name_to_index(cursor, other_table)[other_field] … … 59 74 relations[my_field_index] = (other_field_index, other_table) 60 75 return relations 61 76 62 def get_key_columns(self, cursor, table_name):77 def get_key_columns(self, cursor, qname): 63 78 """ 64 Returns a list of (column_name, referenced_table_name, referenced_column_name) for all 65 key columns in given table. 79 Returns a list of 80 (column_name, 81 (reference_table_schema, referenced_table_name), 82 referenced_column_name) 83 for all key columns in given table. 66 84 """ 67 85 key_columns = [] 86 qname = self.qname_converter(qname, force_schema=True) 68 87 try: 69 88 cursor.execute(""" 70 SELECT column_name, referenced_table_ name, referenced_column_name89 SELECT column_name, referenced_table_schema, referenced_table_name, referenced_column_name 71 90 FROM information_schema.key_column_usage 72 WHERE table_ name= %s73 AND table_ schema = DATABASE()91 WHERE table_schema = %s 92 AND table_name = %s 74 93 AND referenced_table_name IS NOT NULL 75 AND referenced_column_name IS NOT NULL""", [table_name]) 76 key_columns.extend(cursor.fetchall()) 94 AND referenced_column_name IS NOT NULL""", 95 [qname.schema, qname.table]) 96 for row in cursor.fetchall(): 97 key_columns.append((row[0], QName(row[1], row[2], True), 98 row[3])) 77 99 except (ProgrammingError, OperationalError): 78 100 # Fall back to "SHOW CREATE TABLE", for previous MySQL versions. 79 101 # Go through all constraints and save the equal matches. 80 cursor.execute("SHOW CREATE TABLE %s" % self.connection.ops.qu ote_name(table_name))102 cursor.execute("SHOW CREATE TABLE %s" % self.connection.ops.qualified_name(qname)) 81 103 for row in cursor.fetchall(): 82 104 pos = 0 83 105 while True: … … 85 107 if match == None: 86 108 break 87 109 pos = match.end() 88 key_columns.append(match.groups()) 110 groups = match.groups() 111 tblname = groups[1] 112 if '.' in tblname: 113 tblname = tblname.split('.') 114 else: 115 tblname = None, tblname 116 key_columns.append((groups[0], tblname, groups[2])) 89 117 return key_columns 90 118 91 def get_primary_key_column(self, cursor, table_name):119 def get_primary_key_column(self, cursor, qname): 92 120 """ 93 121 Returns the name of the primary key column for the given table 94 122 """ 95 for column in self.get_indexes(cursor, table_name).iteritems():123 for column in self.get_indexes(cursor, qname).iteritems(): 96 124 if column[1]['primary_key']: 97 125 return column[0] 98 126 return None 99 127 100 def get_indexes(self, cursor, table_name):128 def get_indexes(self, cursor, qname): 101 129 """ 102 130 Returns a dictionary of fieldname -> infodict for the given table, 103 131 where each infodict is in the format: 104 132 {'primary_key': boolean representing whether it's the primary key, 105 133 'unique': boolean representing whether it's a unique index} 106 134 """ 107 cursor.execute("SHOW INDEX FROM %s" % self.connection.ops.quote_name(table_name)) 135 qname = self.qname_converter(qname) 136 cursor.execute("SHOW INDEX FROM %s" % self.connection.ops.qualified_name(qname)) 108 137 indexes = {} 109 138 for row in cursor.fetchall(): 110 139 indexes[row[4]] = {'primary_key': (row[2] == 'PRIMARY'), 'unique': not bool(row[1])} 111 140 return indexes 112 141 142 def get_schema_list(self, cursor): 143 cursor.execute("SHOW DATABASES") 144 return [r[0] for r in cursor.fetchall()] 145 146 def qname_converter(self, qname, force_schema=False): 147 assert isinstance(qname, QName) 148 if qname.db_format and (qname.schema or not force_schema): 149 return qname 150 schema = self.connection.convert_schema(qname.schema) 151 if not schema and force_schema: 152 schema = self.connection.settings_dict['NAME'] 153 return QName(schema, qname.table, True) -
django/db/backends/mysql/creation.py
1 from django.db import QName 1 2 from django.db.backends.creation import BaseDatabaseCreation 2 3 3 4 class DatabaseCreation(BaseDatabaseCreation): … … 58 59 style.SQL_KEYWORD('NOT NULL')) 59 60 ] 60 61 deferred = [ 61 (field.m2m_ db_table(), field.m2m_column_name(), opts.db_table,62 (field.m2m_qualified_table(), field.m2m_column_name(), opts.qualified_name, 62 63 opts.pk.column), 63 (field.m2m_ db_table(), field.m2m_reverse_name(),64 field.rel.to._meta. db_table, field.rel.to._meta.pk.column)64 (field.m2m_qualified_table(), field.m2m_reverse_name(), 65 field.rel.to._meta.qualified_name, field.rel.to._meta.pk.column) 65 66 ] 66 67 return table_output, deferred 68 69 def sql_destroy_schema(self, schema, style): 70 qn = self.connection.ops.quote_name 71 return "%s %s;" % (style.SQL_KEYWORD('DROP DATABASE'), qn(schema)) 72 73 def qualified_index_name(self, model, col): 74 """ 75 On MySQL we must use the db_schema prefixed to the index name as 76 indexes can not be placed into different schemas. 77 """ 78 from django.db.backends.util import truncate_name 79 schema = model._meta.db_schema or self.connection.schema 80 max_len = self.connection.ops.max_name_length() 81 schema_prefix = '' 82 if schema: 83 schema = self.connection.convert_schema(schema) 84 schema_prefix = truncate_name(schema, max_len / 2) + '_' 85 i_name = '%s%s_%s' % (schema_prefix, model._meta.db_table, self._digest(col)) 86 i_name = self.connection.ops.quote_name(truncate_name(i_name, max_len)) 87 return i_name 88 89 def qualified_name_for_ref(self, from_table, ref_table): 90 """ 91 MySQL does not have qualified name format for indexes, so make sure to 92 use qualified names if needed. 93 """ 94 from_qn = self.connection.introspection.qname_converter(from_table) 95 to_qn = self.connection.introspection.qname_converter(ref_table) 96 if to_qn.schema is None: 97 to_qn = QName(self.connection.settings_dict['NAME'], 98 to_qn.table, to_qn.db_format) 99 return super(DatabaseCreation, self).qualified_name_for_ref(from_qn, to_qn) -
django/db/backends/mysql/base.py
15 15 from django.core.exceptions import ImproperlyConfigured 16 16 raise ImproperlyConfigured("Error loading MySQLdb module: %s" % e) 17 17 18 from django.db.backends.util import truncate_name 19 18 20 # We want version (1, 2, 1, 'final', 2) or later. We can't just use 19 21 # lexicographic ordering in this check because then (1, 2, 1, 'gamma') 20 22 # inadvertently passes the version test. … … 187 189 "Confirm support for introspected foreign keys" 188 190 return self._mysql_storage_engine() != 'MyISAM' 189 191 192 def confirm(self): 193 super(DatabaseFeatures, self).confirm() 194 self.supports_foreign_keys != self.can_introspect_foreign_keys 195 190 196 class DatabaseOperations(BaseDatabaseOperations): 191 197 compiler_module = "django.db.backends.mysql.compiler" 192 198 … … 245 251 return name # Quoting once is enough. 246 252 return "`%s`" % name 247 253 254 def qualified_name(self, qname): 255 schema = qname.schema 256 if not qname.db_format: 257 schema = self.connection.convert_schema(schema) 258 if schema: 259 return "%s.%s" % (self.quote_name(schema), 260 self.quote_name(qname.table)) 261 else: 262 return self.quote_name(qname.table) 263 248 264 def random_function_sql(self): 249 265 return 'RAND()' 250 266 … … 255 271 if tables: 256 272 sql = ['SET FOREIGN_KEY_CHECKS = 0;'] 257 273 for table in tables: 258 sql.append('%s %s;' % (style.SQL_KEYWORD('TRUNCATE'), style.SQL_FIELD(self.quote_name(table)))) 274 sql.append('%s %s;' 275 % (style.SQL_KEYWORD('TRUNCATE'), 276 style.SQL_FIELD(self.qualified_name(table)))) 259 277 sql.append('SET FOREIGN_KEY_CHECKS = 1;') 260 278 261 279 # 'ALTER TABLE table AUTO_INCREMENT = 1;'... style SQL statements … … 263 281 sql.extend(["%s %s %s %s %s;" % \ 264 282 (style.SQL_KEYWORD('ALTER'), 265 283 style.SQL_KEYWORD('TABLE'), 266 style.SQL_TABLE(self.qu ote_name(sequence['table'])),284 style.SQL_TABLE(self.qualified_name((sequence['qname']))), 267 285 style.SQL_KEYWORD('AUTO_INCREMENT'), 268 286 style.SQL_FIELD('= 1'), 269 287 ) for sequence in sequences]) … … 347 365 self.creation = DatabaseCreation(self) 348 366 self.introspection = DatabaseIntrospection(self) 349 367 self.validation = DatabaseValidation(self) 368 369 def convert_schema(self, schema): 370 schema = schema or self.schema 371 if schema and self.test_schema_prefix: 372 return truncate_name('%s%s' % (self.test_schema_prefix, schema), 373 self.ops.max_name_length()) 374 return schema 350 375 351 376 def _valid_connection(self): 352 377 if self.connection is not None: … … 442 467 ALL IMMEDIATE") 443 468 """ 444 469 cursor = self.cursor() 470 qn3 = self.ops.qualified_name 445 471 if table_names is None: 446 table_names = self.introspection.get_ table_list(cursor)472 table_names = self.introspection.get_visible_tables_list(cursor) 447 473 for table_name in table_names: 448 474 primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name) 449 475 if not primary_key_column_name: … … 451 477 key_columns = self.introspection.get_key_columns(cursor, table_name) 452 478 for column_name, referenced_table_name, referenced_column_name in key_columns: 453 479 cursor.execute(""" 454 SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s`as REFERRING455 LEFT JOIN `%s`as REFERRED480 SELECT REFERRING.`%s`, REFERRING.`%s` FROM %s as REFERRING 481 LEFT JOIN %s as REFERRED 456 482 ON (REFERRING.`%s` = REFERRED.`%s`) 457 483 WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL""" 458 % (primary_key_column_name, column_name, table_name, referenced_table_name, 459 column_name, referenced_column_name, column_name, referenced_column_name)) 484 % (primary_key_column_name, column_name, qn3(table_name), 485 qn3(referenced_table_name), column_name, 486 referenced_column_name, column_name, 487 referenced_column_name)) 460 488 for bad_row in cursor.fetchall(): 461 489 raise utils.IntegrityError("The row in table '%s' with primary key '%s' has an invalid " 462 490 "foreign key: %s.%s contains a value '%s' that does not have a corresponding value in %s.%s." 463 % (table_name , bad_row[0],464 table_name , column_name, bad_row[1],465 referenced_table_name , referenced_column_name))491 % (table_name[1], bad_row[0], 492 table_name[1], column_name, bad_row[1], 493 referenced_table_name[1], referenced_column_name)) -
django/db/backends/oracle/base.py
4 4 Requires cx_Oracle: http://cx-oracle.sourceforge.net/ 5 5 """ 6 6 7 8 7 import datetime 9 8 import decimal 10 9 import sys 11 10 import warnings 12 11 12 from django.db import QName 13 13 14 14 def _setup_environment(environ): 15 15 import platform … … 48 48 from django.conf import settings 49 49 from django.db import utils 50 50 from django.db.backends import * 51 from django.db.backends.util import truncate_name 51 52 from django.db.backends.signals import connection_created 52 53 from django.db.backends.oracle.client import DatabaseClient 53 54 from django.db.backends.oracle.creation import DatabaseCreation … … 87 88 class DatabaseOperations(BaseDatabaseOperations): 88 89 compiler_module = "django.db.backends.oracle.compiler" 89 90 90 def autoinc_sql(self, table, column):91 def autoinc_sql(self, qname, column): 91 92 # To simulate auto-incrementing primary keys in Oracle, we have to 92 93 # create a sequence and a trigger. 93 sq_name = self._get_sequence_name(table) 94 tr_name = self._get_trigger_name(table) 95 tbl_name = self.quote_name(table) 96 col_name = self.quote_name(column) 94 seq_name = self._get_sequence_name(qname) 95 schema = seq_name.schema.upper() 96 sname = self.connection.ops.qualified_name(seq_name).upper() 97 params = { 98 'sq_name': seq_name.table, 99 'schema': schema, 100 'qualified_sq_name': sname, 101 'tr_name': self._get_trigger_name(qname), 102 'tbl_name': self.qualified_name(qname), 103 'col_name' : self.quote_name(column), 104 } 97 105 sequence_sql = """ 98 106 DECLARE 99 107 i INTEGER; 100 108 BEGIN 101 SELECT COUNT(*) INTO i FROM USER_CATALOG 102 WHERE TABLE_NAME = '%(sq_name)s' AND TABLE_TYPE = 'SEQUENCE'; 109 SELECT COUNT(*) INTO i FROM ALL_CATALOG 110 WHERE TABLE_NAME = '%(sq_name)s' AND OWNER = '%(schema)s' 111 AND TABLE_TYPE = 'SEQUENCE'; 103 112 IF i = 0 THEN 104 EXECUTE IMMEDIATE 'CREATE SEQUENCE "%(sq_name)s"';113 EXECUTE IMMEDIATE 'CREATE SEQUENCE %(qualified_sq_name)s'; 105 114 END IF; 106 115 END; 107 /""" % locals()116 /""" % params 108 117 trigger_sql = """ 109 CREATE OR REPLACE TRIGGER "%(tr_name)s"118 CREATE OR REPLACE TRIGGER %(tr_name)s 110 119 BEFORE INSERT ON %(tbl_name)s 111 120 FOR EACH ROW 112 121 WHEN (new.%(col_name)s IS NULL) 113 122 BEGIN 114 SELECT "%(sq_name)s".nextval123 SELECT %(qualified_sq_name)s.nextval 115 124 INTO :new.%(col_name)s FROM dual; 116 125 END; 117 /""" % locals()126 /""" % params 118 127 return sequence_sql, trigger_sql 119 128 120 129 def date_extract_sql(self, lookup_type, field_name): … … 196 205 def deferrable_sql(self): 197 206 return " DEFERRABLE INITIALLY DEFERRED" 198 207 199 def drop_sequence_sql(self, table): 200 return "DROP SEQUENCE %s;" % self.quote_name(self._get_sequence_name(table)) 208 def drop_sequence_sql(self, qname): 209 seq_name = self._get_sequence_name(qname) 210 qname = self.connection.ops.qualified_name(seq_name) 211 return "DROP SEQUENCE %s;" % qname 201 212 202 213 def fetch_returned_insert_id(self, cursor): 203 214 return long(cursor._insert_id_var.getvalue()) … … 213 224 # The DB API definition does not define this attribute. 214 225 return cursor.statement 215 226 216 def last_insert_id(self, cursor, table_name, pk_name):217 sq_name = self._get_sequence_name( table_name)218 cursor.execute('SELECT "%s".currval FROM dual' % sq_name)227 def last_insert_id(self, cursor, qualified_name, pk_name): 228 sq_name = self._get_sequence_name(qualified_name) 229 cursor.execute('SELECT %s.currval FROM dual' % sq_name) 219 230 return cursor.fetchone()[0] 220 231 221 232 def lookup_cast(self, lookup_type): … … 247 258 self.max_name_length()) 248 259 return name.upper() 249 260 261 def qualified_name(self, qname): 262 assert isinstance(qname, QName) 263 schema = qname.schema 264 if not qname.db_format: 265 if not schema: 266 schema = self.connection.schema 267 schema = self.connection.convert_schema(schema) 268 if schema: 269 return "%s.%s" % (self.quote_name(schema), 270 self.quote_name(qname.table)) 271 else: 272 return self.quote_name(qname.table) 273 250 274 def random_function_sql(self): 251 275 return "DBMS_RANDOM.RANDOM" 252 276 277 253 278 def regex_lookup_9(self, lookup_type): 254 279 raise NotImplementedError("Regexes are not supported in Oracle before version 10g.") 255 280 … … 284 309 sql = ['%s %s %s;' % \ 285 310 (style.SQL_KEYWORD('DELETE'), 286 311 style.SQL_KEYWORD('FROM'), 287 style.SQL_FIELD(self.qu ote_name(table)))312 style.SQL_FIELD(self.qualified_name(table))) 288 313 for table in tables] 289 314 # Since we've just deleted all the rows, running our sequence 290 315 # ALTER code will reset the sequence to 0. 291 316 for sequence_info in sequences: 292 sequence_name = self._get_sequence_name(sequence_info['table']) 293 table_name = self.quote_name(sequence_info['table']) 294 column_name = self.quote_name(sequence_info['column'] or 'id') 295 query = _get_sequence_reset_sql() % {'sequence': sequence_name, 296 'table': table_name, 297 'column': column_name} 298 sql.append(query) 317 q = self._sequence_reset_sql_for_col( 318 sequence_info['qname'], sequence_info['column'] or 'id') 319 sql.append(q) 299 320 return sql 300 321 else: 301 322 return [] 302 323 324 def _sequence_reset_sql_for_col(self, qname, column): 325 qname = self.connection.introspection.qname_converter(qname) 326 qn = self.connection.ops.qualified_name 327 table = qn(qname) 328 sequence_name = self._get_sequence_name(qname) 329 column_name = self.quote_name(column) 330 params = {'sequence': sequence_name.table, 'table': table, 331 'schema': sequence_name.schema.upper(), 'column': column_name} 332 query = _get_sequence_reset_sql(bool(params['schema'])) 333 return query % params 334 303 335 def sequence_reset_sql(self, style, model_list): 304 336 from django.db import models 305 337 output = [] 306 query = _get_sequence_reset_sql()307 338 for model in model_list: 308 339 for f in model._meta.local_fields: 309 340 if isinstance(f, models.AutoField): 310 table_name = self.quote_name(model._meta.db_table) 311 sequence_name = self._get_sequence_name(model._meta.db_table) 312 column_name = self.quote_name(f.column) 313 output.append(query % {'sequence': sequence_name, 314 'table': table_name, 315 'column': column_name}) 341 q = self._sequence_reset_sql_for_col( 342 model._meta.qualified_name, f.column) 343 output.append(q) 316 344 # Only one AutoField is allowed per model, so don't 317 345 # continue to loop 318 346 break 319 347 for f in model._meta.many_to_many: 320 348 if not f.rel.through: 321 table_name = self.quote_name(f.m2m_db_table()) 322 sequence_name = self._get_sequence_name(f.m2m_db_table()) 323 column_name = self.quote_name('id') 324 output.append(query % {'sequence': sequence_name, 325 'table': table_name, 326 'column': column_name}) 349 q = self._sequence_reset_sql_for_col( 350 f.m2m_qualified_name(), 'id') 351 output.append(q) 327 352 return output 328 353 329 354 def start_transaction_sql(self): … … 377 402 raise NotImplementedError("Bit-wise or is not supported in Oracle.") 378 403 return super(DatabaseOperations, self).combine_expression(connector, sub_expressions) 379 404 380 def _get_sequence_name(self, table): 405 def _get_sequence_name(self, qname): 406 assert isinstance(qname, QName) 407 qname = self.connection.introspection.qname_converter(qname, 408 force_schema=True) 381 409 name_length = self.max_name_length() - 3 382 return '%s_SQ' % util.truncate_name(table, name_length).upper() 410 seq_name = '%s_SQ' % util.truncate_name(qname.table, 411 name_length).upper() 412 return QName(qname.schema, seq_name, True) 413 #return '%s_SQ' % util.truncate_name(name, name_length).upper() 383 414 384 def _get_trigger_name(self, table): 415 def _get_trigger_name(self, qname): 416 assert isinstance(qname, QName) 417 qname = self.connection.introspection.qname_converter(qname) 385 418 name_length = self.max_name_length() - 3 386 return '%s_TR' % util.truncate_name(table, name_length).upper() 419 trig_name = '%s_TR' % util.truncate_name(qname.table, 420 name_length).upper() 421 return self.qualified_name(QName(qname.schema, trig_name, True)) 387 422 388 423 def bulk_insert_sql(self, fields, num_values): 389 424 items_sql = "SELECT %s FROM DUAL" % ", ".join(["%s"] * len(fields)) … … 444 479 self.creation = DatabaseCreation(self) 445 480 self.introspection = DatabaseIntrospection(self) 446 481 self.validation = BaseDatabaseValidation(self) 482 483 def convert_schema(self, schema): 484 schema = schema or self.schema 485 if schema and self.test_schema_prefix: 486 return truncate_name('%s%s' % (self.test_schema_prefix, schema), 487 self.ops.max_name_length()) 488 return schema 447 489 448 490 def check_constraints(self, table_names=None): 449 491 """ … … 476 518 conn_params = self.settings_dict['OPTIONS'].copy() 477 519 if 'use_returning_into' in conn_params: 478 520 del conn_params['use_returning_into'] 479 self.connection = Database.connect(conn_string, **conn_params) 521 try: 522 self.connection = Database.connect(conn_string, **conn_params) 523 except: 524 print conn_string 525 raise 480 526 cursor = FormatStylePlaceholderCursor(self.connection) 481 527 # Set oracle date to ansi date format. This only needs to execute 482 528 # once when we create a new connection. We also set the Territory … … 814 860 return s 815 861 816 862 817 def _get_sequence_reset_sql( ):863 def _get_sequence_reset_sql(with_schema=False): 818 864 # TODO: colorize this SQL code with style.SQL_KEYWORD(), etc. 819 return """ 865 if with_schema: 866 return """ 820 867 DECLARE 821 868 table_value integer; 822 869 seq_value integer; 823 870 BEGIN 824 871 SELECT NVL(MAX(%(column)s), 0) INTO table_value FROM %(table)s; 872 SELECT NVL(last_number - cache_size, 0) INTO seq_value FROM all_sequences 873 WHERE sequence_name = '%(sequence)s' AND sequence_owner = '%(schema)s'; 874 WHILE table_value > seq_value LOOP 875 SELECT "%(schema)s"."%(sequence)s".nextval INTO seq_value FROM dual; 876 END LOOP; 877 END; 878 /""" 879 else: 880 return """ 881 DECLARE 882 table_value integer; 883 seq_value integer; 884 BEGIN 885 SELECT NVL(MAX(%(column)s), 0) INTO table_value FROM %(table)s; 825 886 SELECT NVL(last_number - cache_size, 0) INTO seq_value FROM user_sequences 826 887 WHERE sequence_name = '%(sequence)s'; 827 888 WHILE table_value > seq_value LOOP -
django/db/backends/oracle/introspection.py
1 from django.db import QName 1 2 from django.db.backends import BaseDatabaseIntrospection 2 3 import cx_Oracle 3 4 import re … … 37 38 return super(DatabaseIntrospection, self).get_field_type( 38 39 data_type, description) 39 40 40 def get_table_list(self, cursor): 41 "Returns a list of table names in the current database." 42 cursor.execute("SELECT TABLE_NAME FROM USER_TABLES") 43 return [row[0].lower() for row in cursor.fetchall()] 41 def get_visible_tables_list(self, cursor): 42 "Returns a list of visible tables" 43 return self.get_qualified_tables_list(cursor, [self.connection.settings_dict['USER']]) 44 44 45 def get_table_description(self, cursor, table_name): 45 def get_qualified_tables_list(self, cursor, schemas): 46 "Returns a list of table names in the given schemas list." 47 default_schema = self.connection.convert_schema(None) 48 if default_schema: 49 schemas.append(default_schema) 50 if not schemas: 51 return [] 52 param_list = ', '.join(['%s']*len(schemas)) 53 schemas = [s.upper() for s in schemas] 54 cursor.execute(""" 55 SELECT OWNER, TABLE_NAME 56 FROM ALL_TABLES WHERE OWNER in (%s)""" % param_list, schemas) 57 return [QName(row[0].lower(), row[1].lower(), True) 58 for row in cursor.fetchall()] 59 60 def get_table_description(self, cursor, qname): 46 61 "Returns a description of the table, with the DB-API cursor.description interface." 47 cursor.execute("SELECT * FROM %s WHERE ROWNUM < 2" % self.connection.ops.quote_name(table_name)) 62 cursor.execute("SELECT * FROM %s WHERE ROWNUM < 2" 63 % self.connection.ops.qualified_name(qname)) 48 64 description = [] 49 65 for desc in cursor.description: 50 66 description.append((desc[0].lower(),) + desc[1:]) 51 67 return description 52 68 53 def table_name_converter(self, name): 54 "Table name comparison is case insensitive under Oracle" 69 def identifier_converter(self, name): 55 70 return name.lower() 56 71 57 def _name_to_index(self, cursor, table_name): 72 def qname_converter(self, qname, force_schema=False): 73 assert isinstance(qname, QName) 74 if qname.db_format and (qname.schema or not force_schema): 75 return qname 76 schema = self.connection.convert_schema(qname.schema) 77 if not schema and force_schema: 78 schema = self.connection.settings_dict['USER'] 79 return QName(schema, qname.table, True) 80 81 def _name_to_index(self, cursor, qname): 58 82 """ 59 83 Returns a dictionary of {field_name: field_index} for the given table. 60 84 Indexes are 0-based. 61 85 """ 62 return dict([(d[0], i) for i, d in enumerate(self.get_table_description(cursor, table_name))])86 return dict([(d[0], i) for i, d in enumerate(self.get_table_description(cursor, qname))]) 63 87 64 def get_relations(self, cursor, table_name):88 def get_relations(self, cursor, qname): 65 89 """ 66 90 Returns a dictionary of {field_index: (field_index_other_table, other_table)} 67 91 representing all relationships to the given table. Indexes are 0-based. 68 92 """ 69 table_name = table_name.upper() 93 qname = self.qname_converter(qname, force_schema=True) 94 schema, table = qname.schema.upper(), qname.table.upper() 70 95 cursor.execute(""" 71 SELECT ta.column_id - 1, tb.table_name, tb.column_id - 1 72 FROM user_constraints, USER_CONS_COLUMNS ca, USER_CONS_COLUMNS cb, 73 user_tab_cols ta, user_tab_cols tb 74 WHERE user_constraints.table_name = %s AND 96 SELECT ta.column_id - 1, tb.table_name, tb.owner, tb.column_id - 1 97 FROM all_constraints, ALL_CONS_COLUMNS ca, ALL_CONS_COLUMNS cb, 98 all_tab_cols ta, all_tab_cols tb 99 WHERE all_constraints.table_name = %s AND 100 all_constraints.owner = %s AND 75 101 ta.table_name = %s AND 102 ta.owner = %s AND 76 103 ta.column_name = ca.column_name AND 77 104 ca.table_name = %s AND 78 user_constraints.constraint_name = ca.constraint_name AND 79 user_constraints.r_constraint_name = cb.constraint_name AND 105 ca.owner = %s AND 106 all_constraints.constraint_name = ca.constraint_name AND 107 all_constraints.r_constraint_name = cb.constraint_name AND 80 108 cb.table_name = tb.table_name AND 81 109 cb.column_name = tb.column_name AND 82 ca.position = cb.position""", [table_name, table_name, table_name]) 110 ca.position = cb.position""", [table, schema, table, schema, 111 table, schema]) 83 112 84 113 relations = {} 85 114 for row in cursor.fetchall(): 86 relations[row[0]] = (row[2], row[1].lower()) 115 relations[row[0]] = ( 116 row[3], QName(row[2].lower(), row[1].lower(), True)) 87 117 return relations 88 118 89 def get_indexes(self, cursor, table_name):119 def get_indexes(self, cursor, qname): 90 120 """ 91 121 Returns a dictionary of fieldname -> infodict for the given table, 92 122 where each infodict is in the format: … … 96 126 # This query retrieves each index on the given table, including the 97 127 # first associated field name 98 128 # "We were in the nick of time; you were in great peril!" 129 qname = self.qname_converter(qname, force_schema=True) 130 schema, table = qname.schema.upper(), qname.table.upper() 131 # There can be multiple constraints for a given column, and we 132 # are interested if _any_ of them is unique or primary key, hence 133 # the group by + max. 99 134 sql = """\ 100 SELECT LOWER(all_tab_cols.column_name) AS column_name, 101 CASE user_constraints.constraint_type 102 WHEN 'P' THEN 1 ELSE 0 103 END AS is_primary_key, 104 CASE user_indexes.uniqueness 105 WHEN 'UNIQUE' THEN 1 ELSE 0 106 END AS is_unique 107 FROM all_tab_cols, user_cons_columns, user_constraints, user_ind_columns, user_indexes 108 WHERE all_tab_cols.column_name = user_cons_columns.column_name (+) 109 AND all_tab_cols.table_name = user_cons_columns.table_name (+) 110 AND user_cons_columns.constraint_name = user_constraints.constraint_name (+) 111 AND user_constraints.constraint_type (+) = 'P' 112 AND user_ind_columns.column_name (+) = all_tab_cols.column_name 113 AND user_ind_columns.table_name (+) = all_tab_cols.table_name 114 AND user_indexes.uniqueness (+) = 'UNIQUE' 115 AND user_indexes.index_name (+) = user_ind_columns.index_name 116 AND all_tab_cols.table_name = UPPER(%s) 135 SELECT column_name, max(is_primary_key), max(is_unique) 136 FROM ( 137 SELECT LOWER(all_tab_cols.column_name) AS column_name, 138 CASE all_constraints.constraint_type 139 WHEN 'P' THEN 1 ELSE 0 140 END AS is_primary_key, 141 CASE all_indexes.uniqueness 142 WHEN 'UNIQUE' THEN 1 ELSE 0 143 END AS is_unique 144 FROM all_tab_cols, all_cons_columns, all_constraints, all_ind_columns, all_indexes 145 WHERE all_tab_cols.column_name = all_cons_columns.column_name (+) 146 AND all_tab_cols.table_name = all_cons_columns.table_name (+) 147 AND all_tab_cols.owner = all_cons_columns.owner (+) 148 AND all_cons_columns.constraint_name = all_constraints.constraint_name (+) 149 AND all_cons_columns.owner = all_constraints.owner (+) 150 AND all_constraints.constraint_type (+) = 'P' 151 AND all_ind_columns.column_name (+) = all_tab_cols.column_name 152 AND all_ind_columns.table_name (+) = all_tab_cols.table_name 153 AND all_ind_columns.index_owner (+) = all_tab_cols.owner 154 AND all_indexes.uniqueness (+) = 'UNIQUE' 155 AND all_indexes.index_name (+) = all_ind_columns.index_name 156 AND all_indexes.table_owner (+) = all_ind_columns.index_owner 157 AND all_tab_cols.table_name = %s 158 AND all_tab_cols.owner = %s 159 ) 160 GROUP BY column_name 117 161 """ 118 cursor.execute(sql, [table _name])162 cursor.execute(sql, [table, schema]) 119 163 indexes = {} 120 164 for row in cursor.fetchall(): 121 165 indexes[row[0]] = {'primary_key': row[1], 'unique': row[2]} 122 166 return indexes 167 168 def get_schema_list(self, cursor): 169 cursor.execute("SELECT USERNAME FROM ALL_USERS") 170 return [r[0] for r in cursor.fetchall()] -
django/db/backends/oracle/creation.py
1 1 import sys 2 2 import time 3 from django.db import QName 3 4 from django.db.backends.creation import BaseDatabaseCreation 5 from django.core.management.color import no_style 4 6 5 7 TEST_DATABASE_PREFIX = 'test_' 6 8 PASSWORD = 'Im_a_lumberjack' … … 43 45 def __init__(self, connection): 44 46 super(DatabaseCreation, self).__init__(connection) 45 47 46 def _ create_test_db(self, verbosity=1, autoclobber=False):48 def _get_ddl_parameters(self): 47 49 TEST_NAME = self._test_database_name() 48 50 TEST_USER = self._test_database_user() 49 51 TEST_PASSWD = self._test_database_passwd() 50 52 TEST_TBLSPACE = self._test_database_tblspace() 51 53 TEST_TBLSPACE_TMP = self._test_database_tblspace_tmp() 52 53 parameters = { 54 return { 54 55 'dbname': TEST_NAME, 55 56 'user': TEST_USER, 56 57 'password': TEST_PASSWD, … … 58 59 'tblspace_temp': TEST_TBLSPACE_TMP, 59 60 } 60 61 62 def _create_test_db(self, verbosity=1, autoclobber=False, schemas=[]): 63 parameters = self._get_ddl_parameters() 61 64 cursor = self.connection.cursor() 62 65 if self._test_database_create(): 63 66 try: … … 65 68 except Exception, e: 66 69 sys.stderr.write("Got an error creating the test database: %s\n" % e) 67 70 if not autoclobber: 68 confirm = raw_input("It appears the test database, %s, already exists. Type 'yes' to delete it, or 'no' to cancel: " % TEST_NAME) 71 confirm = raw_input("It appears the test database, %(dbname)s, already " 72 "exists. Type 'yes' to delete it, or 'no' to cancel: " 73 % parameters) 69 74 if autoclobber or confirm == 'yes': 70 75 try: 71 76 if verbosity >= 1: … … 83 88 if verbosity >= 1: 84 89 print "Creating test user..." 85 90 try: 86 self._create_test_user(cursor, parameters, verbosity )91 self._create_test_user(cursor, parameters, verbosity, dba=bool(schemas)) 87 92 except Exception, e: 88 93 sys.stderr.write("Got an error creating the test user: %s\n" % e) 89 94 if not autoclobber: 90 confirm = raw_input("It appears the test user, %s, already exists. Type 'yes' to delete it, or 'no' to cancel: " % TEST_USER) 95 confirm = raw_input("It appears the test user, %(user)s, already exists. " 96 "Type 'yes' to delete it, or 'no' to cancel: " 97 % parameters) 91 98 if autoclobber or confirm == 'yes': 92 99 try: 93 100 if verbosity >= 1: … … 95 102 self._destroy_test_user(cursor, parameters, verbosity) 96 103 if verbosity >= 1: 97 104 print "Creating test user..." 98 self._create_test_user(cursor, parameters, verbosity )105 self._create_test_user(cursor, parameters, verbosity, dba=bool(schemas)) 99 106 except Exception, e: 100 107 sys.stderr.write("Got an error recreating the test user: %s\n" % e) 101 108 sys.exit(2) … … 105 112 106 113 self.connection.settings_dict['SAVED_USER'] = self.connection.settings_dict['USER'] 107 114 self.connection.settings_dict['SAVED_PASSWORD'] = self.connection.settings_dict['PASSWORD'] 108 self.connection.settings_dict['TEST_USER'] = self.connection.settings_dict['USER'] = TEST_USER109 self.connection.settings_dict['PASSWORD'] = TEST_PASSWD115 self.connection.settings_dict['TEST_USER'] = self.connection.settings_dict['USER'] = parameters['user'] 116 self.connection.settings_dict['PASSWORD'] = parameters['password'] 110 117 111 118 return self.connection.settings_dict['NAME'] 112 119 … … 115 122 Destroy a test database, prompting the user for confirmation if the 116 123 database already exists. Returns the name of the test database created. 117 124 """ 118 TEST_NAME = self._test_database_name() 119 TEST_USER = self._test_database_user() 120 TEST_PASSWD = self._test_database_passwd() 121 TEST_TBLSPACE = self._test_database_tblspace() 122 TEST_TBLSPACE_TMP = self._test_database_tblspace_tmp() 125 parameters = self._get_ddl_parameters() 123 126 124 127 self.connection.settings_dict['USER'] = self.connection.settings_dict['SAVED_USER'] 125 128 self.connection.settings_dict['PASSWORD'] = self.connection.settings_dict['SAVED_PASSWORD'] 126 129 127 parameters = {128 'dbname': TEST_NAME,129 'user': TEST_USER,130 'password': TEST_PASSWD,131 'tblspace': TEST_TBLSPACE,132 'tblspace_temp': TEST_TBLSPACE_TMP,133 }134 135 130 cursor = self.connection.cursor() 136 131 time.sleep(1) # To avoid "database is being accessed by other users" errors. 132 if self._test_database_create(): 133 if verbosity >= 1: 134 print 'Destroying test database tables...' 135 self._execute_test_db_destruction(cursor, parameters, verbosity) 137 136 if self._test_user_create(): 138 137 if verbosity >= 1: 139 138 print 'Destroying test user...' 140 139 self._destroy_test_user(cursor, parameters, verbosity) 141 if self._test_database_create():142 if verbosity >= 1:143 print 'Destroying test database tables...'144 self._execute_test_db_destruction(cursor, parameters, verbosity)145 140 self.connection.close() 146 141 147 142 def _execute_test_db_creation(self, cursor, parameters, verbosity): … … 159 154 ] 160 155 self._execute_statements(cursor, statements, parameters, verbosity) 161 156 162 def _create_test_user(self, cursor, parameters, verbosity ):157 def _create_test_user(self, cursor, parameters, verbosity, dba=False): 163 158 if verbosity >= 2: 164 159 print "_create_test_user(): username = %s" % parameters['user'] 160 parameters = parameters.copy() 161 parameters['dba'] = ', DBA' if dba else '' 162 165 163 statements = [ 166 164 """CREATE USER %(user)s 167 165 IDENTIFIED BY %(password)s 168 166 DEFAULT TABLESPACE %(tblspace)s 169 167 TEMPORARY TABLESPACE %(tblspace_temp)s 170 168 """, 171 """GRANT CONNECT, RESOURCE TO %(user)s""",169 """GRANT CONNECT, RESOURCE %(dba)s TO %(user)s""", 172 170 ] 173 171 self._execute_statements(cursor, statements, parameters, verbosity) 174 172 … … 186 184 print "_destroy_test_user(): user=%s" % parameters['user'] 187 185 print "Be patient. This can take some time..." 188 186 statements = [ 189 'DROP USER %(user)s CASCADE',187 self.sql_destroy_schema(parameters['user'], style=None) 190 188 ] 191 189 self._execute_statements(cursor, statements, parameters, verbosity) 192 190 191 def sql_destroy_schema(self, schema, style): 192 return "DROP USER %s CASCADE" % schema 193 194 193 195 def _execute_statements(self, cursor, statements, parameters, verbosity): 194 196 for template in statements: 195 197 stmt = template % parameters … … 272 274 273 275 def set_autocommit(self): 274 276 self.connection.connection.autocommit = True 277 278 def _create_test_schemas(self, verbosity, schemas, autoclobber): 279 if not self._test_user_create(): 280 return [] 281 cursor = self.connection.cursor() 282 parameters = self._get_ddl_parameters() 283 parameters['authorization'] = parameters['user'] 284 conv = self.connection.introspection.identifier_converter 285 existing_schemas = [conv(s) for s in self.connection.introspection.get_schema_list(cursor)] 286 conflicts = [conv(s) for s in existing_schemas if conv(s) in schemas] 287 if conflicts: 288 print 'The following users already exists: %s' % ', '.join(conflicts) 289 if not autoclobber: 290 confirm = raw_input( 291 "Type 'yes' if you would like to try deleting these users " 292 "or 'no' to cancel: ") 293 if autoclobber or confirm == 'yes': 294 for schema in conflicts: 295 parameters['user'] = schema 296 if verbosity >= 1: 297 print "Destroying user %s" % schema 298 self._destroy_test_user(cursor, parameters, verbosity) 299 existing_schemas.remove(schema) 300 else: 301 print "Tests cancelled." 302 sys.exit(1) 303 304 to_create = [s for s in schemas if s not in existing_schemas] 305 for schema in to_create: 306 parameters['user'] = schema 307 if verbosity >= 1: 308 print "Creating user %s" % schema 309 self._create_test_user(cursor, parameters, verbosity) 310 return to_create 311 312 def needs_separate_conn(self, from_qname, to_qname): 313 conv = self.connection.introspection.qname_converter 314 def_schema = conv(QName(None, None, False), force_schema=True).schema 315 from_qname = conv(from_qname, force_schema=True) 316 to_qname = conv(to_qname, force_schema=True) 317 return (def_schema != from_qname.schema 318 or to_qname.schema != from_qname.schema) 319 320 def sql_for_inline_foreign_key_references(self, field, known_models, style): 321 """ 322 Return the SQL snippet defining the foreign key reference for a field. 323 324 Oracle doesn't let you do cross-schema foreign keys, except if you 325 are connected to the "from" schema. Don't ask why. 326 """ 327 if self.needs_separate_conn(field.model._meta.qualified_name, 328 field.rel.to._meta.qualified_name): 329 return [], True 330 return super(DatabaseCreation, self).sql_for_inline_foreign_key_references(field, known_models, style) 331 332 def sql_for_pending_references(self, model, style, pending_references, 333 second_pass=False): 334 """ 335 Sad fact of life: On Oracle it is impossible to do cross-schema 336 references unless you explisitly grant REFERENCES on the referenced 337 table, and in addition the reference is made from the schema 338 containing the altered table (the one getting the new constraint). 339 To make this even nicer, it is impossible to do the GRANT using the 340 same user we are giving the REFERENCES right, as you can't GRANT 341 yourself. 342 343 The solution we are using is to do the pending cross-schema references 344 in two stages after all tables have been created: 345 1) Connect as the foreign key's target table owner, and grant 346 REFERENCES to all users needing to do foreign keys. 347 2) Connect as the source table's owner, and create the foreign 348 keys. 349 To support this arrangement, we will create only non-cross-schema 350 references unless we are explicitly told by the second_pass flag 351 that it is safe to do the cross schema references. 352 353 It is possible to grant REFERENCES to public (but it seems other roles 354 will not work), but as we need to anyways do this multi-connection 355 dance it seems better to do the grants explicitly only when needed. 356 """ 357 if second_pass: 358 return super(DatabaseCreation, self).sql_for_pending_references( 359 model, style, pending_references) 360 # Split the "safe" and "unsafe" references apart, and call 361 # the super() method for the safe set. 362 cross_schema_refs = [] 363 single_schema_refs = [] 364 if model in pending_references: 365 for rel_class, f in pending_references[model]: 366 if self.needs_separate_conn(rel_class._meta.qualified_name, 367 model._meta.qualified_name): 368 cross_schema_refs.append((rel_class, f)) 369 else: 370 single_schema_refs.append((rel_class, f)) 371 sql = [] 372 if single_schema_refs: 373 pending_references[model] = single_schema_refs 374 sql = super(DatabaseCreation, self).sql_for_pending_references( 375 model, style, pending_references) 376 if cross_schema_refs: 377 pending_references[model] = cross_schema_refs 378 return sql 379 380 def post_create_pending_references(self, pending_references, as_sql=False): 381 # Build a dictionary: from_schema -> [(model, refs)...] 382 references_to_schema = {} 383 sql = [] 384 conv = self.connection.introspection.qname_converter 385 for model, refs in pending_references.items(): 386 schema = conv(model._meta.qualified_name, force_schema=True).schema 387 if schema not in references_to_schema: 388 references_to_schema[schema] = [] 389 references_to_schema[schema].append((model, refs)) 390 # Pass 1: give grants. 391 for schema, all_refs in references_to_schema.items(): 392 grant_to = set() 393 for model, refs in all_refs: 394 for ref in refs: 395 to_user = conv(ref[0]._meta.qualified_name, 396 force_schema=True).schema 397 if to_user != schema: 398 grant_to.add((model, to_user)) 399 sql.extend(self._grant_references(schema, grant_to, as_sql)) 400 # Prepare for pass 2. This time we must connect as the user 401 # of the altered table's schema. So, first build a dictionary of 402 # from_schema -> [{model: [refs]}], that is, build a 403 # pending_references for each schema separately. 404 references_from_schema = {} 405 for model, refs in pending_references.items(): 406 for ref in refs: 407 schema = conv(ref[0]._meta.qualified_name, 408 force_schema=True).schema 409 if schema not in references_from_schema: 410 references_from_schema[schema] = {} 411 if model not in references_from_schema[schema]: 412 references_from_schema[schema][model] = [] 413 references_from_schema[schema][model].append(ref) 414 # Pass 2: create the actual references 415 for schema, ref_dict in references_from_schema.items(): 416 per_schema_sql = ['-- Connect as user "%s"' % schema] if as_sql else [] 417 for model, refs in ref_dict.items(): 418 ref_sql = self.sql_for_pending_references(model, no_style(), 419 ref_dict, second_pass=True) 420 per_schema_sql.extend(ref_sql) 421 if not as_sql: 422 self._run_sql_as_user(schema, per_schema_sql) 423 sql.extend(per_schema_sql) 424 return sql 425 426 def _grant_references(self, schema, grant_to, as_sql): 427 sql = ['-- Connect as user "%s"' % schema] if as_sql else [] 428 qn = self.connection.ops.quote_name 429 for model, user in grant_to: 430 sql.append('GRANT REFERENCES ON %s TO %s' 431 % (qn(model._meta.db_table), qn(user))) 432 if not as_sql: 433 self._run_sql_as_user(schema, sql) 434 return sql 435 436 def _run_sql_as_user(self, user, sql): 437 if not sql: 438 return 439 self.connection.close() 440 try: 441 old_settings = self.connection.settings_dict.copy() 442 self.connection.settings_dict['USER'] = user 443 cursor = self.connection.cursor() 444 for q in sql: 445 cursor.execute(q) 446 finally: 447 self.connection.close() 448 self.connection.settings_dict = old_settings -
django/db/backends/__init__.py
7 7 from contextlib import contextmanager 8 8 9 9 from django.conf import settings 10 from django.db import DEFAULT_DB_ALIAS 10 from django.db import DEFAULT_DB_ALIAS, QName 11 11 from django.db.backends import util 12 12 from django.db.transaction import TransactionManagementError 13 13 from django.utils.importlib import import_module … … 44 44 45 45 def __ne__(self, other): 46 46 return not self == other 47 48 def _get_schema(self): 49 return self.settings_dict['SCHEMA'] 50 schema = property(_get_schema) 47 51 52 def _get_test_schema_prefix(self): 53 return self.settings_dict['TEST_SCHEMA_PREFIX'] 54 test_schema_prefix = property(_get_test_schema_prefix) 55 56 def convert_schema(self, schema): 57 return schema or self.schema 58 48 59 def _commit(self): 49 60 if self.connection is not None: 50 61 return self.connection.commit() … … 311 322 def make_debug_cursor(self, cursor): 312 323 return util.CursorDebugWrapper(cursor, self) 313 324 325 def qname(self, model): 326 """ 327 Given a model class or instance, returns its current database table 328 name in schema qualified format. 329 """ 330 return self.ops.qualified_name(model._meta.qualified_name) 331 314 332 class BaseDatabaseFeatures(object): 315 333 allows_group_by_pk = False 316 334 # True if django.db.backend.utils.typecast_timestamp is used on values … … 404 422 _confirmed = False 405 423 supports_transactions = None 406 424 supports_stddev = None 425 supports_foreign_keys = True 407 426 can_introspect_foreign_keys = None 408 427 409 428 # Support for the DISTINCT ON clause 410 429 can_distinct_on_fields = False 411 430 431 # If the database has databases and schemas as different concepts 432 # or plain fakes schemas, it is safe to skip conflicts checking in 433 # testing on that database. 434 namespaced_schemas = False 435 412 436 def __init__(self, connection): 413 437 self.connection = connection 414 438 … … 461 485 self.connection = connection 462 486 self._cache = None 463 487 464 def autoinc_sql(self, table, column):488 def autoinc_sql(self, qualified_name, column): 465 489 """ 466 490 Returns any SQL needed to support auto-incrementing primary keys, or 467 491 None if no SQL is necessary. … … 513 537 """ 514 538 return "DROP CONSTRAINT" 515 539 516 def drop_sequence_sql(self, table):540 def drop_sequence_sql(self, qualified_name): 517 541 """ 518 542 Returns any SQL necessary to drop the sequence for the given table. 519 543 Returns None if no SQL is necessary. … … 594 618 595 619 return smart_unicode(sql) % u_params 596 620 597 def last_insert_id(self, cursor, table_name, pk_name):621 def last_insert_id(self, cursor, qualified_name, pk_name): 598 622 """ 599 623 Given a cursor object that has just performed an INSERT statement into 600 624 a table that has an auto-incrementing ID, returns the newly created ID. … … 673 697 """ 674 698 raise NotImplementedError() 675 699 700 def qualified_name(self, qualified_name, from_model=False): 701 """ 702 Formats the given schema, table_name tuple into database's 703 qualified and quoted name format. The schema can be None. 704 705 We need to know if the name if from an existing database 706 table, or from a model. The reason is that some backends 707 do modifications to the name (schema-prefix the table name) 708 at runtime, and we must not do that repeatedly. Hence, if 709 the name comes from the DB and is already schema-prefixed, 710 then we must not schema-prefix it again. 711 """ 712 raise NotImplementedError 713 676 714 def random_function_sql(self): 677 715 """ 678 716 Returns a SQL expression that returns a random value. … … 718 756 """ 719 757 return '' 720 758 721 def sql_flush(self, style, tables, sequences ):759 def sql_flush(self, style, tables, sequences, from_db): 722 760 """ 723 761 Returns a list of SQL statements required to remove all data from 724 762 the given database tables (without actually removing the tables … … 726 764 727 765 The `style` argument is a Style object as returned by either 728 766 color_style() or no_style() in django.core.management.color. 767 768 The from_db argument tells if the names are coming from database 769 or from model._meta, needed for schema support. 729 770 """ 730 771 raise NotImplementedError() 731 772 … … 883 924 distinguish between a FloatField and IntegerField, for example.""" 884 925 return self.data_types_reverse[data_type] 885 926 886 def table_name_converter(self, name): 887 """Apply a conversion to the name for the purposes of comparison. 927 def qname_converter(self, qname, force_schema=False): 928 """ 929 Apply a conversion to the name for the purposes of comparison. 888 930 889 931 The default table name converter is for case sensitive comparison. 932 933 The given name must be a QName. If force_schema is set, then backends 934 should try to append a default schema name to the given name if 935 applicable for the backend. 890 936 """ 891 return name937 return qname 892 938 939 def identifier_converter(self, identifier): 940 """ 941 On some backends we need to do a little acrobaty to convert the names 942 from the DB and names from Models into consistent format. For example, 943 the return types from DB might be upper-case, but we need them 944 lower-case for comparisons. This method can be used to convert 945 identifiers into consistent format. 946 """ 947 return identifier 948 949 893 950 def table_names(self): 894 "Returns a list of names of all tables that existin the database."951 "Returns a list of names of all tables that are visible in the database." 895 952 cursor = self.connection.cursor() 896 return self.get_ table_list(cursor)953 return self.get_visible_tables_list(cursor) 897 954 955 def get_visible_tables_list(self, cursoe): 956 """ 957 Returns all visible (in "search path") tables from the database. 958 """ 959 return [] 960 961 def qualified_names(self, schemas=None): 962 """ 963 Returns qualified table names for all schemas Django is using. 964 """ 965 cursor = self.connection.cursor() 966 if schemas is None: 967 return self.get_visible_tables_list(cursor) 968 else: 969 return 970 971 def all_qualified_names(self): 972 """ 973 Gets all table names from the database. Note that it is intentional 974 that visible tables appear both as unqualified and qualified tables. 975 """ 976 cursor = self.connection.cursor() 977 nonqualified_tables = self.get_visible_tables_list(cursor) 978 schemas = self.connection.creation.get_schemas() 979 schemas = [self.connection.convert_schema(s) for s in schemas] 980 qualified_tables = self.get_qualified_tables_list(cursor, schemas) 981 return set([QName(None, t, from_db) for _, t, from_db 982 in nonqualified_tables] 983 + qualified_tables) 984 985 def get_qualified_tables_list(self, cursor, schemas): 986 """ 987 Returns schema qualified names (as pair schema, tbl_name) of all 988 tables in the given schemas. 989 """ 990 return [] 991 992 def get_schema_list(self, cursor): 993 "Returns a list of schemas that exist in the database" 994 return [] 995 996 997 def schema_names(self): 998 "Returns a list of schemas that exist in the database" 999 cursor = self.connection.cursor() 1000 return self.get_schema_list(cursor) 1001 898 1002 def django_table_names(self, only_existing=False): 899 1003 """ 900 Returns a list of all table names that have associated Django models and901 are in INSTALLED_APPS.1004 Returns a list of all table's qualified names that have associated 1005 Django models and are in INSTALLED_APPS. 902 1006 903 If only_existing is True, the resulting list will only include the tables904 t hat actually exist in the database.1007 If only_existing is True, the resulting list will only include the 1008 tables that actually exist in the database. 905 1009 """ 906 1010 from django.db import models, router 907 1011 tables = set() … … 911 1015 continue 912 1016 if not router.allow_syncdb(self.connection.alias, model): 913 1017 continue 914 tables.add(model._meta.db_table) 915 tables.update([f.m2m_db_table() for f in model._meta.local_many_to_many]) 1018 tables.add(model._meta.qualified_name) 1019 tables.update([f.m2m_qualified_name() 1020 for f in model._meta.local_many_to_many]) 916 1021 tables = list(tables) 917 1022 if only_existing: 918 existing_tables = self.table_names() 919 tables = [ 920 t 921 for t in tables 922 if self.table_name_converter(t) in existing_tables 923 ] 924 return tables 1023 found_tables = [] 1024 existing_tables = self.all_qualified_names() 1025 found_tables.extend([ 1026 t for t in tables 1027 if self.qname_converter(t) in existing_tables 1028 ]) 1029 return found_tables 1030 else: 1031 return tables 925 1032 926 1033 def installed_models(self, tables): 927 "Returns a set of all models represented by the provided list of table names." 1034 """ 1035 Returns a set of all models represented by the provided list of table names. 1036 1037 The given tables are assumed to be pre-converted. 1038 """ 928 1039 from django.db import models, router 929 1040 all_models = [] 930 1041 for app in models.get_apps(): 931 1042 for model in models.get_models(app): 932 1043 if router.allow_syncdb(self.connection.alias, model): 933 1044 all_models.append(model) 934 tables = map(self.table_name_converter, tables)935 1045 return set([ 936 1046 m for m in all_models 937 if self. table_name_converter(m._meta.db_table) in tables1047 if self.qname_converter(m._meta.qualified_name) in tables 938 1048 ]) 939 1049 940 1050 def sequence_list(self): 941 "Returns a list of information about all DB sequences for all models in all apps." 1051 """ 1052 Returns a list of information about all DB sequences for all models 1053 in all apps. 1054 """ 942 1055 from django.db import models, router 943 1056 944 1057 apps = models.get_apps() … … 952 1065 continue 953 1066 for f in model._meta.local_fields: 954 1067 if isinstance(f, models.AutoField): 955 sequence_list.append({'table': model._meta.db_table, 'column': f.column}) 1068 qname = self.qname_converter(model._meta.qualified_name) 1069 sequence_list.append({'qname': qname, 'column': f.column}) 956 1070 break # Only one AutoField is allowed per model, so don't bother continuing. 957 1071 958 1072 for f in model._meta.local_many_to_many: 959 1073 # If this is an m2m using an intermediate table, 960 1074 # we don't need to reset the sequence. 961 1075 if f.rel.through is None: 962 sequence_list.append({'table': f.m2m_db_table(), 'column': None}) 1076 qname = self.qname_converter(f.m2m_qualified_name()) 1077 sequence_list.append({'qname': qname, 1078 'column': None}) 963 1079 964 1080 return sequence_list 965 1081 -
django/core/management/commands/syncdb.py
55 55 db = options.get('database') 56 56 connection = connections[db] 57 57 cursor = connection.cursor() 58 58 converter = connection.introspection.qname_converter 59 # We might fetch the same table multiple times - once as qualified and 60 # once as visible table (None, t). That is wanted, so that we can easily 61 # see if a model with schema = None is installed, as well as if model with 62 # locked schema is installed. 63 tables = connection.introspection.all_qualified_names() 64 59 65 # Get a list of already installed *models* so that references work right. 60 tables = connection.introspection.table_names()61 66 seen_models = connection.introspection.installed_models(tables) 62 67 created_models = set() 63 68 pending_references = {} … … 71 76 ] 72 77 def model_installed(model): 73 78 opts = model._meta 74 converter = connection.introspection.table_name_converter 75 return not ((converter(opts.db_table) in tables) or 76 (opts.auto_created and converter(opts.auto_created._meta.db_table) in tables)) 79 return not ((converter(opts.qualified_name) in tables) or 80 (opts.auto_created and converter(opts.auto_created._meta.qualified_name) in tables)) 77 81 78 82 manifest = SortedDict( 79 83 (app_name, filter(model_installed, model_list)) … … 83 87 # Create the tables for each model 84 88 if verbosity >= 1: 85 89 print "Creating tables ..." 90 seen_schemas = connection.introspection.schema_names() 91 seen_schemas = set([connection.introspection.identifier_converter(s) 92 for s in seen_schemas]) 93 86 94 for app_name, model_list in manifest.items(): 87 95 for model in model_list: 88 96 # Create the model's database table, if it doesn't already exist. 89 97 if verbosity >= 3: 90 98 print "Processing %s.%s model" % (app_name, model._meta.object_name) 91 sql, references = connection.creation.sql_create_model(model, self.style, seen_models) 99 sql = [] 100 schema = connection.convert_schema(model._meta.qualified_name[0]) 101 if schema and schema not in seen_schemas: 102 q = connection.creation.sql_create_schema(schema, self.style) 103 if q: 104 sql.append(q) 105 seen_schemas.add(schema) 106 table_sql, references = connection.creation.sql_create_model(model, self.style, seen_models) 107 sql.extend(table_sql) 92 108 seen_models.add(model) 93 109 created_models.add(model) 94 110 for refto, refs in references.items(): 95 111 pending_references.setdefault(refto, []).extend(refs) 96 112 if refto in seen_models: 97 sql.extend(connection.creation.sql_for_pending_references(refto, self.style, pending_references)) 98 sql.extend(connection.creation.sql_for_pending_references(model, self.style, pending_references)) 113 ref_sql = connection.creation.sql_for_pending_references( 114 refto, self.style, pending_references) 115 if ref_sql: 116 sql.extend(ref_sql) 117 ref_sql = sql.extend(connection.creation.sql_for_pending_references( 118 model, self.style, pending_references)) 119 if ref_sql: 120 sql.extend(ref_sql) 99 121 if verbosity >= 1 and sql: 100 print "Creating table %s" % model._meta.db_table 122 if model._meta.db_schema: 123 print "Creating table %s.%s" % model._meta.qualified_name 124 else: 125 print "Creating table %s" % model._meta.db_table 101 126 for statement in sql: 102 127 cursor.execute(statement) 103 tables.a ppend(connection.introspection.table_name_converter(model._meta.db_table))128 tables.add(connection.introspection.qname_converter(model._meta.qualified_name)) 104 129 105 106 130 transaction.commit_unless_managed(using=db) 131 # We need to see if there are still some pending references left: this 132 # is possible on backends where we must do cross-schema references 133 # using different connections (hence also outside the above 134 # transaction) 135 if pending_references: 136 # Pass the references to connection-specific handler. 137 connection.creation.post_create_pending_references(pending_references) 107 138 108 # Send the post_syncdb signal, so individual apps can do whatever they need109 # to do at this point.139 # Send the post_syncdb signal, so individual apps can do whatever they 140 # need to do at this point. 110 141 emit_post_sync_signal(created_models, verbosity, interactive, db) 111 142 112 143 # The connection may have been closed by a syncdb handler. -
django/core/management/commands/loaddata.py
218 218 219 219 # Since we disabled constraint checks, we must manually check for 220 220 # any invalid keys that might have been added 221 table_names = [model._meta.db_table for model in models]222 connection.check_constraints(table_names= table_names)221 qualified_names = [model._meta.qualified_name for model in models] 222 connection.check_constraints(table_names=qualified_names) 223 223 224 224 except (SystemExit, KeyboardInterrupt): 225 225 raise -
django/core/management/commands/inspectdb.py
27 27 def handle_inspection(self, options): 28 28 connection = connections[options.get('database')] 29 29 30 table2model = lambda table_name: table_name.title().replace('_', '').replace(' ', '').replace('-', '')30 table2model = lambda qname: qname[1].title().replace('_', '').replace(' ', '').replace('-', '') 31 31 32 32 cursor = connection.cursor() 33 33 yield "# This is an auto-generated Django model module." … … 41 41 yield '' 42 42 yield 'from %s import models' % self.db_module 43 43 yield '' 44 for table_name in connection.introspection.get_table_list(cursor): 45 yield 'class %s(models.Model):' % table2model(table_name) 44 inspect = connection.introspection 45 for qname in inspect.get_visible_tables_list(cursor): 46 yield 'class %s(models.Model):' % table2model(qname) 46 47 try: 47 relations = connection.introspection.get_relations(cursor, table_name)48 relations = inspect.get_relations(cursor, qname) 48 49 except NotImplementedError: 49 50 relations = {} 50 51 try: 51 indexes = connection.introspection.get_indexes(cursor, table_name)52 indexes = inspect.get_indexes(cursor, qname) 52 53 except NotImplementedError: 53 54 indexes = {} 54 for i, row in enumerate( connection.introspection.get_table_description(cursor, table_name)):55 for i, row in enumerate(inspect.get_table_description(cursor, qname)): 55 56 column_name = row[0] 56 57 att_name = column_name.lower() 57 58 comment_notes = [] # Holds Field notes, to be displayed in a Python comment. … … 82 83 comment_notes.append('Field name made lowercase.') 83 84 84 85 if i in relations: 85 rel_to = relations[i][1] == table_name and "'self'" or table2model(relations[i][1])86 rel_to = relations[i][1] == qname and "'self'" or table2model(relations[i][1]) 86 87 field_type = 'ForeignKey(%s' % rel_to 87 88 if att_name.endswith('_id'): 88 89 att_name = att_name[:-3] … … 91 92 else: 92 93 # Calling `get_field_type` to get the field type string and any 93 94 # additional paramters and notes. 94 field_type, field_params, field_notes = self.get_field_type(connection, table_name, row)95 field_type, field_params, field_notes = self.get_field_type(connection, qname, row) 95 96 extra_params.update(field_params) 96 97 comment_notes.extend(field_notes) 97 98 … … 118 119 extra_params['blank'] = True 119 120 if not field_type in ('TextField(', 'CharField('): 120 121 extra_params['null'] = True 121 122 122 field_desc = '%s = models.%s' % (att_name, field_type) 123 123 if extra_params: 124 124 if not field_desc.endswith('('): … … 128 128 if comment_notes: 129 129 field_desc += ' # ' + ' '.join(comment_notes) 130 130 yield ' %s' % field_desc 131 for meta_line in self.get_meta( table_name):131 for meta_line in self.get_meta(qname): 132 132 yield meta_line 133 133 134 def get_field_type(self, connection, table_name, row):134 def get_field_type(self, connection, qname, row): 135 135 """ 136 136 Given the database connection, the table name, and the cursor row 137 137 description, this routine will return the given field type name, as … … 162 162 163 163 return field_type, field_params, field_notes 164 164 165 def get_meta(self, table_name):165 def get_meta(self, qname): 166 166 """ 167 167 Return a sequence comprising the lines of code necessary 168 168 to construct the inner Meta class for the model corresponding 169 169 to the given database table name. 170 170 """ 171 171 return [' class Meta:', 172 ' db_table = %r' % table_name, 172 ' db_table = %r' % qname[1], 173 ' db_schema = %r' % qname[0] or 'None', 173 174 ''] -
django/core/management/validation.py
19 19 validates all models of all installed apps. Writes errors, if any, to outfile. 20 20 Returns number of errors. 21 21 """ 22 from django.conf import settings23 22 from django.db import models, connection 24 23 from django.db.models.loading import get_app_errors 25 24 from django.db.models.fields.related import RelatedObject … … 32 31 33 32 for cls in models.get_models(app): 34 33 opts = cls._meta 35 34 36 35 # Do field-specific validation. 37 36 for f in opts.local_fields: 38 37 if f.name == 'id' and not f.primary_key and opts.pk.name == 'id': -
django/core/management/sql.py
3 3 4 4 from django.conf import settings 5 5 from django.core.management.base import CommandError 6 from django.db import models 6 from django.db import models, QName 7 7 from django.db.models import get_models 8 8 9 9 def sql_create(app, style, connection): 10 10 "Returns a list of the CREATE TABLE SQL statements for the given app." 11 12 11 if connection.settings_dict['ENGINE'] == 'django.db.backends.dummy': 13 12 # This must be the "dummy" database backend, which means the user 14 13 # hasn't set ENGINE for the database. … … 23 22 # we can be conservative). 24 23 app_models = models.get_models(app, include_auto_created=True) 25 24 final_output = [] 26 tables = connection.introspection. table_names()25 tables = connection.introspection.all_qualified_names() 27 26 known_models = set([model for model in connection.introspection.installed_models(tables) if model not in app_models]) 28 27 pending_references = {} 29 28 30 29 for model in app_models: 30 schema = connection.convert_schema(model._meta.db_schema) 31 if schema: 32 output = connection.creation.sql_create_schema(schema, style) 33 if output: 34 final_output.append(output) 31 35 output, references = connection.creation.sql_create_model(model, style, known_models) 32 36 final_output.extend(output) 33 37 for refto, refs in references.items(): … … 63 67 64 68 # Figure out which tables already exist 65 69 if cursor: 66 table_names = connection.introspection. get_table_list(cursor)70 table_names = connection.introspection.all_qualified_names(converted=True) 67 71 else: 68 72 table_names = [] 69 73 … … 75 79 references_to_delete = {} 76 80 app_models = models.get_models(app, include_auto_created=True) 77 81 for model in app_models: 78 if cursor and connection.introspection. table_name_converter(model._meta.db_table) in table_names:82 if cursor and connection.introspection.qname_converter(model._meta.qualified_name) in table_names: 79 83 # The table exists, so it needs to be dropped 80 84 opts = model._meta 81 85 for f in opts.local_fields: … … 85 89 to_delete.add(model) 86 90 87 91 for model in app_models: 88 if connection.introspection. table_name_converter(model._meta.db_table) in table_names:92 if connection.introspection.qname_converter(model._meta.qualified_name) in table_names: 89 93 output.extend(connection.creation.sql_destroy_model(model, references_to_delete, style)) 90 94 91 95 # Close database connection explicitly, in case this output is being piped … … 106 110 if only_django: 107 111 tables = connection.introspection.django_table_names(only_existing=True) 108 112 else: 109 tables = connection.introspection.table_names() 113 tables = connection.introspection.all_qualified_names() 114 if [t for t in tables if not isinstance(t, QName)]: 115 import ipdb; ipdb.set_trace() 110 116 statements = connection.ops.sql_flush( 111 117 style, tables, connection.introspection.sequence_list() 112 118 ) … … 145 151 if opts.managed: 146 152 post_sql_fields = [f for f in opts.local_fields if hasattr(f, 'post_create_sql')] 147 153 for f in post_sql_fields: 148 output.extend(f.post_create_sql(style, model._meta. db_table))154 output.extend(f.post_create_sql(style, model._meta.qualified_name)) 149 155 150 156 # Some backends can't execute more than one SQL statement at a time, 151 157 # so split into separate statements. -
django/contrib/contenttypes/generic.py
173 173 def m2m_db_table(self): 174 174 return self.rel.to._meta.db_table 175 175 176 def m2m_qualified_name(self): 177 return self.rel.to._meta.qualified_name 178 179 def m2m_db_schema(self): 180 return self.rel.to._meta.db_schema 181 176 182 def m2m_column_name(self): 177 183 return self.object_id_field_name 178 184 -
django/test/simple.py
271 271 dependencies = {} 272 272 for alias in connections: 273 273 connection = connections[alias] 274 if not connection.settings_dict['TEST_SCHEMA_PREFIX'] is None: 275 connection.settings_dict['TEST_SCHEMA_PREFIX'] = '%s_' % alias 274 276 if connection.settings_dict['TEST_MIRROR']: 275 277 # If the database is marked as a test mirror, save 276 278 # the alias. … … 301 303 test_databases.items(), dependencies): 302 304 # Actually create the database for the first connection 303 305 connection = connections[aliases[0]] 304 old_names.append((connection, db_name, True)) 305 test_db_name = connection.creation.create_test_db( 306 test_db_name, created_schemas = connection.creation.create_test_db( 306 307 self.verbosity, autoclobber=not self.interactive) 308 old_names.append((connection, db_name, True, created_schemas)) 307 309 for alias in aliases[1:]: 308 310 connection = connections[alias] 309 311 if db_name: 310 old_names.append((connection, db_name, False ))312 old_names.append((connection, db_name, False, [])) 311 313 connection.settings_dict['NAME'] = test_db_name 312 314 else: 313 315 # If settings_dict['NAME'] isn't defined, we have a backend 314 316 # where the name isn't important -- e.g., SQLite, which 315 317 # uses :memory:. Force create the database instead of 316 318 # assuming it's a duplicate. 317 old_names.append((connection, db_name, True ))319 old_names.append((connection, db_name, True, [])) 318 320 connection.creation.create_test_db( 319 321 self.verbosity, autoclobber=not self.interactive) 320 322 … … 335 337 Destroys all the non-mirror databases. 336 338 """ 337 339 old_names, mirrors = old_config 338 for connection, old_name, destroy in old_names:340 for connection, old_name, destroy, created_schemas in old_names: 339 341 if destroy: 340 connection.creation.destroy_test_db(old_name, self.verbosity)342 connection.creation.destroy_test_db(old_name, created_schemas, self.verbosity) 341 343 342 344 def teardown_test_environment(self, **kwargs): 343 345 unittest.removeHandler() -
tests/modeltests/raw_query/tests.py
2 2 3 3 from datetime import date 4 4 5 from django.db.models.sql.query import InvalidQuery 5 from django.db.models.query_utils import InvalidQuery 6 from django.db import connection 6 7 from django.test import TestCase 7 8 8 9 from .models import Author, Book, Coffee, Reviewer, FriendlyAuthor … … 59 60 """ 60 61 Basic test of raw query with a simple database query 61 62 """ 62 query = "SELECT * FROM raw_query_author"63 query = "SELECT * FROM %s" % connection.qname(Author) 63 64 authors = Author.objects.all() 64 65 self.assertSuccessfulRawQuery(Author, query, authors) 65 66 … … 68 69 Raw queries are lazy: they aren't actually executed until they're 69 70 iterated over. 70 71 """ 71 q = Author.objects.raw('SELECT * FROM raw_query_author')72 q = Author.objects.raw('SELECT * FROM %s' % connection.qname(Author)) 72 73 self.assertTrue(q.query.cursor is None) 73 74 list(q) 74 75 self.assertTrue(q.query.cursor is not None) … … 77 78 """ 78 79 Test of a simple raw query against a model containing a foreign key 79 80 """ 80 query = "SELECT * FROM raw_query_book"81 query = "SELECT * FROM %s" % connection.qname(Book) 81 82 books = Book.objects.all() 82 83 self.assertSuccessfulRawQuery(Book, query, books) 83 84 … … 86 87 Test of a simple raw query against a model containing a field with 87 88 db_column defined. 88 89 """ 89 query = "SELECT * FROM raw_query_coffee"90 query = "SELECT * FROM %s" % connection.qname(Coffee) 90 91 coffees = Coffee.objects.all() 91 92 self.assertSuccessfulRawQuery(Coffee, query, coffees) 92 93 … … 102 103 ) 103 104 104 105 for select in selects: 105 query = "SELECT %s FROM raw_query_author" % select106 query = "SELECT %s FROM %s" % (select, connection.qname(Author)) 106 107 authors = Author.objects.all() 107 108 self.assertSuccessfulRawQuery(Author, query, authors) 108 109 … … 111 112 Test of raw query's optional ability to translate unexpected result 112 113 column names to specific model fields 113 114 """ 114 query = "SELECT first_name AS first, last_name AS last, dob, id FROM raw_query_author" 115 query = ("SELECT first_name AS first, last_name AS last, dob, id FROM %s" 116 % connection.qname(Author)) 115 117 translations = {'first': 'first_name', 'last': 'last_name'} 116 118 authors = Author.objects.all() 117 119 self.assertSuccessfulRawQuery(Author, query, authors, translations=translations) … … 120 122 """ 121 123 Test passing optional query parameters 122 124 """ 123 query = "SELECT * FROM raw_query_author WHERE first_name = %s"125 query = "SELECT * FROM %s WHERE first_name = %%s" % connection.qname(Author) 124 126 author = Author.objects.all()[2] 125 127 params = [author.first_name] 126 128 results = list(Author.objects.raw(query, params=params)) … … 132 134 """ 133 135 Test of a simple raw query against a model containing a m2m field 134 136 """ 135 query = "SELECT * FROM raw_query_reviewer"137 query = "SELECT * FROM %s" % connection.qname(Reviewer) 136 138 reviewers = Reviewer.objects.all() 137 139 self.assertSuccessfulRawQuery(Reviewer, query, reviewers) 138 140 … … 140 142 """ 141 143 Test to insure that extra translations are ignored. 142 144 """ 143 query = "SELECT * FROM raw_query_author"145 query = "SELECT * FROM %s" % connection.qname(Author) 144 146 translations = {'something': 'else'} 145 147 authors = Author.objects.all() 146 148 self.assertSuccessfulRawQuery(Author, query, authors, translations=translations) 147 149 148 150 def testMissingFields(self): 149 query = "SELECT id, first_name, dob FROM raw_query_author"151 query = "SELECT id, first_name, dob FROM %s" % connection.qname(Author) 150 152 for author in Author.objects.raw(query): 151 153 self.assertNotEqual(author.first_name, None) 152 154 # last_name isn't given, but it will be retrieved on demand 153 155 self.assertNotEqual(author.last_name, None) 154 156 155 157 def testMissingFieldsWithoutPK(self): 156 query = "SELECT first_name, dob FROM raw_query_author"158 query = "SELECT first_name, dob FROM %s" % connection.qname(Author) 157 159 try: 158 160 list(Author.objects.raw(query)) 159 161 self.fail('Query without primary key should fail') … … 161 163 pass 162 164 163 165 def testAnnotations(self): 164 query = "SELECT a.*, count(b.id) as book_count FROM raw_query_author a LEFT JOIN raw_query_book b ON a.id = b.author_id GROUP BY a.id, a.first_name, a.last_name, a.dob ORDER BY a.id"166 query = "SELECT a.*, count(b.id) as book_count FROM %s a LEFT JOIN %s b ON a.id = b.author_id GROUP BY a.id, a.first_name, a.last_name, a.dob ORDER BY a.id" % (connection.qname(Author), connection.qname(Book)) 165 167 expected_annotations = ( 166 168 ('book_count', 3), 167 169 ('book_count', 0), … … 172 174 self.assertSuccessfulRawQuery(Author, query, authors, expected_annotations) 173 175 174 176 def testWhiteSpaceQuery(self): 175 query = " SELECT * FROM raw_query_author"177 query = " SELECT * FROM %s" % connection.qname(Author) 176 178 authors = Author.objects.all() 177 179 self.assertSuccessfulRawQuery(Author, query, authors) 178 180 179 181 def testMultipleIterations(self): 180 query = "SELECT * FROM raw_query_author"182 query = "SELECT * FROM %s" % connection.qname(Author) 181 183 normal_authors = Author.objects.all() 182 184 raw_authors = Author.objects.raw(query) 183 185 … … 197 199 198 200 def testGetItem(self): 199 201 # Indexing on RawQuerySets 200 query = "SELECT * FROM raw_query_author ORDER BY id ASC"202 query = "SELECT * FROM %s ORDER BY id ASC" % connection.qname(Author) 201 203 third_author = Author.objects.raw(query)[2] 202 204 self.assertEqual(third_author.first_name, 'Bob') 203 205 … … 211 213 # Wesley was bron 212 214 f = FriendlyAuthor.objects.create(first_name="Wesley", last_name="Chun", 213 215 dob=date(1962, 10, 28)) 214 query = "SELECT * FROM raw_query_friendlyauthor"216 query = "SELECT * FROM %s" % connection.qname(FriendlyAuthor) 215 217 self.assertEqual( 216 218 [o.pk for o in FriendlyAuthor.objects.raw(query)], [f.pk] 217 219 ) 218 220 219 221 def test_query_count(self): 220 222 self.assertNumQueries(1, 221 list, Author.objects.raw("SELECT * FROM raw_query_author")223 list, Author.objects.raw("SELECT * FROM %s" % connection.qname(Author)) 222 224 ) -
tests/modeltests/timezones/tests.py
264 264 dt = datetime.datetime(2011, 9, 1, 13, 20, 30) 265 265 event = Event.objects.create(dt=dt) 266 266 self.assertQuerysetEqual( 267 Event.objects.raw('SELECT * FROM timezones_event WHERE dt = %s', [dt]), 267 Event.objects.raw('SELECT * FROM %s WHERE dt = %%s' 268 % connection.qname(Event), 269 [dt]), 268 270 [event], 269 271 transform=lambda d: d) 270 272 … … 476 478 dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) 477 479 event = Event.objects.create(dt=dt) 478 480 self.assertQuerysetEqual( 479 Event.objects.raw('SELECT * FROM timezones_event WHERE dt = %s', [dt]), 481 Event.objects.raw('SELECT * FROM %s WHERE dt = %%s' 482 % connection.qname(Event), [dt]), 480 483 [event], 481 484 transform=lambda d: d) 482 485 -
tests/modeltests/proxy_models/tests.py
3 3 from django.contrib.contenttypes.models import ContentType 4 4 from django.core import management 5 5 from django.core.exceptions import FieldError 6 from django.db import models, DEFAULT_DB_ALIAS 6 from django.db import models, DEFAULT_DB_ALIAS, transaction, IntegrityError 7 7 from django.db.models import signals 8 from django.test import TestCase 8 from django.test import TestCase, TransactionTestCase, skipUnlessDBFeature 9 9 10 10 11 11 from .models import (MyPerson, Person, StatusPerson, LowerStatusPerson, … … 326 326 management.call_command('loaddata', 'mypeople.json', verbosity=0, commit=False) 327 327 p = MyPerson.objects.get(pk=100) 328 328 self.assertEqual(p.name, 'Elvis Presley') 329 330 class TransactionalProxyModelTests(TransactionTestCase): 331 @skipUnlessDBFeature('supports_foreign_keys') 332 def test_proxy_fk(self): 333 """ 334 Test that the DB contains proper foreign keys for proxy model references. 335 """ 336 @transaction.commit_on_success 337 def create_failing_pk(): 338 t = TrackerUser.objects.create(status='bar') 339 Improvement.objects.create(summary='foof', version='foof', 340 reporter_id=1, associated_bug_id=1, 341 assignee=t) 342 self.assertRaises(IntegrityError, create_failing_pk) -
tests/modeltests/select_for_update/tests.py
36 36 # issuing a SELECT ... FOR UPDATE will block. 37 37 new_connections = ConnectionHandler(settings.DATABASES) 38 38 self.new_connection = new_connections[DEFAULT_DB_ALIAS] 39 40 39 # We need to set settings.DEBUG to True so we can capture 41 40 # the output SQL to examine. 42 41 self._old_debug = settings.DEBUG … … 62 61 # end_blocking_transaction() should be called. 63 62 self.cursor = self.new_connection.cursor() 64 63 sql = 'SELECT * FROM %(db_table)s %(for_update)s;' % { 65 'db_table': Person._meta.db_table,64 'db_table': self.new_connection.qname(Person), 66 65 'for_update': self.new_connection.ops.for_update_sql(), 67 66 } 68 67 self.cursor.execute(sql, ()) 69 result =self.cursor.fetchone()68 self.cursor.fetchone() 70 69 71 70 def end_blocking_transaction(self): 72 71 # Roll back the blocking transaction. … … 241 240 list( 242 241 Person.objects.raw( 243 242 'SELECT * FROM %s %s' % ( 244 Person._meta.db_table,243 connection.qname(Person), 245 244 connection.ops.for_update_sql(nowait=True) 246 245 ) 247 246 ) -
tests/modeltests/unmanaged_models/tests.py
48 48 """ 49 49 The intermediary table between two unmanaged models should not be created. 50 50 """ 51 table = Unmanaged2._meta.get_field('mm').m2m_db_table() 52 tables = connection.introspection.table_names() 53 self.assertTrue(table not in tables, "Table '%s' should not exist, but it does." % table) 51 conv = connection.introspection.qname_converter 52 table = conv(Unmanaged2.mm.through._meta.qualified_name) 53 tables = connection.introspection.all_qualified_names() 54 self.assertTrue(table not in tables, "Table '%s' should not exist, but it does." % table[1]) 54 55 55 56 def test_many_to_many_between_unmanaged_and_managed(self): 56 57 """ 57 58 An intermediary table between a managed and an unmanaged model should be created. 58 59 """ 59 table = Managed1._meta.get_field('mm').m2m_db_table() 60 tables = connection.introspection.table_names() 61 self.assertTrue(table in tables, "Table '%s' does not exist." % table) 60 conv = connection.introspection.qname_converter 61 table = conv(Managed1.mm.through._meta.qualified_name) 62 tables = connection.introspection.all_qualified_names() 63 self.assertTrue(table in tables, "Table '%s' does not exist." % table[1]) -
tests/modeltests/many_to_one/tests.py
4 4 from datetime import datetime 5 5 6 6 from django.core.exceptions import MultipleObjectsReturned 7 from django.db import connection 7 8 from django.test import TestCase 8 9 from django.utils.translation import ugettext_lazy 9 10 … … 169 170 # The automatically joined table has a predictable name. 170 171 self.assertQuerysetEqual( 171 172 Article.objects.filter(reporter__first_name__exact='John').extra( 172 where=[" many_to_one_reporter.last_name='Smith'"]),173 where=["%s.last_name='Smith'" % connection.qname(Reporter)]), 173 174 [ 174 175 "<Article: John's second story>", 175 176 "<Article: This is a test>", … … 177 178 # ... and should work fine with the unicode that comes out of forms.Form.cleaned_data 178 179 self.assertQuerysetEqual( 179 180 Article.objects.filter(reporter__first_name__exact='John' 180 ).extra(where=["many_to_one_reporter.last_name='%s'" % u'Smith']), 181 ).extra(where=["%s.last_name='%s'" % 182 (connection.qname(Reporter), u'Smith')]), 181 183 [ 182 184 "<Article: John's second story>", 183 185 "<Article: This is a test>", -
tests/modeltests/prefetch_related/tests.py
372 372 l = [a.authorwithage for a in Author.objects.prefetch_related('authorwithage')] 373 373 374 374 # Regression for #18090: the prefetching query must include an IN clause. 375 self.assertIn('authorwithage', connection.queries[-1]['sql']) 376 self.assertIn(' IN ', connection.queries[-1]['sql']) 375 executed_sql = connection.queries[-1]['sql'].lower() 376 self.assertIn('authorwithage', executed_sql) 377 self.assertIn(' in ', executed_sql) 377 378 378 379 self.assertEqual(l, [a.authorwithage for a in Author.objects.all()]) 379 380 -
tests/modeltests/select_related/tests.py
1 1 from __future__ import absolute_import 2 2 3 from django.db import connection 3 4 from django.test import TestCase 4 5 5 6 from .models import Domain, Kingdom, Phylum, Klass, Order, Family, Genus, Species … … 120 121 121 122 def test_select_related_with_extra(self): 122 123 s = Species.objects.all().select_related(depth=1)\ 123 .extra(select={'a': ' select_related_species.id + 10'})[0]124 .extra(select={'a': '%s.id + 10' % connection.qname(Species)})[0] 124 125 self.assertEqual(s.id + 10, s.a) 125 126 126 127 def test_certain_fields(self): -
tests/modeltests/custom_methods/models.py
31 31 cursor = connection.cursor() 32 32 cursor.execute(""" 33 33 SELECT id, headline, pub_date 34 FROM custom_methods_article 35 WHERE pub_date = %s 36 AND id != %s""", [connection.ops.value_to_db_date(self.pub_date), 37 self.id]) 34 FROM %s 35 WHERE pub_date = %%s 36 AND id != %%s""" % connection.qname(self), 37 [connection.ops.value_to_db_date(self.pub_date), 38 self.id]) 38 39 return [self.__class__(*row) for row in cursor.fetchall()] -
tests/modeltests/transactions/tests.py
164 164 class TransactionRollbackTests(TransactionTestCase): 165 165 def execute_bad_sql(self): 166 166 cursor = connection.cursor() 167 cursor.execute("INSERT INTO transactions_reporter (first_name, last_name) VALUES ('Douglas', 'Adams');") 167 tbl = connection.qname(Reporter) 168 cursor.execute("INSERT INTO %s (first_name, last_name) VALUES ('Douglas', 'Adams');" % tbl) 168 169 transaction.set_dirty() 169 170 170 171 @skipUnlessDBFeature('requires_rollback_on_dirty_transaction') … … 305 306 with self.assertRaises(IntegrityError): 306 307 with transaction.commit_on_success(): 307 308 cursor = connection.cursor() 308 cursor.execute("INSERT INTO transactions_reporter (first_name, last_name) VALUES ('Douglas', 'Adams');") 309 tbl = connection.qname(Reporter) 310 cursor.execute("INSERT INTO %s (first_name, last_name) VALUES ('Douglas', 'Adams');" % tbl) 309 311 transaction.set_dirty() 310 312 transaction.rollback() -
tests/regressiontests/transactions_regress/tests.py
25 25 def raw_sql(): 26 26 "Write a record using raw sql under a commit_on_success decorator" 27 27 cursor = connection.cursor() 28 cursor.execute("INSERT into transactions_regress_mod (id,fld) values (17,18)") 28 tbl = connection.qname(Mod) 29 cursor.execute("INSERT into %s (id,fld) values (17,18)" % tbl) 29 30 30 31 raw_sql() 31 32 # Rollback so that if the decorator didn't commit, the record is unwritten … … 116 117 be committed. 117 118 """ 118 119 cursor = connection.cursor() 119 cursor.execute("INSERT into transactions_regress_mod (id,fld) values (1,2)") 120 tbl = connection.qname(Mod) 121 cursor.execute("INSERT into %s (id,fld) values (1,2)" % tbl) 120 122 transaction.rollback() 121 cursor.execute("INSERT into transactions_regress_mod (id,fld) values (1,2)")123 cursor.execute("INSERT into %s (id,fld) values (1,2)" % tbl) 122 124 123 125 reuse_cursor_ref() 124 126 # Rollback so that if the decorator didn't commit, the record is unwritten -
tests/regressiontests/queries/tests.py
6 6 7 7 from django.conf import settings 8 8 from django.core.exceptions import FieldError 9 from django.contrib.sites.models import Site 9 10 from django.db import DatabaseError, connection, connections, DEFAULT_DB_ALIAS 10 11 from django.db.models import Count 11 12 from django.db.models.query import Q, ITER_CHUNK_SIZE, EmptyQuerySet … … 387 388 388 389 def test_ticket2496(self): 389 390 self.assertQuerysetEqual( 390 Item.objects.extra(tables=[ 'queries_author']).select_related().order_by('name')[:1],391 Item.objects.extra(tables=[Author._meta.qualified_name]).select_related().order_by('name')[:1], 391 392 ['<Item: four>'] 392 393 ) 393 394 … … 507 508 self.assertEqual(d, {'a': u'one', 'b': u'two'}) 508 509 509 510 # Order by the number of tags attached to an item. 510 l = Item.objects.extra(select={'count': 'select count(*) from queries_item_tags where queries_item_tags.item_id = queries_item.id'}).order_by('-count') 511 l = Item.objects.extra(select={ 512 'count':'select count(*) from %s where %s.item_id = %s.id' % 513 (connection.qname(Item.tags.through), connection.qname(Item.tags.through), 514 connection.qname(Item)) 515 }).order_by('-count') 511 516 self.assertEqual([o.count for o in l], [2, 2, 1, 0]) 512 517 513 518 def test_ticket6154(self): … … 577 582 578 583 def test_ticket7098(self): 579 584 # Make sure semi-deprecated ordering by related models syntax still 580 # works. 581 self.assertValueQuerysetEqual( 582 Item.objects.values('note__note').order_by('queries_note.note', 'id'), 583 [{'note__note': u'n2'}, {'note__note': u'n3'}, {'note__note': u'n3'}, {'note__note': u'n3'}] 584 ) 585 # works. 586 # Skip this test if schema support is in effect - there is little point to fix the 587 # deprecated .order_by() notation to support schemas. 588 if not connection.schema: 589 self.assertValueQuerysetEqual( 590 # Need to remove the quotes from the table name for this test... 591 Item.objects.values('note__note').order_by('%s.note' % connection.qname(Note)[1:-1], 'id'), 592 [{'note__note': u'n2'}, {'note__note': u'n3'}, {'note__note': u'n3'}, {'note__note': u'n3'}] 593 ) 585 594 586 595 def test_ticket7096(self): 587 596 # Make sure exclude() with multiple conditions continues to work. … … 1216 1225 # Ordering of extra() pieces is possible, too and you can mix extra 1217 1226 # fields and model fields in the ordering. 1218 1227 self.assertQuerysetEqual( 1219 Ranking.objects.extra(tables=['django_site'], order_by=['-django_site.id', 'rank']), 1228 Ranking.objects.extra(tables=[Site._meta.qualified_name], 1229 order_by=['-%s.id' % connection.qname(Site), 'rank']), 1220 1230 ['<Ranking: 1: a3>', '<Ranking: 2: a2>', '<Ranking: 3: a1>'] 1221 1231 ) 1222 1232 … … 1251 1261 1252 1262 def test_ticket7045(self): 1253 1263 # Extra tables used to crash SQL construction on the second use. 1254 qs = Ranking.objects.extra(tables=[ 'django_site'])1264 qs = Ranking.objects.extra(tables=[connection.qname(Site)]) 1255 1265 qs.query.get_compiler(qs.db).as_sql() 1256 1266 # test passes if this doesn't raise an exception. 1257 1267 qs.query.get_compiler(qs.db).as_sql() -
tests/regressiontests/delete_regress/tests.py
17 17 def setUp(self): 18 18 # Create a second connection to the default database 19 19 conn_settings = settings.DATABASES[DEFAULT_DB_ALIAS] 20 # TODO: there must be a better way to do this copying. .deepcopy() 21 # perhaps? 20 22 self.conn2 = backend.DatabaseWrapper({ 21 23 'HOST': conn_settings['HOST'], 22 24 'NAME': conn_settings['NAME'], … … 25 27 'PORT': conn_settings['PORT'], 26 28 'USER': conn_settings['USER'], 27 29 'TIME_ZONE': settings.TIME_ZONE, 30 'SCHEMA': conn_settings['SCHEMA'], 31 'TEST_SCHEMA_PREFIX': conn_settings['TEST_SCHEMA_PREFIX'], 32 'TEST_SCHEMAS': conn_settings['TEST_SCHEMAS'], 28 33 }) 29 34 30 35 # Put both DB connections into managed transaction mode … … 55 60 56 61 # Delete something using connection 2. 57 62 cursor2 = self.conn2.cursor() 58 cursor2.execute('DELETE from delete_regress_book WHERE id=1')63 cursor2.execute('DELETE from %s WHERE id=1' % self.conn2.qname(Book)) 59 64 self.conn2._commit() 60 65 61 66 # Now perform a queryset delete that covers the object -
tests/regressiontests/backends/tests.py
9 9 from django.core.management.color import no_style 10 10 from django.core.exceptions import ImproperlyConfigured 11 11 from django.db import (backend, connection, connections, DEFAULT_DB_ALIAS, 12 IntegrityError, transaction )12 IntegrityError, transaction, QName) 13 13 from django.db.backends.signals import connection_created 14 14 from django.db.backends.postgresql_psycopg2 import version as pg_version 15 15 from django.db.utils import ConnectionHandler, DatabaseError, load_backend … … 129 129 "An executemany call with too many/not enough parameters will raise an exception (Refs #12612)" 130 130 cursor = connection.cursor() 131 131 query = ('INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % ( 132 connection.introspection. table_name_converter('backends_square'),132 connection.introspection.identifier_converter('backends_square'), 133 133 connection.ops.quote_name('root'), 134 134 connection.ops.quote_name('square') 135 135 )) … … 169 169 VLM = models.VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ 170 170 VLM_m2m = VLM.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.through 171 171 tables = [ 172 VLM._meta. db_table,173 VLM_m2m._meta. db_table,172 VLM._meta.qualified_name, 173 VLM_m2m._meta.qualified_name 174 174 ] 175 175 sequences = [ 176 176 { 177 177 'column': VLM._meta.pk.column, 178 ' table': VLM._meta.db_table178 'qname': QName(None, VLM._meta.db_table, False), 179 179 }, 180 180 ] 181 181 cursor = connection.cursor() … … 313 313 def create_squares_with_executemany(self, args): 314 314 cursor = connection.cursor() 315 315 opts = models.Square._meta 316 tbl = connection. introspection.table_name_converter(opts.db_table)316 tbl = connection.qname(models.Square) 317 317 f1 = connection.ops.quote_name(opts.get_field('root').column) 318 318 f2 = connection.ops.quote_name(opts.get_field('square').column) 319 319 query = 'INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (tbl, f1, f2) … … 358 358 opts2 = models.Person._meta 359 359 f3, f4 = opts2.get_field('first_name'), opts2.get_field('last_name') 360 360 query2 = ('SELECT %s, %s FROM %s ORDER BY %s' 361 % (qn(f3.column), qn(f4.column), connection. introspection.table_name_converter(opts2.db_table),361 % (qn(f3.column), qn(f4.column), connection.qname(models.Person), 362 362 qn(f3.column))) 363 363 cursor = connection.cursor() 364 364 cursor.execute(query2) … … 375 375 def test_duplicate_table_error(self): 376 376 """ Test that creating an existing table returns a DatabaseError """ 377 377 cursor = connection.cursor() 378 query = 'CREATE TABLE %s (id INTEGER);' % models.Article._meta.db_table378 query = 'CREATE TABLE %s (id INTEGER);' % connection.qname(models.Article) 379 379 with self.assertRaises(DatabaseError): 380 380 cursor.execute(query) 381 381 -
tests/regressiontests/inspectdb/tests.py
1 from __future__ import absolute_import 2 1 3 from StringIO import StringIO 2 4 3 5 from django.core.management import call_command … … 2 4 from django.test import TestCase, skipUnlessDBFeature 5 from .models import People 3 6 … … 8 11 9 12 @skipUnlessDBFeature('can_introspect_foreign_keys') 10 13 def test_attribute_name_not_python_keyword(self): 14 from django.db import connection 15 _, tbl, _ = connection.introspection.qname_converter(People._meta.qualified_name) 16 mname = ''.join(t.title() for t in tbl.split('_')) 11 17 out = StringIO() 12 18 call_command('inspectdb', stdout=out) 13 19 error_message = "inspectdb generated an attribute name which is a python keyword" 14 self.assertNotIn("from = models.ForeignKey( InspectdbPeople)", out.getvalue(), msg=error_message)15 self.assertIn("from_field = models.ForeignKey( InspectdbPeople)", out.getvalue())16 self.assertIn("people_pk = models.ForeignKey( InspectdbPeople, primary_key=True)",20 self.assertNotIn("from = models.ForeignKey(%s)" % mname, out.getvalue(), msg=error_message) 21 self.assertIn("from_field = models.ForeignKey(%s)" % mname, out.getvalue()) 22 self.assertIn("people_pk = models.ForeignKey(%s, primary_key=True)" % mname, 17 23 out.getvalue()) 18 self.assertIn("people_unique = models.ForeignKey( InspectdbPeople, unique=True)",24 self.assertIn("people_unique = models.ForeignKey(%s, unique=True)" % mname, 19 25 out.getvalue()) 20 26 out.close() 21 27 -
tests/regressiontests/extra_regress/tests.py
3 3 import datetime 4 4 5 5 from django.contrib.auth.models import User 6 from django.db import connection 6 7 from django.test import TestCase 7 8 from django.utils.datastructures import SortedDict 8 9 … … 42 43 # Queryset to match most recent revision: 43 44 qs = RevisionableModel.objects.extra( 44 45 where=["%(table)s.id IN (SELECT MAX(rev.id) FROM %(table)s rev GROUP BY rev.base_id)" % { 45 'table': RevisionableModel._meta.db_table,46 'table': connection.qname(RevisionableModel), 46 47 }] 47 48 ) 48 49 -
tests/regressiontests/aggregation_regress/tests.py
6 6 from operator import attrgetter 7 7 8 8 from django.core.exceptions import FieldError 9 from django.db import connection 9 10 from django.db.models import Count, Max, Avg, Sum, StdDev, Variance, F, Q 10 11 from django.test import TestCase, Approximate, skipUnlessDBFeature 11 12 … … 69 70 #oracle doesn't support subqueries in group by clause 70 71 shortest_book_sql = """ 71 72 SELECT name 72 FROM aggregation_regress_bookb73 WHERE b.publisher_id = aggregation_regress_publisher.id73 FROM %s b 74 WHERE b.publisher_id = %s.id 74 75 ORDER BY b.pages 75 76 LIMIT 1 76 """ 77 """ % (connection.qname(Book), connection.qname(Book)) 77 78 # tests that this query does not raise a DatabaseError due to the full 78 79 # subselect being (erroneously) added to the GROUP BY parameters 79 80 qs = Publisher.objects.extra(select={ -
tests/regressiontests/admin_scripts/tests.py
3 3 advertised - especially with regards to the handling of the DJANGO_SETTINGS_MODULE 4 4 and default settings.py files. 5 5 """ 6 7 6 import os 8 7 import re 9 8 import shutil … … 13 12 14 13 from django import conf, bin, get_version 15 14 from django.conf import settings 15 from django.db import connection 16 16 from django.test.simple import DjangoTestSuiteRunner 17 17 from django.utils import unittest 18 18 from django.test import LiveServerTestCase 19 19 20 from .models import Article 21 20 22 test_dir = os.path.dirname(os.path.dirname(__file__)) 21 expected_query_re = re.compile(r'CREATE TABLE [`"]admin_scripts_article[`"]', re.IGNORECASE)22 23 23 24 24 class AdminScriptTestCase(unittest.TestCase): 25 25 def write_settings(self, filename, apps=None, is_dir=False, sdict=None): 26 26 test_dir = os.path.dirname(os.path.dirname(__file__)) … … 844 844 """ 845 845 def setUp(self): 846 846 self.write_settings('alternate_settings.py') 847 tblname = connection.qname(Article) 848 self.expected_query_re = re.compile(r'CREATE TABLE %s' % tblname, re.IGNORECASE) 847 849 848 850 def tearDown(self): 849 851 self.remove_settings('alternate_settings.py') … … 859 861 "alternate: manage.py builtin commands work with settings provided as argument" 860 862 args = ['sqlall', '--settings=alternate_settings', 'admin_scripts'] 861 863 out, err = self.run_manage(args) 862 self.assertRegexpMatches(out, expected_query_re)864 self.assertRegexpMatches(out, self.expected_query_re) 863 865 self.assertNoOutput(err) 864 866 865 867 def test_builtin_with_environment(self): 866 868 "alternate: manage.py builtin commands work if settings are provided in the environment" 867 869 args = ['sqlall', 'admin_scripts'] 868 870 out, err = self.run_manage(args, 'alternate_settings') 869 self.assertRegexpMatches(out, expected_query_re)871 self.assertRegexpMatches(out, self.expected_query_re) 870 872 self.assertNoOutput(err) 871 873 872 874 def test_builtin_with_bad_settings(self): -
tests/regressiontests/introspection/tests.py
2 2 3 3 from functools import update_wrapper 4 4 5 from django.db import connection 5 from django.conf import settings 6 from django.db import connection, QName 6 7 from django.test import TestCase, skipUnlessDBFeature, skipIfDBFeature 7 8 8 9 from .models import Reporter, Article … … 39 40 __metaclass__ = IgnoreNotimplementedError 40 41 41 42 def test_table_names(self): 42 tl = connection.introspection.table_names() 43 self.assertTrue(Reporter._meta.db_table in tl, 43 conv = connection.introspection.qname_converter 44 tl = connection.introspection.all_qualified_names() 45 self.assertTrue(conv(Reporter._meta.qualified_name) in tl, 44 46 "'%s' isn't in table_list()." % Reporter._meta.db_table) 45 self.assertTrue( Article._meta.db_tablein tl,47 self.assertTrue(conv(Article._meta.qualified_name) in tl, 46 48 "'%s' isn't in table_list()." % Article._meta.db_table) 47 49 48 50 def test_django_table_names(self): 49 51 cursor = connection.cursor() 50 cursor.execute('CREATE TABLE django_ixn_test_table (id INTEGER);') 52 tblname = connection.ops.qualified_name( 53 QName(None, 'django_ixn_test_table', False)) 54 cursor.execute('CREATE TABLE %s (id INTEGER);' % tblname) 51 55 tl = connection.introspection.django_table_names() 52 cursor.execute("DROP TABLE django_ixn_test_table;")53 self.assertTrue( 'django_ixn_testcase_table'not in tl,56 cursor.execute("DROP TABLE %s;" % tblname) 57 self.assertTrue(tblname not in tl, 54 58 "django_table_names() returned a non-Django table") 55 59 56 60 def test_django_table_names_retval_type(self): 57 61 # Ticket #15216 58 62 cursor = connection.cursor() 59 cursor.execute('CREATE TABLE django_ixn_test_table (id INTEGER);') 63 tblname = connection.ops.qualified_name( 64 QName(None, 'django_ixn_test_table', False)) 65 cursor.execute('CREATE TABLE %s (id INTEGER);' % tblname) 60 66 61 67 tl = connection.introspection.django_table_names(only_existing=True) 62 68 self.assertIs(type(tl), list) … … 65 71 self.assertIs(type(tl), list) 66 72 67 73 def test_installed_models(self): 68 tables = [Article._meta.db_table, Reporter._meta.db_table] 74 conv = connection.introspection.qname_converter 75 tables = [conv(Article._meta.qualified_name), 76 conv(Reporter._meta.qualified_name)] 69 77 models = connection.introspection.installed_models(tables) 70 78 self.assertEqual(models, set([Article, Reporter])) 71 79 72 80 def test_sequence_list(self): 73 81 sequences = connection.introspection.sequence_list() 74 expected = {'table': Reporter._meta.db_table, 'column': 'id'} 82 qname = connection.introspection.qname_converter(Reporter._meta.qualified_name) 83 expected = {'qname': qname, 'column': 'id'} 75 84 self.assertTrue(expected in sequences, 76 85 'Reporter sequence not found in sequence_list()') 77 86 78 87 def test_get_table_description_names(self): 79 88 cursor = connection.cursor() 80 desc = connection.introspection.get_table_description(cursor, Reporter._meta.db_table) 89 tbl = connection.introspection.qname_converter(Reporter._meta.qualified_name) 90 desc = connection.introspection.get_table_description(cursor, tbl) 81 91 self.assertEqual([r[0] for r in desc], 82 92 [f.column for f in Reporter._meta.fields]) 83 93 84 94 def test_get_table_description_types(self): 85 95 cursor = connection.cursor() 86 desc = connection.introspection.get_table_description(cursor, Reporter._meta.db_table) 96 tbl = connection.introspection.qname_converter(Reporter._meta.qualified_name) 97 desc = connection.introspection.get_table_description(cursor, tbl) 87 98 self.assertEqual( 88 99 [datatype(r[1], r) for r in desc], 89 100 ['IntegerField', 'CharField', 'CharField', 'CharField', 'BigIntegerField'] … … 95 106 @skipIfDBFeature('interprets_empty_strings_as_nulls') 96 107 def test_get_table_description_nullable(self): 97 108 cursor = connection.cursor() 98 desc = connection.introspection.get_table_description(cursor, Reporter._meta.db_table) 109 tbl = connection.introspection.qname_converter(Reporter._meta.qualified_name) 110 desc = connection.introspection.get_table_description(cursor, tbl) 99 111 self.assertEqual( 100 112 [r[6] for r in desc], 101 113 [False, False, False, False, True] … … 105 117 @skipUnlessDBFeature('has_real_datatype') 106 118 def test_postgresql_real_type(self): 107 119 cursor = connection.cursor() 108 cursor.execute("CREATE TABLE django_ixn_real_test_table (number REAL);") 109 desc = connection.introspection.get_table_description(cursor, 'django_ixn_real_test_table') 110 cursor.execute('DROP TABLE django_ixn_real_test_table;') 120 tblname = connection.ops.qualified_name( 121 QName(None, 'django_ixn_real_test_table', False)) 122 cursor.execute("CREATE TABLE %s (number REAL);" % tblname) 123 desc = connection.introspection.get_table_description( 124 cursor, QName(None, 'django_ixn_real_test_table', False)) 125 cursor.execute('DROP TABLE %s;' % tblname) 111 126 self.assertEqual(datatype(desc[0][1], desc[0]), 'FloatField') 112 127 113 128 def test_get_relations(self): 114 129 cursor = connection.cursor() 115 relations = connection.introspection.get_relations(cursor, Article._meta.db_table) 130 tbl = connection.introspection.qname_converter(Article._meta.qualified_name) 131 relations = connection.introspection.get_relations(cursor, tbl) 132 rep_tbl = connection.introspection.qname_converter(Reporter._meta.qualified_name) 116 133 117 134 # Older versions of MySQL don't have the chops to report on this stuff, 118 135 # so just skip it if no relations come back. If they do, though, we 119 136 # should test that the response is correct. 120 137 if relations: 121 138 # That's {field_index: (field_index_other_table, other_table)} 122 self.assertEqual(relations, {3: (0, Reporter._meta.db_table)}) 139 # We have a small problem here: the Reporter model is installed 140 # into the default schema (qualified_name[0] == None). The 141 # relation introspection is going to see it in that schema, but we 142 # do not know what that schema is. So, test everything except the 143 # schema. 144 # TODO: this testing logic is UGLY! 145 schema = connection.convert_schema(Reporter._meta.qualified_name[0]) 146 self.assertTrue(3 in relations) 147 relations[3] = (relations[3][0], (schema, relations[3][1][1], True)) 148 self.assertEqual(relations, {3: (0, rep_tbl)}) 123 149 124 150 def test_get_key_columns(self): 125 151 cursor = connection.cursor() 126 key_columns = connection.introspection.get_key_columns(cursor, Article._meta.db_table) 127 self.assertEqual(key_columns, [(u'reporter_id', Reporter._meta.db_table, u'id')]) 152 rep_tbl = connection.introspection.qname_converter(Reporter._meta.qualified_name, force_schema=True) 153 key_columns = connection.introspection.get_key_columns(cursor, Article._meta.qualified_name) 154 self.assertEqual(key_columns, [(u'reporter_id', rep_tbl, u'id')]) 128 155 129 156 def test_get_primary_key_column(self): 130 157 cursor = connection.cursor() 131 primary_key_column = connection.introspection.get_primary_key_column(cursor, Article._meta. db_table)158 primary_key_column = connection.introspection.get_primary_key_column(cursor, Article._meta.qualified_name) 132 159 self.assertEqual(primary_key_column, u'id') 133 160 134 161 def test_get_indexes(self): 135 162 cursor = connection.cursor() 136 indexes = connection.introspection.get_indexes(cursor, Article._meta.db_table) 163 tbl = connection.introspection.qname_converter(Article._meta.qualified_name) 164 indexes = connection.introspection.get_indexes(cursor, tbl) 137 165 self.assertEqual(indexes['reporter_id'], {'unique': False, 'primary_key': False}) 138 166 139 167 -
docs/topics/db/models.txt
636 636 verbose_name_plural = "oxen" 637 637 638 638 Model metadata is "anything that's not a field", such as ordering options 639 (:attr:`~Options.ordering`), database table name (:attr:`~Options.db_table`), or 639 (:attr:`~Options.ordering`), database table name (:attr:`~Options.db_table`), 640 or custom schema for the tables (:attr:`~Options.db_schema`), or 640 641 human-readable singular and plural names (:attr:`~Options.verbose_name` and 641 642 :attr:`~Options.verbose_name_plural`). None are required, and adding ``class 642 643 Meta`` to a model is completely optional. -
docs/ref/models/options.txt
67 67 the table name via ``db_table``, particularly if you are using the MySQL 68 68 backend. See the :ref:`MySQL notes <mysql-notes>` for more details. 69 69 70 .. _db_schema: 70 71 72 ``db_schema`` 73 ------------- 74 75 .. attribute:: Options.db_schema 76 77 .. versionadded:: 1.5 78 79 The name of the database schema to use for the model. If the backend 80 doesn't support multiple schemas, this option is ignored. 81 82 If this is used then Django will place the table in the given schema. 83 Usually this means turning the pair db_schema, db_table into a fully 84 qualified name. For example: "db_schema"."db_table". 85 86 71 87 ``db_tablespace`` 72 88 ----------------- 73 89 -
docs/ref/settings.txt
512 512 513 513 The username to use when connecting to the database. Not used with SQLite. 514 514 515 .. setting:: SCHEMA 516 517 SCHEMA 518 ~~~~~~ 519 520 .. versionadded:: 1.5 521 522 Default: ``''`` (Empty string) 523 524 The name of the database schema to use for models. If the backend 525 doesn't support multiple schemas, this option is ignored. An empty 526 string means that schema qualified table names are not used by default. 527 528 If this is used then Django will prefix any table names with the schema name. 529 The schema can be overriden on a per-model basis, for details see 530 :ref:`db_schema`. 531 515 532 .. setting:: TEST_CHARSET 516 533 517 534 TEST_CHARSET