Refs #27656 -- Updated django.db docstring verbs according to PEP 257.
This commit is contained in:
parent
d6e26e5b7c
commit
60e52a047e
@ -22,9 +22,7 @@ NO_DB_ALIAS = '__no_db__'
|
|||||||
|
|
||||||
|
|
||||||
class BaseDatabaseWrapper:
|
class BaseDatabaseWrapper:
|
||||||
"""
|
"""Represent a database connection."""
|
||||||
Represents a database connection.
|
|
||||||
"""
|
|
||||||
# Mapping of Field objects to their column types.
|
# Mapping of Field objects to their column types.
|
||||||
data_types = {}
|
data_types = {}
|
||||||
# Mapping of Field objects to their SQL suffix such as AUTOINCREMENT.
|
# Mapping of Field objects to their SQL suffix such as AUTOINCREMENT.
|
||||||
@ -112,7 +110,7 @@ class BaseDatabaseWrapper:
|
|||||||
"""
|
"""
|
||||||
Time zone for datetimes stored as naive values in the database.
|
Time zone for datetimes stored as naive values in the database.
|
||||||
|
|
||||||
Returns a tzinfo object or None.
|
Return a tzinfo object or None.
|
||||||
|
|
||||||
This is only needed when time zone support is enabled and the database
|
This is only needed when time zone support is enabled and the database
|
||||||
doesn't support time zones. (When the database supports time zones,
|
doesn't support time zones. (When the database supports time zones,
|
||||||
@ -154,25 +152,25 @@ class BaseDatabaseWrapper:
|
|||||||
# ##### Backend-specific methods for creating connections and cursors #####
|
# ##### Backend-specific methods for creating connections and cursors #####
|
||||||
|
|
||||||
def get_connection_params(self):
|
def get_connection_params(self):
|
||||||
"""Returns a dict of parameters suitable for get_new_connection."""
|
"""Return a dict of parameters suitable for get_new_connection."""
|
||||||
raise NotImplementedError('subclasses of BaseDatabaseWrapper may require a get_connection_params() method')
|
raise NotImplementedError('subclasses of BaseDatabaseWrapper may require a get_connection_params() method')
|
||||||
|
|
||||||
def get_new_connection(self, conn_params):
|
def get_new_connection(self, conn_params):
|
||||||
"""Opens a connection to the database."""
|
"""Open a connection to the database."""
|
||||||
raise NotImplementedError('subclasses of BaseDatabaseWrapper may require a get_new_connection() method')
|
raise NotImplementedError('subclasses of BaseDatabaseWrapper may require a get_new_connection() method')
|
||||||
|
|
||||||
def init_connection_state(self):
|
def init_connection_state(self):
|
||||||
"""Initializes the database connection settings."""
|
"""Initialize the database connection settings."""
|
||||||
raise NotImplementedError('subclasses of BaseDatabaseWrapper may require an init_connection_state() method')
|
raise NotImplementedError('subclasses of BaseDatabaseWrapper may require an init_connection_state() method')
|
||||||
|
|
||||||
def create_cursor(self, name=None):
|
def create_cursor(self, name=None):
|
||||||
"""Creates a cursor. Assumes that a connection is established."""
|
"""Create a cursor. Assume that a connection is established."""
|
||||||
raise NotImplementedError('subclasses of BaseDatabaseWrapper may require a create_cursor() method')
|
raise NotImplementedError('subclasses of BaseDatabaseWrapper may require a create_cursor() method')
|
||||||
|
|
||||||
# ##### Backend-specific methods for creating connections #####
|
# ##### Backend-specific methods for creating connections #####
|
||||||
|
|
||||||
def connect(self):
|
def connect(self):
|
||||||
"""Connects to the database. Assumes that the connection is closed."""
|
"""Connect to the database. Assume that the connection is closed."""
|
||||||
# Check for invalid configurations.
|
# Check for invalid configurations.
|
||||||
self.check_settings()
|
self.check_settings()
|
||||||
# In case the previous connection was closed while in an atomic block
|
# In case the previous connection was closed while in an atomic block
|
||||||
@ -205,9 +203,7 @@ class BaseDatabaseWrapper:
|
|||||||
"handles time zones conversions natively." % self.alias)
|
"handles time zones conversions natively." % self.alias)
|
||||||
|
|
||||||
def ensure_connection(self):
|
def ensure_connection(self):
|
||||||
"""
|
"""Guarantee that a connection to the database is established."""
|
||||||
Guarantees that a connection to the database is established.
|
|
||||||
"""
|
|
||||||
if self.connection is None:
|
if self.connection is None:
|
||||||
with self.wrap_database_errors:
|
with self.wrap_database_errors:
|
||||||
self.connect()
|
self.connect()
|
||||||
@ -248,15 +244,11 @@ class BaseDatabaseWrapper:
|
|||||||
# ##### Generic wrappers for PEP-249 connection methods #####
|
# ##### Generic wrappers for PEP-249 connection methods #####
|
||||||
|
|
||||||
def cursor(self):
|
def cursor(self):
|
||||||
"""
|
"""Create a cursor, opening a connection if necessary."""
|
||||||
Creates a cursor, opening a connection if necessary.
|
|
||||||
"""
|
|
||||||
return self._cursor()
|
return self._cursor()
|
||||||
|
|
||||||
def commit(self):
|
def commit(self):
|
||||||
"""
|
"""Commit a transaction and reset the dirty flag."""
|
||||||
Commits a transaction and resets the dirty flag.
|
|
||||||
"""
|
|
||||||
self.validate_thread_sharing()
|
self.validate_thread_sharing()
|
||||||
self.validate_no_atomic_block()
|
self.validate_no_atomic_block()
|
||||||
self._commit()
|
self._commit()
|
||||||
@ -265,9 +257,7 @@ class BaseDatabaseWrapper:
|
|||||||
self.run_commit_hooks_on_set_autocommit_on = True
|
self.run_commit_hooks_on_set_autocommit_on = True
|
||||||
|
|
||||||
def rollback(self):
|
def rollback(self):
|
||||||
"""
|
"""Roll back a transaction and reset the dirty flag."""
|
||||||
Rolls back a transaction and resets the dirty flag.
|
|
||||||
"""
|
|
||||||
self.validate_thread_sharing()
|
self.validate_thread_sharing()
|
||||||
self.validate_no_atomic_block()
|
self.validate_no_atomic_block()
|
||||||
self._rollback()
|
self._rollback()
|
||||||
@ -277,9 +267,7 @@ class BaseDatabaseWrapper:
|
|||||||
self.run_on_commit = []
|
self.run_on_commit = []
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
"""
|
"""Close the connection to the database."""
|
||||||
Closes the connection to the database.
|
|
||||||
"""
|
|
||||||
self.validate_thread_sharing()
|
self.validate_thread_sharing()
|
||||||
self.run_on_commit = []
|
self.run_on_commit = []
|
||||||
|
|
||||||
@ -319,9 +307,9 @@ class BaseDatabaseWrapper:
|
|||||||
|
|
||||||
def savepoint(self):
|
def savepoint(self):
|
||||||
"""
|
"""
|
||||||
Creates a savepoint inside the current transaction. Returns an
|
Create a savepoint inside the current transaction. Return an
|
||||||
identifier for the savepoint that will be used for the subsequent
|
identifier for the savepoint that will be used for the subsequent
|
||||||
rollback or commit. Does nothing if savepoints are not supported.
|
rollback or commit. Do nothing if savepoints are not supported.
|
||||||
"""
|
"""
|
||||||
if not self._savepoint_allowed():
|
if not self._savepoint_allowed():
|
||||||
return
|
return
|
||||||
@ -339,7 +327,7 @@ class BaseDatabaseWrapper:
|
|||||||
|
|
||||||
def savepoint_rollback(self, sid):
|
def savepoint_rollback(self, sid):
|
||||||
"""
|
"""
|
||||||
Rolls back to a savepoint. Does nothing if savepoints are not supported.
|
Roll back to a savepoint. Do nothing if savepoints are not supported.
|
||||||
"""
|
"""
|
||||||
if not self._savepoint_allowed():
|
if not self._savepoint_allowed():
|
||||||
return
|
return
|
||||||
@ -354,7 +342,7 @@ class BaseDatabaseWrapper:
|
|||||||
|
|
||||||
def savepoint_commit(self, sid):
|
def savepoint_commit(self, sid):
|
||||||
"""
|
"""
|
||||||
Releases a savepoint. Does nothing if savepoints are not supported.
|
Release a savepoint. Do nothing if savepoints are not supported.
|
||||||
"""
|
"""
|
||||||
if not self._savepoint_allowed():
|
if not self._savepoint_allowed():
|
||||||
return
|
return
|
||||||
@ -364,7 +352,7 @@ class BaseDatabaseWrapper:
|
|||||||
|
|
||||||
def clean_savepoints(self):
|
def clean_savepoints(self):
|
||||||
"""
|
"""
|
||||||
Resets the counter used to generate unique savepoint ids in this thread.
|
Reset the counter used to generate unique savepoint ids in this thread.
|
||||||
"""
|
"""
|
||||||
self.savepoint_state = 0
|
self.savepoint_state = 0
|
||||||
|
|
||||||
@ -379,9 +367,7 @@ class BaseDatabaseWrapper:
|
|||||||
# ##### Generic transaction management methods #####
|
# ##### Generic transaction management methods #####
|
||||||
|
|
||||||
def get_autocommit(self):
|
def get_autocommit(self):
|
||||||
"""
|
"""Get the autocommit state."""
|
||||||
Check the autocommit state.
|
|
||||||
"""
|
|
||||||
self.ensure_connection()
|
self.ensure_connection()
|
||||||
return self.autocommit
|
return self.autocommit
|
||||||
|
|
||||||
@ -417,9 +403,7 @@ class BaseDatabaseWrapper:
|
|||||||
self.run_commit_hooks_on_set_autocommit_on = False
|
self.run_commit_hooks_on_set_autocommit_on = False
|
||||||
|
|
||||||
def get_rollback(self):
|
def get_rollback(self):
|
||||||
"""
|
"""Get the "needs rollback" flag -- for *advanced use* only."""
|
||||||
Get the "needs rollback" flag -- for *advanced use* only.
|
|
||||||
"""
|
|
||||||
if not self.in_atomic_block:
|
if not self.in_atomic_block:
|
||||||
raise TransactionManagementError(
|
raise TransactionManagementError(
|
||||||
"The rollback flag doesn't work outside of an 'atomic' block.")
|
"The rollback flag doesn't work outside of an 'atomic' block.")
|
||||||
@ -435,9 +419,7 @@ class BaseDatabaseWrapper:
|
|||||||
self.needs_rollback = rollback
|
self.needs_rollback = rollback
|
||||||
|
|
||||||
def validate_no_atomic_block(self):
|
def validate_no_atomic_block(self):
|
||||||
"""
|
"""Raise an error if an atomic block is active."""
|
||||||
Raise an error if an atomic block is active.
|
|
||||||
"""
|
|
||||||
if self.in_atomic_block:
|
if self.in_atomic_block:
|
||||||
raise TransactionManagementError(
|
raise TransactionManagementError(
|
||||||
"This is forbidden when an 'atomic' block is active.")
|
"This is forbidden when an 'atomic' block is active.")
|
||||||
@ -453,7 +435,7 @@ class BaseDatabaseWrapper:
|
|||||||
@contextmanager
|
@contextmanager
|
||||||
def constraint_checks_disabled(self):
|
def constraint_checks_disabled(self):
|
||||||
"""
|
"""
|
||||||
Context manager that disables foreign key constraint checking.
|
Disable foreign key constraint checking.
|
||||||
"""
|
"""
|
||||||
disabled = self.disable_constraint_checking()
|
disabled = self.disable_constraint_checking()
|
||||||
try:
|
try:
|
||||||
@ -489,9 +471,9 @@ class BaseDatabaseWrapper:
|
|||||||
|
|
||||||
def is_usable(self):
|
def is_usable(self):
|
||||||
"""
|
"""
|
||||||
Tests if the database connection is usable.
|
Test if the database connection is usable.
|
||||||
|
|
||||||
This function may assume that self.connection is not None.
|
This method may assume that self.connection is not None.
|
||||||
|
|
||||||
Actual implementations should take care not to raise exceptions
|
Actual implementations should take care not to raise exceptions
|
||||||
as that may prevent Django from recycling unusable connections.
|
as that may prevent Django from recycling unusable connections.
|
||||||
@ -501,7 +483,7 @@ class BaseDatabaseWrapper:
|
|||||||
|
|
||||||
def close_if_unusable_or_obsolete(self):
|
def close_if_unusable_or_obsolete(self):
|
||||||
"""
|
"""
|
||||||
Closes the current connection if unrecoverable errors have occurred,
|
Close the current connection if unrecoverable errors have occurred
|
||||||
or if it outlived its maximum age.
|
or if it outlived its maximum age.
|
||||||
"""
|
"""
|
||||||
if self.connection is not None:
|
if self.connection is not None:
|
||||||
@ -528,10 +510,10 @@ class BaseDatabaseWrapper:
|
|||||||
|
|
||||||
def validate_thread_sharing(self):
|
def validate_thread_sharing(self):
|
||||||
"""
|
"""
|
||||||
Validates that the connection isn't accessed by another thread than the
|
Validate that the connection isn't accessed by another thread than the
|
||||||
one which originally created it, unless the connection was explicitly
|
one which originally created it, unless the connection was explicitly
|
||||||
authorized to be shared between threads (via the `allow_thread_sharing`
|
authorized to be shared between threads (via the `allow_thread_sharing`
|
||||||
property). Raises an exception if the validation fails.
|
property). Raise an exception if the validation fails.
|
||||||
"""
|
"""
|
||||||
if not (self.allow_thread_sharing or self._thread_ident == _thread.get_ident()):
|
if not (self.allow_thread_sharing or self._thread_ident == _thread.get_ident()):
|
||||||
raise DatabaseError(
|
raise DatabaseError(
|
||||||
@ -567,15 +549,11 @@ class BaseDatabaseWrapper:
|
|||||||
return self.cursor()
|
return self.cursor()
|
||||||
|
|
||||||
def make_debug_cursor(self, cursor):
|
def make_debug_cursor(self, cursor):
|
||||||
"""
|
"""Create a cursor that logs all queries in self.queries_log."""
|
||||||
Creates a cursor that logs all queries in self.queries_log.
|
|
||||||
"""
|
|
||||||
return utils.CursorDebugWrapper(cursor, self)
|
return utils.CursorDebugWrapper(cursor, self)
|
||||||
|
|
||||||
def make_cursor(self, cursor):
|
def make_cursor(self, cursor):
|
||||||
"""
|
"""Create a cursor without debug logging."""
|
||||||
Creates a cursor without debug logging.
|
|
||||||
"""
|
|
||||||
return utils.CursorWrapper(cursor, self)
|
return utils.CursorWrapper(cursor, self)
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
@ -585,7 +563,7 @@ class BaseDatabaseWrapper:
|
|||||||
if it opened one, closes it to avoid leaving a dangling connection.
|
if it opened one, closes it to avoid leaving a dangling connection.
|
||||||
This is useful for operations outside of the request-response cycle.
|
This is useful for operations outside of the request-response cycle.
|
||||||
|
|
||||||
Provides a cursor: with self.temporary_connection() as cursor: ...
|
Provide a cursor: with self.temporary_connection() as cursor: ...
|
||||||
"""
|
"""
|
||||||
must_close = self.connection is None
|
must_close = self.connection is None
|
||||||
cursor = self.cursor()
|
cursor = self.cursor()
|
||||||
@ -599,8 +577,8 @@ class BaseDatabaseWrapper:
|
|||||||
@property
|
@property
|
||||||
def _nodb_connection(self):
|
def _nodb_connection(self):
|
||||||
"""
|
"""
|
||||||
Return an alternative connection to be used when there is no need to access
|
Return an alternative connection to be used when there is no need to
|
||||||
the main database, specifically for test db creation/deletion.
|
access the main database, specifically for test db creation/deletion.
|
||||||
This also prevents the production database from being exposed to
|
This also prevents the production database from being exposed to
|
||||||
potential child threads while (or after) the test database is destroyed.
|
potential child threads while (or after) the test database is destroyed.
|
||||||
Refs #10868, #17786, #16969.
|
Refs #10868, #17786, #16969.
|
||||||
@ -624,7 +602,7 @@ class BaseDatabaseWrapper:
|
|||||||
|
|
||||||
def schema_editor(self, *args, **kwargs):
|
def schema_editor(self, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Returns a new instance of this backend's SchemaEditor.
|
Return a new instance of this backend's SchemaEditor.
|
||||||
"""
|
"""
|
||||||
if self.SchemaEditorClass is None:
|
if self.SchemaEditorClass is None:
|
||||||
raise NotImplementedError(
|
raise NotImplementedError(
|
||||||
|
@ -1,8 +1,5 @@
|
|||||||
class BaseDatabaseClient:
|
class BaseDatabaseClient:
|
||||||
"""
|
"""Encapsulate backend-specific methods for opening a client shell."""
|
||||||
This class encapsulates all backend-specific methods for opening a
|
|
||||||
client shell.
|
|
||||||
"""
|
|
||||||
# This should be a string representing the name of the executable
|
# This should be a string representing the name of the executable
|
||||||
# (e.g., "psql"). Subclasses must override this.
|
# (e.g., "psql"). Subclasses must override this.
|
||||||
executable_name = None
|
executable_name = None
|
||||||
|
@ -13,8 +13,8 @@ TEST_DATABASE_PREFIX = 'test_'
|
|||||||
|
|
||||||
class BaseDatabaseCreation:
|
class BaseDatabaseCreation:
|
||||||
"""
|
"""
|
||||||
This class encapsulates all backend-specific differences that pertain to
|
Encapsulate backend-specific differences pertaining to creation and
|
||||||
creation and destruction of the test database.
|
destruction of the test database.
|
||||||
"""
|
"""
|
||||||
def __init__(self, connection):
|
def __init__(self, connection):
|
||||||
self.connection = connection
|
self.connection = connection
|
||||||
@ -28,8 +28,8 @@ class BaseDatabaseCreation:
|
|||||||
|
|
||||||
def create_test_db(self, verbosity=1, autoclobber=False, serialize=True, keepdb=False):
|
def create_test_db(self, verbosity=1, autoclobber=False, serialize=True, keepdb=False):
|
||||||
"""
|
"""
|
||||||
Creates a test database, prompting the user for confirmation if the
|
Create a test database, prompting the user for confirmation if the
|
||||||
database already exists. Returns the name of the test database created.
|
database already exists. Return the name of the test database created.
|
||||||
"""
|
"""
|
||||||
# Don't import django.core.management if it isn't needed.
|
# Don't import django.core.management if it isn't needed.
|
||||||
from django.core.management import call_command
|
from django.core.management import call_command
|
||||||
@ -84,14 +84,14 @@ class BaseDatabaseCreation:
|
|||||||
|
|
||||||
def set_as_test_mirror(self, primary_settings_dict):
|
def set_as_test_mirror(self, primary_settings_dict):
|
||||||
"""
|
"""
|
||||||
Set this database up to be used in testing as a mirror of a primary database
|
Set this database up to be used in testing as a mirror of a primary
|
||||||
whose settings are given
|
database whose settings are given.
|
||||||
"""
|
"""
|
||||||
self.connection.settings_dict['NAME'] = primary_settings_dict['NAME']
|
self.connection.settings_dict['NAME'] = primary_settings_dict['NAME']
|
||||||
|
|
||||||
def serialize_db_to_string(self):
|
def serialize_db_to_string(self):
|
||||||
"""
|
"""
|
||||||
Serializes all data in the database into a JSON string.
|
Serialize all data in the database into a JSON string.
|
||||||
Designed only for test runner usage; will not handle large
|
Designed only for test runner usage; will not handle large
|
||||||
amounts of data.
|
amounts of data.
|
||||||
"""
|
"""
|
||||||
@ -121,8 +121,8 @@ class BaseDatabaseCreation:
|
|||||||
|
|
||||||
def deserialize_db_from_string(self, data):
|
def deserialize_db_from_string(self, data):
|
||||||
"""
|
"""
|
||||||
Reloads the database with data from a string generated by
|
Reload the database with data from a string generated by
|
||||||
the serialize_db_to_string method.
|
the serialize_db_to_string() method.
|
||||||
"""
|
"""
|
||||||
data = StringIO(data)
|
data = StringIO(data)
|
||||||
for obj in serializers.deserialize("json", data, using=self.connection.alias):
|
for obj in serializers.deserialize("json", data, using=self.connection.alias):
|
||||||
@ -139,7 +139,7 @@ class BaseDatabaseCreation:
|
|||||||
|
|
||||||
def _get_test_db_name(self):
|
def _get_test_db_name(self):
|
||||||
"""
|
"""
|
||||||
Internal implementation - returns the name of the test DB that will be
|
Internal implementation - return the name of the test DB that will be
|
||||||
created. Only useful when called from create_test_db() and
|
created. Only useful when called from create_test_db() and
|
||||||
_create_test_db() and when no external munging is done with the 'NAME'
|
_create_test_db() and when no external munging is done with the 'NAME'
|
||||||
settings.
|
settings.
|
||||||
@ -150,7 +150,7 @@ class BaseDatabaseCreation:
|
|||||||
|
|
||||||
def _create_test_db(self, verbosity, autoclobber, keepdb=False):
|
def _create_test_db(self, verbosity, autoclobber, keepdb=False):
|
||||||
"""
|
"""
|
||||||
Internal implementation - creates the test db tables.
|
Internal implementation - create the test db tables.
|
||||||
"""
|
"""
|
||||||
suffix = self.sql_table_creation_suffix()
|
suffix = self.sql_table_creation_suffix()
|
||||||
|
|
||||||
@ -285,7 +285,7 @@ class BaseDatabaseCreation:
|
|||||||
|
|
||||||
def test_db_signature(self):
|
def test_db_signature(self):
|
||||||
"""
|
"""
|
||||||
Returns a tuple with elements of self.connection.settings_dict (a
|
Return a tuple with elements of self.connection.settings_dict (a
|
||||||
DATABASES setting value) that uniquely identify a database
|
DATABASES setting value) that uniquely identify a database
|
||||||
accordingly to the RDBMS particularities.
|
accordingly to the RDBMS particularities.
|
||||||
"""
|
"""
|
||||||
|
@ -8,24 +8,25 @@ FieldInfo = namedtuple('FieldInfo', 'name type_code display_size internal_size p
|
|||||||
|
|
||||||
|
|
||||||
class BaseDatabaseIntrospection:
|
class BaseDatabaseIntrospection:
|
||||||
"""
|
"""Encapsulate backend-specific introspection utilities."""
|
||||||
This class encapsulates all backend-specific introspection utilities
|
|
||||||
"""
|
|
||||||
data_types_reverse = {}
|
data_types_reverse = {}
|
||||||
|
|
||||||
def __init__(self, connection):
|
def __init__(self, connection):
|
||||||
self.connection = connection
|
self.connection = connection
|
||||||
|
|
||||||
def get_field_type(self, data_type, description):
|
def get_field_type(self, data_type, description):
|
||||||
"""Hook for a database backend to use the cursor description to
|
"""
|
||||||
|
Hook for a database backend to use the cursor description to
|
||||||
match a Django field type to a database column.
|
match a Django field type to a database column.
|
||||||
|
|
||||||
For Oracle, the column data_type on its own is insufficient to
|
For Oracle, the column data_type on its own is insufficient to
|
||||||
distinguish between a FloatField and IntegerField, for example."""
|
distinguish between a FloatField and IntegerField, for example.
|
||||||
|
"""
|
||||||
return self.data_types_reverse[data_type]
|
return self.data_types_reverse[data_type]
|
||||||
|
|
||||||
def table_name_converter(self, name):
|
def table_name_converter(self, name):
|
||||||
"""Apply a conversion to the name for the purposes of comparison.
|
"""
|
||||||
|
Apply a conversion to the name for the purposes of comparison.
|
||||||
|
|
||||||
The default table name converter is for case sensitive comparison.
|
The default table name converter is for case sensitive comparison.
|
||||||
"""
|
"""
|
||||||
@ -35,16 +36,16 @@ class BaseDatabaseIntrospection:
|
|||||||
"""
|
"""
|
||||||
Apply a conversion to the column name for the purposes of comparison.
|
Apply a conversion to the column name for the purposes of comparison.
|
||||||
|
|
||||||
Uses table_name_converter() by default.
|
Use table_name_converter() by default.
|
||||||
"""
|
"""
|
||||||
return self.table_name_converter(name)
|
return self.table_name_converter(name)
|
||||||
|
|
||||||
def table_names(self, cursor=None, include_views=False):
|
def table_names(self, cursor=None, include_views=False):
|
||||||
"""
|
"""
|
||||||
Returns a list of names of all tables that exist in the database.
|
Return a list of names of all tables that exist in the database.
|
||||||
The returned table list is sorted by Python's default sorting. We
|
Sort the returned table list by Python's default sorting. Do NOT use
|
||||||
do NOT use database's ORDER BY here to avoid subtle differences
|
the database's ORDER BY here to avoid subtle differences in sorting
|
||||||
in sorting order between databases.
|
order between databases.
|
||||||
"""
|
"""
|
||||||
def get_names(cursor):
|
def get_names(cursor):
|
||||||
return sorted(ti.name for ti in self.get_table_list(cursor)
|
return sorted(ti.name for ti in self.get_table_list(cursor)
|
||||||
@ -56,18 +57,17 @@ class BaseDatabaseIntrospection:
|
|||||||
|
|
||||||
def get_table_list(self, cursor):
|
def get_table_list(self, cursor):
|
||||||
"""
|
"""
|
||||||
Returns an unsorted list of TableInfo named tuples of all tables and
|
Return an unsorted list of TableInfo named tuples of all tables and
|
||||||
views that exist in the database.
|
views that exist in the database.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError('subclasses of BaseDatabaseIntrospection may require a get_table_list() method')
|
raise NotImplementedError('subclasses of BaseDatabaseIntrospection may require a get_table_list() method')
|
||||||
|
|
||||||
def django_table_names(self, only_existing=False, include_views=True):
|
def django_table_names(self, only_existing=False, include_views=True):
|
||||||
"""
|
"""
|
||||||
Returns a list of all table names that have associated Django models and
|
Return a list of all table names that have associated Django models and
|
||||||
are in INSTALLED_APPS.
|
are in INSTALLED_APPS.
|
||||||
|
|
||||||
If only_existing is True, the resulting list will only include the tables
|
If only_existing is True, include only the tables in the database.
|
||||||
that actually exist in the database.
|
|
||||||
"""
|
"""
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
from django.db import router
|
from django.db import router
|
||||||
@ -92,7 +92,10 @@ class BaseDatabaseIntrospection:
|
|||||||
return tables
|
return tables
|
||||||
|
|
||||||
def installed_models(self, tables):
|
def installed_models(self, tables):
|
||||||
"Returns a set of all models represented by the provided list of table names."
|
"""
|
||||||
|
Return a set of all models represented by the provided list of table
|
||||||
|
names.
|
||||||
|
"""
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
from django.db import router
|
from django.db import router
|
||||||
all_models = []
|
all_models = []
|
||||||
@ -105,7 +108,10 @@ class BaseDatabaseIntrospection:
|
|||||||
}
|
}
|
||||||
|
|
||||||
def sequence_list(self):
|
def sequence_list(self):
|
||||||
"Returns a list of information about all DB sequences for all models in all apps."
|
"""
|
||||||
|
Return a list of information about all DB sequences for all models in
|
||||||
|
all apps.
|
||||||
|
"""
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
from django.db import models, router
|
from django.db import models, router
|
||||||
|
|
||||||
@ -132,14 +138,15 @@ class BaseDatabaseIntrospection:
|
|||||||
|
|
||||||
def get_key_columns(self, cursor, table_name):
|
def get_key_columns(self, cursor, table_name):
|
||||||
"""
|
"""
|
||||||
Backends can override this to return a list of (column_name, referenced_table_name,
|
Backends can override this to return a list of:
|
||||||
referenced_column_name) for all key columns in given table.
|
(column_name, referenced_table_name, referenced_column_name)
|
||||||
|
for all key columns in given table.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError('subclasses of BaseDatabaseIntrospection may require a get_key_columns() method')
|
raise NotImplementedError('subclasses of BaseDatabaseIntrospection may require a get_key_columns() method')
|
||||||
|
|
||||||
def get_primary_key_column(self, cursor, table_name):
|
def get_primary_key_column(self, cursor, table_name):
|
||||||
"""
|
"""
|
||||||
Returns the name of the primary key column for the given table.
|
Return the name of the primary key column for the given table.
|
||||||
"""
|
"""
|
||||||
for constraint in self.get_constraints(cursor, table_name).values():
|
for constraint in self.get_constraints(cursor, table_name).values():
|
||||||
if constraint['primary_key']:
|
if constraint['primary_key']:
|
||||||
@ -149,7 +156,7 @@ class BaseDatabaseIntrospection:
|
|||||||
def get_indexes(self, cursor, table_name):
|
def get_indexes(self, cursor, table_name):
|
||||||
"""
|
"""
|
||||||
Deprecated in Django 1.11, use get_constraints instead.
|
Deprecated in Django 1.11, use get_constraints instead.
|
||||||
Returns a dictionary of indexed fieldname -> infodict for the given
|
Return a dictionary of indexed fieldname -> infodict for the given
|
||||||
table, where each infodict is in the format:
|
table, where each infodict is in the format:
|
||||||
{'primary_key': boolean representing whether it's the primary key,
|
{'primary_key': boolean representing whether it's the primary key,
|
||||||
'unique': boolean representing whether it's a unique index}
|
'unique': boolean representing whether it's a unique index}
|
||||||
@ -160,10 +167,10 @@ class BaseDatabaseIntrospection:
|
|||||||
|
|
||||||
def get_constraints(self, cursor, table_name):
|
def get_constraints(self, cursor, table_name):
|
||||||
"""
|
"""
|
||||||
Retrieves any constraints or keys (unique, pk, fk, check, index)
|
Retrieve any constraints or keys (unique, pk, fk, check, index)
|
||||||
across one or more columns.
|
across one or more columns.
|
||||||
|
|
||||||
Returns a dict mapping constraint names to their attributes,
|
Return a dict mapping constraint names to their attributes,
|
||||||
where attributes is a dict with keys:
|
where attributes is a dict with keys:
|
||||||
* columns: List of columns this covers
|
* columns: List of columns this covers
|
||||||
* primary_key: True if primary key, False otherwise
|
* primary_key: True if primary key, False otherwise
|
||||||
|
@ -12,9 +12,8 @@ from django.utils.encoding import force_text
|
|||||||
|
|
||||||
class BaseDatabaseOperations:
|
class BaseDatabaseOperations:
|
||||||
"""
|
"""
|
||||||
This class encapsulates all backend-specific differences, such as the way
|
Encapsulate backend-specific differences, such as the way a backend
|
||||||
a backend performs ordering or calculates the ID of a recently-inserted
|
performs ordering or calculates the ID of a recently-inserted row.
|
||||||
row.
|
|
||||||
"""
|
"""
|
||||||
compiler_module = "django.db.models.sql.compiler"
|
compiler_module = "django.db.models.sql.compiler"
|
||||||
|
|
||||||
@ -39,7 +38,7 @@ class BaseDatabaseOperations:
|
|||||||
|
|
||||||
def autoinc_sql(self, table, column):
|
def autoinc_sql(self, table, column):
|
||||||
"""
|
"""
|
||||||
Returns any SQL needed to support auto-incrementing primary keys, or
|
Return any SQL needed to support auto-incrementing primary keys, or
|
||||||
None if no SQL is necessary.
|
None if no SQL is necessary.
|
||||||
|
|
||||||
This SQL is executed when a table is created.
|
This SQL is executed when a table is created.
|
||||||
@ -48,7 +47,7 @@ class BaseDatabaseOperations:
|
|||||||
|
|
||||||
def bulk_batch_size(self, fields, objs):
|
def bulk_batch_size(self, fields, objs):
|
||||||
"""
|
"""
|
||||||
Returns the maximum allowed batch size for the backend. The fields
|
Return the maximum allowed batch size for the backend. The fields
|
||||||
are the fields going to be inserted in the batch, the objs contains
|
are the fields going to be inserted in the batch, the objs contains
|
||||||
all the objects to be inserted.
|
all the objects to be inserted.
|
||||||
"""
|
"""
|
||||||
@ -56,7 +55,7 @@ class BaseDatabaseOperations:
|
|||||||
|
|
||||||
def cache_key_culling_sql(self):
|
def cache_key_culling_sql(self):
|
||||||
"""
|
"""
|
||||||
Returns an SQL query that retrieves the first cache key greater than the
|
Return an SQL query that retrieves the first cache key greater than the
|
||||||
n smallest.
|
n smallest.
|
||||||
|
|
||||||
This is used by the 'db' cache backend to determine where to start
|
This is used by the 'db' cache backend to determine where to start
|
||||||
@ -66,28 +65,28 @@ class BaseDatabaseOperations:
|
|||||||
|
|
||||||
def unification_cast_sql(self, output_field):
|
def unification_cast_sql(self, output_field):
|
||||||
"""
|
"""
|
||||||
Given a field instance, returns the SQL necessary to cast the result of
|
Given a field instance, return the SQL that casts the result of a union
|
||||||
a union to that type. Note that the resulting string should contain a
|
to that type. The resulting string should contain a '%s' placeholder
|
||||||
'%s' placeholder for the expression being cast.
|
for the expression being cast.
|
||||||
"""
|
"""
|
||||||
return '%s'
|
return '%s'
|
||||||
|
|
||||||
def date_extract_sql(self, lookup_type, field_name):
|
def date_extract_sql(self, lookup_type, field_name):
|
||||||
"""
|
"""
|
||||||
Given a lookup_type of 'year', 'month' or 'day', returns the SQL that
|
Given a lookup_type of 'year', 'month', or 'day', return the SQL that
|
||||||
extracts a value from the given date field field_name.
|
extracts a value from the given date field field_name.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a date_extract_sql() method')
|
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a date_extract_sql() method')
|
||||||
|
|
||||||
def date_interval_sql(self, timedelta):
|
def date_interval_sql(self, timedelta):
|
||||||
"""
|
"""
|
||||||
Implements the date interval functionality for expressions
|
Implement the date interval functionality for expressions.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a date_interval_sql() method')
|
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a date_interval_sql() method')
|
||||||
|
|
||||||
def date_trunc_sql(self, lookup_type, field_name):
|
def date_trunc_sql(self, lookup_type, field_name):
|
||||||
"""
|
"""
|
||||||
Given a lookup_type of 'year', 'month' or 'day', returns the SQL that
|
Given a lookup_type of 'year', 'month', or 'day', return the SQL that
|
||||||
truncates the given date field field_name to a date object with only
|
truncates the given date field field_name to a date object with only
|
||||||
the given specificity.
|
the given specificity.
|
||||||
"""
|
"""
|
||||||
@ -95,13 +94,13 @@ class BaseDatabaseOperations:
|
|||||||
|
|
||||||
def datetime_cast_date_sql(self, field_name, tzname):
|
def datetime_cast_date_sql(self, field_name, tzname):
|
||||||
"""
|
"""
|
||||||
Returns the SQL necessary to cast a datetime value to date value.
|
Return the SQL to cast a datetime value to date value.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetime_cast_date() method')
|
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetime_cast_date() method')
|
||||||
|
|
||||||
def datetime_cast_time_sql(self, field_name, tzname):
|
def datetime_cast_time_sql(self, field_name, tzname):
|
||||||
"""
|
"""
|
||||||
Returns the SQL necessary to cast a datetime value to time value.
|
Return the SQL to cast a datetime value to time value.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetime_cast_time_sql() method')
|
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetime_cast_time_sql() method')
|
||||||
|
|
||||||
@ -123,7 +122,7 @@ class BaseDatabaseOperations:
|
|||||||
|
|
||||||
def time_trunc_sql(self, lookup_type, field_name):
|
def time_trunc_sql(self, lookup_type, field_name):
|
||||||
"""
|
"""
|
||||||
Given a lookup_type of 'hour', 'minute' or 'second', returns the SQL
|
Given a lookup_type of 'hour', 'minute' or 'second', return the SQL
|
||||||
that truncates the given time field field_name to a time object with
|
that truncates the given time field field_name to a time object with
|
||||||
only the given specificity.
|
only the given specificity.
|
||||||
"""
|
"""
|
||||||
@ -131,23 +130,23 @@ class BaseDatabaseOperations:
|
|||||||
|
|
||||||
def time_extract_sql(self, lookup_type, field_name):
|
def time_extract_sql(self, lookup_type, field_name):
|
||||||
"""
|
"""
|
||||||
Given a lookup_type of 'hour', 'minute' or 'second', returns the SQL
|
Given a lookup_type of 'hour', 'minute', or 'second', return the SQL
|
||||||
that extracts a value from the given time field field_name.
|
that extracts a value from the given time field field_name.
|
||||||
"""
|
"""
|
||||||
return self.date_extract_sql(lookup_type, field_name)
|
return self.date_extract_sql(lookup_type, field_name)
|
||||||
|
|
||||||
def deferrable_sql(self):
|
def deferrable_sql(self):
|
||||||
"""
|
"""
|
||||||
Returns the SQL necessary to make a constraint "initially deferred"
|
Return the SQL to make a constraint "initially deferred" during a
|
||||||
during a CREATE TABLE statement.
|
CREATE TABLE statement.
|
||||||
"""
|
"""
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
def distinct_sql(self, fields):
|
def distinct_sql(self, fields):
|
||||||
"""
|
"""
|
||||||
Returns an SQL DISTINCT clause which removes duplicate rows from the
|
Return an SQL DISTINCT clause which removes duplicate rows from the
|
||||||
result set. If any fields are given, only the given fields are being
|
result set. If any fields are given, only check the given fields for
|
||||||
checked for duplicates.
|
duplicates.
|
||||||
"""
|
"""
|
||||||
if fields:
|
if fields:
|
||||||
raise NotImplementedError('DISTINCT ON fields is not supported by this database backend')
|
raise NotImplementedError('DISTINCT ON fields is not supported by this database backend')
|
||||||
@ -157,31 +156,30 @@ class BaseDatabaseOperations:
|
|||||||
def fetch_returned_insert_id(self, cursor):
|
def fetch_returned_insert_id(self, cursor):
|
||||||
"""
|
"""
|
||||||
Given a cursor object that has just performed an INSERT...RETURNING
|
Given a cursor object that has just performed an INSERT...RETURNING
|
||||||
statement into a table that has an auto-incrementing ID, returns the
|
statement into a table that has an auto-incrementing ID, return the
|
||||||
newly created ID.
|
newly created ID.
|
||||||
"""
|
"""
|
||||||
return cursor.fetchone()[0]
|
return cursor.fetchone()[0]
|
||||||
|
|
||||||
def field_cast_sql(self, db_type, internal_type):
|
def field_cast_sql(self, db_type, internal_type):
|
||||||
"""
|
"""
|
||||||
Given a column type (e.g. 'BLOB', 'VARCHAR'), and an internal type
|
Given a column type (e.g. 'BLOB', 'VARCHAR') and an internal type
|
||||||
(e.g. 'GenericIPAddressField'), returns the SQL necessary to cast it
|
(e.g. 'GenericIPAddressField'), return the SQL to cast it before using
|
||||||
before using it in a WHERE statement. Note that the resulting string
|
it in a WHERE statement. The resulting string should contain a '%s'
|
||||||
should contain a '%s' placeholder for the column being searched against.
|
placeholder for the column being searched against.
|
||||||
"""
|
"""
|
||||||
return '%s'
|
return '%s'
|
||||||
|
|
||||||
def force_no_ordering(self):
|
def force_no_ordering(self):
|
||||||
"""
|
"""
|
||||||
Returns a list used in the "ORDER BY" clause to force no ordering at
|
Return a list used in the "ORDER BY" clause to force no ordering at
|
||||||
all. Returning an empty list means that nothing will be included in the
|
all. Return an empty list to include nothing in the ordering.
|
||||||
ordering.
|
|
||||||
"""
|
"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def for_update_sql(self, nowait=False, skip_locked=False):
|
def for_update_sql(self, nowait=False, skip_locked=False):
|
||||||
"""
|
"""
|
||||||
Returns the FOR UPDATE SQL clause to lock rows for an update operation.
|
Return the FOR UPDATE SQL clause to lock rows for an update operation.
|
||||||
"""
|
"""
|
||||||
if nowait:
|
if nowait:
|
||||||
return 'FOR UPDATE NOWAIT'
|
return 'FOR UPDATE NOWAIT'
|
||||||
@ -192,10 +190,10 @@ class BaseDatabaseOperations:
|
|||||||
|
|
||||||
def last_executed_query(self, cursor, sql, params):
|
def last_executed_query(self, cursor, sql, params):
|
||||||
"""
|
"""
|
||||||
Returns a string of the query last executed by the given cursor, with
|
Return a string of the query last executed by the given cursor, with
|
||||||
placeholders replaced with actual values.
|
placeholders replaced with actual values.
|
||||||
|
|
||||||
`sql` is the raw query containing placeholders, and `params` is the
|
`sql` is the raw query containing placeholders and `params` is the
|
||||||
sequence of parameters. These are used by default, but this method
|
sequence of parameters. These are used by default, but this method
|
||||||
exists for database backends to provide a better implementation
|
exists for database backends to provide a better implementation
|
||||||
according to their own quoting schemes.
|
according to their own quoting schemes.
|
||||||
@ -215,52 +213,51 @@ class BaseDatabaseOperations:
|
|||||||
def last_insert_id(self, cursor, table_name, pk_name):
|
def last_insert_id(self, cursor, table_name, pk_name):
|
||||||
"""
|
"""
|
||||||
Given a cursor object that has just performed an INSERT statement into
|
Given a cursor object that has just performed an INSERT statement into
|
||||||
a table that has an auto-incrementing ID, returns the newly created ID.
|
a table that has an auto-incrementing ID, return the newly created ID.
|
||||||
|
|
||||||
This method also receives the table name and the name of the primary-key
|
`pk_name` is the name of the primary-key column.
|
||||||
column.
|
|
||||||
"""
|
"""
|
||||||
return cursor.lastrowid
|
return cursor.lastrowid
|
||||||
|
|
||||||
def lookup_cast(self, lookup_type, internal_type=None):
|
def lookup_cast(self, lookup_type, internal_type=None):
|
||||||
"""
|
"""
|
||||||
Returns the string to use in a query when performing lookups
|
Return the string to use in a query when performing lookups
|
||||||
("contains", "like", etc.). The resulting string should contain a '%s'
|
("contains", "like", etc.). It should contain a '%s' placeholder for
|
||||||
placeholder for the column being searched against.
|
the column being searched against.
|
||||||
"""
|
"""
|
||||||
return "%s"
|
return "%s"
|
||||||
|
|
||||||
def max_in_list_size(self):
|
def max_in_list_size(self):
|
||||||
"""
|
"""
|
||||||
Returns the maximum number of items that can be passed in a single 'IN'
|
Return the maximum number of items that can be passed in a single 'IN'
|
||||||
list condition, or None if the backend does not impose a limit.
|
list condition, or None if the backend does not impose a limit.
|
||||||
"""
|
"""
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def max_name_length(self):
|
def max_name_length(self):
|
||||||
"""
|
"""
|
||||||
Returns the maximum length of table and column names, or None if there
|
Return the maximum length of table and column names, or None if there
|
||||||
is no limit.
|
is no limit.
|
||||||
"""
|
"""
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def no_limit_value(self):
|
def no_limit_value(self):
|
||||||
"""
|
"""
|
||||||
Returns the value to use for the LIMIT when we are wanting "LIMIT
|
Return the value to use for the LIMIT when we are wanting "LIMIT
|
||||||
infinity". Returns None if the limit clause can be omitted in this case.
|
infinity". Return None if the limit clause can be omitted in this case.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a no_limit_value() method')
|
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a no_limit_value() method')
|
||||||
|
|
||||||
def pk_default_value(self):
|
def pk_default_value(self):
|
||||||
"""
|
"""
|
||||||
Returns the value to use during an INSERT statement to specify that
|
Return the value to use during an INSERT statement to specify that
|
||||||
the field should use its default value.
|
the field should use its default value.
|
||||||
"""
|
"""
|
||||||
return 'DEFAULT'
|
return 'DEFAULT'
|
||||||
|
|
||||||
def prepare_sql_script(self, sql):
|
def prepare_sql_script(self, sql):
|
||||||
"""
|
"""
|
||||||
Takes an SQL script that may contain multiple lines and returns a list
|
Take an SQL script that may contain multiple lines and return a list
|
||||||
of statements to feed to successive cursor.execute() calls.
|
of statements to feed to successive cursor.execute() calls.
|
||||||
|
|
||||||
Since few databases are able to process raw SQL scripts in a single
|
Since few databases are able to process raw SQL scripts in a single
|
||||||
@ -280,23 +277,23 @@ class BaseDatabaseOperations:
|
|||||||
|
|
||||||
def process_clob(self, value):
|
def process_clob(self, value):
|
||||||
"""
|
"""
|
||||||
Returns the value of a CLOB column, for backends that return a locator
|
Return the value of a CLOB column, for backends that return a locator
|
||||||
object that requires additional processing.
|
object that requires additional processing.
|
||||||
"""
|
"""
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def return_insert_id(self):
|
def return_insert_id(self):
|
||||||
"""
|
"""
|
||||||
For backends that support returning the last insert ID as part
|
For backends that support returning the last insert ID as part of an
|
||||||
of an insert query, this method returns the SQL and params to
|
insert query, return the SQL and params to append to the INSERT query.
|
||||||
append to the INSERT query. The returned fragment should
|
The returned fragment should contain a format string to hold the
|
||||||
contain a format string to hold the appropriate column.
|
appropriate column.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def compiler(self, compiler_name):
|
def compiler(self, compiler_name):
|
||||||
"""
|
"""
|
||||||
Returns the SQLCompiler class corresponding to the given name,
|
Return the SQLCompiler class corresponding to the given name,
|
||||||
in the namespace corresponding to the `compiler_module` attribute
|
in the namespace corresponding to the `compiler_module` attribute
|
||||||
on this backend.
|
on this backend.
|
||||||
"""
|
"""
|
||||||
@ -306,31 +303,29 @@ class BaseDatabaseOperations:
|
|||||||
|
|
||||||
def quote_name(self, name):
|
def quote_name(self, name):
|
||||||
"""
|
"""
|
||||||
Returns a quoted version of the given table, index or column name. Does
|
Return a quoted version of the given table, index, or column name. Do
|
||||||
not quote the given name if it's already been quoted.
|
not quote the given name if it's already been quoted.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a quote_name() method')
|
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a quote_name() method')
|
||||||
|
|
||||||
def random_function_sql(self):
|
def random_function_sql(self):
|
||||||
"""
|
"""Return an SQL expression that returns a random value."""
|
||||||
Returns an SQL expression that returns a random value.
|
|
||||||
"""
|
|
||||||
return 'RANDOM()'
|
return 'RANDOM()'
|
||||||
|
|
||||||
def regex_lookup(self, lookup_type):
|
def regex_lookup(self, lookup_type):
|
||||||
"""
|
"""
|
||||||
Returns the string to use in a query when performing regular expression
|
Return the string to use in a query when performing regular expression
|
||||||
lookups (using "regex" or "iregex"). The resulting string should
|
lookups (using "regex" or "iregex"). It should contain a '%s'
|
||||||
contain a '%s' placeholder for the column being searched against.
|
placeholder for the column being searched against.
|
||||||
|
|
||||||
If the feature is not supported (or part of it is not supported), a
|
If the feature is not supported (or part of it is not supported), raise
|
||||||
NotImplementedError exception can be raised.
|
NotImplementedError.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a regex_lookup() method')
|
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a regex_lookup() method')
|
||||||
|
|
||||||
def savepoint_create_sql(self, sid):
|
def savepoint_create_sql(self, sid):
|
||||||
"""
|
"""
|
||||||
Returns the SQL for starting a new savepoint. Only required if the
|
Return the SQL for starting a new savepoint. Only required if the
|
||||||
"uses_savepoints" feature is True. The "sid" parameter is a string
|
"uses_savepoints" feature is True. The "sid" parameter is a string
|
||||||
for the savepoint id.
|
for the savepoint id.
|
||||||
"""
|
"""
|
||||||
@ -338,32 +333,30 @@ class BaseDatabaseOperations:
|
|||||||
|
|
||||||
def savepoint_commit_sql(self, sid):
|
def savepoint_commit_sql(self, sid):
|
||||||
"""
|
"""
|
||||||
Returns the SQL for committing the given savepoint.
|
Return the SQL for committing the given savepoint.
|
||||||
"""
|
"""
|
||||||
return "RELEASE SAVEPOINT %s" % self.quote_name(sid)
|
return "RELEASE SAVEPOINT %s" % self.quote_name(sid)
|
||||||
|
|
||||||
def savepoint_rollback_sql(self, sid):
|
def savepoint_rollback_sql(self, sid):
|
||||||
"""
|
"""
|
||||||
Returns the SQL for rolling back the given savepoint.
|
Return the SQL for rolling back the given savepoint.
|
||||||
"""
|
"""
|
||||||
return "ROLLBACK TO SAVEPOINT %s" % self.quote_name(sid)
|
return "ROLLBACK TO SAVEPOINT %s" % self.quote_name(sid)
|
||||||
|
|
||||||
def set_time_zone_sql(self):
|
def set_time_zone_sql(self):
|
||||||
"""
|
"""
|
||||||
Returns the SQL that will set the connection's time zone.
|
Return the SQL that will set the connection's time zone.
|
||||||
|
|
||||||
Returns '' if the backend doesn't support time zones.
|
Return '' if the backend doesn't support time zones.
|
||||||
"""
|
"""
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
def sql_flush(self, style, tables, sequences, allow_cascade=False):
|
def sql_flush(self, style, tables, sequences, allow_cascade=False):
|
||||||
"""
|
"""
|
||||||
Returns a list of SQL statements required to remove all data from
|
Return a list of SQL statements required to remove all data from
|
||||||
the given database tables (without actually removing the tables
|
the given database tables (without actually removing the tables
|
||||||
themselves).
|
themselves) and the SQL statements required to reset the sequences
|
||||||
|
passed in `sequences`.
|
||||||
The returned value also includes SQL statements required to reset DB
|
|
||||||
sequences passed in :param sequences:.
|
|
||||||
|
|
||||||
The `style` argument is a Style object as returned by either
|
The `style` argument is a Style object as returned by either
|
||||||
color_style() or no_style() in django.core.management.color.
|
color_style() or no_style() in django.core.management.color.
|
||||||
@ -376,8 +369,8 @@ class BaseDatabaseOperations:
|
|||||||
|
|
||||||
def sequence_reset_by_name_sql(self, style, sequences):
|
def sequence_reset_by_name_sql(self, style, sequences):
|
||||||
"""
|
"""
|
||||||
Returns a list of the SQL statements required to reset sequences
|
Return a list of the SQL statements required to reset sequences
|
||||||
passed in :param sequences:.
|
passed in `sequences`.
|
||||||
|
|
||||||
The `style` argument is a Style object as returned by either
|
The `style` argument is a Style object as returned by either
|
||||||
color_style() or no_style() in django.core.management.color.
|
color_style() or no_style() in django.core.management.color.
|
||||||
@ -386,7 +379,7 @@ class BaseDatabaseOperations:
|
|||||||
|
|
||||||
def sequence_reset_sql(self, style, model_list):
|
def sequence_reset_sql(self, style, model_list):
|
||||||
"""
|
"""
|
||||||
Returns a list of the SQL statements required to reset sequences for
|
Return a list of the SQL statements required to reset sequences for
|
||||||
the given models.
|
the given models.
|
||||||
|
|
||||||
The `style` argument is a Style object as returned by either
|
The `style` argument is a Style object as returned by either
|
||||||
@ -395,32 +388,28 @@ class BaseDatabaseOperations:
|
|||||||
return [] # No sequence reset required by default.
|
return [] # No sequence reset required by default.
|
||||||
|
|
||||||
def start_transaction_sql(self):
|
def start_transaction_sql(self):
|
||||||
"""
|
"""Return the SQL statement required to start a transaction."""
|
||||||
Returns the SQL statement required to start a transaction.
|
|
||||||
"""
|
|
||||||
return "BEGIN;"
|
return "BEGIN;"
|
||||||
|
|
||||||
def end_transaction_sql(self, success=True):
|
def end_transaction_sql(self, success=True):
|
||||||
"""
|
"""Return the SQL statement required to end a transaction."""
|
||||||
Returns the SQL statement required to end a transaction.
|
|
||||||
"""
|
|
||||||
if not success:
|
if not success:
|
||||||
return "ROLLBACK;"
|
return "ROLLBACK;"
|
||||||
return "COMMIT;"
|
return "COMMIT;"
|
||||||
|
|
||||||
def tablespace_sql(self, tablespace, inline=False):
|
def tablespace_sql(self, tablespace, inline=False):
|
||||||
"""
|
"""
|
||||||
Returns the SQL that will be used in a query to define the tablespace.
|
Return the SQL that will be used in a query to define the tablespace.
|
||||||
|
|
||||||
Returns '' if the backend doesn't support tablespaces.
|
Return '' if the backend doesn't support tablespaces.
|
||||||
|
|
||||||
If inline is True, the SQL is appended to a row; otherwise it's appended
|
If `inline` is True, append the SQL to a row; otherwise append it to
|
||||||
to the entire CREATE TABLE or CREATE INDEX statement.
|
the entire CREATE TABLE or CREATE INDEX statement.
|
||||||
"""
|
"""
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
def prep_for_like_query(self, x):
|
def prep_for_like_query(self, x):
|
||||||
"""Prepares a value for use in a LIKE query."""
|
"""Prepare a value for use in a LIKE query."""
|
||||||
return force_text(x).replace("\\", "\\\\").replace("%", r"\%").replace("_", r"\_")
|
return force_text(x).replace("\\", "\\\\").replace("%", r"\%").replace("_", r"\_")
|
||||||
|
|
||||||
# Same as prep_for_like_query(), but called for "iexact" matches, which
|
# Same as prep_for_like_query(), but called for "iexact" matches, which
|
||||||
@ -430,14 +419,14 @@ class BaseDatabaseOperations:
|
|||||||
def validate_autopk_value(self, value):
|
def validate_autopk_value(self, value):
|
||||||
"""
|
"""
|
||||||
Certain backends do not accept some values for "serial" fields
|
Certain backends do not accept some values for "serial" fields
|
||||||
(for example zero in MySQL). This method will raise a ValueError
|
(for example zero in MySQL). Raise a ValueError if the value is
|
||||||
if the value is invalid, otherwise returns validated value.
|
invalid, otherwise return the validated value.
|
||||||
"""
|
"""
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def adapt_unknown_value(self, value):
|
def adapt_unknown_value(self, value):
|
||||||
"""
|
"""
|
||||||
Transforms a value to something compatible with the backend driver.
|
Transform a value to something compatible with the backend driver.
|
||||||
|
|
||||||
This method only depends on the type of the value. It's designed for
|
This method only depends on the type of the value. It's designed for
|
||||||
cases where the target type isn't known, such as .raw() SQL queries.
|
cases where the target type isn't known, such as .raw() SQL queries.
|
||||||
@ -456,7 +445,7 @@ class BaseDatabaseOperations:
|
|||||||
|
|
||||||
def adapt_datefield_value(self, value):
|
def adapt_datefield_value(self, value):
|
||||||
"""
|
"""
|
||||||
Transforms a date value to an object compatible with what is expected
|
Transform a date value to an object compatible with what is expected
|
||||||
by the backend driver for date columns.
|
by the backend driver for date columns.
|
||||||
"""
|
"""
|
||||||
if value is None:
|
if value is None:
|
||||||
@ -465,7 +454,7 @@ class BaseDatabaseOperations:
|
|||||||
|
|
||||||
def adapt_datetimefield_value(self, value):
|
def adapt_datetimefield_value(self, value):
|
||||||
"""
|
"""
|
||||||
Transforms a datetime value to an object compatible with what is expected
|
Transform a datetime value to an object compatible with what is expected
|
||||||
by the backend driver for datetime columns.
|
by the backend driver for datetime columns.
|
||||||
"""
|
"""
|
||||||
if value is None:
|
if value is None:
|
||||||
@ -474,7 +463,7 @@ class BaseDatabaseOperations:
|
|||||||
|
|
||||||
def adapt_timefield_value(self, value):
|
def adapt_timefield_value(self, value):
|
||||||
"""
|
"""
|
||||||
Transforms a time value to an object compatible with what is expected
|
Transform a time value to an object compatible with what is expected
|
||||||
by the backend driver for time columns.
|
by the backend driver for time columns.
|
||||||
"""
|
"""
|
||||||
if value is None:
|
if value is None:
|
||||||
@ -485,21 +474,21 @@ class BaseDatabaseOperations:
|
|||||||
|
|
||||||
def adapt_decimalfield_value(self, value, max_digits=None, decimal_places=None):
|
def adapt_decimalfield_value(self, value, max_digits=None, decimal_places=None):
|
||||||
"""
|
"""
|
||||||
Transforms a decimal.Decimal value to an object compatible with what is
|
Transform a decimal.Decimal value to an object compatible with what is
|
||||||
expected by the backend driver for decimal (numeric) columns.
|
expected by the backend driver for decimal (numeric) columns.
|
||||||
"""
|
"""
|
||||||
return utils.format_number(value, max_digits, decimal_places)
|
return utils.format_number(value, max_digits, decimal_places)
|
||||||
|
|
||||||
def adapt_ipaddressfield_value(self, value):
|
def adapt_ipaddressfield_value(self, value):
|
||||||
"""
|
"""
|
||||||
Transforms a string representation of an IP address into the expected
|
Transform a string representation of an IP address into the expected
|
||||||
type for the backend driver.
|
type for the backend driver.
|
||||||
"""
|
"""
|
||||||
return value or None
|
return value or None
|
||||||
|
|
||||||
def year_lookup_bounds_for_date_field(self, value):
|
def year_lookup_bounds_for_date_field(self, value):
|
||||||
"""
|
"""
|
||||||
Returns a two-elements list with the lower and upper bound to be used
|
Return a two-elements list with the lower and upper bound to be used
|
||||||
with a BETWEEN operator to query a DateField value using a year
|
with a BETWEEN operator to query a DateField value using a year
|
||||||
lookup.
|
lookup.
|
||||||
|
|
||||||
@ -513,7 +502,7 @@ class BaseDatabaseOperations:
|
|||||||
|
|
||||||
def year_lookup_bounds_for_datetime_field(self, value):
|
def year_lookup_bounds_for_datetime_field(self, value):
|
||||||
"""
|
"""
|
||||||
Returns a two-elements list with the lower and upper bound to be used
|
Return a two-elements list with the lower and upper bound to be used
|
||||||
with a BETWEEN operator to query a DateTimeField value using a year
|
with a BETWEEN operator to query a DateTimeField value using a year
|
||||||
lookup.
|
lookup.
|
||||||
|
|
||||||
@ -531,7 +520,7 @@ class BaseDatabaseOperations:
|
|||||||
|
|
||||||
def get_db_converters(self, expression):
|
def get_db_converters(self, expression):
|
||||||
"""
|
"""
|
||||||
Get a list of functions needed to convert field data.
|
Return a list of functions needed to convert field data.
|
||||||
|
|
||||||
Some field types on some backends do not provide data in the correct
|
Some field types on some backends do not provide data in the correct
|
||||||
format, this is the hook for converter functions.
|
format, this is the hook for converter functions.
|
||||||
@ -556,10 +545,11 @@ class BaseDatabaseOperations:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def combine_expression(self, connector, sub_expressions):
|
def combine_expression(self, connector, sub_expressions):
|
||||||
"""Combine a list of subexpressions into a single expression, using
|
"""
|
||||||
|
Combine a list of subexpressions into a single expression, using
|
||||||
the provided connecting operator. This is required because operators
|
the provided connecting operator. This is required because operators
|
||||||
can vary between backends (e.g., Oracle with %% and &) and between
|
can vary between backends (e.g., Oracle with %% and &) and between
|
||||||
subexpression types (e.g., date expressions)
|
subexpression types (e.g., date expressions).
|
||||||
"""
|
"""
|
||||||
conn = ' %s ' % connector
|
conn = ' %s ' % connector
|
||||||
return conn.join(sub_expressions)
|
return conn.join(sub_expressions)
|
||||||
@ -575,7 +565,8 @@ class BaseDatabaseOperations:
|
|||||||
return '%s'
|
return '%s'
|
||||||
|
|
||||||
def modify_insert_params(self, placeholder, params):
|
def modify_insert_params(self, placeholder, params):
|
||||||
"""Allow modification of insert parameters. Needed for Oracle Spatial
|
"""
|
||||||
|
Allow modification of insert parameters. Needed for Oracle Spatial
|
||||||
backend due to #10888.
|
backend due to #10888.
|
||||||
"""
|
"""
|
||||||
return params
|
return params
|
||||||
@ -583,7 +574,7 @@ class BaseDatabaseOperations:
|
|||||||
def integer_field_range(self, internal_type):
|
def integer_field_range(self, internal_type):
|
||||||
"""
|
"""
|
||||||
Given an integer field internal type (e.g. 'PositiveIntegerField'),
|
Given an integer field internal type (e.g. 'PositiveIntegerField'),
|
||||||
returns a tuple of the (min_value, max_value) form representing the
|
return a tuple of the (min_value, max_value) form representing the
|
||||||
range of the column type bound to the field.
|
range of the column type bound to the field.
|
||||||
"""
|
"""
|
||||||
return self.integer_field_ranges[internal_type]
|
return self.integer_field_ranges[internal_type]
|
||||||
|
@ -11,8 +11,8 @@ logger = logging.getLogger('django.db.backends.schema')
|
|||||||
|
|
||||||
|
|
||||||
def _related_non_m2m_objects(old_field, new_field):
|
def _related_non_m2m_objects(old_field, new_field):
|
||||||
# Filters out m2m objects from reverse relations.
|
# Filter out m2m objects from reverse relations.
|
||||||
# Returns (old_relation, new_relation) tuples.
|
# Return (old_relation, new_relation) tuples.
|
||||||
return zip(
|
return zip(
|
||||||
(obj for obj in old_field.model._meta.related_objects if not obj.field.many_to_many),
|
(obj for obj in old_field.model._meta.related_objects if not obj.field.many_to_many),
|
||||||
(obj for obj in new_field.model._meta.related_objects if not obj.field.many_to_many)
|
(obj for obj in new_field.model._meta.related_objects if not obj.field.many_to_many)
|
||||||
@ -21,17 +21,9 @@ def _related_non_m2m_objects(old_field, new_field):
|
|||||||
|
|
||||||
class BaseDatabaseSchemaEditor:
|
class BaseDatabaseSchemaEditor:
|
||||||
"""
|
"""
|
||||||
This class (and its subclasses) are responsible for emitting schema-changing
|
This class and its subclasses are responsible for emitting schema-changing
|
||||||
statements to the databases - model creation/removal/alteration, field
|
statements to the databases - model creation/removal/alteration, field
|
||||||
renaming, index fiddling, and so on.
|
renaming, index fiddling, and so on.
|
||||||
|
|
||||||
It is intended to eventually completely replace DatabaseCreation.
|
|
||||||
|
|
||||||
This class should be used by creating an instance for each set of schema
|
|
||||||
changes (e.g. a migration file), and by first calling start(),
|
|
||||||
then the relevant actions, and then commit(). This is necessary to allow
|
|
||||||
things like circular foreign key references - FKs will only be created once
|
|
||||||
commit() is called.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Overrideable SQL templates
|
# Overrideable SQL templates
|
||||||
@ -96,9 +88,7 @@ class BaseDatabaseSchemaEditor:
|
|||||||
# Core utility functions
|
# Core utility functions
|
||||||
|
|
||||||
def execute(self, sql, params=()):
|
def execute(self, sql, params=()):
|
||||||
"""
|
"""Execute the given SQL statement, with optional parameters."""
|
||||||
Executes the given SQL statement, with optional parameters.
|
|
||||||
"""
|
|
||||||
# Don't perform the transactional DDL check if SQL is being collected
|
# Don't perform the transactional DDL check if SQL is being collected
|
||||||
# as it's not going to be executed anyway.
|
# as it's not going to be executed anyway.
|
||||||
if not self.collect_sql and self.connection.in_atomic_block and not self.connection.features.can_rollback_ddl:
|
if not self.collect_sql and self.connection.in_atomic_block and not self.connection.features.can_rollback_ddl:
|
||||||
@ -124,7 +114,7 @@ class BaseDatabaseSchemaEditor:
|
|||||||
@classmethod
|
@classmethod
|
||||||
def _digest(cls, *args):
|
def _digest(cls, *args):
|
||||||
"""
|
"""
|
||||||
Generates a 32-bit digest of a set of arguments that can be used to
|
Generate a 32-bit digest of a set of arguments that can be used to
|
||||||
shorten identifying names.
|
shorten identifying names.
|
||||||
"""
|
"""
|
||||||
h = hashlib.md5()
|
h = hashlib.md5()
|
||||||
@ -136,8 +126,8 @@ class BaseDatabaseSchemaEditor:
|
|||||||
|
|
||||||
def column_sql(self, model, field, include_default=False):
|
def column_sql(self, model, field, include_default=False):
|
||||||
"""
|
"""
|
||||||
Takes a field and returns its column definition.
|
Take a field and return its column definition.
|
||||||
The field must already have had set_attributes_from_name called.
|
The field must already have had set_attributes_from_name() called.
|
||||||
"""
|
"""
|
||||||
# Get the column's type and use that as the basis of the SQL
|
# Get the column's type and use that as the basis of the SQL
|
||||||
db_params = field.db_parameters(connection=self.connection)
|
db_params = field.db_parameters(connection=self.connection)
|
||||||
@ -199,9 +189,7 @@ class BaseDatabaseSchemaEditor:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def effective_default(self, field):
|
def effective_default(self, field):
|
||||||
"""
|
"""Return a field's effective database default value."""
|
||||||
Returns a field's effective database default value
|
|
||||||
"""
|
|
||||||
if field.has_default():
|
if field.has_default():
|
||||||
default = field.get_default()
|
default = field.get_default()
|
||||||
elif not field.null and field.blank and field.empty_strings_allowed:
|
elif not field.null and field.blank and field.empty_strings_allowed:
|
||||||
@ -230,7 +218,7 @@ class BaseDatabaseSchemaEditor:
|
|||||||
|
|
||||||
def quote_value(self, value):
|
def quote_value(self, value):
|
||||||
"""
|
"""
|
||||||
Returns a quoted version of the value so it's safe to use in an SQL
|
Return a quoted version of the value so it's safe to use in an SQL
|
||||||
string. This is not safe against injection from user code; it is
|
string. This is not safe against injection from user code; it is
|
||||||
intended only for use in making SQL scripts or preparing default values
|
intended only for use in making SQL scripts or preparing default values
|
||||||
for particularly tricky backends (defaults are not user-defined, though,
|
for particularly tricky backends (defaults are not user-defined, though,
|
||||||
@ -242,8 +230,8 @@ class BaseDatabaseSchemaEditor:
|
|||||||
|
|
||||||
def create_model(self, model):
|
def create_model(self, model):
|
||||||
"""
|
"""
|
||||||
Takes a model and creates a table for it in the database.
|
Create a table and any accompanying indexes or unique constraints for
|
||||||
Will also create any accompanying indexes or unique constraints.
|
the given `model`.
|
||||||
"""
|
"""
|
||||||
# Create column SQL, add FK deferreds if needed
|
# Create column SQL, add FK deferreds if needed
|
||||||
column_sqls = []
|
column_sqls = []
|
||||||
@ -310,9 +298,7 @@ class BaseDatabaseSchemaEditor:
|
|||||||
self.create_model(field.remote_field.through)
|
self.create_model(field.remote_field.through)
|
||||||
|
|
||||||
def delete_model(self, model):
|
def delete_model(self, model):
|
||||||
"""
|
"""Delete a model from the database."""
|
||||||
Deletes a model from the database.
|
|
||||||
"""
|
|
||||||
# Handle auto-created intermediary models
|
# Handle auto-created intermediary models
|
||||||
for field in model._meta.local_many_to_many:
|
for field in model._meta.local_many_to_many:
|
||||||
if field.remote_field.through._meta.auto_created:
|
if field.remote_field.through._meta.auto_created:
|
||||||
@ -324,22 +310,18 @@ class BaseDatabaseSchemaEditor:
|
|||||||
})
|
})
|
||||||
|
|
||||||
def add_index(self, model, index):
|
def add_index(self, model, index):
|
||||||
"""
|
"""Add an index on a model."""
|
||||||
Add an index on a model.
|
|
||||||
"""
|
|
||||||
self.execute(index.create_sql(model, self))
|
self.execute(index.create_sql(model, self))
|
||||||
|
|
||||||
def remove_index(self, model, index):
|
def remove_index(self, model, index):
|
||||||
"""
|
"""Remove an index from a model."""
|
||||||
Remove an index from a model.
|
|
||||||
"""
|
|
||||||
self.execute(index.remove_sql(model, self))
|
self.execute(index.remove_sql(model, self))
|
||||||
|
|
||||||
def alter_unique_together(self, model, old_unique_together, new_unique_together):
|
def alter_unique_together(self, model, old_unique_together, new_unique_together):
|
||||||
"""
|
"""
|
||||||
Deals with a model changing its unique_together.
|
Deal with a model changing its unique_together. The input
|
||||||
Note: The input unique_togethers must be doubly-nested, not the single-
|
unique_togethers must be doubly-nested, not the single-nested
|
||||||
nested ["foo", "bar"] format.
|
["foo", "bar"] format.
|
||||||
"""
|
"""
|
||||||
olds = set(tuple(fields) for fields in old_unique_together)
|
olds = set(tuple(fields) for fields in old_unique_together)
|
||||||
news = set(tuple(fields) for fields in new_unique_together)
|
news = set(tuple(fields) for fields in new_unique_together)
|
||||||
@ -353,9 +335,9 @@ class BaseDatabaseSchemaEditor:
|
|||||||
|
|
||||||
def alter_index_together(self, model, old_index_together, new_index_together):
|
def alter_index_together(self, model, old_index_together, new_index_together):
|
||||||
"""
|
"""
|
||||||
Deals with a model changing its index_together.
|
Deal with a model changing its index_together. The input
|
||||||
Note: The input index_togethers must be doubly-nested, not the single-
|
index_togethers must be doubly-nested, not the single-nested
|
||||||
nested ["foo", "bar"] format.
|
["foo", "bar"] format.
|
||||||
"""
|
"""
|
||||||
olds = set(tuple(fields) for fields in old_index_together)
|
olds = set(tuple(fields) for fields in old_index_together)
|
||||||
news = set(tuple(fields) for fields in new_index_together)
|
news = set(tuple(fields) for fields in new_index_together)
|
||||||
@ -379,9 +361,7 @@ class BaseDatabaseSchemaEditor:
|
|||||||
self.execute(self._delete_constraint_sql(sql, model, constraint_names[0]))
|
self.execute(self._delete_constraint_sql(sql, model, constraint_names[0]))
|
||||||
|
|
||||||
def alter_db_table(self, model, old_db_table, new_db_table):
|
def alter_db_table(self, model, old_db_table, new_db_table):
|
||||||
"""
|
"""Rename the table a model points to."""
|
||||||
Renames the table a model points to.
|
|
||||||
"""
|
|
||||||
if (old_db_table == new_db_table or
|
if (old_db_table == new_db_table or
|
||||||
(self.connection.features.ignores_table_name_case and
|
(self.connection.features.ignores_table_name_case and
|
||||||
old_db_table.lower() == new_db_table.lower())):
|
old_db_table.lower() == new_db_table.lower())):
|
||||||
@ -392,9 +372,7 @@ class BaseDatabaseSchemaEditor:
|
|||||||
})
|
})
|
||||||
|
|
||||||
def alter_db_tablespace(self, model, old_db_tablespace, new_db_tablespace):
|
def alter_db_tablespace(self, model, old_db_tablespace, new_db_tablespace):
|
||||||
"""
|
"""Move a model's table between tablespaces."""
|
||||||
Moves a model's table between tablespaces
|
|
||||||
"""
|
|
||||||
self.execute(self.sql_retablespace_table % {
|
self.execute(self.sql_retablespace_table % {
|
||||||
"table": self.quote_name(model._meta.db_table),
|
"table": self.quote_name(model._meta.db_table),
|
||||||
"old_tablespace": self.quote_name(old_db_tablespace),
|
"old_tablespace": self.quote_name(old_db_tablespace),
|
||||||
@ -403,9 +381,8 @@ class BaseDatabaseSchemaEditor:
|
|||||||
|
|
||||||
def add_field(self, model, field):
|
def add_field(self, model, field):
|
||||||
"""
|
"""
|
||||||
Creates a field on a model.
|
Create a field on a model. Usually involves adding a column, but may
|
||||||
Usually involves adding a column, but may involve adding a
|
involve adding a table instead (for M2M fields).
|
||||||
table instead (for M2M fields)
|
|
||||||
"""
|
"""
|
||||||
# Special-case implicit M2M tables
|
# Special-case implicit M2M tables
|
||||||
if field.many_to_many and field.remote_field.through._meta.auto_created:
|
if field.many_to_many and field.remote_field.through._meta.auto_created:
|
||||||
@ -447,7 +424,7 @@ class BaseDatabaseSchemaEditor:
|
|||||||
|
|
||||||
def remove_field(self, model, field):
|
def remove_field(self, model, field):
|
||||||
"""
|
"""
|
||||||
Removes a field from a model. Usually involves deleting a column,
|
Remove a field from a model. Usually involves deleting a column,
|
||||||
but for M2Ms may involve deleting a table.
|
but for M2Ms may involve deleting a table.
|
||||||
"""
|
"""
|
||||||
# Special-case implicit M2M tables
|
# Special-case implicit M2M tables
|
||||||
@ -473,11 +450,11 @@ class BaseDatabaseSchemaEditor:
|
|||||||
|
|
||||||
def alter_field(self, model, old_field, new_field, strict=False):
|
def alter_field(self, model, old_field, new_field, strict=False):
|
||||||
"""
|
"""
|
||||||
Allows a field's type, uniqueness, nullability, default, column,
|
Allow a field's type, uniqueness, nullability, default, column,
|
||||||
constraints etc. to be modified.
|
constraints, etc. to be modified.
|
||||||
Requires a copy of the old field as well so we can only perform
|
`old_field` is required to compute the necessary changes.
|
||||||
changes that are required.
|
If `strict` is True, raise errors if the old column does not match
|
||||||
If strict is true, raises errors if the old column does not match old_field precisely.
|
`old_field` precisely.
|
||||||
"""
|
"""
|
||||||
# Ensure this field is even column-based
|
# Ensure this field is even column-based
|
||||||
old_db_params = old_field.db_parameters(connection=self.connection)
|
old_db_params = old_field.db_parameters(connection=self.connection)
|
||||||
@ -514,8 +491,7 @@ class BaseDatabaseSchemaEditor:
|
|||||||
|
|
||||||
def _alter_field(self, model, old_field, new_field, old_type, new_type,
|
def _alter_field(self, model, old_field, new_field, old_type, new_type,
|
||||||
old_db_params, new_db_params, strict=False):
|
old_db_params, new_db_params, strict=False):
|
||||||
"""Actually perform a "physical" (non-ManyToMany) field update."""
|
"""Perform a "physical" (non-ManyToMany) field update."""
|
||||||
|
|
||||||
# Drop any FK constraints, we'll remake them later
|
# Drop any FK constraints, we'll remake them later
|
||||||
fks_dropped = set()
|
fks_dropped = set()
|
||||||
if old_field.remote_field and old_field.db_constraint:
|
if old_field.remote_field and old_field.db_constraint:
|
||||||
@ -797,9 +773,9 @@ class BaseDatabaseSchemaEditor:
|
|||||||
for cases when a creation type is different to an alteration type
|
for cases when a creation type is different to an alteration type
|
||||||
(e.g. SERIAL in PostgreSQL, PostGIS fields).
|
(e.g. SERIAL in PostgreSQL, PostGIS fields).
|
||||||
|
|
||||||
Should return two things; an SQL fragment of (sql, params) to insert
|
Return a two-tuple of: an SQL fragment of (sql, params) to insert into
|
||||||
into an ALTER TABLE statement, and a list of extra (sql, params) tuples
|
an ALTER TABLE statement and a list of extra (sql, params) tuples to
|
||||||
to run once the field is altered.
|
run once the field is altered.
|
||||||
"""
|
"""
|
||||||
return (
|
return (
|
||||||
(
|
(
|
||||||
@ -813,9 +789,7 @@ class BaseDatabaseSchemaEditor:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def _alter_many_to_many(self, model, old_field, new_field, strict):
|
def _alter_many_to_many(self, model, old_field, new_field, strict):
|
||||||
"""
|
"""Alter M2Ms to repoint their to= endpoints."""
|
||||||
Alters M2Ms to repoint their to= endpoints.
|
|
||||||
"""
|
|
||||||
# Rename the through table
|
# Rename the through table
|
||||||
if old_field.remote_field.through._meta.db_table != new_field.remote_field.through._meta.db_table:
|
if old_field.remote_field.through._meta.db_table != new_field.remote_field.through._meta.db_table:
|
||||||
self.alter_db_table(old_field.remote_field.through, old_field.remote_field.through._meta.db_table,
|
self.alter_db_table(old_field.remote_field.through, old_field.remote_field.through._meta.db_table,
|
||||||
@ -837,7 +811,7 @@ class BaseDatabaseSchemaEditor:
|
|||||||
|
|
||||||
def _create_index_name(self, model, column_names, suffix=""):
|
def _create_index_name(self, model, column_names, suffix=""):
|
||||||
"""
|
"""
|
||||||
Generates a unique name for an index/unique constraint.
|
Generate a unique name for an index/unique constraint.
|
||||||
|
|
||||||
The name is divided into 3 parts: the table name, the column names,
|
The name is divided into 3 parts: the table name, the column names,
|
||||||
and a unique digest and suffix.
|
and a unique digest and suffix.
|
||||||
@ -895,8 +869,8 @@ class BaseDatabaseSchemaEditor:
|
|||||||
|
|
||||||
def _model_indexes_sql(self, model):
|
def _model_indexes_sql(self, model):
|
||||||
"""
|
"""
|
||||||
Return all index SQL statements (field indexes, index_together,
|
Return a list of all index SQL statements (field indexes,
|
||||||
Meta.indexes) for the specified model, as a list.
|
index_together, Meta.indexes) for the specified model.
|
||||||
"""
|
"""
|
||||||
if not model._meta.managed or model._meta.proxy or model._meta.swapped:
|
if not model._meta.managed or model._meta.proxy or model._meta.swapped:
|
||||||
return []
|
return []
|
||||||
@ -967,9 +941,7 @@ class BaseDatabaseSchemaEditor:
|
|||||||
def _constraint_names(self, model, column_names=None, unique=None,
|
def _constraint_names(self, model, column_names=None, unique=None,
|
||||||
primary_key=None, index=None, foreign_key=None,
|
primary_key=None, index=None, foreign_key=None,
|
||||||
check=None):
|
check=None):
|
||||||
"""
|
"""Return all constraint names matching the columns and conditions."""
|
||||||
Returns all constraint names matching the columns and conditions
|
|
||||||
"""
|
|
||||||
if column_names is not None:
|
if column_names is not None:
|
||||||
column_names = [
|
column_names = [
|
||||||
self.connection.introspection.column_name_converter(name)
|
self.connection.introspection.column_name_converter(name)
|
||||||
|
@ -1,7 +1,5 @@
|
|||||||
class BaseDatabaseValidation:
|
class BaseDatabaseValidation:
|
||||||
"""
|
"""Encapsulate backend-specific validation."""
|
||||||
This class encapsulates all backend-specific validation.
|
|
||||||
"""
|
|
||||||
def __init__(self, connection):
|
def __init__(self, connection):
|
||||||
self.connection = connection
|
self.connection = connection
|
||||||
|
|
||||||
|
@ -3,7 +3,7 @@ Dummy database backend for Django.
|
|||||||
|
|
||||||
Django uses this if the database ENGINE setting is empty (None or empty string).
|
Django uses this if the database ENGINE setting is empty (None or empty string).
|
||||||
|
|
||||||
Each of these API functions, except connection.close(), raises
|
Each of these API functions, except connection.close(), raise
|
||||||
ImproperlyConfigured.
|
ImproperlyConfigured.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -51,10 +51,10 @@ server_version_re = re.compile(r'(\d{1,2})\.(\d{1,2})\.(\d{1,2})')
|
|||||||
|
|
||||||
class CursorWrapper:
|
class CursorWrapper:
|
||||||
"""
|
"""
|
||||||
A thin wrapper around MySQLdb's normal cursor class so that we can catch
|
A thin wrapper around MySQLdb's normal cursor class that catches particular
|
||||||
particular exception instances and reraise them with the right types.
|
exception instances and reraises them with the correct types.
|
||||||
|
|
||||||
Implemented as a wrapper, rather than a subclass, so that we aren't stuck
|
Implemented as a wrapper, rather than a subclass, so that it isn't stuck
|
||||||
to the particular underlying representation returned by Connection.cursor().
|
to the particular underlying representation returned by Connection.cursor().
|
||||||
"""
|
"""
|
||||||
codes_for_integrityerror = (1048,)
|
codes_for_integrityerror = (1048,)
|
||||||
@ -269,8 +269,9 @@ class DatabaseWrapper(BaseDatabaseWrapper):
|
|||||||
|
|
||||||
def disable_constraint_checking(self):
|
def disable_constraint_checking(self):
|
||||||
"""
|
"""
|
||||||
Disables foreign key checks, primarily for use in adding rows with forward references. Always returns True,
|
Disable foreign key checks, primarily for use in adding rows with
|
||||||
to indicate constraint checks need to be re-enabled.
|
forward references. Always return True to indicate constraint checks
|
||||||
|
need to be re-enabled.
|
||||||
"""
|
"""
|
||||||
self.cursor().execute('SET foreign_key_checks=0')
|
self.cursor().execute('SET foreign_key_checks=0')
|
||||||
return True
|
return True
|
||||||
@ -289,14 +290,14 @@ class DatabaseWrapper(BaseDatabaseWrapper):
|
|||||||
|
|
||||||
def check_constraints(self, table_names=None):
|
def check_constraints(self, table_names=None):
|
||||||
"""
|
"""
|
||||||
Checks each table name in `table_names` for rows with invalid foreign
|
Check each table name in `table_names` for rows with invalid foreign
|
||||||
key references. This method is intended to be used in conjunction with
|
key references. This method is intended to be used in conjunction with
|
||||||
`disable_constraint_checking()` and `enable_constraint_checking()`, to
|
`disable_constraint_checking()` and `enable_constraint_checking()`, to
|
||||||
determine if rows with invalid references were entered while constraint
|
determine if rows with invalid references were entered while constraint
|
||||||
checks were off.
|
checks were off.
|
||||||
|
|
||||||
Raises an IntegrityError on the first invalid foreign key reference
|
Raise an IntegrityError on the first invalid foreign key reference
|
||||||
encountered (if any) and provides detailed information about the
|
encountered (if any) and provide detailed information about the
|
||||||
invalid reference in the error message.
|
invalid reference in the error message.
|
||||||
|
|
||||||
Backends can override this method if they can more directly apply
|
Backends can override this method if they can more directly apply
|
||||||
|
@ -50,16 +50,15 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||||||
return field_type
|
return field_type
|
||||||
|
|
||||||
def get_table_list(self, cursor):
|
def get_table_list(self, cursor):
|
||||||
"""
|
"""Return a list of table and view names in the current database."""
|
||||||
Returns a list of table and view names in the current database.
|
|
||||||
"""
|
|
||||||
cursor.execute("SHOW FULL TABLES")
|
cursor.execute("SHOW FULL TABLES")
|
||||||
return [TableInfo(row[0], {'BASE TABLE': 't', 'VIEW': 'v'}.get(row[1]))
|
return [TableInfo(row[0], {'BASE TABLE': 't', 'VIEW': 'v'}.get(row[1]))
|
||||||
for row in cursor.fetchall()]
|
for row in cursor.fetchall()]
|
||||||
|
|
||||||
def get_table_description(self, cursor, table_name):
|
def get_table_description(self, cursor, table_name):
|
||||||
"""
|
"""
|
||||||
Returns a description of the table, with the DB-API cursor.description interface."
|
Return a description of the table with the DB-API cursor.description
|
||||||
|
interface."
|
||||||
"""
|
"""
|
||||||
# information_schema database gives more accurate results for some figures:
|
# information_schema database gives more accurate results for some figures:
|
||||||
# - varchar length returned by cursor.description is an internal length,
|
# - varchar length returned by cursor.description is an internal length,
|
||||||
@ -99,7 +98,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||||||
|
|
||||||
def get_relations(self, cursor, table_name):
|
def get_relations(self, cursor, table_name):
|
||||||
"""
|
"""
|
||||||
Returns a dictionary of {field_name: (field_name_other_table, other_table)}
|
Return a dictionary of {field_name: (field_name_other_table, other_table)}
|
||||||
representing all relationships to the given table.
|
representing all relationships to the given table.
|
||||||
"""
|
"""
|
||||||
constraints = self.get_key_columns(cursor, table_name)
|
constraints = self.get_key_columns(cursor, table_name)
|
||||||
@ -110,8 +109,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||||||
|
|
||||||
def get_key_columns(self, cursor, table_name):
|
def get_key_columns(self, cursor, table_name):
|
||||||
"""
|
"""
|
||||||
Returns a list of (column_name, referenced_table_name, referenced_column_name) for all
|
Return a list of (column_name, referenced_table_name, referenced_column_name)
|
||||||
key columns in given table.
|
for all key columns in the given table.
|
||||||
"""
|
"""
|
||||||
key_columns = []
|
key_columns = []
|
||||||
cursor.execute("""
|
cursor.execute("""
|
||||||
@ -153,7 +152,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||||||
|
|
||||||
def get_storage_engine(self, cursor, table_name):
|
def get_storage_engine(self, cursor, table_name):
|
||||||
"""
|
"""
|
||||||
Retrieves the storage engine for a given table. Returns the default
|
Retrieve the storage engine for a given table. Return the default
|
||||||
storage engine if the table doesn't exist.
|
storage engine if the table doesn't exist.
|
||||||
"""
|
"""
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
@ -167,7 +166,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||||||
|
|
||||||
def get_constraints(self, cursor, table_name):
|
def get_constraints(self, cursor, table_name):
|
||||||
"""
|
"""
|
||||||
Retrieves any constraints or keys (unique, pk, fk, check, index) across one or more columns.
|
Retrieve any constraints or keys (unique, pk, fk, check, index) across
|
||||||
|
one or more columns.
|
||||||
"""
|
"""
|
||||||
constraints = {}
|
constraints = {}
|
||||||
# Get the actual constraint names and columns
|
# Get the actual constraint names and columns
|
||||||
|
@ -277,8 +277,8 @@ class DatabaseWrapper(BaseDatabaseWrapper):
|
|||||||
|
|
||||||
def check_constraints(self, table_names=None):
|
def check_constraints(self, table_names=None):
|
||||||
"""
|
"""
|
||||||
To check constraints, we set constraints to immediate. Then, when, we're done we must ensure they
|
Check constraints by setting them to immediate. Return them to deferred
|
||||||
are returned to deferred.
|
afterward.
|
||||||
"""
|
"""
|
||||||
self.cursor().execute('SET CONSTRAINTS ALL IMMEDIATE')
|
self.cursor().execute('SET CONSTRAINTS ALL IMMEDIATE')
|
||||||
self.cursor().execute('SET CONSTRAINTS ALL DEFERRED')
|
self.cursor().execute('SET CONSTRAINTS ALL DEFERRED')
|
||||||
|
@ -4,7 +4,7 @@ from django.db.models.sql import compiler
|
|||||||
class SQLCompiler(compiler.SQLCompiler):
|
class SQLCompiler(compiler.SQLCompiler):
|
||||||
def as_sql(self, with_limits=True, with_col_aliases=False):
|
def as_sql(self, with_limits=True, with_col_aliases=False):
|
||||||
"""
|
"""
|
||||||
Creates the SQL for this query. Returns the SQL string and list
|
Create the SQL for this query. Return the SQL string and list
|
||||||
of parameters. This is overridden from the original Query class
|
of parameters. This is overridden from the original Query class
|
||||||
to handle the additional SQL Oracle requires to emulate LIMIT
|
to handle the additional SQL Oracle requires to emulate LIMIT
|
||||||
and OFFSET.
|
and OFFSET.
|
||||||
|
@ -103,11 +103,12 @@ class DatabaseCreation(BaseDatabaseCreation):
|
|||||||
|
|
||||||
def _switch_to_test_user(self, parameters):
|
def _switch_to_test_user(self, parameters):
|
||||||
"""
|
"""
|
||||||
Oracle doesn't have the concept of separate databases under the same user.
|
Switch to the user that's used for creating the test database.
|
||||||
Thus, we use a separate user (see _create_test_db). This method is used
|
|
||||||
to switch to that user. We will need the main user again for clean-up when
|
Oracle doesn't have the concept of separate databases under the same
|
||||||
we end testing, so we keep its credentials in SAVED_USER/SAVED_PASSWORD
|
user, so a separate user is used; see _create_test_db(). The main user
|
||||||
entries in the settings dict.
|
is also needed for cleanup when testing is completed, so save its
|
||||||
|
credentials in the SAVED_USER/SAVED_PASSWORD key in the settings dict.
|
||||||
"""
|
"""
|
||||||
real_settings = settings.DATABASES[self.connection.alias]
|
real_settings = settings.DATABASES[self.connection.alias]
|
||||||
real_settings['SAVED_USER'] = self.connection.settings_dict['SAVED_USER'] = \
|
real_settings['SAVED_USER'] = self.connection.settings_dict['SAVED_USER'] = \
|
||||||
@ -122,8 +123,8 @@ class DatabaseCreation(BaseDatabaseCreation):
|
|||||||
|
|
||||||
def set_as_test_mirror(self, primary_settings_dict):
|
def set_as_test_mirror(self, primary_settings_dict):
|
||||||
"""
|
"""
|
||||||
Set this database up to be used in testing as a mirror of a primary database
|
Set this database up to be used in testing as a mirror of a primary
|
||||||
whose settings are given
|
database whose settings are given.
|
||||||
"""
|
"""
|
||||||
self.connection.settings_dict['USER'] = primary_settings_dict['USER']
|
self.connection.settings_dict['USER'] = primary_settings_dict['USER']
|
||||||
self.connection.settings_dict['PASSWORD'] = primary_settings_dict['PASSWORD']
|
self.connection.settings_dict['PASSWORD'] = primary_settings_dict['PASSWORD']
|
||||||
@ -166,7 +167,7 @@ class DatabaseCreation(BaseDatabaseCreation):
|
|||||||
def _destroy_test_db(self, test_database_name, verbosity=1):
|
def _destroy_test_db(self, test_database_name, verbosity=1):
|
||||||
"""
|
"""
|
||||||
Destroy a test database, prompting the user for confirmation if the
|
Destroy a test database, prompting the user for confirmation if the
|
||||||
database already exists. Returns the name of the test database created.
|
database already exists. Return the name of the test database created.
|
||||||
"""
|
"""
|
||||||
self.connection.settings_dict['USER'] = self.connection.settings_dict['SAVED_USER']
|
self.connection.settings_dict['USER'] = self.connection.settings_dict['SAVED_USER']
|
||||||
self.connection.settings_dict['PASSWORD'] = self.connection.settings_dict['SAVED_PASSWORD']
|
self.connection.settings_dict['PASSWORD'] = self.connection.settings_dict['SAVED_PASSWORD']
|
||||||
@ -292,9 +293,8 @@ class DatabaseCreation(BaseDatabaseCreation):
|
|||||||
|
|
||||||
def _test_settings_get(self, key, default=None, prefixed=None):
|
def _test_settings_get(self, key, default=None, prefixed=None):
|
||||||
"""
|
"""
|
||||||
Return a value from the test settings dict,
|
Return a value from the test settings dict, or a given default, or a
|
||||||
or a given default,
|
prefixed entry from the main settings dict.
|
||||||
or a prefixed entry from the main settings dict
|
|
||||||
"""
|
"""
|
||||||
settings_dict = self.connection.settings_dict
|
settings_dict = self.connection.settings_dict
|
||||||
val = settings_dict['TEST'].get(key, default)
|
val = settings_dict['TEST'].get(key, default)
|
||||||
@ -345,9 +345,9 @@ class DatabaseCreation(BaseDatabaseCreation):
|
|||||||
|
|
||||||
def _get_test_db_name(self):
|
def _get_test_db_name(self):
|
||||||
"""
|
"""
|
||||||
We need to return the 'production' DB name to get the test DB creation
|
Return the 'production' DB name to get the test DB creation machinery
|
||||||
machinery to work. This isn't a great deal in this case because DB
|
to work. This isn't a great deal in this case because DB names as
|
||||||
names as handled by Django haven't real counterparts in Oracle.
|
handled by Django don't have real counterparts in Oracle.
|
||||||
"""
|
"""
|
||||||
return self.connection.settings_dict['NAME']
|
return self.connection.settings_dict['NAME']
|
||||||
|
|
||||||
|
@ -44,15 +44,16 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||||||
return super().get_field_type(data_type, description)
|
return super().get_field_type(data_type, description)
|
||||||
|
|
||||||
def get_table_list(self, cursor):
|
def get_table_list(self, cursor):
|
||||||
"""
|
"""Return a list of table and view names in the current database."""
|
||||||
Returns a list of table and view names in the current database.
|
|
||||||
"""
|
|
||||||
cursor.execute("SELECT TABLE_NAME, 't' FROM USER_TABLES UNION ALL "
|
cursor.execute("SELECT TABLE_NAME, 't' FROM USER_TABLES UNION ALL "
|
||||||
"SELECT VIEW_NAME, 'v' FROM USER_VIEWS")
|
"SELECT VIEW_NAME, 'v' FROM USER_VIEWS")
|
||||||
return [TableInfo(row[0].lower(), row[1]) for row in cursor.fetchall()]
|
return [TableInfo(row[0].lower(), row[1]) for row in cursor.fetchall()]
|
||||||
|
|
||||||
def get_table_description(self, cursor, table_name):
|
def get_table_description(self, cursor, table_name):
|
||||||
"Returns a description of the table, with the DB-API cursor.description interface."
|
"""
|
||||||
|
Return a description of the table with the DB-API cursor.description
|
||||||
|
interface.
|
||||||
|
"""
|
||||||
# user_tab_columns gives data default for columns
|
# user_tab_columns gives data default for columns
|
||||||
cursor.execute("""
|
cursor.execute("""
|
||||||
SELECT
|
SELECT
|
||||||
@ -81,19 +82,19 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||||||
return description
|
return description
|
||||||
|
|
||||||
def table_name_converter(self, name):
|
def table_name_converter(self, name):
|
||||||
"Table name comparison is case insensitive under Oracle"
|
"""Table name comparison is case insensitive under Oracle."""
|
||||||
return name.lower()
|
return name.lower()
|
||||||
|
|
||||||
def _name_to_index(self, cursor, table_name):
|
def _name_to_index(self, cursor, table_name):
|
||||||
"""
|
"""
|
||||||
Returns a dictionary of {field_name: field_index} for the given table.
|
Return a dictionary of {field_name: field_index} for the given table.
|
||||||
Indexes are 0-based.
|
Indexes are 0-based.
|
||||||
"""
|
"""
|
||||||
return {d[0]: i for i, d in enumerate(self.get_table_description(cursor, table_name))}
|
return {d[0]: i for i, d in enumerate(self.get_table_description(cursor, table_name))}
|
||||||
|
|
||||||
def get_relations(self, cursor, table_name):
|
def get_relations(self, cursor, table_name):
|
||||||
"""
|
"""
|
||||||
Returns a dictionary of {field_name: (field_name_other_table, other_table)}
|
Return a dictionary of {field_name: (field_name_other_table, other_table)}
|
||||||
representing all relationships to the given table.
|
representing all relationships to the given table.
|
||||||
"""
|
"""
|
||||||
table_name = table_name.upper()
|
table_name = table_name.upper()
|
||||||
@ -164,7 +165,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||||||
|
|
||||||
def get_constraints(self, cursor, table_name):
|
def get_constraints(self, cursor, table_name):
|
||||||
"""
|
"""
|
||||||
Retrieves any constraints or keys (unique, pk, fk, check, index) across one or more columns.
|
Retrieve any constraints or keys (unique, pk, fk, check, index) across
|
||||||
|
one or more columns.
|
||||||
"""
|
"""
|
||||||
constraints = {}
|
constraints = {}
|
||||||
# Loop over the constraints, getting PKs, uniques, and checks
|
# Loop over the constraints, getting PKs, uniques, and checks
|
||||||
|
@ -105,7 +105,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
|||||||
def normalize_name(self, name):
|
def normalize_name(self, name):
|
||||||
"""
|
"""
|
||||||
Get the properly shortened and uppercased identifier as returned by
|
Get the properly shortened and uppercased identifier as returned by
|
||||||
quote_name(), but without the actual quotes.
|
quote_name() but without the quotes.
|
||||||
"""
|
"""
|
||||||
nn = self.quote_name(name)
|
nn = self.quote_name(name)
|
||||||
if nn[0] == '"' and nn[-1] == '"':
|
if nn[0] == '"' and nn[-1] == '"':
|
||||||
@ -113,9 +113,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
|||||||
return nn
|
return nn
|
||||||
|
|
||||||
def _generate_temp_name(self, for_name):
|
def _generate_temp_name(self, for_name):
|
||||||
"""
|
"""Generate temporary names for workarounds that need temp columns."""
|
||||||
Generates temporary names for workarounds that need temp columns
|
|
||||||
"""
|
|
||||||
suffix = hex(hash(for_name)).upper()[1:]
|
suffix = hex(hash(for_name)).upper()[1:]
|
||||||
return self.normalize_name(for_name + "_" + suffix)
|
return self.normalize_name(for_name + "_" + suffix)
|
||||||
|
|
||||||
|
@ -230,8 +230,8 @@ class DatabaseWrapper(BaseDatabaseWrapper):
|
|||||||
|
|
||||||
def check_constraints(self, table_names=None):
|
def check_constraints(self, table_names=None):
|
||||||
"""
|
"""
|
||||||
To check constraints, we set constraints to immediate. Then, when, we're done we must ensure they
|
Check constraints by setting them to immediate. Return them to deferred
|
||||||
are returned to deferred.
|
afterward.
|
||||||
"""
|
"""
|
||||||
self.cursor().execute('SET CONSTRAINTS ALL IMMEDIATE')
|
self.cursor().execute('SET CONSTRAINTS ALL IMMEDIATE')
|
||||||
self.cursor().execute('SET CONSTRAINTS ALL DEFERRED')
|
self.cursor().execute('SET CONSTRAINTS ALL DEFERRED')
|
||||||
|
@ -53,9 +53,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||||||
return field_type
|
return field_type
|
||||||
|
|
||||||
def get_table_list(self, cursor):
|
def get_table_list(self, cursor):
|
||||||
"""
|
"""Return a list of table and view names in the current database."""
|
||||||
Returns a list of table and view names in the current database.
|
|
||||||
"""
|
|
||||||
cursor.execute("""
|
cursor.execute("""
|
||||||
SELECT c.relname, c.relkind
|
SELECT c.relname, c.relkind
|
||||||
FROM pg_catalog.pg_class c
|
FROM pg_catalog.pg_class c
|
||||||
@ -68,7 +66,10 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||||||
if row[0] not in self.ignored_tables]
|
if row[0] not in self.ignored_tables]
|
||||||
|
|
||||||
def get_table_description(self, cursor, table_name):
|
def get_table_description(self, cursor, table_name):
|
||||||
"Returns a description of the table, with the DB-API cursor.description interface."
|
"""
|
||||||
|
Return a description of the table with the DB-API cursor.description
|
||||||
|
interface.
|
||||||
|
"""
|
||||||
# As cursor.description does not return reliably the nullable property,
|
# As cursor.description does not return reliably the nullable property,
|
||||||
# we have to query the information_schema (#7783)
|
# we have to query the information_schema (#7783)
|
||||||
cursor.execute("""
|
cursor.execute("""
|
||||||
@ -87,7 +88,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||||||
|
|
||||||
def get_relations(self, cursor, table_name):
|
def get_relations(self, cursor, table_name):
|
||||||
"""
|
"""
|
||||||
Returns a dictionary of {field_name: (field_name_other_table, other_table)}
|
Return a dictionary of {field_name: (field_name_other_table, other_table)}
|
||||||
representing all relationships to the given table.
|
representing all relationships to the given table.
|
||||||
"""
|
"""
|
||||||
cursor.execute("""
|
cursor.execute("""
|
||||||
|
@ -202,16 +202,15 @@ class DatabaseOperations(BaseDatabaseOperations):
|
|||||||
|
|
||||||
def max_name_length(self):
|
def max_name_length(self):
|
||||||
"""
|
"""
|
||||||
Returns the maximum length of an identifier.
|
Return the maximum length of an identifier.
|
||||||
|
|
||||||
Note that the maximum length of an identifier is 63 by default, but can
|
The maximum length of an identifier is 63 by default, but can be
|
||||||
be changed by recompiling PostgreSQL after editing the NAMEDATALEN
|
changed by recompiling PostgreSQL after editing the NAMEDATALEN
|
||||||
macro in src/include/pg_config_manual.h .
|
macro in src/include/pg_config_manual.h.
|
||||||
|
|
||||||
This implementation simply returns 63, but can easily be overridden by a
|
This implementation returns 63, but can be overridden by a custom
|
||||||
custom database backend that inherits most of its behavior from this one.
|
database backend that inherits most of its behavior from this one.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return 63
|
return 63
|
||||||
|
|
||||||
def distinct_sql(self, fields):
|
def distinct_sql(self, fields):
|
||||||
|
@ -52,9 +52,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
def _alter_column_type_sql(self, table, old_field, new_field, new_type):
|
def _alter_column_type_sql(self, table, old_field, new_field, new_type):
|
||||||
"""
|
"""Make ALTER TYPE with SERIAL make sense."""
|
||||||
Makes ALTER TYPE with SERIAL make sense.
|
|
||||||
"""
|
|
||||||
if new_type.lower() in ("serial", "bigserial"):
|
if new_type.lower() in ("serial", "bigserial"):
|
||||||
column = new_field.column
|
column = new_field.column
|
||||||
sequence_name = "%s_%s_seq" % (table, column)
|
sequence_name = "%s_%s_seq" % (table, column)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
"""
|
"""
|
||||||
Extracts the version of the PostgreSQL server.
|
Extract the version of the PostgreSQL server.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
@ -24,7 +24,7 @@ def _parse_version(text):
|
|||||||
|
|
||||||
def get_version(connection):
|
def get_version(connection):
|
||||||
"""
|
"""
|
||||||
Returns an integer representing the major, minor and revision number of the
|
Return an integer representing the major, minor, and revision number of the
|
||||||
server. Format is the one used for the return value of libpq
|
server. Format is the one used for the return value of libpq
|
||||||
PQServerVersion()/``server_version`` connection attribute (available in
|
PQServerVersion()/``server_version`` connection attribute (available in
|
||||||
newer psycopg2 versions.)
|
newer psycopg2 versions.)
|
||||||
|
@ -27,9 +27,8 @@ from .schema import DatabaseSchemaEditor # isort:skip
|
|||||||
|
|
||||||
|
|
||||||
def decoder(conv_func):
|
def decoder(conv_func):
|
||||||
""" The Python sqlite3 interface returns always byte strings.
|
"""
|
||||||
This function converts the received value to a regular string before
|
Convert bytestrings from Python's sqlite3 interface to a regular string.
|
||||||
passing it to the receiver function.
|
|
||||||
"""
|
"""
|
||||||
return lambda s: conv_func(s.decode())
|
return lambda s: conv_func(s.decode())
|
||||||
|
|
||||||
@ -215,14 +214,14 @@ class DatabaseWrapper(BaseDatabaseWrapper):
|
|||||||
|
|
||||||
def check_constraints(self, table_names=None):
|
def check_constraints(self, table_names=None):
|
||||||
"""
|
"""
|
||||||
Checks each table name in `table_names` for rows with invalid foreign
|
Check each table name in `table_names` for rows with invalid foreign
|
||||||
key references. This method is intended to be used in conjunction with
|
key references. This method is intended to be used in conjunction with
|
||||||
`disable_constraint_checking()` and `enable_constraint_checking()`, to
|
`disable_constraint_checking()` and `enable_constraint_checking()`, to
|
||||||
determine if rows with invalid references were entered while constraint
|
determine if rows with invalid references were entered while constraint
|
||||||
checks were off.
|
checks were off.
|
||||||
|
|
||||||
Raises an IntegrityError on the first invalid foreign key reference
|
Raise an IntegrityError on the first invalid foreign key reference
|
||||||
encountered (if any) and provides detailed information about the
|
encountered (if any) and provide detailed information about the
|
||||||
invalid reference in the error message.
|
invalid reference in the error message.
|
||||||
|
|
||||||
Backends can override this method if they can more directly apply
|
Backends can override this method if they can more directly apply
|
||||||
|
@ -99,7 +99,7 @@ class DatabaseCreation(BaseDatabaseCreation):
|
|||||||
|
|
||||||
def test_db_signature(self):
|
def test_db_signature(self):
|
||||||
"""
|
"""
|
||||||
Returns a tuple that uniquely identifies a test database.
|
Return a tuple that uniquely identifies a test database.
|
||||||
|
|
||||||
This takes into account the special cases of ":memory:" and "" for
|
This takes into account the special cases of ":memory:" and "" for
|
||||||
SQLite since the databases will be distinct despite having the same
|
SQLite since the databases will be distinct despite having the same
|
||||||
|
@ -53,12 +53,12 @@ class DatabaseFeatures(BaseDatabaseFeatures):
|
|||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def supports_stddev(self):
|
def supports_stddev(self):
|
||||||
"""Confirm support for STDDEV and related stats functions
|
"""
|
||||||
|
Confirm support for STDDEV and related stats functions.
|
||||||
|
|
||||||
SQLite supports STDDEV as an extension package; so
|
SQLite supports STDDEV as an extension package; so
|
||||||
connection.ops.check_expression_support() can't unilaterally
|
connection.ops.check_expression_support() can't unilaterally
|
||||||
rule out support for STDDEV. We need to manually check
|
rule out support for STDDEV. Manually check whether the call works.
|
||||||
whether the call works.
|
|
||||||
"""
|
"""
|
||||||
with self.connection.cursor() as cursor:
|
with self.connection.cursor() as cursor:
|
||||||
cursor.execute('CREATE TABLE STDDEV_TEST (X INT)')
|
cursor.execute('CREATE TABLE STDDEV_TEST (X INT)')
|
||||||
|
@ -58,9 +58,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||||||
data_types_reverse = FlexibleFieldLookupDict()
|
data_types_reverse = FlexibleFieldLookupDict()
|
||||||
|
|
||||||
def get_table_list(self, cursor):
|
def get_table_list(self, cursor):
|
||||||
"""
|
"""Return a list of table and view names in the current database."""
|
||||||
Returns a list of table and view names in the current database.
|
|
||||||
"""
|
|
||||||
# Skip the sqlite_sequence system table used for autoincrement key
|
# Skip the sqlite_sequence system table used for autoincrement key
|
||||||
# generation.
|
# generation.
|
||||||
cursor.execute("""
|
cursor.execute("""
|
||||||
@ -70,7 +68,10 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||||||
return [TableInfo(row[0], row[1][0]) for row in cursor.fetchall()]
|
return [TableInfo(row[0], row[1][0]) for row in cursor.fetchall()]
|
||||||
|
|
||||||
def get_table_description(self, cursor, table_name):
|
def get_table_description(self, cursor, table_name):
|
||||||
"Returns a description of the table, with the DB-API cursor.description interface."
|
"""
|
||||||
|
Return a description of the table with the DB-API cursor.description
|
||||||
|
interface.
|
||||||
|
"""
|
||||||
return [
|
return [
|
||||||
FieldInfo(
|
FieldInfo(
|
||||||
info['name'],
|
info['name'],
|
||||||
@ -156,8 +157,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||||||
|
|
||||||
def get_key_columns(self, cursor, table_name):
|
def get_key_columns(self, cursor, table_name):
|
||||||
"""
|
"""
|
||||||
Returns a list of (column_name, referenced_table_name, referenced_column_name) for all
|
Return a list of (column_name, referenced_table_name, referenced_column_name)
|
||||||
key columns in given table.
|
for all key columns in given table.
|
||||||
"""
|
"""
|
||||||
key_columns = []
|
key_columns = []
|
||||||
|
|
||||||
@ -207,9 +208,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||||||
return indexes
|
return indexes
|
||||||
|
|
||||||
def get_primary_key_column(self, cursor, table_name):
|
def get_primary_key_column(self, cursor, table_name):
|
||||||
"""
|
"""Return the column name of the primary key for the given table."""
|
||||||
Get the column name of the primary key for the given table.
|
|
||||||
"""
|
|
||||||
# Don't use PRAGMA because that causes issues with some transactions
|
# Don't use PRAGMA because that causes issues with some transactions
|
||||||
cursor.execute("SELECT sql FROM sqlite_master WHERE tbl_name = %s AND type = %s", [table_name, "table"])
|
cursor.execute("SELECT sql FROM sqlite_master WHERE tbl_name = %s AND type = %s", [table_name, "table"])
|
||||||
row = cursor.fetchone()
|
row = cursor.fetchone()
|
||||||
@ -238,7 +237,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||||||
|
|
||||||
def get_constraints(self, cursor, table_name):
|
def get_constraints(self, cursor, table_name):
|
||||||
"""
|
"""
|
||||||
Retrieves any constraints or keys (unique, pk, fk, check, index) across one or more columns.
|
Retrieve any constraints or keys (unique, pk, fk, check, index) across
|
||||||
|
one or more columns.
|
||||||
"""
|
"""
|
||||||
constraints = {}
|
constraints = {}
|
||||||
# Get the index info
|
# Get the index info
|
||||||
|
@ -43,31 +43,24 @@ class DatabaseOperations(BaseDatabaseOperations):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def date_extract_sql(self, lookup_type, field_name):
|
def date_extract_sql(self, lookup_type, field_name):
|
||||||
# sqlite doesn't support extract, so we fake it with the user-defined
|
"""
|
||||||
# function django_date_extract that's registered in connect(). Note that
|
Support EXTRACT with a user-defined function django_date_extract()
|
||||||
# single quotes are used because this is a string (and could otherwise
|
that's registered in connect(). Use single quotes because this is a
|
||||||
# cause a collision with a field name).
|
string and could otherwise cause a collision with a field name.
|
||||||
|
"""
|
||||||
return "django_date_extract('%s', %s)" % (lookup_type.lower(), field_name)
|
return "django_date_extract('%s', %s)" % (lookup_type.lower(), field_name)
|
||||||
|
|
||||||
def date_interval_sql(self, timedelta):
|
def date_interval_sql(self, timedelta):
|
||||||
return "'%s'" % duration_string(timedelta), []
|
return "'%s'" % duration_string(timedelta), []
|
||||||
|
|
||||||
def format_for_duration_arithmetic(self, sql):
|
def format_for_duration_arithmetic(self, sql):
|
||||||
"""Do nothing here, we will handle it in the custom function."""
|
"""Do nothing since formatting is handled in the custom function."""
|
||||||
return sql
|
return sql
|
||||||
|
|
||||||
def date_trunc_sql(self, lookup_type, field_name):
|
def date_trunc_sql(self, lookup_type, field_name):
|
||||||
# sqlite doesn't support DATE_TRUNC, so we fake it with a user-defined
|
|
||||||
# function django_date_trunc that's registered in connect(). Note that
|
|
||||||
# single quotes are used because this is a string (and could otherwise
|
|
||||||
# cause a collision with a field name).
|
|
||||||
return "django_date_trunc('%s', %s)" % (lookup_type.lower(), field_name)
|
return "django_date_trunc('%s', %s)" % (lookup_type.lower(), field_name)
|
||||||
|
|
||||||
def time_trunc_sql(self, lookup_type, field_name):
|
def time_trunc_sql(self, lookup_type, field_name):
|
||||||
# sqlite doesn't support DATE_TRUNC, so we fake it with a user-defined
|
|
||||||
# function django_date_trunc that's registered in connect(). Note that
|
|
||||||
# single quotes are used because this is a string (and could otherwise
|
|
||||||
# cause a collision with a field name).
|
|
||||||
return "django_time_trunc('%s', %s)" % (lookup_type.lower(), field_name)
|
return "django_time_trunc('%s', %s)" % (lookup_type.lower(), field_name)
|
||||||
|
|
||||||
def _convert_tzname_to_sql(self, tzname):
|
def _convert_tzname_to_sql(self, tzname):
|
||||||
@ -84,22 +77,16 @@ class DatabaseOperations(BaseDatabaseOperations):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def datetime_extract_sql(self, lookup_type, field_name, tzname):
|
def datetime_extract_sql(self, lookup_type, field_name, tzname):
|
||||||
# Same comment as in date_extract_sql.
|
|
||||||
return "django_datetime_extract('%s', %s, %s)" % (
|
return "django_datetime_extract('%s', %s, %s)" % (
|
||||||
lookup_type.lower(), field_name, self._convert_tzname_to_sql(tzname),
|
lookup_type.lower(), field_name, self._convert_tzname_to_sql(tzname),
|
||||||
)
|
)
|
||||||
|
|
||||||
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
|
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
|
||||||
# Same comment as in date_trunc_sql.
|
|
||||||
return "django_datetime_trunc('%s', %s, %s)" % (
|
return "django_datetime_trunc('%s', %s, %s)" % (
|
||||||
lookup_type.lower(), field_name, self._convert_tzname_to_sql(tzname),
|
lookup_type.lower(), field_name, self._convert_tzname_to_sql(tzname),
|
||||||
)
|
)
|
||||||
|
|
||||||
def time_extract_sql(self, lookup_type, field_name):
|
def time_extract_sql(self, lookup_type, field_name):
|
||||||
# sqlite doesn't support extract, so we fake it with the user-defined
|
|
||||||
# function django_time_extract that's registered in connect(). Note that
|
|
||||||
# single quotes are used because this is a string (and could otherwise
|
|
||||||
# cause a collision with a field name).
|
|
||||||
return "django_time_extract('%s', %s)" % (lookup_type.lower(), field_name)
|
return "django_time_extract('%s', %s)" % (lookup_type.lower(), field_name)
|
||||||
|
|
||||||
def pk_default_value(self):
|
def pk_default_value(self):
|
||||||
|
@ -19,7 +19,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
|||||||
# Some SQLite schema alterations need foreign key constraints to be
|
# Some SQLite schema alterations need foreign key constraints to be
|
||||||
# disabled. This is the default in SQLite but can be changed with a
|
# disabled. This is the default in SQLite but can be changed with a
|
||||||
# build flag and might change in future, so can't be relied upon.
|
# build flag and might change in future, so can't be relied upon.
|
||||||
# We enforce it here for the duration of the transaction.
|
# Enforce it here for the duration of the transaction.
|
||||||
c.execute('PRAGMA foreign_keys')
|
c.execute('PRAGMA foreign_keys')
|
||||||
self._initial_pragma_fk = c.fetchone()[0]
|
self._initial_pragma_fk = c.fetchone()[0]
|
||||||
c.execute('PRAGMA foreign_keys = 0')
|
c.execute('PRAGMA foreign_keys = 0')
|
||||||
@ -225,9 +225,8 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
|||||||
|
|
||||||
def add_field(self, model, field):
|
def add_field(self, model, field):
|
||||||
"""
|
"""
|
||||||
Creates a field on a model.
|
Create a field on a model. Usually involves adding a column, but may
|
||||||
Usually involves adding a column, but may involve adding a
|
involve adding a table instead (for M2M fields).
|
||||||
table instead (for M2M fields)
|
|
||||||
"""
|
"""
|
||||||
# Special-case implicit M2M tables
|
# Special-case implicit M2M tables
|
||||||
if field.many_to_many and field.remote_field.through._meta.auto_created:
|
if field.many_to_many and field.remote_field.through._meta.auto_created:
|
||||||
@ -236,7 +235,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
|||||||
|
|
||||||
def remove_field(self, model, field):
|
def remove_field(self, model, field):
|
||||||
"""
|
"""
|
||||||
Removes a field from a model. Usually involves deleting a column,
|
Remove a field from a model. Usually involves deleting a column,
|
||||||
but for M2Ms may involve deleting a table.
|
but for M2Ms may involve deleting a table.
|
||||||
"""
|
"""
|
||||||
# M2M fields are a special case
|
# M2M fields are a special case
|
||||||
@ -254,14 +253,12 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
|||||||
|
|
||||||
def _alter_field(self, model, old_field, new_field, old_type, new_type,
|
def _alter_field(self, model, old_field, new_field, old_type, new_type,
|
||||||
old_db_params, new_db_params, strict=False):
|
old_db_params, new_db_params, strict=False):
|
||||||
"""Actually perform a "physical" (non-ManyToMany) field update."""
|
"""Perform a "physical" (non-ManyToMany) field update."""
|
||||||
# Alter by remaking table
|
# Alter by remaking table
|
||||||
self._remake_table(model, alter_field=(old_field, new_field))
|
self._remake_table(model, alter_field=(old_field, new_field))
|
||||||
|
|
||||||
def _alter_many_to_many(self, model, old_field, new_field, strict):
|
def _alter_many_to_many(self, model, old_field, new_field, strict):
|
||||||
"""
|
"""Alter M2Ms to repoint their to= endpoints."""
|
||||||
Alters M2Ms to repoint their to= endpoints.
|
|
||||||
"""
|
|
||||||
if old_field.remote_field.through._meta.db_table == new_field.remote_field.through._meta.db_table:
|
if old_field.remote_field.through._meta.db_table == new_field.remote_field.through._meta.db_table:
|
||||||
# The field name didn't change, but some options did; we have to propagate this altering.
|
# The field name didn't change, but some options did; we have to propagate this altering.
|
||||||
self._remake_table(
|
self._remake_table(
|
||||||
|
@ -114,7 +114,7 @@ class CursorDebugWrapper(CursorWrapper):
|
|||||||
###############################################
|
###############################################
|
||||||
|
|
||||||
def typecast_date(s):
|
def typecast_date(s):
|
||||||
return datetime.date(*map(int, s.split('-'))) if s else None # returns None if s is null
|
return datetime.date(*map(int, s.split('-'))) if s else None # return None if s is null
|
||||||
|
|
||||||
|
|
||||||
def typecast_time(s): # does NOT store time zone information
|
def typecast_time(s): # does NOT store time zone information
|
||||||
@ -136,8 +136,7 @@ def typecast_timestamp(s): # does NOT store time zone information
|
|||||||
if ' ' not in s:
|
if ' ' not in s:
|
||||||
return typecast_date(s)
|
return typecast_date(s)
|
||||||
d, t = s.split()
|
d, t = s.split()
|
||||||
# Extract timezone information, if it exists. Currently we just throw
|
# Extract timezone information, if it exists. Currently it's ignored.
|
||||||
# it away, but in the future we may make use of it.
|
|
||||||
if '-' in t:
|
if '-' in t:
|
||||||
t, tz = t.split('-', 1)
|
t, tz = t.split('-', 1)
|
||||||
tz = '-' + tz
|
tz = '-' + tz
|
||||||
@ -195,7 +194,7 @@ def truncate_name(name, length=None, hash_len=4):
|
|||||||
|
|
||||||
def format_number(value, max_digits, decimal_places):
|
def format_number(value, max_digits, decimal_places):
|
||||||
"""
|
"""
|
||||||
Formats a number into a string with the requisite number of digits and
|
Format a number into a string with the requisite number of digits and
|
||||||
decimal places.
|
decimal places.
|
||||||
"""
|
"""
|
||||||
if value is None:
|
if value is None:
|
||||||
|
@ -18,9 +18,9 @@ from .topological_sort import stable_topological_sort
|
|||||||
|
|
||||||
class MigrationAutodetector:
|
class MigrationAutodetector:
|
||||||
"""
|
"""
|
||||||
Takes a pair of ProjectStates, and compares them to see what the
|
Take a pair of ProjectStates and compare them to see what the first would
|
||||||
first would need doing to make it match the second (the second
|
need doing to make it match the second (the second usually being the
|
||||||
usually being the project's current state).
|
project's current state).
|
||||||
|
|
||||||
Note that this naturally operates on entire projects at a time,
|
Note that this naturally operates on entire projects at a time,
|
||||||
as it's likely that changes interact (for example, you can't
|
as it's likely that changes interact (for example, you can't
|
||||||
@ -38,7 +38,7 @@ class MigrationAutodetector:
|
|||||||
def changes(self, graph, trim_to_apps=None, convert_apps=None, migration_name=None):
|
def changes(self, graph, trim_to_apps=None, convert_apps=None, migration_name=None):
|
||||||
"""
|
"""
|
||||||
Main entry point to produce a list of applicable changes.
|
Main entry point to produce a list of applicable changes.
|
||||||
Takes a graph to base names on and an optional set of apps
|
Take a graph to base names on and an optional set of apps
|
||||||
to try and restrict to (restriction is not guaranteed)
|
to try and restrict to (restriction is not guaranteed)
|
||||||
"""
|
"""
|
||||||
changes = self._detect_changes(convert_apps, graph)
|
changes = self._detect_changes(convert_apps, graph)
|
||||||
@ -90,9 +90,8 @@ class MigrationAutodetector:
|
|||||||
def only_relation_agnostic_fields(self, fields):
|
def only_relation_agnostic_fields(self, fields):
|
||||||
"""
|
"""
|
||||||
Return a definition of the fields that ignores field names and
|
Return a definition of the fields that ignores field names and
|
||||||
what related fields actually relate to.
|
what related fields actually relate to. Used for detecting renames (as,
|
||||||
Used for detecting renames (as, of course, the related fields
|
of course, the related fields change during renames).
|
||||||
change during renames)
|
|
||||||
"""
|
"""
|
||||||
fields_def = []
|
fields_def = []
|
||||||
for name, field in sorted(fields):
|
for name, field in sorted(fields):
|
||||||
@ -104,7 +103,7 @@ class MigrationAutodetector:
|
|||||||
|
|
||||||
def _detect_changes(self, convert_apps=None, graph=None):
|
def _detect_changes(self, convert_apps=None, graph=None):
|
||||||
"""
|
"""
|
||||||
Returns a dict of migration plans which will achieve the
|
Return a dict of migration plans which will achieve the
|
||||||
change from from_state to to_state. The dict has app labels
|
change from from_state to to_state. The dict has app labels
|
||||||
as keys and a list of migrations as values.
|
as keys and a list of migrations as values.
|
||||||
|
|
||||||
@ -117,11 +116,10 @@ class MigrationAutodetector:
|
|||||||
graph is an optional argument that, if provided, can help improve
|
graph is an optional argument that, if provided, can help improve
|
||||||
dependency generation and avoid potential circular dependencies.
|
dependency generation and avoid potential circular dependencies.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# The first phase is generating all the operations for each app
|
# The first phase is generating all the operations for each app
|
||||||
# and gathering them into a big per-app list.
|
# and gathering them into a big per-app list.
|
||||||
# We'll then go through that list later and order it and split
|
# Then go through that list, order it, and split into migrations to
|
||||||
# into migrations to resolve dependencies caused by M2Ms and FKs.
|
# resolve dependencies caused by M2Ms and FKs.
|
||||||
self.generated_operations = {}
|
self.generated_operations = {}
|
||||||
self.altered_indexes = {}
|
self.altered_indexes = {}
|
||||||
|
|
||||||
@ -198,9 +196,9 @@ class MigrationAutodetector:
|
|||||||
|
|
||||||
def _prepare_field_lists(self):
|
def _prepare_field_lists(self):
|
||||||
"""
|
"""
|
||||||
Prepare field lists, and prepare a list of the fields that used
|
Prepare field lists and a list of the fields that used through models
|
||||||
through models in the old state so we can make dependencies
|
in the old state so dependencies can be made from the through model
|
||||||
from the through model deletion to the field that uses it.
|
deletion to the field that uses it.
|
||||||
"""
|
"""
|
||||||
self.kept_model_keys = set(self.old_model_keys).intersection(self.new_model_keys)
|
self.kept_model_keys = set(self.old_model_keys).intersection(self.new_model_keys)
|
||||||
self.kept_proxy_keys = set(self.old_proxy_keys).intersection(self.new_proxy_keys)
|
self.kept_proxy_keys = set(self.old_proxy_keys).intersection(self.new_proxy_keys)
|
||||||
@ -216,9 +214,7 @@ class MigrationAutodetector:
|
|||||||
self.new_field_keys.update((app_label, model_name, x) for x, y in new_model_state.fields)
|
self.new_field_keys.update((app_label, model_name, x) for x, y in new_model_state.fields)
|
||||||
|
|
||||||
def _generate_through_model_map(self):
|
def _generate_through_model_map(self):
|
||||||
"""
|
"""Through model map generation."""
|
||||||
Through model map generation
|
|
||||||
"""
|
|
||||||
for app_label, model_name in sorted(self.old_model_keys):
|
for app_label, model_name in sorted(self.old_model_keys):
|
||||||
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
|
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
|
||||||
old_model_state = self.from_state.models[app_label, old_model_name]
|
old_model_state = self.from_state.models[app_label, old_model_name]
|
||||||
@ -234,13 +230,13 @@ class MigrationAutodetector:
|
|||||||
|
|
||||||
def _build_migration_list(self, graph=None):
|
def _build_migration_list(self, graph=None):
|
||||||
"""
|
"""
|
||||||
We need to chop the lists of operations up into migrations with
|
Chop the lists of operations up into migrations with dependencies on
|
||||||
dependencies on each other. We do this by stepping up an app's list of
|
each other. Do this by going through an app's list of operations until
|
||||||
operations until we find one that has an outgoing dependency that isn't
|
one is found that has an outgoing dependency that isn't in another
|
||||||
in another app's migration yet (hasn't been chopped off its list). We
|
app's migration yet (hasn't been chopped off its list). Then chop off
|
||||||
then chop off the operations before it into a migration and move onto
|
the operations before it into a migration and move onto the next app.
|
||||||
the next app. If we loop back around without doing anything, there's a
|
If the loops completes without doing anything, there's a circular
|
||||||
circular dependency (which _should_ be impossible as the operations are
|
dependency (which _should_ be impossible as the operations are
|
||||||
all split at this point so they can't depend and be depended on).
|
all split at this point so they can't depend and be depended on).
|
||||||
"""
|
"""
|
||||||
self.migrations = {}
|
self.migrations = {}
|
||||||
@ -325,9 +321,8 @@ class MigrationAutodetector:
|
|||||||
|
|
||||||
def _sort_migrations(self):
|
def _sort_migrations(self):
|
||||||
"""
|
"""
|
||||||
Reorder to make things possible. The order we have already isn't bad,
|
Reorder to make things possible. Reordering may be needed so FKs work
|
||||||
but we need to pull a few things around so FKs work nicely inside the
|
nicely inside the same app.
|
||||||
same app
|
|
||||||
"""
|
"""
|
||||||
for app_label, ops in sorted(self.generated_operations.items()):
|
for app_label, ops in sorted(self.generated_operations.items()):
|
||||||
# construct a dependency graph for intra-app dependencies
|
# construct a dependency graph for intra-app dependencies
|
||||||
@ -360,8 +355,8 @@ class MigrationAutodetector:
|
|||||||
|
|
||||||
def check_dependency(self, operation, dependency):
|
def check_dependency(self, operation, dependency):
|
||||||
"""
|
"""
|
||||||
Returns ``True`` if the given operation depends on the given dependency,
|
Return True if the given operation depends on the given dependency,
|
||||||
``False`` otherwise.
|
False otherwise.
|
||||||
"""
|
"""
|
||||||
# Created model
|
# Created model
|
||||||
if dependency[2] is None and dependency[3] is True:
|
if dependency[2] is None and dependency[3] is True:
|
||||||
@ -431,8 +426,8 @@ class MigrationAutodetector:
|
|||||||
|
|
||||||
def swappable_first_key(self, item):
|
def swappable_first_key(self, item):
|
||||||
"""
|
"""
|
||||||
Sorting key function that places potential swappable models first in
|
Place potential swappable models first in lists of created models (only
|
||||||
lists of created models (only real way to solve #22783)
|
real way to solve #22783).
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
model = self.new_apps.get_model(item[0], item[1])
|
model = self.new_apps.get_model(item[0], item[1])
|
||||||
@ -451,9 +446,9 @@ class MigrationAutodetector:
|
|||||||
|
|
||||||
def generate_renamed_models(self):
|
def generate_renamed_models(self):
|
||||||
"""
|
"""
|
||||||
Finds any renamed models, and generates the operations for them,
|
Find any renamed models, generate the operations for them, and remove
|
||||||
and removes the old entry from the model lists.
|
the old entry from the model lists. Must be run before other
|
||||||
Must be run before other model-level generation.
|
model-level generation.
|
||||||
"""
|
"""
|
||||||
self.renamed_models = {}
|
self.renamed_models = {}
|
||||||
self.renamed_models_rel = {}
|
self.renamed_models_rel = {}
|
||||||
@ -490,11 +485,11 @@ class MigrationAutodetector:
|
|||||||
"""
|
"""
|
||||||
Find all new models (both managed and unmanaged) and make create
|
Find all new models (both managed and unmanaged) and make create
|
||||||
operations for them as well as separate operations to create any
|
operations for them as well as separate operations to create any
|
||||||
foreign key or M2M relationships (we'll optimize these back in later
|
foreign key or M2M relationships (these are optimized later, if
|
||||||
if we can).
|
possible).
|
||||||
|
|
||||||
We also defer any model options that refer to collections of fields
|
Defer any model options that refer to collections of fields that might
|
||||||
that might be deferred (e.g. unique_together, index_together).
|
be deferred (e.g. unique_together, index_together).
|
||||||
"""
|
"""
|
||||||
old_keys = set(self.old_model_keys).union(self.old_unmanaged_keys)
|
old_keys = set(self.old_model_keys).union(self.old_unmanaged_keys)
|
||||||
added_models = set(self.new_model_keys) - old_keys
|
added_models = set(self.new_model_keys) - old_keys
|
||||||
@ -643,10 +638,10 @@ class MigrationAutodetector:
|
|||||||
|
|
||||||
def generate_created_proxies(self):
|
def generate_created_proxies(self):
|
||||||
"""
|
"""
|
||||||
Makes CreateModel statements for proxy models.
|
Make CreateModel statements for proxy models. Use the same statements
|
||||||
We use the same statements as that way there's less code duplication,
|
as that way there's less code duplication, but of course for proxy
|
||||||
but of course for proxy models we can skip all that pointless field
|
models it's safe to skip all the pointless field stuff and just chuck
|
||||||
stuff and just chuck out an operation.
|
out an operation.
|
||||||
"""
|
"""
|
||||||
added = set(self.new_proxy_keys) - set(self.old_proxy_keys)
|
added = set(self.new_proxy_keys) - set(self.old_proxy_keys)
|
||||||
for app_label, model_name in sorted(added):
|
for app_label, model_name in sorted(added):
|
||||||
@ -679,10 +674,10 @@ class MigrationAutodetector:
|
|||||||
"""
|
"""
|
||||||
Find all deleted models (managed and unmanaged) and make delete
|
Find all deleted models (managed and unmanaged) and make delete
|
||||||
operations for them as well as separate operations to delete any
|
operations for them as well as separate operations to delete any
|
||||||
foreign key or M2M relationships (we'll optimize these back in later
|
foreign key or M2M relationships (these are optimized later, if
|
||||||
if we can).
|
possible).
|
||||||
|
|
||||||
We also bring forward removal of any model options that refer to
|
Also bring forward removal of any model options that refer to
|
||||||
collections of fields - the inverse of generate_created_models().
|
collections of fields - the inverse of generate_created_models().
|
||||||
"""
|
"""
|
||||||
new_keys = set(self.new_model_keys).union(self.new_unmanaged_keys)
|
new_keys = set(self.new_model_keys).union(self.new_unmanaged_keys)
|
||||||
@ -769,9 +764,7 @@ class MigrationAutodetector:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def generate_deleted_proxies(self):
|
def generate_deleted_proxies(self):
|
||||||
"""
|
"""Make DeleteModel options for proxy models."""
|
||||||
Makes DeleteModel statements for proxy models.
|
|
||||||
"""
|
|
||||||
deleted = set(self.old_proxy_keys) - set(self.new_proxy_keys)
|
deleted = set(self.old_proxy_keys) - set(self.new_proxy_keys)
|
||||||
for app_label, model_name in sorted(deleted):
|
for app_label, model_name in sorted(deleted):
|
||||||
model_state = self.from_state.models[app_label, model_name]
|
model_state = self.from_state.models[app_label, model_name]
|
||||||
@ -784,9 +777,7 @@ class MigrationAutodetector:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def generate_renamed_fields(self):
|
def generate_renamed_fields(self):
|
||||||
"""
|
"""Work out renamed fields."""
|
||||||
Works out renamed fields
|
|
||||||
"""
|
|
||||||
self.renamed_fields = {}
|
self.renamed_fields = {}
|
||||||
for app_label, model_name, field_name in sorted(self.new_field_keys - self.old_field_keys):
|
for app_label, model_name, field_name in sorted(self.new_field_keys - self.old_field_keys):
|
||||||
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
|
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
|
||||||
@ -817,9 +808,7 @@ class MigrationAutodetector:
|
|||||||
break
|
break
|
||||||
|
|
||||||
def generate_added_fields(self):
|
def generate_added_fields(self):
|
||||||
"""
|
"""Make AddField operations."""
|
||||||
Fields that have been added
|
|
||||||
"""
|
|
||||||
for app_label, model_name, field_name in sorted(self.new_field_keys - self.old_field_keys):
|
for app_label, model_name, field_name in sorted(self.new_field_keys - self.old_field_keys):
|
||||||
self._generate_added_field(app_label, model_name, field_name)
|
self._generate_added_field(app_label, model_name, field_name)
|
||||||
|
|
||||||
@ -855,9 +844,7 @@ class MigrationAutodetector:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def generate_removed_fields(self):
|
def generate_removed_fields(self):
|
||||||
"""
|
"""Make RemoveField operations."""
|
||||||
Fields that have been removed.
|
|
||||||
"""
|
|
||||||
for app_label, model_name, field_name in sorted(self.old_field_keys - self.new_field_keys):
|
for app_label, model_name, field_name in sorted(self.old_field_keys - self.new_field_keys):
|
||||||
self._generate_removed_field(app_label, model_name, field_name)
|
self._generate_removed_field(app_label, model_name, field_name)
|
||||||
|
|
||||||
@ -879,7 +866,8 @@ class MigrationAutodetector:
|
|||||||
|
|
||||||
def generate_altered_fields(self):
|
def generate_altered_fields(self):
|
||||||
"""
|
"""
|
||||||
Fields that have been altered.
|
Make AlterField operations, or possibly RemovedField/AddField if alter
|
||||||
|
isn's possible.
|
||||||
"""
|
"""
|
||||||
for app_label, model_name, field_name in sorted(self.old_field_keys.intersection(self.new_field_keys)):
|
for app_label, model_name, field_name in sorted(self.old_field_keys.intersection(self.new_field_keys)):
|
||||||
# Did the field change?
|
# Did the field change?
|
||||||
@ -1057,9 +1045,9 @@ class MigrationAutodetector:
|
|||||||
|
|
||||||
def generate_altered_options(self):
|
def generate_altered_options(self):
|
||||||
"""
|
"""
|
||||||
Works out if any non-schema-affecting options have changed and
|
Work out if any non-schema-affecting options have changed and make an
|
||||||
makes an operation to represent them in state changes (in case Python
|
operation to represent them in state changes (in case Python code in
|
||||||
code in migrations needs them)
|
migrations needs them).
|
||||||
"""
|
"""
|
||||||
models_to_check = self.kept_model_keys.union(
|
models_to_check = self.kept_model_keys.union(
|
||||||
self.kept_proxy_keys
|
self.kept_proxy_keys
|
||||||
@ -1137,9 +1125,9 @@ class MigrationAutodetector:
|
|||||||
|
|
||||||
def arrange_for_graph(self, changes, graph, migration_name=None):
|
def arrange_for_graph(self, changes, graph, migration_name=None):
|
||||||
"""
|
"""
|
||||||
Takes in a result from changes() and a MigrationGraph,
|
Take a result from changes() and a MigrationGraph, and fix the names
|
||||||
and fixes the names and dependencies of the changes so they
|
and dependencies of the changes so they extend the graph from the leaf
|
||||||
extend the graph from the leaf nodes for each app.
|
nodes for each app.
|
||||||
"""
|
"""
|
||||||
leaves = graph.leaf_nodes()
|
leaves = graph.leaf_nodes()
|
||||||
name_map = {}
|
name_map = {}
|
||||||
@ -1186,11 +1174,10 @@ class MigrationAutodetector:
|
|||||||
|
|
||||||
def _trim_to_apps(self, changes, app_labels):
|
def _trim_to_apps(self, changes, app_labels):
|
||||||
"""
|
"""
|
||||||
Takes changes from arrange_for_graph and set of app labels and
|
Take changes from arrange_for_graph() and set of app labels, and return
|
||||||
returns a modified set of changes which trims out as many migrations
|
a modified set of changes which trims out as many migrations that are
|
||||||
that are not in app_labels as possible.
|
not in app_labels as possible. Note that some other migrations may
|
||||||
Note that some other migrations may still be present, as they may be
|
still be present as they may be required dependencies.
|
||||||
required dependencies.
|
|
||||||
"""
|
"""
|
||||||
# Gather other app dependencies in a first pass
|
# Gather other app dependencies in a first pass
|
||||||
app_dependencies = {}
|
app_dependencies = {}
|
||||||
@ -1214,10 +1201,9 @@ class MigrationAutodetector:
|
|||||||
@classmethod
|
@classmethod
|
||||||
def suggest_name(cls, ops):
|
def suggest_name(cls, ops):
|
||||||
"""
|
"""
|
||||||
Given a set of operations, suggests a name for the migration
|
Given a set of operations, suggest a name for the migration they might
|
||||||
they might represent. Names are not guaranteed to be unique,
|
represent. Names are not guaranteed to be unique, but put some effort
|
||||||
but we put some effort in to the fallback name to avoid VCS conflicts
|
into the fallback name to avoid VCS conflicts if possible.
|
||||||
if we can.
|
|
||||||
"""
|
"""
|
||||||
if len(ops) == 1:
|
if len(ops) == 1:
|
||||||
if isinstance(ops[0], operations.CreateModel):
|
if isinstance(ops[0], operations.CreateModel):
|
||||||
@ -1236,8 +1222,8 @@ class MigrationAutodetector:
|
|||||||
@classmethod
|
@classmethod
|
||||||
def parse_number(cls, name):
|
def parse_number(cls, name):
|
||||||
"""
|
"""
|
||||||
Given a migration name, tries to extract a number from the
|
Given a migration name, try to extract a number from the beginning of
|
||||||
beginning of it. If no number found, returns None.
|
it. If no number is found, return None.
|
||||||
"""
|
"""
|
||||||
match = re.match(r'^\d+', name)
|
match = re.match(r'^\d+', name)
|
||||||
if match:
|
if match:
|
||||||
|
@ -2,51 +2,37 @@ from django.db.utils import DatabaseError
|
|||||||
|
|
||||||
|
|
||||||
class AmbiguityError(Exception):
|
class AmbiguityError(Exception):
|
||||||
"""
|
"""More than one migration matches a name prefix."""
|
||||||
Raised when more than one migration matches a name prefix.
|
|
||||||
"""
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class BadMigrationError(Exception):
|
class BadMigrationError(Exception):
|
||||||
"""
|
"""There's a bad migration (unreadable/bad format/etc.)."""
|
||||||
Raised when there's a bad migration (unreadable/bad format/etc.).
|
|
||||||
"""
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class CircularDependencyError(Exception):
|
class CircularDependencyError(Exception):
|
||||||
"""
|
"""There's an impossible-to-resolve circular dependency."""
|
||||||
Raised when there's an impossible-to-resolve circular dependency.
|
|
||||||
"""
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class InconsistentMigrationHistory(Exception):
|
class InconsistentMigrationHistory(Exception):
|
||||||
"""
|
"""An applied migration has some of its dependencies not applied."""
|
||||||
Raised when an applied migration has some of its dependencies not applied.
|
|
||||||
"""
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class InvalidBasesError(ValueError):
|
class InvalidBasesError(ValueError):
|
||||||
"""
|
"""A model's base classes can't be resolved."""
|
||||||
Raised when a model's base classes can't be resolved.
|
|
||||||
"""
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class IrreversibleError(RuntimeError):
|
class IrreversibleError(RuntimeError):
|
||||||
"""
|
"""An irreversible migration is about to be reversed."""
|
||||||
Raised when a irreversible migration is about to be reversed.
|
|
||||||
"""
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class NodeNotFoundError(LookupError):
|
class NodeNotFoundError(LookupError):
|
||||||
"""
|
"""An attempt on a node is made that is not available in the graph."""
|
||||||
Raised when an attempt on a node is made that is not available in the graph.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, message, node, origin=None):
|
def __init__(self, message, node, origin=None):
|
||||||
self.message = message
|
self.message = message
|
||||||
|
@ -9,8 +9,8 @@ from .state import ProjectState
|
|||||||
|
|
||||||
class MigrationExecutor:
|
class MigrationExecutor:
|
||||||
"""
|
"""
|
||||||
End-to-end migration execution - loads migrations, and runs them
|
End-to-end migration execution - load migrations and run them up or down
|
||||||
up or down to a specified set of targets.
|
to a specified set of targets.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, connection, progress_callback=None):
|
def __init__(self, connection, progress_callback=None):
|
||||||
@ -21,7 +21,7 @@ class MigrationExecutor:
|
|||||||
|
|
||||||
def migration_plan(self, targets, clean_start=False):
|
def migration_plan(self, targets, clean_start=False):
|
||||||
"""
|
"""
|
||||||
Given a set of targets, returns a list of (Migration instance, backwards?).
|
Given a set of targets, return a list of (Migration instance, backwards?).
|
||||||
"""
|
"""
|
||||||
plan = []
|
plan = []
|
||||||
if clean_start:
|
if clean_start:
|
||||||
@ -81,7 +81,7 @@ class MigrationExecutor:
|
|||||||
|
|
||||||
def migrate(self, targets, plan=None, state=None, fake=False, fake_initial=False):
|
def migrate(self, targets, plan=None, state=None, fake=False, fake_initial=False):
|
||||||
"""
|
"""
|
||||||
Migrates the database up to the given targets.
|
Migrate the database up to the given targets.
|
||||||
|
|
||||||
Django first needs to create all project states before a migration is
|
Django first needs to create all project states before a migration is
|
||||||
(un)applied and in a second step run all the database operations.
|
(un)applied and in a second step run all the database operations.
|
||||||
@ -208,8 +208,8 @@ class MigrationExecutor:
|
|||||||
|
|
||||||
def collect_sql(self, plan):
|
def collect_sql(self, plan):
|
||||||
"""
|
"""
|
||||||
Takes a migration plan and returns a list of collected SQL
|
Take a migration plan and return a list of collected SQL statements
|
||||||
statements that represent the best-efforts version of that plan.
|
that represent the best-efforts version of that plan.
|
||||||
"""
|
"""
|
||||||
statements = []
|
statements = []
|
||||||
state = None
|
state = None
|
||||||
@ -225,9 +225,7 @@ class MigrationExecutor:
|
|||||||
return statements
|
return statements
|
||||||
|
|
||||||
def apply_migration(self, state, migration, fake=False, fake_initial=False):
|
def apply_migration(self, state, migration, fake=False, fake_initial=False):
|
||||||
"""
|
"""Run a migration forwards."""
|
||||||
Runs a migration forwards.
|
|
||||||
"""
|
|
||||||
if self.progress_callback:
|
if self.progress_callback:
|
||||||
self.progress_callback("apply_start", migration, fake)
|
self.progress_callback("apply_start", migration, fake)
|
||||||
if not fake:
|
if not fake:
|
||||||
@ -252,9 +250,7 @@ class MigrationExecutor:
|
|||||||
return state
|
return state
|
||||||
|
|
||||||
def unapply_migration(self, state, migration, fake=False):
|
def unapply_migration(self, state, migration, fake=False):
|
||||||
"""
|
"""Run a migration backwards."""
|
||||||
Runs a migration backwards.
|
|
||||||
"""
|
|
||||||
if self.progress_callback:
|
if self.progress_callback:
|
||||||
self.progress_callback("unapply_start", migration, fake)
|
self.progress_callback("unapply_start", migration, fake)
|
||||||
if not fake:
|
if not fake:
|
||||||
@ -275,12 +271,12 @@ class MigrationExecutor:
|
|||||||
"""
|
"""
|
||||||
Mark replacement migrations applied if their replaced set all are.
|
Mark replacement migrations applied if their replaced set all are.
|
||||||
|
|
||||||
We do this unconditionally on every migrate, rather than just when
|
Do this unconditionally on every migrate, rather than just when
|
||||||
migrations are applied or unapplied, so as to correctly handle the case
|
migrations are applied or unapplied, to correctly handle the case
|
||||||
when a new squash migration is pushed to a deployment that already had
|
when a new squash migration is pushed to a deployment that already had
|
||||||
all its replaced migrations applied. In this case no new migration will
|
all its replaced migrations applied. In this case no new migration will
|
||||||
be applied, but we still want to correctly maintain the applied state
|
be applied, but the applied state of the squashed migration must be
|
||||||
of the squash migration.
|
maintained.
|
||||||
"""
|
"""
|
||||||
applied = self.recorder.applied_migrations()
|
applied = self.recorder.applied_migrations()
|
||||||
for key, migration in self.loader.replacements.items():
|
for key, migration in self.loader.replacements.items():
|
||||||
@ -290,7 +286,7 @@ class MigrationExecutor:
|
|||||||
|
|
||||||
def detect_soft_applied(self, project_state, migration):
|
def detect_soft_applied(self, project_state, migration):
|
||||||
"""
|
"""
|
||||||
Tests whether a migration has been implicitly applied - that the
|
Test whether a migration has been implicitly applied - that the
|
||||||
tables or columns it would create exist. This is intended only for use
|
tables or columns it would create exist. This is intended only for use
|
||||||
on initial migrations (as it only looks for CreateModel and AddField).
|
on initial migrations (as it only looks for CreateModel and AddField).
|
||||||
"""
|
"""
|
||||||
|
@ -97,7 +97,7 @@ class DummyNode(Node):
|
|||||||
|
|
||||||
class MigrationGraph:
|
class MigrationGraph:
|
||||||
"""
|
"""
|
||||||
Represents the digraph of all migrations in a project.
|
Represent the digraph of all migrations in a project.
|
||||||
|
|
||||||
Each migration is a node, and each dependency is an edge. There are
|
Each migration is a node, and each dependency is an edge. There are
|
||||||
no implicit dependencies between numbered migrations - the numbering is
|
no implicit dependencies between numbered migrations - the numbering is
|
||||||
@ -142,8 +142,9 @@ class MigrationGraph:
|
|||||||
|
|
||||||
def add_dependency(self, migration, child, parent, skip_validation=False):
|
def add_dependency(self, migration, child, parent, skip_validation=False):
|
||||||
"""
|
"""
|
||||||
This may create dummy nodes if they don't yet exist.
|
This may create dummy nodes if they don't yet exist. If
|
||||||
If `skip_validation` is set, validate_consistency should be called afterwards.
|
`skip_validation=True`, validate_consistency() should be called
|
||||||
|
afterwards.
|
||||||
"""
|
"""
|
||||||
if child not in self.nodes:
|
if child not in self.nodes:
|
||||||
error_message = (
|
error_message = (
|
||||||
@ -165,7 +166,7 @@ class MigrationGraph:
|
|||||||
|
|
||||||
def remove_replaced_nodes(self, replacement, replaced):
|
def remove_replaced_nodes(self, replacement, replaced):
|
||||||
"""
|
"""
|
||||||
Removes each of the `replaced` nodes (when they exist). Any
|
Remove each of the `replaced` nodes (when they exist). Any
|
||||||
dependencies that were referencing them are changed to reference the
|
dependencies that were referencing them are changed to reference the
|
||||||
`replacement` node instead.
|
`replacement` node instead.
|
||||||
"""
|
"""
|
||||||
@ -201,10 +202,10 @@ class MigrationGraph:
|
|||||||
|
|
||||||
def remove_replacement_node(self, replacement, replaced):
|
def remove_replacement_node(self, replacement, replaced):
|
||||||
"""
|
"""
|
||||||
The inverse operation to `remove_replaced_nodes`. Almost. Removes the
|
The inverse operation to `remove_replaced_nodes`. Almost. Remove the
|
||||||
replacement node `replacement` and remaps its child nodes to
|
replacement node `replacement` and remap its child nodes to `replaced`
|
||||||
`replaced` - the list of nodes it would have replaced. Its parent
|
- the list of nodes it would have replaced. Don't remap its parent
|
||||||
nodes are not remapped as they are expected to be correct already.
|
nodes as they are expected to be correct already.
|
||||||
"""
|
"""
|
||||||
self.nodes.pop(replacement, None)
|
self.nodes.pop(replacement, None)
|
||||||
try:
|
try:
|
||||||
@ -237,9 +238,7 @@ class MigrationGraph:
|
|||||||
self.clear_cache()
|
self.clear_cache()
|
||||||
|
|
||||||
def validate_consistency(self):
|
def validate_consistency(self):
|
||||||
"""
|
"""Ensure there are no dummy nodes remaining in the graph."""
|
||||||
Ensure there are no dummy nodes remaining in the graph.
|
|
||||||
"""
|
|
||||||
[n.raise_error() for n in self.node_map.values() if isinstance(n, DummyNode)]
|
[n.raise_error() for n in self.node_map.values() if isinstance(n, DummyNode)]
|
||||||
|
|
||||||
def clear_cache(self):
|
def clear_cache(self):
|
||||||
@ -251,10 +250,9 @@ class MigrationGraph:
|
|||||||
|
|
||||||
def forwards_plan(self, target):
|
def forwards_plan(self, target):
|
||||||
"""
|
"""
|
||||||
Given a node, returns a list of which previous nodes (dependencies)
|
Given a node, return a list of which previous nodes (dependencies) must
|
||||||
must be applied, ending with the node itself.
|
be applied, ending with the node itself. This is the list you would
|
||||||
This is the list you would follow if applying the migrations to
|
follow if applying the migrations to a database.
|
||||||
a database.
|
|
||||||
"""
|
"""
|
||||||
if target not in self.nodes:
|
if target not in self.nodes:
|
||||||
raise NodeNotFoundError("Node %r not a valid node" % (target, ), target)
|
raise NodeNotFoundError("Node %r not a valid node" % (target, ), target)
|
||||||
@ -271,10 +269,9 @@ class MigrationGraph:
|
|||||||
|
|
||||||
def backwards_plan(self, target):
|
def backwards_plan(self, target):
|
||||||
"""
|
"""
|
||||||
Given a node, returns a list of which dependent nodes (dependencies)
|
Given a node, return a list of which dependent nodes (dependencies)
|
||||||
must be unapplied, ending with the node itself.
|
must be unapplied, ending with the node itself. This is the list you
|
||||||
This is the list you would follow if removing the migrations from
|
would follow if removing the migrations from a database.
|
||||||
a database.
|
|
||||||
"""
|
"""
|
||||||
if target not in self.nodes:
|
if target not in self.nodes:
|
||||||
raise NodeNotFoundError("Node %r not a valid node" % (target, ), target)
|
raise NodeNotFoundError("Node %r not a valid node" % (target, ), target)
|
||||||
@ -290,9 +287,7 @@ class MigrationGraph:
|
|||||||
return self.iterative_dfs(node, forwards=False)
|
return self.iterative_dfs(node, forwards=False)
|
||||||
|
|
||||||
def iterative_dfs(self, start, forwards=True):
|
def iterative_dfs(self, start, forwards=True):
|
||||||
"""
|
"""Iterative depth-first search for finding dependencies."""
|
||||||
Iterative depth first search, for finding dependencies.
|
|
||||||
"""
|
|
||||||
visited = deque()
|
visited = deque()
|
||||||
visited.append(start)
|
visited.append(start)
|
||||||
if forwards:
|
if forwards:
|
||||||
@ -314,7 +309,7 @@ class MigrationGraph:
|
|||||||
|
|
||||||
def root_nodes(self, app=None):
|
def root_nodes(self, app=None):
|
||||||
"""
|
"""
|
||||||
Returns all root nodes - that is, nodes with no dependencies inside
|
Return all root nodes - that is, nodes with no dependencies inside
|
||||||
their app. These are the starting point for an app.
|
their app. These are the starting point for an app.
|
||||||
"""
|
"""
|
||||||
roots = set()
|
roots = set()
|
||||||
@ -325,7 +320,7 @@ class MigrationGraph:
|
|||||||
|
|
||||||
def leaf_nodes(self, app=None):
|
def leaf_nodes(self, app=None):
|
||||||
"""
|
"""
|
||||||
Returns all leaf nodes - that is, nodes with no dependents in their app.
|
Return all leaf nodes - that is, nodes with no dependents in their app.
|
||||||
These are the "most current" version of an app's schema.
|
These are the "most current" version of an app's schema.
|
||||||
Having more than one per app is technically an error, but one that
|
Having more than one per app is technically an error, but one that
|
||||||
gets handled further up, in the interactive command - it's usually the
|
gets handled further up, in the interactive command - it's usually the
|
||||||
@ -369,9 +364,9 @@ class MigrationGraph:
|
|||||||
|
|
||||||
def make_state(self, nodes=None, at_end=True, real_apps=None):
|
def make_state(self, nodes=None, at_end=True, real_apps=None):
|
||||||
"""
|
"""
|
||||||
Given a migration node or nodes, returns a complete ProjectState for it.
|
Given a migration node or nodes, return a complete ProjectState for it.
|
||||||
If at_end is False, returns the state before the migration has run.
|
If at_end is False, return the state before the migration has run.
|
||||||
If nodes is not provided, returns the overall most current project state.
|
If nodes is not provided, return the overall most current project state.
|
||||||
"""
|
"""
|
||||||
if nodes is None:
|
if nodes is None:
|
||||||
nodes = list(self.leaf_nodes())
|
nodes = list(self.leaf_nodes())
|
||||||
|
@ -17,7 +17,7 @@ MIGRATIONS_MODULE_NAME = 'migrations'
|
|||||||
|
|
||||||
class MigrationLoader:
|
class MigrationLoader:
|
||||||
"""
|
"""
|
||||||
Loads migration files from disk, and their status from the database.
|
Load migration files from disk and their status from the database.
|
||||||
|
|
||||||
Migration files are expected to live in the "migrations" directory of
|
Migration files are expected to live in the "migrations" directory of
|
||||||
an app. Their names are entirely unimportant from a code perspective,
|
an app. Their names are entirely unimportant from a code perspective,
|
||||||
@ -62,9 +62,7 @@ class MigrationLoader:
|
|||||||
return '%s.%s' % (app_package_name, MIGRATIONS_MODULE_NAME), False
|
return '%s.%s' % (app_package_name, MIGRATIONS_MODULE_NAME), False
|
||||||
|
|
||||||
def load_disk(self):
|
def load_disk(self):
|
||||||
"""
|
"""Load the migrations from all INSTALLED_APPS from disk."""
|
||||||
Loads the migrations from all INSTALLED_APPS from disk.
|
|
||||||
"""
|
|
||||||
self.disk_migrations = {}
|
self.disk_migrations = {}
|
||||||
self.unmigrated_apps = set()
|
self.unmigrated_apps = set()
|
||||||
self.migrated_apps = set()
|
self.migrated_apps = set()
|
||||||
@ -119,11 +117,13 @@ class MigrationLoader:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def get_migration(self, app_label, name_prefix):
|
def get_migration(self, app_label, name_prefix):
|
||||||
"Gets the migration exactly named, or raises `graph.NodeNotFoundError`"
|
"""Return the named migration or raise NodeNotFoundError."""
|
||||||
return self.graph.nodes[app_label, name_prefix]
|
return self.graph.nodes[app_label, name_prefix]
|
||||||
|
|
||||||
def get_migration_by_prefix(self, app_label, name_prefix):
|
def get_migration_by_prefix(self, app_label, name_prefix):
|
||||||
"Returns the migration(s) which match the given app label and name _prefix_"
|
"""
|
||||||
|
Return the migration(s) which match the given app label and name_prefix.
|
||||||
|
"""
|
||||||
# Do the search
|
# Do the search
|
||||||
results = []
|
results = []
|
||||||
for migration_app_label, migration_name in self.disk_migrations:
|
for migration_app_label, migration_name in self.disk_migrations:
|
||||||
@ -192,7 +192,7 @@ class MigrationLoader:
|
|||||||
|
|
||||||
def build_graph(self):
|
def build_graph(self):
|
||||||
"""
|
"""
|
||||||
Builds a migration dependency graph using both the disk and database.
|
Build a migration dependency graph using both the disk and database.
|
||||||
You'll need to rebuild the graph if you apply migrations. This isn't
|
You'll need to rebuild the graph if you apply migrations. This isn't
|
||||||
usually a problem as generally migration stuff runs in a one-shot process.
|
usually a problem as generally migration stuff runs in a one-shot process.
|
||||||
"""
|
"""
|
||||||
@ -294,8 +294,8 @@ class MigrationLoader:
|
|||||||
|
|
||||||
def detect_conflicts(self):
|
def detect_conflicts(self):
|
||||||
"""
|
"""
|
||||||
Looks through the loaded graph and detects any conflicts - apps
|
Look through the loaded graph and detect any conflicts - apps
|
||||||
with more than one leaf migration. Returns a dict of the app labels
|
with more than one leaf migration. Return a dict of the app labels
|
||||||
that conflict with the migration names that conflict.
|
that conflict with the migration names that conflict.
|
||||||
"""
|
"""
|
||||||
seen_apps = {}
|
seen_apps = {}
|
||||||
@ -308,9 +308,9 @@ class MigrationLoader:
|
|||||||
|
|
||||||
def project_state(self, nodes=None, at_end=True):
|
def project_state(self, nodes=None, at_end=True):
|
||||||
"""
|
"""
|
||||||
Returns a ProjectState object representing the most recent state
|
Return a ProjectState object representing the most recent state
|
||||||
that the migrations we loaded represent.
|
that the loaded migrations represent.
|
||||||
|
|
||||||
See graph.make_state for the meaning of "nodes" and "at_end"
|
See graph.make_state() for the meaning of "nodes" and "at_end".
|
||||||
"""
|
"""
|
||||||
return self.graph.make_state(nodes=nodes, at_end=at_end, real_apps=list(self.unmigrated_apps))
|
return self.graph.make_state(nodes=nodes, at_end=at_end, real_apps=list(self.unmigrated_apps))
|
||||||
|
@ -73,9 +73,9 @@ class Migration:
|
|||||||
|
|
||||||
def mutate_state(self, project_state, preserve=True):
|
def mutate_state(self, project_state, preserve=True):
|
||||||
"""
|
"""
|
||||||
Takes a ProjectState and returns a new one with the migration's
|
Take a ProjectState and return a new one with the migration's
|
||||||
operations applied to it. Preserves the original object state by
|
operations applied to it. Preserve the original object state by
|
||||||
default and will return a mutated state from a copy.
|
default and return a mutated state from a copy.
|
||||||
"""
|
"""
|
||||||
new_state = project_state
|
new_state = project_state
|
||||||
if preserve:
|
if preserve:
|
||||||
@ -87,11 +87,11 @@ class Migration:
|
|||||||
|
|
||||||
def apply(self, project_state, schema_editor, collect_sql=False):
|
def apply(self, project_state, schema_editor, collect_sql=False):
|
||||||
"""
|
"""
|
||||||
Takes a project_state representing all migrations prior to this one
|
Take a project_state representing all migrations prior to this one
|
||||||
and a schema_editor for a live database and applies the migration
|
and a schema_editor for a live database and apply the migration
|
||||||
in a forwards order.
|
in a forwards order.
|
||||||
|
|
||||||
Returns the resulting project state for efficient re-use by following
|
Return the resulting project state for efficient reuse by following
|
||||||
Migrations.
|
Migrations.
|
||||||
"""
|
"""
|
||||||
for operation in self.operations:
|
for operation in self.operations:
|
||||||
@ -124,8 +124,8 @@ class Migration:
|
|||||||
|
|
||||||
def unapply(self, project_state, schema_editor, collect_sql=False):
|
def unapply(self, project_state, schema_editor, collect_sql=False):
|
||||||
"""
|
"""
|
||||||
Takes a project_state representing all migrations prior to this one
|
Take a project_state representing all migrations prior to this one
|
||||||
and a schema_editor for a live database and applies the migration
|
and a schema_editor for a live database and apply the migration
|
||||||
in a reverse order.
|
in a reverse order.
|
||||||
|
|
||||||
The backwards migration process consists of two phases:
|
The backwards migration process consists of two phases:
|
||||||
@ -185,7 +185,5 @@ class SwappableTuple(tuple):
|
|||||||
|
|
||||||
|
|
||||||
def swappable_dependency(value):
|
def swappable_dependency(value):
|
||||||
"""
|
"""Turn a setting value into a dependency."""
|
||||||
Turns a setting value into a dependency.
|
|
||||||
"""
|
|
||||||
return SwappableTuple((value.split(".", 1)[0], "__first__"), value)
|
return SwappableTuple((value.split(".", 1)[0], "__first__"), value)
|
||||||
|
@ -41,7 +41,7 @@ class Operation:
|
|||||||
|
|
||||||
def deconstruct(self):
|
def deconstruct(self):
|
||||||
"""
|
"""
|
||||||
Returns a 3-tuple of class import path (or just name if it lives
|
Return a 3-tuple of class import path (or just name if it lives
|
||||||
under django.db.migrations), positional arguments, and keyword
|
under django.db.migrations), positional arguments, and keyword
|
||||||
arguments.
|
arguments.
|
||||||
"""
|
"""
|
||||||
@ -53,21 +53,21 @@ class Operation:
|
|||||||
|
|
||||||
def state_forwards(self, app_label, state):
|
def state_forwards(self, app_label, state):
|
||||||
"""
|
"""
|
||||||
Takes the state from the previous migration, and mutates it
|
Take the state from the previous migration, and mutate it
|
||||||
so that it matches what this migration would perform.
|
so that it matches what this migration would perform.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError('subclasses of Operation must provide a state_forwards() method')
|
raise NotImplementedError('subclasses of Operation must provide a state_forwards() method')
|
||||||
|
|
||||||
def database_forwards(self, app_label, schema_editor, from_state, to_state):
|
def database_forwards(self, app_label, schema_editor, from_state, to_state):
|
||||||
"""
|
"""
|
||||||
Performs the mutation on the database schema in the normal
|
Perform the mutation on the database schema in the normal
|
||||||
(forwards) direction.
|
(forwards) direction.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError('subclasses of Operation must provide a database_forwards() method')
|
raise NotImplementedError('subclasses of Operation must provide a database_forwards() method')
|
||||||
|
|
||||||
def database_backwards(self, app_label, schema_editor, from_state, to_state):
|
def database_backwards(self, app_label, schema_editor, from_state, to_state):
|
||||||
"""
|
"""
|
||||||
Performs the mutation on the database schema in the reverse
|
Perform the mutation on the database schema in the reverse
|
||||||
direction - e.g. if this were CreateModel, it would in fact
|
direction - e.g. if this were CreateModel, it would in fact
|
||||||
drop the model's table.
|
drop the model's table.
|
||||||
"""
|
"""
|
||||||
@ -75,13 +75,13 @@ class Operation:
|
|||||||
|
|
||||||
def describe(self):
|
def describe(self):
|
||||||
"""
|
"""
|
||||||
Outputs a brief summary of what the action does.
|
Output a brief summary of what the action does.
|
||||||
"""
|
"""
|
||||||
return "%s: %s" % (self.__class__.__name__, self._constructor_args)
|
return "%s: %s" % (self.__class__.__name__, self._constructor_args)
|
||||||
|
|
||||||
def references_model(self, name, app_label=None):
|
def references_model(self, name, app_label=None):
|
||||||
"""
|
"""
|
||||||
Returns True if there is a chance this operation references the given
|
Return True if there is a chance this operation references the given
|
||||||
model name (as a string), with an optional app label for accuracy.
|
model name (as a string), with an optional app label for accuracy.
|
||||||
|
|
||||||
Used for optimization. If in doubt, return True;
|
Used for optimization. If in doubt, return True;
|
||||||
@ -93,7 +93,7 @@ class Operation:
|
|||||||
|
|
||||||
def references_field(self, model_name, name, app_label=None):
|
def references_field(self, model_name, name, app_label=None):
|
||||||
"""
|
"""
|
||||||
Returns True if there is a chance this operation references the given
|
Return True if there is a chance this operation references the given
|
||||||
field name, with an optional app label for accuracy.
|
field name, with an optional app label for accuracy.
|
||||||
|
|
||||||
Used for optimization. If in doubt, return True.
|
Used for optimization. If in doubt, return True.
|
||||||
@ -102,7 +102,7 @@ class Operation:
|
|||||||
|
|
||||||
def allow_migrate_model(self, connection_alias, model):
|
def allow_migrate_model(self, connection_alias, model):
|
||||||
"""
|
"""
|
||||||
Returns if we're allowed to migrate the model.
|
Return wether or not a model may be migrated.
|
||||||
|
|
||||||
This is a thin wrapper around router.allow_migrate_model() that
|
This is a thin wrapper around router.allow_migrate_model() that
|
||||||
preemptively rejects any proxy, swapped out, or unmanaged model.
|
preemptively rejects any proxy, swapped out, or unmanaged model.
|
||||||
|
@ -37,9 +37,7 @@ class FieldOperation(Operation):
|
|||||||
|
|
||||||
|
|
||||||
class AddField(FieldOperation):
|
class AddField(FieldOperation):
|
||||||
"""
|
"""Add a field to a model."""
|
||||||
Adds a field to a model.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, model_name, name, field, preserve_default=True):
|
def __init__(self, model_name, name, field, preserve_default=True):
|
||||||
self.field = field
|
self.field = field
|
||||||
@ -118,9 +116,7 @@ class AddField(FieldOperation):
|
|||||||
|
|
||||||
|
|
||||||
class RemoveField(FieldOperation):
|
class RemoveField(FieldOperation):
|
||||||
"""
|
"""Remove a field from a model."""
|
||||||
Removes a field from a model.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def deconstruct(self):
|
def deconstruct(self):
|
||||||
kwargs = {
|
kwargs = {
|
||||||
@ -163,7 +159,8 @@ class RemoveField(FieldOperation):
|
|||||||
|
|
||||||
class AlterField(FieldOperation):
|
class AlterField(FieldOperation):
|
||||||
"""
|
"""
|
||||||
Alters a field's database column (e.g. null, max_length) to the provided new field
|
Alter a field's database column (e.g. null, max_length) to the provided
|
||||||
|
new field.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, model_name, name, field, preserve_default=True):
|
def __init__(self, model_name, name, field, preserve_default=True):
|
||||||
@ -236,9 +233,7 @@ class AlterField(FieldOperation):
|
|||||||
|
|
||||||
|
|
||||||
class RenameField(FieldOperation):
|
class RenameField(FieldOperation):
|
||||||
"""
|
"""Rename a field on the model. Might affect db_column too."""
|
||||||
Renames a field on the model. Might affect db_column too.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, model_name, old_name, new_name):
|
def __init__(self, model_name, old_name, new_name):
|
||||||
self.old_name = old_name
|
self.old_name = old_name
|
||||||
|
@ -39,9 +39,7 @@ class ModelOperation(Operation):
|
|||||||
|
|
||||||
|
|
||||||
class CreateModel(ModelOperation):
|
class CreateModel(ModelOperation):
|
||||||
"""
|
"""Create a model's table."""
|
||||||
Create a model's table.
|
|
||||||
"""
|
|
||||||
|
|
||||||
serialization_expand_args = ['fields', 'options', 'managers']
|
serialization_expand_args = ['fields', 'options', 'managers']
|
||||||
|
|
||||||
@ -227,9 +225,7 @@ class CreateModel(ModelOperation):
|
|||||||
|
|
||||||
|
|
||||||
class DeleteModel(ModelOperation):
|
class DeleteModel(ModelOperation):
|
||||||
"""
|
"""Drop a model's table."""
|
||||||
Drops a model's table.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def deconstruct(self):
|
def deconstruct(self):
|
||||||
kwargs = {
|
kwargs = {
|
||||||
@ -259,9 +255,7 @@ class DeleteModel(ModelOperation):
|
|||||||
|
|
||||||
|
|
||||||
class RenameModel(ModelOperation):
|
class RenameModel(ModelOperation):
|
||||||
"""
|
"""Rename a model."""
|
||||||
Renames a model.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, old_name, new_name):
|
def __init__(self, old_name, new_name):
|
||||||
self.old_name = old_name
|
self.old_name = old_name
|
||||||
@ -423,9 +417,7 @@ class RenameModel(ModelOperation):
|
|||||||
|
|
||||||
|
|
||||||
class AlterModelTable(ModelOperation):
|
class AlterModelTable(ModelOperation):
|
||||||
"""
|
"""Rename a model's table."""
|
||||||
Renames a model's table
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, name, table):
|
def __init__(self, name, table):
|
||||||
self.table = table
|
self.table = table
|
||||||
@ -497,7 +489,7 @@ class FieldRelatedOptionOperation(ModelOptionOperation):
|
|||||||
|
|
||||||
class AlterUniqueTogether(FieldRelatedOptionOperation):
|
class AlterUniqueTogether(FieldRelatedOptionOperation):
|
||||||
"""
|
"""
|
||||||
Changes the value of unique_together to the target one.
|
Change the value of unique_together to the target one.
|
||||||
Input value of unique_together must be a set of tuples.
|
Input value of unique_together must be a set of tuples.
|
||||||
"""
|
"""
|
||||||
option_name = "unique_together"
|
option_name = "unique_together"
|
||||||
@ -551,7 +543,7 @@ class AlterUniqueTogether(FieldRelatedOptionOperation):
|
|||||||
|
|
||||||
class AlterIndexTogether(FieldRelatedOptionOperation):
|
class AlterIndexTogether(FieldRelatedOptionOperation):
|
||||||
"""
|
"""
|
||||||
Changes the value of index_together to the target one.
|
Change the value of index_together to the target one.
|
||||||
Input value of index_together must be a set of tuples.
|
Input value of index_together must be a set of tuples.
|
||||||
"""
|
"""
|
||||||
option_name = "index_together"
|
option_name = "index_together"
|
||||||
@ -604,9 +596,7 @@ class AlterIndexTogether(FieldRelatedOptionOperation):
|
|||||||
|
|
||||||
|
|
||||||
class AlterOrderWithRespectTo(FieldRelatedOptionOperation):
|
class AlterOrderWithRespectTo(FieldRelatedOptionOperation):
|
||||||
"""
|
"""Represent a change with the order_with_respect_to option."""
|
||||||
Represents a change with the order_with_respect_to option.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, name, order_with_respect_to):
|
def __init__(self, name, order_with_respect_to):
|
||||||
self.order_with_respect_to = order_with_respect_to
|
self.order_with_respect_to = order_with_respect_to
|
||||||
@ -664,7 +654,7 @@ class AlterOrderWithRespectTo(FieldRelatedOptionOperation):
|
|||||||
|
|
||||||
class AlterModelOptions(ModelOptionOperation):
|
class AlterModelOptions(ModelOptionOperation):
|
||||||
"""
|
"""
|
||||||
Sets new model options that don't directly affect the database schema
|
Set new model options that don't directly affect the database schema
|
||||||
(like verbose_name, permissions, ordering). Python code in migrations
|
(like verbose_name, permissions, ordering). Python code in migrations
|
||||||
may still need them.
|
may still need them.
|
||||||
"""
|
"""
|
||||||
@ -718,9 +708,7 @@ class AlterModelOptions(ModelOptionOperation):
|
|||||||
|
|
||||||
|
|
||||||
class AlterModelManagers(ModelOptionOperation):
|
class AlterModelManagers(ModelOptionOperation):
|
||||||
"""
|
"""Alter the model's managers."""
|
||||||
Alters the model's managers
|
|
||||||
"""
|
|
||||||
|
|
||||||
serialization_expand_args = ['managers']
|
serialization_expand_args = ['managers']
|
||||||
|
|
||||||
@ -759,9 +747,7 @@ class IndexOperation(Operation):
|
|||||||
|
|
||||||
|
|
||||||
class AddIndex(IndexOperation):
|
class AddIndex(IndexOperation):
|
||||||
"""
|
"""Add an index on a model."""
|
||||||
Add an index on a model.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, model_name, index):
|
def __init__(self, model_name, index):
|
||||||
self.model_name = model_name
|
self.model_name = model_name
|
||||||
@ -806,9 +792,7 @@ class AddIndex(IndexOperation):
|
|||||||
|
|
||||||
|
|
||||||
class RemoveIndex(IndexOperation):
|
class RemoveIndex(IndexOperation):
|
||||||
"""
|
"""Remove an index from a model."""
|
||||||
Remove an index from a model.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, model_name, name):
|
def __init__(self, model_name, name):
|
||||||
self.model_name = model_name
|
self.model_name = model_name
|
||||||
|
@ -5,7 +5,7 @@ from .base import Operation
|
|||||||
|
|
||||||
class SeparateDatabaseAndState(Operation):
|
class SeparateDatabaseAndState(Operation):
|
||||||
"""
|
"""
|
||||||
Takes two lists of operations - ones that will be used for the database,
|
Take two lists of operations - ones that will be used for the database,
|
||||||
and ones that will be used for the state change. This allows operations
|
and ones that will be used for the state change. This allows operations
|
||||||
that don't support state change to have it applied, or have operations
|
that don't support state change to have it applied, or have operations
|
||||||
that affect the state or not the database, or so on.
|
that affect the state or not the database, or so on.
|
||||||
@ -62,9 +62,9 @@ class SeparateDatabaseAndState(Operation):
|
|||||||
|
|
||||||
class RunSQL(Operation):
|
class RunSQL(Operation):
|
||||||
"""
|
"""
|
||||||
Runs some raw SQL. A reverse SQL statement may be provided.
|
Run some raw SQL. A reverse SQL statement may be provided.
|
||||||
|
|
||||||
Also accepts a list of operations that represent the state change effected
|
Also accept a list of operations that represent the state change effected
|
||||||
by this SQL change, in case it's custom column/table creation/deletion.
|
by this SQL change, in case it's custom column/table creation/deletion.
|
||||||
"""
|
"""
|
||||||
noop = ''
|
noop = ''
|
||||||
@ -132,7 +132,7 @@ class RunSQL(Operation):
|
|||||||
|
|
||||||
class RunPython(Operation):
|
class RunPython(Operation):
|
||||||
"""
|
"""
|
||||||
Runs Python code in a context suitable for doing versioned ORM operations.
|
Run Python code in a context suitable for doing versioned ORM operations.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
reduces_to_sql = False
|
reduces_to_sql = False
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
class MigrationOptimizer:
|
class MigrationOptimizer:
|
||||||
"""
|
"""
|
||||||
Powers the optimization process, where you provide a list of Operations
|
Power the optimization process, where you provide a list of Operations
|
||||||
and you are returned a list of equal or shorter length - operations
|
and you are returned a list of equal or shorter length - operations
|
||||||
are merged into one if possible.
|
are merged into one if possible.
|
||||||
|
|
||||||
@ -39,9 +39,7 @@ class MigrationOptimizer:
|
|||||||
operations = result
|
operations = result
|
||||||
|
|
||||||
def optimize_inner(self, operations, app_label=None):
|
def optimize_inner(self, operations, app_label=None):
|
||||||
"""
|
"""Inner optimization loop."""
|
||||||
Inner optimization loop.
|
|
||||||
"""
|
|
||||||
new_operations = []
|
new_operations = []
|
||||||
for i, operation in enumerate(operations):
|
for i, operation in enumerate(operations):
|
||||||
# Compare it to each operation after it
|
# Compare it to each operation after it
|
||||||
|
@ -11,7 +11,7 @@ from .loader import MigrationLoader
|
|||||||
|
|
||||||
class MigrationQuestioner:
|
class MigrationQuestioner:
|
||||||
"""
|
"""
|
||||||
Gives the autodetector responses to questions it might have.
|
Give the autodetector responses to questions it might have.
|
||||||
This base class has a built-in noninteractive mode, but the
|
This base class has a built-in noninteractive mode, but the
|
||||||
interactive subclass is what the command-line arguments will use.
|
interactive subclass is what the command-line arguments will use.
|
||||||
"""
|
"""
|
||||||
@ -22,7 +22,7 @@ class MigrationQuestioner:
|
|||||||
self.dry_run = dry_run
|
self.dry_run = dry_run
|
||||||
|
|
||||||
def ask_initial(self, app_label):
|
def ask_initial(self, app_label):
|
||||||
"Should we create an initial migration for the app?"
|
"""Should we create an initial migration for the app?"""
|
||||||
# If it was specified on the command line, definitely true
|
# If it was specified on the command line, definitely true
|
||||||
if app_label in self.specified_apps:
|
if app_label in self.specified_apps:
|
||||||
return True
|
return True
|
||||||
@ -52,29 +52,29 @@ class MigrationQuestioner:
|
|||||||
return not any(x.endswith(".py") for x in filenames if x != "__init__.py")
|
return not any(x.endswith(".py") for x in filenames if x != "__init__.py")
|
||||||
|
|
||||||
def ask_not_null_addition(self, field_name, model_name):
|
def ask_not_null_addition(self, field_name, model_name):
|
||||||
"Adding a NOT NULL field to a model"
|
"""Adding a NOT NULL field to a model."""
|
||||||
# None means quit
|
# None means quit
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def ask_not_null_alteration(self, field_name, model_name):
|
def ask_not_null_alteration(self, field_name, model_name):
|
||||||
"Changing a NULL field to NOT NULL"
|
"""Changing a NULL field to NOT NULL."""
|
||||||
# None means quit
|
# None means quit
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def ask_rename(self, model_name, old_name, new_name, field_instance):
|
def ask_rename(self, model_name, old_name, new_name, field_instance):
|
||||||
"Was this field really renamed?"
|
"""Was this field really renamed?"""
|
||||||
return self.defaults.get("ask_rename", False)
|
return self.defaults.get("ask_rename", False)
|
||||||
|
|
||||||
def ask_rename_model(self, old_model_state, new_model_state):
|
def ask_rename_model(self, old_model_state, new_model_state):
|
||||||
"Was this model really renamed?"
|
"""Was this model really renamed?"""
|
||||||
return self.defaults.get("ask_rename_model", False)
|
return self.defaults.get("ask_rename_model", False)
|
||||||
|
|
||||||
def ask_merge(self, app_label):
|
def ask_merge(self, app_label):
|
||||||
"Do you really want to merge these migrations?"
|
"""Do you really want to merge these migrations?"""
|
||||||
return self.defaults.get("ask_merge", False)
|
return self.defaults.get("ask_merge", False)
|
||||||
|
|
||||||
def ask_auto_now_add_addition(self, field_name, model_name):
|
def ask_auto_now_add_addition(self, field_name, model_name):
|
||||||
"Adding an auto_now_add field to a model"
|
"""Adding an auto_now_add field to a model."""
|
||||||
# None means quit
|
# None means quit
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -138,7 +138,7 @@ class InteractiveMigrationQuestioner(MigrationQuestioner):
|
|||||||
print("Invalid input: %s" % e)
|
print("Invalid input: %s" % e)
|
||||||
|
|
||||||
def ask_not_null_addition(self, field_name, model_name):
|
def ask_not_null_addition(self, field_name, model_name):
|
||||||
"Adding a NOT NULL field to a model"
|
"""Adding a NOT NULL field to a model."""
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
choice = self._choice_input(
|
choice = self._choice_input(
|
||||||
"You are trying to add a non-nullable field '%s' to %s without a default; "
|
"You are trying to add a non-nullable field '%s' to %s without a default; "
|
||||||
@ -157,7 +157,7 @@ class InteractiveMigrationQuestioner(MigrationQuestioner):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
def ask_not_null_alteration(self, field_name, model_name):
|
def ask_not_null_alteration(self, field_name, model_name):
|
||||||
"Changing a NULL field to NOT NULL"
|
"""Changing a NULL field to NOT NULL."""
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
choice = self._choice_input(
|
choice = self._choice_input(
|
||||||
"You are trying to change the nullable field '%s' on %s to non-nullable "
|
"You are trying to change the nullable field '%s' on %s to non-nullable "
|
||||||
@ -182,13 +182,13 @@ class InteractiveMigrationQuestioner(MigrationQuestioner):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
def ask_rename(self, model_name, old_name, new_name, field_instance):
|
def ask_rename(self, model_name, old_name, new_name, field_instance):
|
||||||
"Was this field really renamed?"
|
"""Was this field really renamed?"""
|
||||||
msg = "Did you rename %s.%s to %s.%s (a %s)? [y/N]"
|
msg = "Did you rename %s.%s to %s.%s (a %s)? [y/N]"
|
||||||
return self._boolean_input(msg % (model_name, old_name, model_name, new_name,
|
return self._boolean_input(msg % (model_name, old_name, model_name, new_name,
|
||||||
field_instance.__class__.__name__), False)
|
field_instance.__class__.__name__), False)
|
||||||
|
|
||||||
def ask_rename_model(self, old_model_state, new_model_state):
|
def ask_rename_model(self, old_model_state, new_model_state):
|
||||||
"Was this model really renamed?"
|
"""Was this model really renamed?"""
|
||||||
msg = "Did you rename the %s.%s model to %s? [y/N]"
|
msg = "Did you rename the %s.%s model to %s? [y/N]"
|
||||||
return self._boolean_input(msg % (old_model_state.app_label, old_model_state.name,
|
return self._boolean_input(msg % (old_model_state.app_label, old_model_state.name,
|
||||||
new_model_state.name), False)
|
new_model_state.name), False)
|
||||||
@ -202,7 +202,7 @@ class InteractiveMigrationQuestioner(MigrationQuestioner):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def ask_auto_now_add_addition(self, field_name, model_name):
|
def ask_auto_now_add_addition(self, field_name, model_name):
|
||||||
"Adding an auto_now_add field to a model"
|
"""Adding an auto_now_add field to a model."""
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
choice = self._choice_input(
|
choice = self._choice_input(
|
||||||
"You are trying to add the field '{}' with 'auto_now_add=True' "
|
"You are trying to add the field '{}' with 'auto_now_add=True' "
|
||||||
|
@ -8,7 +8,7 @@ from .exceptions import MigrationSchemaMissing
|
|||||||
|
|
||||||
class MigrationRecorder:
|
class MigrationRecorder:
|
||||||
"""
|
"""
|
||||||
Deals with storing migration records in the database.
|
Deal with storing migration records in the database.
|
||||||
|
|
||||||
Because this table is actually itself used for dealing with model
|
Because this table is actually itself used for dealing with model
|
||||||
creation, it's the one thing we can't do normally via migrations.
|
creation, it's the one thing we can't do normally via migrations.
|
||||||
@ -40,9 +40,7 @@ class MigrationRecorder:
|
|||||||
return self.Migration.objects.using(self.connection.alias)
|
return self.Migration.objects.using(self.connection.alias)
|
||||||
|
|
||||||
def ensure_schema(self):
|
def ensure_schema(self):
|
||||||
"""
|
"""Ensure the table exists and has the correct schema."""
|
||||||
Ensures the table exists and has the correct schema.
|
|
||||||
"""
|
|
||||||
# If the table's there, that's fine - we've never changed its schema
|
# If the table's there, that's fine - we've never changed its schema
|
||||||
# in the codebase.
|
# in the codebase.
|
||||||
if self.Migration._meta.db_table in self.connection.introspection.table_names(self.connection.cursor()):
|
if self.Migration._meta.db_table in self.connection.introspection.table_names(self.connection.cursor()):
|
||||||
@ -55,28 +53,20 @@ class MigrationRecorder:
|
|||||||
raise MigrationSchemaMissing("Unable to create the django_migrations table (%s)" % exc)
|
raise MigrationSchemaMissing("Unable to create the django_migrations table (%s)" % exc)
|
||||||
|
|
||||||
def applied_migrations(self):
|
def applied_migrations(self):
|
||||||
"""
|
"""Return a set of (app, name) of applied migrations."""
|
||||||
Returns a set of (app, name) of applied migrations.
|
|
||||||
"""
|
|
||||||
self.ensure_schema()
|
self.ensure_schema()
|
||||||
return set(tuple(x) for x in self.migration_qs.values_list("app", "name"))
|
return set(tuple(x) for x in self.migration_qs.values_list("app", "name"))
|
||||||
|
|
||||||
def record_applied(self, app, name):
|
def record_applied(self, app, name):
|
||||||
"""
|
"""Record that a migration was applied."""
|
||||||
Records that a migration was applied.
|
|
||||||
"""
|
|
||||||
self.ensure_schema()
|
self.ensure_schema()
|
||||||
self.migration_qs.create(app=app, name=name)
|
self.migration_qs.create(app=app, name=name)
|
||||||
|
|
||||||
def record_unapplied(self, app, name):
|
def record_unapplied(self, app, name):
|
||||||
"""
|
"""Record that a migration was unapplied."""
|
||||||
Records that a migration was unapplied.
|
|
||||||
"""
|
|
||||||
self.ensure_schema()
|
self.ensure_schema()
|
||||||
self.migration_qs.filter(app=app, name=name).delete()
|
self.migration_qs.filter(app=app, name=name).delete()
|
||||||
|
|
||||||
def flush(self):
|
def flush(self):
|
||||||
"""
|
"""Delete all migration records. Useful for testing migrations."""
|
||||||
Deletes all migration records. Useful if you're testing migrations.
|
|
||||||
"""
|
|
||||||
self.migration_qs.all().delete()
|
self.migration_qs.all().delete()
|
||||||
|
@ -27,9 +27,7 @@ def _get_app_label_and_model_name(model, app_label=''):
|
|||||||
|
|
||||||
|
|
||||||
def _get_related_models(m):
|
def _get_related_models(m):
|
||||||
"""
|
"""Return all models that have a direct relationship to the given model."""
|
||||||
Return all models that have a direct relationship to the given model.
|
|
||||||
"""
|
|
||||||
related_models = [
|
related_models = [
|
||||||
subclass for subclass in m.__subclasses__()
|
subclass for subclass in m.__subclasses__()
|
||||||
if issubclass(subclass, models.Model)
|
if issubclass(subclass, models.Model)
|
||||||
@ -82,9 +80,9 @@ def get_related_models_recursive(model):
|
|||||||
|
|
||||||
class ProjectState:
|
class ProjectState:
|
||||||
"""
|
"""
|
||||||
Represents the entire project's overall state.
|
Represent the entire project's overall state. This is the item that is
|
||||||
This is the item that is passed around - we do it here rather than at the
|
passed around - do it here rather than at the app level so that cross-app
|
||||||
app level so that cross-app FKs/etc. resolve properly.
|
FKs/etc. resolve properly.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, models=None, real_apps=None):
|
def __init__(self, models=None, real_apps=None):
|
||||||
@ -194,7 +192,7 @@ class ProjectState:
|
|||||||
self.apps.render_multiple(states_to_be_rendered)
|
self.apps.render_multiple(states_to_be_rendered)
|
||||||
|
|
||||||
def clone(self):
|
def clone(self):
|
||||||
"Returns an exact copy of this ProjectState"
|
"""Return an exact copy of this ProjectState."""
|
||||||
new_state = ProjectState(
|
new_state = ProjectState(
|
||||||
models={k: v.clone() for k, v in self.models.items()},
|
models={k: v.clone() for k, v in self.models.items()},
|
||||||
real_apps=self.real_apps,
|
real_apps=self.real_apps,
|
||||||
@ -219,7 +217,7 @@ class ProjectState:
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_apps(cls, apps):
|
def from_apps(cls, apps):
|
||||||
"Takes in an Apps and returns a ProjectState matching it"
|
"""Take an Apps and return a ProjectState matching it."""
|
||||||
app_models = {}
|
app_models = {}
|
||||||
for model in apps.get_models(include_swapped=True):
|
for model in apps.get_models(include_swapped=True):
|
||||||
model_state = ModelState.from_model(model)
|
model_state = ModelState.from_model(model)
|
||||||
@ -235,9 +233,7 @@ class ProjectState:
|
|||||||
|
|
||||||
|
|
||||||
class AppConfigStub(AppConfig):
|
class AppConfigStub(AppConfig):
|
||||||
"""
|
"""Stub of an AppConfig. Only provides a label and a dict of models."""
|
||||||
Stubs a Django AppConfig. Only provides a label, and a dict of models.
|
|
||||||
"""
|
|
||||||
# Not used, but required by AppConfig.__init__
|
# Not used, but required by AppConfig.__init__
|
||||||
path = ''
|
path = ''
|
||||||
|
|
||||||
@ -325,9 +321,7 @@ class StateApps(Apps):
|
|||||||
unrendered_models = new_unrendered_models
|
unrendered_models = new_unrendered_models
|
||||||
|
|
||||||
def clone(self):
|
def clone(self):
|
||||||
"""
|
"""Return a clone of this registry."""
|
||||||
Return a clone of this registry, mainly used by the migration framework.
|
|
||||||
"""
|
|
||||||
clone = StateApps([], {})
|
clone = StateApps([], {})
|
||||||
clone.all_models = copy.deepcopy(self.all_models)
|
clone.all_models = copy.deepcopy(self.all_models)
|
||||||
clone.app_configs = copy.deepcopy(self.app_configs)
|
clone.app_configs = copy.deepcopy(self.app_configs)
|
||||||
@ -358,9 +352,9 @@ class StateApps(Apps):
|
|||||||
|
|
||||||
class ModelState:
|
class ModelState:
|
||||||
"""
|
"""
|
||||||
Represents a Django Model. We don't use the actual Model class
|
Represent a Django Model. Don't use the actual Model class as it's not
|
||||||
as it's not designed to have its options changed - instead, we
|
designed to have its options changed - instead, mutate this one and then
|
||||||
mutate this one and then render it into a Model as required.
|
render it into a Model as required.
|
||||||
|
|
||||||
Note that while you are allowed to mutate .fields, you are not allowed
|
Note that while you are allowed to mutate .fields, you are not allowed
|
||||||
to mutate the Field instances inside there themselves - you must instead
|
to mutate the Field instances inside there themselves - you must instead
|
||||||
@ -409,9 +403,7 @@ class ModelState:
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_model(cls, model, exclude_rels=False):
|
def from_model(cls, model, exclude_rels=False):
|
||||||
"""
|
"""Given a model, return a ModelState representing it."""
|
||||||
Feed me a model, get a ModelState representing it out.
|
|
||||||
"""
|
|
||||||
# Deconstruct the fields
|
# Deconstruct the fields
|
||||||
fields = []
|
fields = []
|
||||||
for field in model._meta.local_fields:
|
for field in model._meta.local_fields:
|
||||||
@ -532,7 +524,7 @@ class ModelState:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def construct_managers(self):
|
def construct_managers(self):
|
||||||
"Deep-clone the managers using deconstruction"
|
"""Deep-clone the managers using deconstruction."""
|
||||||
# Sort all managers by their creation counter
|
# Sort all managers by their creation counter
|
||||||
sorted_managers = sorted(self.managers, key=lambda v: v[1].creation_counter)
|
sorted_managers = sorted(self.managers, key=lambda v: v[1].creation_counter)
|
||||||
for mgr_name, manager in sorted_managers:
|
for mgr_name, manager in sorted_managers:
|
||||||
@ -546,7 +538,7 @@ class ModelState:
|
|||||||
yield mgr_name, manager_class(*args, **kwargs)
|
yield mgr_name, manager_class(*args, **kwargs)
|
||||||
|
|
||||||
def clone(self):
|
def clone(self):
|
||||||
"Returns an exact copy of this ModelState"
|
"""Return an exact copy of this ModelState."""
|
||||||
return self.__class__(
|
return self.__class__(
|
||||||
app_label=self.app_label,
|
app_label=self.app_label,
|
||||||
name=self.name,
|
name=self.name,
|
||||||
@ -557,7 +549,7 @@ class ModelState:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def render(self, apps):
|
def render(self, apps):
|
||||||
"Creates a Model object from our current state into the given apps"
|
"""Create a Model object from our current state into the given apps."""
|
||||||
# First, make a Meta object
|
# First, make a Meta object
|
||||||
meta_contents = {'app_label': self.app_label, "apps": apps}
|
meta_contents = {'app_label': self.app_label, "apps": apps}
|
||||||
meta_contents.update(self.options)
|
meta_contents.update(self.options)
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
def topological_sort_as_sets(dependency_graph):
|
def topological_sort_as_sets(dependency_graph):
|
||||||
"""Variation of Kahn's algorithm (1962) that returns sets.
|
"""
|
||||||
|
Variation of Kahn's algorithm (1962) that returns sets.
|
||||||
|
|
||||||
Takes a dependency graph as a dictionary of node => dependencies.
|
Take a dependency graph as a dictionary of node => dependencies.
|
||||||
|
|
||||||
Yields sets of items in topological order, where the first set contains
|
Yield sets of items in topological order, where the first set contains
|
||||||
all nodes without dependencies, and each following set contains all
|
all nodes without dependencies, and each following set contains all
|
||||||
nodes that may depend on the nodes only in the previously yielded sets.
|
nodes that may depend on the nodes only in the previously yielded sets.
|
||||||
"""
|
"""
|
||||||
|
@ -129,7 +129,7 @@ class OperationWriter:
|
|||||||
|
|
||||||
class MigrationWriter:
|
class MigrationWriter:
|
||||||
"""
|
"""
|
||||||
Takes a Migration instance and is able to produce the contents
|
Take a Migration instance and is able to produce the contents
|
||||||
of the migration file from it.
|
of the migration file from it.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -138,9 +138,7 @@ class MigrationWriter:
|
|||||||
self.needs_manual_porting = False
|
self.needs_manual_porting = False
|
||||||
|
|
||||||
def as_string(self):
|
def as_string(self):
|
||||||
"""
|
"""Return a string of the file contents."""
|
||||||
Returns a string of the file contents.
|
|
||||||
"""
|
|
||||||
items = {
|
items = {
|
||||||
"replaces_str": "",
|
"replaces_str": "",
|
||||||
"initial_str": "",
|
"initial_str": "",
|
||||||
|
@ -69,9 +69,7 @@ def subclass_exception(name, parents, module, attached_to=None):
|
|||||||
|
|
||||||
|
|
||||||
class ModelBase(type):
|
class ModelBase(type):
|
||||||
"""
|
"""Metaclass for all models."""
|
||||||
Metaclass for all models.
|
|
||||||
"""
|
|
||||||
def __new__(cls, name, bases, attrs):
|
def __new__(cls, name, bases, attrs):
|
||||||
super_new = super().__new__
|
super_new = super().__new__
|
||||||
|
|
||||||
@ -322,9 +320,7 @@ class ModelBase(type):
|
|||||||
setattr(cls, name, value)
|
setattr(cls, name, value)
|
||||||
|
|
||||||
def _prepare(cls):
|
def _prepare(cls):
|
||||||
"""
|
"""Create some methods once self._meta has been populated."""
|
||||||
Creates some methods once self._meta has been populated.
|
|
||||||
"""
|
|
||||||
opts = cls._meta
|
opts = cls._meta
|
||||||
opts._prepare(cls)
|
opts._prepare(cls)
|
||||||
|
|
||||||
@ -372,9 +368,7 @@ class ModelBase(type):
|
|||||||
|
|
||||||
|
|
||||||
class ModelState:
|
class ModelState:
|
||||||
"""
|
"""Store model instance state."""
|
||||||
A class for storing instance state
|
|
||||||
"""
|
|
||||||
def __init__(self, db=None):
|
def __init__(self, db=None):
|
||||||
self.db = db
|
self.db = db
|
||||||
# If true, uniqueness validation checks will consider this a new, as-yet-unsaved object.
|
# If true, uniqueness validation checks will consider this a new, as-yet-unsaved object.
|
||||||
@ -561,7 +555,7 @@ class Model(metaclass=ModelBase):
|
|||||||
|
|
||||||
def get_deferred_fields(self):
|
def get_deferred_fields(self):
|
||||||
"""
|
"""
|
||||||
Returns a set containing names of deferred fields for this instance.
|
Return a set containing names of deferred fields for this instance.
|
||||||
"""
|
"""
|
||||||
return {
|
return {
|
||||||
f.attname for f in self._meta.concrete_fields
|
f.attname for f in self._meta.concrete_fields
|
||||||
@ -570,7 +564,7 @@ class Model(metaclass=ModelBase):
|
|||||||
|
|
||||||
def refresh_from_db(self, using=None, fields=None):
|
def refresh_from_db(self, using=None, fields=None):
|
||||||
"""
|
"""
|
||||||
Reloads field values from the database.
|
Reload field values from the database.
|
||||||
|
|
||||||
By default, the reloading happens from the database this instance was
|
By default, the reloading happens from the database this instance was
|
||||||
loaded from, or by the read router if this instance wasn't loaded from
|
loaded from, or by the read router if this instance wasn't loaded from
|
||||||
@ -622,10 +616,10 @@ class Model(metaclass=ModelBase):
|
|||||||
|
|
||||||
def serializable_value(self, field_name):
|
def serializable_value(self, field_name):
|
||||||
"""
|
"""
|
||||||
Returns the value of the field name for this instance. If the field is
|
Return the value of the field name for this instance. If the field is
|
||||||
a foreign key, returns the id value, instead of the object. If there's
|
a foreign key, return the id value instead of the object. If there's
|
||||||
no Field object with this name on the model, the model attribute's
|
no Field object with this name on the model, return the model
|
||||||
value is returned directly.
|
attribute's value.
|
||||||
|
|
||||||
Used to serialize a field's value (in the serializer, or form output,
|
Used to serialize a field's value (in the serializer, or form output,
|
||||||
for example). Normally, you would just access the attribute directly
|
for example). Normally, you would just access the attribute directly
|
||||||
@ -640,7 +634,7 @@ class Model(metaclass=ModelBase):
|
|||||||
def save(self, force_insert=False, force_update=False, using=None,
|
def save(self, force_insert=False, force_update=False, using=None,
|
||||||
update_fields=None):
|
update_fields=None):
|
||||||
"""
|
"""
|
||||||
Saves the current instance. Override this in a subclass if you want to
|
Save the current instance. Override this in a subclass if you want to
|
||||||
control the saving process.
|
control the saving process.
|
||||||
|
|
||||||
The 'force_insert' and 'force_update' parameters can be used to insist
|
The 'force_insert' and 'force_update' parameters can be used to insist
|
||||||
@ -721,7 +715,7 @@ class Model(metaclass=ModelBase):
|
|||||||
def save_base(self, raw=False, force_insert=False,
|
def save_base(self, raw=False, force_insert=False,
|
||||||
force_update=False, using=None, update_fields=None):
|
force_update=False, using=None, update_fields=None):
|
||||||
"""
|
"""
|
||||||
Handles the parts of saving which should be done only once per save,
|
Handle the parts of saving which should be done only once per save,
|
||||||
yet need to be done in raw saves, too. This includes some sanity
|
yet need to be done in raw saves, too. This includes some sanity
|
||||||
checks and signal sending.
|
checks and signal sending.
|
||||||
|
|
||||||
@ -761,9 +755,7 @@ class Model(metaclass=ModelBase):
|
|||||||
save_base.alters_data = True
|
save_base.alters_data = True
|
||||||
|
|
||||||
def _save_parents(self, cls, using, update_fields):
|
def _save_parents(self, cls, using, update_fields):
|
||||||
"""
|
"""Save all the parents of cls using values from self."""
|
||||||
Saves all the parents of cls using values from self.
|
|
||||||
"""
|
|
||||||
meta = cls._meta
|
meta = cls._meta
|
||||||
for parent, field in meta.parents.items():
|
for parent, field in meta.parents.items():
|
||||||
# Make sure the link fields are synced between parent and self.
|
# Make sure the link fields are synced between parent and self.
|
||||||
@ -787,7 +779,7 @@ class Model(metaclass=ModelBase):
|
|||||||
def _save_table(self, raw=False, cls=None, force_insert=False,
|
def _save_table(self, raw=False, cls=None, force_insert=False,
|
||||||
force_update=False, using=None, update_fields=None):
|
force_update=False, using=None, update_fields=None):
|
||||||
"""
|
"""
|
||||||
Does the heavy-lifting involved in saving. Updates or inserts the data
|
Do the heavy-lifting involved in saving. Update or insert the data
|
||||||
for a single table.
|
for a single table.
|
||||||
"""
|
"""
|
||||||
meta = cls._meta
|
meta = cls._meta
|
||||||
@ -838,9 +830,8 @@ class Model(metaclass=ModelBase):
|
|||||||
|
|
||||||
def _do_update(self, base_qs, using, pk_val, values, update_fields, forced_update):
|
def _do_update(self, base_qs, using, pk_val, values, update_fields, forced_update):
|
||||||
"""
|
"""
|
||||||
This method will try to update the model. If the model was updated (in
|
Try to update the model. Return True if the model was updated (if an
|
||||||
the sense that an update query was done and a matching row was found
|
update query was done and a matching row was found in the DB).
|
||||||
from the DB) the method will return True.
|
|
||||||
"""
|
"""
|
||||||
filtered = base_qs.filter(pk=pk_val)
|
filtered = base_qs.filter(pk=pk_val)
|
||||||
if not values:
|
if not values:
|
||||||
@ -936,8 +927,8 @@ class Model(metaclass=ModelBase):
|
|||||||
|
|
||||||
def validate_unique(self, exclude=None):
|
def validate_unique(self, exclude=None):
|
||||||
"""
|
"""
|
||||||
Checks unique constraints on the model and raises ``ValidationError``
|
Check unique constraints on the model and raise ValidationError if any
|
||||||
if any failed.
|
failed.
|
||||||
"""
|
"""
|
||||||
unique_checks, date_checks = self._get_unique_checks(exclude=exclude)
|
unique_checks, date_checks = self._get_unique_checks(exclude=exclude)
|
||||||
|
|
||||||
@ -952,12 +943,11 @@ class Model(metaclass=ModelBase):
|
|||||||
|
|
||||||
def _get_unique_checks(self, exclude=None):
|
def _get_unique_checks(self, exclude=None):
|
||||||
"""
|
"""
|
||||||
Gather a list of checks to perform. Since validate_unique could be
|
Return a list of checks to perform. Since validate_unique() could be
|
||||||
called from a ModelForm, some fields may have been excluded; we can't
|
called from a ModelForm, some fields may have been excluded; we can't
|
||||||
perform a unique check on a model that is missing fields involved
|
perform a unique check on a model that is missing fields involved
|
||||||
in that check.
|
in that check. Fields that did not validate should also be excluded,
|
||||||
Fields that did not validate should also be excluded, but they need
|
but they need to be passed in via the exclude argument.
|
||||||
to be passed in via the exclude argument.
|
|
||||||
"""
|
"""
|
||||||
if exclude is None:
|
if exclude is None:
|
||||||
exclude = []
|
exclude = []
|
||||||
@ -1125,8 +1115,8 @@ class Model(metaclass=ModelBase):
|
|||||||
|
|
||||||
def full_clean(self, exclude=None, validate_unique=True):
|
def full_clean(self, exclude=None, validate_unique=True):
|
||||||
"""
|
"""
|
||||||
Calls clean_fields, clean, and validate_unique, on the model,
|
Call clean_fields(), clean(), and validate_unique() on the model.
|
||||||
and raises a ``ValidationError`` for any errors that occurred.
|
Raise a ValidationError for any errors that occur.
|
||||||
"""
|
"""
|
||||||
errors = {}
|
errors = {}
|
||||||
if exclude is None:
|
if exclude is None:
|
||||||
@ -1161,7 +1151,7 @@ class Model(metaclass=ModelBase):
|
|||||||
|
|
||||||
def clean_fields(self, exclude=None):
|
def clean_fields(self, exclude=None):
|
||||||
"""
|
"""
|
||||||
Cleans all fields and raises a ValidationError containing a dict
|
Clean all fields and raise a ValidationError containing a dict
|
||||||
of all validation errors if any occur.
|
of all validation errors if any occur.
|
||||||
"""
|
"""
|
||||||
if exclude is None:
|
if exclude is None:
|
||||||
@ -1212,8 +1202,7 @@ class Model(metaclass=ModelBase):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _check_swappable(cls):
|
def _check_swappable(cls):
|
||||||
""" Check if the swapped model exists. """
|
"""Check if the swapped model exists."""
|
||||||
|
|
||||||
errors = []
|
errors = []
|
||||||
if cls._meta.swapped:
|
if cls._meta.swapped:
|
||||||
try:
|
try:
|
||||||
@ -1253,8 +1242,7 @@ class Model(metaclass=ModelBase):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _check_managers(cls, **kwargs):
|
def _check_managers(cls, **kwargs):
|
||||||
""" Perform all manager checks. """
|
"""Perform all manager checks."""
|
||||||
|
|
||||||
errors = []
|
errors = []
|
||||||
for manager in cls._meta.managers:
|
for manager in cls._meta.managers:
|
||||||
errors.extend(manager.check(**kwargs))
|
errors.extend(manager.check(**kwargs))
|
||||||
@ -1262,8 +1250,7 @@ class Model(metaclass=ModelBase):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _check_fields(cls, **kwargs):
|
def _check_fields(cls, **kwargs):
|
||||||
""" Perform all field checks. """
|
"""Perform all field checks."""
|
||||||
|
|
||||||
errors = []
|
errors = []
|
||||||
for field in cls._meta.local_fields:
|
for field in cls._meta.local_fields:
|
||||||
errors.extend(field.check(**kwargs))
|
errors.extend(field.check(**kwargs))
|
||||||
@ -1304,7 +1291,7 @@ class Model(metaclass=ModelBase):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _check_id_field(cls):
|
def _check_id_field(cls):
|
||||||
""" Check if `id` field is a primary key. """
|
"""Check if `id` field is a primary key."""
|
||||||
fields = list(f for f in cls._meta.local_fields if f.name == 'id' and f != cls._meta.pk)
|
fields = list(f for f in cls._meta.local_fields if f.name == 'id' and f != cls._meta.pk)
|
||||||
# fields is empty or consists of the invalid "id" field
|
# fields is empty or consists of the invalid "id" field
|
||||||
if fields and not fields[0].primary_key and cls._meta.pk.name == 'id':
|
if fields and not fields[0].primary_key and cls._meta.pk.name == 'id':
|
||||||
@ -1321,8 +1308,7 @@ class Model(metaclass=ModelBase):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _check_field_name_clashes(cls):
|
def _check_field_name_clashes(cls):
|
||||||
""" Ref #17673. """
|
"""Forbid field shadowing in multi-table inheritance."""
|
||||||
|
|
||||||
errors = []
|
errors = []
|
||||||
used_fields = {} # name or attname -> field
|
used_fields = {} # name or attname -> field
|
||||||
|
|
||||||
@ -1428,7 +1414,7 @@ class Model(metaclass=ModelBase):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _check_index_together(cls):
|
def _check_index_together(cls):
|
||||||
""" Check the value of "index_together" option. """
|
"""Check the value of "index_together" option."""
|
||||||
if not isinstance(cls._meta.index_together, (tuple, list)):
|
if not isinstance(cls._meta.index_together, (tuple, list)):
|
||||||
return [
|
return [
|
||||||
checks.Error(
|
checks.Error(
|
||||||
@ -1455,7 +1441,7 @@ class Model(metaclass=ModelBase):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _check_unique_together(cls):
|
def _check_unique_together(cls):
|
||||||
""" Check the value of "unique_together" option. """
|
"""Check the value of "unique_together" option."""
|
||||||
if not isinstance(cls._meta.unique_together, (tuple, list)):
|
if not isinstance(cls._meta.unique_together, (tuple, list)):
|
||||||
return [
|
return [
|
||||||
checks.Error(
|
checks.Error(
|
||||||
@ -1530,8 +1516,10 @@ class Model(metaclass=ModelBase):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _check_ordering(cls):
|
def _check_ordering(cls):
|
||||||
""" Check "ordering" option -- is it a list of strings and do all fields
|
"""
|
||||||
exist? """
|
Check "ordering" option -- is it a list of strings and do all fields
|
||||||
|
exist?
|
||||||
|
"""
|
||||||
if cls._meta._ordering_clash:
|
if cls._meta._ordering_clash:
|
||||||
return [
|
return [
|
||||||
checks.Error(
|
checks.Error(
|
||||||
@ -1710,9 +1698,7 @@ def make_foreign_order_accessors(model, related_model):
|
|||||||
|
|
||||||
|
|
||||||
def model_unpickle(model_id):
|
def model_unpickle(model_id):
|
||||||
"""
|
"""Used to unpickle Model subclasses with deferred fields."""
|
||||||
Used to unpickle Model subclasses with deferred fields.
|
|
||||||
"""
|
|
||||||
if isinstance(model_id, tuple):
|
if isinstance(model_id, tuple):
|
||||||
model = apps.get_model(*model_id)
|
model = apps.get_model(*model_id)
|
||||||
else:
|
else:
|
||||||
|
@ -79,11 +79,11 @@ class Collector:
|
|||||||
|
|
||||||
def add(self, objs, source=None, nullable=False, reverse_dependency=False):
|
def add(self, objs, source=None, nullable=False, reverse_dependency=False):
|
||||||
"""
|
"""
|
||||||
Adds 'objs' to the collection of objects to be deleted. If the call is
|
Add 'objs' to the collection of objects to be deleted. If the call is
|
||||||
the result of a cascade, 'source' should be the model that caused it,
|
the result of a cascade, 'source' should be the model that caused it,
|
||||||
and 'nullable' should be set to True if the relation can be null.
|
and 'nullable' should be set to True if the relation can be null.
|
||||||
|
|
||||||
Returns a list of all objects that were not already collected.
|
Return a list of all objects that were not already collected.
|
||||||
"""
|
"""
|
||||||
if not objs:
|
if not objs:
|
||||||
return []
|
return []
|
||||||
@ -106,7 +106,7 @@ class Collector:
|
|||||||
|
|
||||||
def add_field_update(self, field, value, objs):
|
def add_field_update(self, field, value, objs):
|
||||||
"""
|
"""
|
||||||
Schedules a field update. 'objs' must be a homogeneous iterable
|
Schedule a field update. 'objs' must be a homogeneous iterable
|
||||||
collection of model instances (e.g. a QuerySet).
|
collection of model instances (e.g. a QuerySet).
|
||||||
"""
|
"""
|
||||||
if not objs:
|
if not objs:
|
||||||
@ -118,12 +118,12 @@ class Collector:
|
|||||||
|
|
||||||
def can_fast_delete(self, objs, from_field=None):
|
def can_fast_delete(self, objs, from_field=None):
|
||||||
"""
|
"""
|
||||||
Determines if the objects in the given queryset-like can be
|
Determine if the objects in the given queryset-like can be
|
||||||
fast-deleted. This can be done if there are no cascades, no
|
fast-deleted. This can be done if there are no cascades, no
|
||||||
parents and no signal listeners for the object class.
|
parents and no signal listeners for the object class.
|
||||||
|
|
||||||
The 'from_field' tells where we are coming from - we need this to
|
The 'from_field' tells where we are coming from - we need this to
|
||||||
determine if the objects are in fact to be deleted. Allows also
|
determine if the objects are in fact to be deleted. Allow also
|
||||||
skipping parent -> child -> parent chain preventing fast delete of
|
skipping parent -> child -> parent chain preventing fast delete of
|
||||||
the child.
|
the child.
|
||||||
"""
|
"""
|
||||||
@ -154,7 +154,7 @@ class Collector:
|
|||||||
|
|
||||||
def get_del_batches(self, objs, field):
|
def get_del_batches(self, objs, field):
|
||||||
"""
|
"""
|
||||||
Returns the objs in suitably sized batches for the used connection.
|
Return the objs in suitably sized batches for the used connection.
|
||||||
"""
|
"""
|
||||||
conn_batch_size = max(
|
conn_batch_size = max(
|
||||||
connections[self.using].ops.bulk_batch_size([field.name], objs), 1)
|
connections[self.using].ops.bulk_batch_size([field.name], objs), 1)
|
||||||
@ -167,7 +167,7 @@ class Collector:
|
|||||||
def collect(self, objs, source=None, nullable=False, collect_related=True,
|
def collect(self, objs, source=None, nullable=False, collect_related=True,
|
||||||
source_attr=None, reverse_dependency=False, keep_parents=False):
|
source_attr=None, reverse_dependency=False, keep_parents=False):
|
||||||
"""
|
"""
|
||||||
Adds 'objs' to the collection of objects to be deleted as well as all
|
Add 'objs' to the collection of objects to be deleted as well as all
|
||||||
parent instances. 'objs' must be a homogeneous iterable collection of
|
parent instances. 'objs' must be a homogeneous iterable collection of
|
||||||
model instances (e.g. a QuerySet). If 'collect_related' is True,
|
model instances (e.g. a QuerySet). If 'collect_related' is True,
|
||||||
related objects will be handled by their respective on_delete handler.
|
related objects will be handled by their respective on_delete handler.
|
||||||
@ -228,7 +228,7 @@ class Collector:
|
|||||||
|
|
||||||
def related_objects(self, related, objs):
|
def related_objects(self, related, objs):
|
||||||
"""
|
"""
|
||||||
Gets a QuerySet of objects related to ``objs`` via the relation ``related``.
|
Get a QuerySet of objects related to `objs` via the relation `related`.
|
||||||
"""
|
"""
|
||||||
return related.related_model._base_manager.using(self.using).filter(
|
return related.related_model._base_manager.using(self.using).filter(
|
||||||
**{"%s__in" % related.field.name: objs}
|
**{"%s__in" % related.field.name: objs}
|
||||||
|
@ -11,7 +11,7 @@ from django.utils.functional import cached_property
|
|||||||
|
|
||||||
class Combinable:
|
class Combinable:
|
||||||
"""
|
"""
|
||||||
Provides the ability to combine one or two objects with
|
Provide the ability to combine one or two objects with
|
||||||
some connector. For example F('foo') + F('bar').
|
some connector. For example F('foo') + F('bar').
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -120,9 +120,7 @@ class Combinable:
|
|||||||
|
|
||||||
@deconstructible
|
@deconstructible
|
||||||
class BaseExpression:
|
class BaseExpression:
|
||||||
"""
|
"""Base class for all query expressions."""
|
||||||
Base class for all query expressions.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# aggregate specific fields
|
# aggregate specific fields
|
||||||
is_summary = False
|
is_summary = False
|
||||||
@ -170,7 +168,7 @@ class BaseExpression:
|
|||||||
|
|
||||||
* connection: the database connection used for the current query.
|
* connection: the database connection used for the current query.
|
||||||
|
|
||||||
Returns: (sql, params)
|
Return: (sql, params)
|
||||||
Where `sql` is a string containing ordered sql parameters to be
|
Where `sql` is a string containing ordered sql parameters to be
|
||||||
replaced with the elements of the list `params`.
|
replaced with the elements of the list `params`.
|
||||||
"""
|
"""
|
||||||
@ -192,7 +190,7 @@ class BaseExpression:
|
|||||||
|
|
||||||
def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False):
|
def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False):
|
||||||
"""
|
"""
|
||||||
Provides the chance to do any preprocessing or validation before being
|
Provide the chance to do any preprocessing or validation before being
|
||||||
added to the query.
|
added to the query.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
@ -203,7 +201,7 @@ class BaseExpression:
|
|||||||
* summarize: a terminal aggregate clause
|
* summarize: a terminal aggregate clause
|
||||||
* for_save: whether this expression about to be used in a save or update
|
* for_save: whether this expression about to be used in a save or update
|
||||||
|
|
||||||
Returns: an Expression to be added to the query.
|
Return: an Expression to be added to the query.
|
||||||
"""
|
"""
|
||||||
c = self.copy()
|
c = self.copy()
|
||||||
c.is_summary = summarize
|
c.is_summary = summarize
|
||||||
@ -214,9 +212,7 @@ class BaseExpression:
|
|||||||
return c
|
return c
|
||||||
|
|
||||||
def _prepare(self, field):
|
def _prepare(self, field):
|
||||||
"""
|
"""Hook used by Lookup.get_prep_lookup() to do custom preparation."""
|
||||||
Hook used by Lookup.get_prep_lookup() to do custom preparation.
|
|
||||||
"""
|
|
||||||
return self
|
return self
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -225,9 +221,7 @@ class BaseExpression:
|
|||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def output_field(self):
|
def output_field(self):
|
||||||
"""
|
"""Return the output type of this expressions."""
|
||||||
Returns the output type of this expressions.
|
|
||||||
"""
|
|
||||||
if self._output_field_or_none is None:
|
if self._output_field_or_none is None:
|
||||||
raise FieldError("Cannot resolve expression type, unknown output_field")
|
raise FieldError("Cannot resolve expression type, unknown output_field")
|
||||||
return self._output_field_or_none
|
return self._output_field_or_none
|
||||||
@ -235,7 +229,7 @@ class BaseExpression:
|
|||||||
@cached_property
|
@cached_property
|
||||||
def _output_field_or_none(self):
|
def _output_field_or_none(self):
|
||||||
"""
|
"""
|
||||||
Returns the output field of this expression, or None if no output type
|
Return the output field of this expression, or None if no output type
|
||||||
can be resolved. Note that the 'output_field' property will raise
|
can be resolved. Note that the 'output_field' property will raise
|
||||||
FieldError if no type can be resolved, but this attribute allows for
|
FieldError if no type can be resolved, but this attribute allows for
|
||||||
None values.
|
None values.
|
||||||
@ -246,10 +240,9 @@ class BaseExpression:
|
|||||||
|
|
||||||
def _resolve_output_field(self):
|
def _resolve_output_field(self):
|
||||||
"""
|
"""
|
||||||
Attempts to infer the output type of the expression. If the output
|
Attempt to infer the output type of the expression. If the output
|
||||||
fields of all source fields match then we can simply infer the same
|
fields of all source fields match then, simply infer the same type
|
||||||
type here. This isn't always correct, but it makes sense most of the
|
here. This isn't always correct, but it makes sense most of the time.
|
||||||
time.
|
|
||||||
|
|
||||||
Consider the difference between `2 + 2` and `2 / 3`. Inferring
|
Consider the difference between `2 + 2` and `2 / 3`. Inferring
|
||||||
the type here is a convenience for the common case. The user should
|
the type here is a convenience for the common case. The user should
|
||||||
@ -316,10 +309,7 @@ class BaseExpression:
|
|||||||
return cols
|
return cols
|
||||||
|
|
||||||
def get_source_fields(self):
|
def get_source_fields(self):
|
||||||
"""
|
"""Return the underlying field types used by this aggregate."""
|
||||||
Returns the underlying field types used by this
|
|
||||||
aggregate.
|
|
||||||
"""
|
|
||||||
return [e._output_field_or_none for e in self.get_source_expressions()]
|
return [e._output_field_or_none for e in self.get_source_expressions()]
|
||||||
|
|
||||||
def asc(self, **kwargs):
|
def asc(self, **kwargs):
|
||||||
@ -364,9 +354,7 @@ class BaseExpression:
|
|||||||
|
|
||||||
|
|
||||||
class Expression(BaseExpression, Combinable):
|
class Expression(BaseExpression, Combinable):
|
||||||
"""
|
"""An expression that can be combined with other expressions."""
|
||||||
An expression that can be combined with other expressions.
|
|
||||||
"""
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@ -470,9 +458,7 @@ class TemporalSubtraction(CombinedExpression):
|
|||||||
|
|
||||||
@deconstructible
|
@deconstructible
|
||||||
class F(Combinable):
|
class F(Combinable):
|
||||||
"""
|
"""An object capable of resolving references to existing query objects."""
|
||||||
An object capable of resolving references to existing query objects.
|
|
||||||
"""
|
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
"""
|
"""
|
||||||
Arguments:
|
Arguments:
|
||||||
@ -527,9 +513,7 @@ class OuterRef(F):
|
|||||||
|
|
||||||
|
|
||||||
class Func(Expression):
|
class Func(Expression):
|
||||||
"""
|
"""An SQL function call."""
|
||||||
An SQL function call.
|
|
||||||
"""
|
|
||||||
function = None
|
function = None
|
||||||
template = '%(function)s(%(expressions)s)'
|
template = '%(function)s(%(expressions)s)'
|
||||||
arg_joiner = ', '
|
arg_joiner = ', '
|
||||||
@ -608,9 +592,7 @@ class Func(Expression):
|
|||||||
|
|
||||||
|
|
||||||
class Value(Expression):
|
class Value(Expression):
|
||||||
"""
|
"""Represent a wrapped value as a node within an expression."""
|
||||||
Represents a wrapped value as a node within an expression
|
|
||||||
"""
|
|
||||||
def __init__(self, value, output_field=None):
|
def __init__(self, value, output_field=None):
|
||||||
"""
|
"""
|
||||||
Arguments:
|
Arguments:
|
||||||
|
@ -190,9 +190,7 @@ class Field(RegisterLookupMixin):
|
|||||||
return '%s.%s.%s' % (app, model._meta.object_name, self.name)
|
return '%s.%s.%s' % (app, model._meta.object_name, self.name)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
"""
|
"""Display the module, class, and name of the field."""
|
||||||
Displays the module, class and name of the field.
|
|
||||||
"""
|
|
||||||
path = '%s.%s' % (self.__class__.__module__, self.__class__.__name__)
|
path = '%s.%s' % (self.__class__.__module__, self.__class__.__name__)
|
||||||
name = getattr(self, 'name', None)
|
name = getattr(self, 'name', None)
|
||||||
if name is not None:
|
if name is not None:
|
||||||
@ -210,9 +208,10 @@ class Field(RegisterLookupMixin):
|
|||||||
return errors
|
return errors
|
||||||
|
|
||||||
def _check_field_name(self):
|
def _check_field_name(self):
|
||||||
""" Check if field name is valid, i.e. 1) does not end with an
|
"""
|
||||||
underscore, 2) does not contain "__" and 3) is not "pk". """
|
Check if field name is valid, i.e. 1) does not end with an
|
||||||
|
underscore, 2) does not contain "__" and 3) is not "pk".
|
||||||
|
"""
|
||||||
if self.name.endswith('_'):
|
if self.name.endswith('_'):
|
||||||
return [
|
return [
|
||||||
checks.Error(
|
checks.Error(
|
||||||
@ -348,37 +347,42 @@ class Field(RegisterLookupMixin):
|
|||||||
def select_format(self, compiler, sql, params):
|
def select_format(self, compiler, sql, params):
|
||||||
"""
|
"""
|
||||||
Custom format for select clauses. For example, GIS columns need to be
|
Custom format for select clauses. For example, GIS columns need to be
|
||||||
selected as AsText(table.col) on MySQL as the table.col data can't be used
|
selected as AsText(table.col) on MySQL as the table.col data can't be
|
||||||
by Django.
|
used by Django.
|
||||||
"""
|
"""
|
||||||
return sql, params
|
return sql, params
|
||||||
|
|
||||||
def deconstruct(self):
|
def deconstruct(self):
|
||||||
"""
|
"""
|
||||||
Returns enough information to recreate the field as a 4-tuple:
|
Return enough information to recreate the field as a 4-tuple:
|
||||||
|
|
||||||
* The name of the field on the model, if contribute_to_class has been run
|
* The name of the field on the model, if contribute_to_class() has
|
||||||
* The import path of the field, including the class: django.db.models.IntegerField
|
been run.
|
||||||
This should be the most portable version, so less specific may be better.
|
* The import path of the field, including the class:e.g.
|
||||||
* A list of positional arguments
|
django.db.models.IntegerField This should be the most portable
|
||||||
* A dict of keyword arguments
|
version, so less specific may be better.
|
||||||
|
* A list of positional arguments.
|
||||||
|
* A dict of keyword arguments.
|
||||||
|
|
||||||
Note that the positional or keyword arguments must contain values of the
|
Note that the positional or keyword arguments must contain values of
|
||||||
following types (including inner values of collection types):
|
the following types (including inner values of collection types):
|
||||||
|
|
||||||
* None, bool, str, int, float, complex, set, frozenset, list, tuple, dict
|
* None, bool, str, int, float, complex, set, frozenset, list, tuple,
|
||||||
|
dict
|
||||||
* UUID
|
* UUID
|
||||||
* datetime.datetime (naive), datetime.date
|
* datetime.datetime (naive), datetime.date
|
||||||
* top-level classes, top-level functions - will be referenced by their full import path
|
* top-level classes, top-level functions - will be referenced by their
|
||||||
|
full import path
|
||||||
* Storage instances - these have their own deconstruct() method
|
* Storage instances - these have their own deconstruct() method
|
||||||
|
|
||||||
This is because the values here must be serialized into a text format
|
This is because the values here must be serialized into a text format
|
||||||
(possibly new Python code, possibly JSON) and these are the only types
|
(possibly new Python code, possibly JSON) and these are the only types
|
||||||
with encoding handlers defined.
|
with encoding handlers defined.
|
||||||
|
|
||||||
There's no need to return the exact way the field was instantiated this time,
|
There's no need to return the exact way the field was instantiated this
|
||||||
just ensure that the resulting field is the same - prefer keyword arguments
|
time, just ensure that the resulting field is the same - prefer keyword
|
||||||
over positional ones, and omit parameters with their default values.
|
arguments over positional ones, and omit parameters with their default
|
||||||
|
values.
|
||||||
"""
|
"""
|
||||||
# Short-form way of fetching all the default parameters
|
# Short-form way of fetching all the default parameters
|
||||||
keywords = {}
|
keywords = {}
|
||||||
@ -486,7 +490,7 @@ class Field(RegisterLookupMixin):
|
|||||||
def __reduce__(self):
|
def __reduce__(self):
|
||||||
"""
|
"""
|
||||||
Pickling should return the model._meta.fields instance of the field,
|
Pickling should return the model._meta.fields instance of the field,
|
||||||
not a new copy of that field. So, we use the app registry to load the
|
not a new copy of that field. So, use the app registry to load the
|
||||||
model and then the field back.
|
model and then the field back.
|
||||||
"""
|
"""
|
||||||
if not hasattr(self, 'model'):
|
if not hasattr(self, 'model'):
|
||||||
@ -512,9 +516,9 @@ class Field(RegisterLookupMixin):
|
|||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
"""
|
"""
|
||||||
Converts the input value into the expected Python data type, raising
|
Convert the input value into the expected Python data type, raising
|
||||||
django.core.exceptions.ValidationError if the data can't be converted.
|
django.core.exceptions.ValidationError if the data can't be converted.
|
||||||
Returns the converted value. Subclasses should override this.
|
Return the converted value. Subclasses should override this.
|
||||||
"""
|
"""
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@ -544,8 +548,8 @@ class Field(RegisterLookupMixin):
|
|||||||
|
|
||||||
def validate(self, value, model_instance):
|
def validate(self, value, model_instance):
|
||||||
"""
|
"""
|
||||||
Validates value and throws ValidationError. Subclasses should override
|
Validate value and raise ValidationError if necessary. Subclasses
|
||||||
this to provide validation logic.
|
should override this to provide validation logic.
|
||||||
"""
|
"""
|
||||||
if not self.editable:
|
if not self.editable:
|
||||||
# Skip validation for non-editable fields.
|
# Skip validation for non-editable fields.
|
||||||
@ -576,8 +580,8 @@ class Field(RegisterLookupMixin):
|
|||||||
def clean(self, value, model_instance):
|
def clean(self, value, model_instance):
|
||||||
"""
|
"""
|
||||||
Convert the value's type and run validation. Validation errors
|
Convert the value's type and run validation. Validation errors
|
||||||
from to_python and validate are propagated. The correct value is
|
from to_python() and validate() are propagated. Return the correct
|
||||||
returned if no error is raised.
|
value if no error is raised.
|
||||||
"""
|
"""
|
||||||
value = self.to_python(value)
|
value = self.to_python(value)
|
||||||
self.validate(value, model_instance)
|
self.validate(value, model_instance)
|
||||||
@ -632,9 +636,9 @@ class Field(RegisterLookupMixin):
|
|||||||
|
|
||||||
def db_parameters(self, connection):
|
def db_parameters(self, connection):
|
||||||
"""
|
"""
|
||||||
Extension of db_type(), providing a range of different return
|
Extension of db_type(), providing a range of different return values
|
||||||
values (type, checks).
|
(type, checks). This will look at db_type(), allowing custom model
|
||||||
This will look at db_type(), allowing custom model fields to override it.
|
fields to override it.
|
||||||
"""
|
"""
|
||||||
type_string = self.db_type(connection)
|
type_string = self.db_type(connection)
|
||||||
check_string = self.db_check(connection)
|
check_string = self.db_check(connection)
|
||||||
@ -667,9 +671,8 @@ class Field(RegisterLookupMixin):
|
|||||||
"""
|
"""
|
||||||
Register the field with the model class it belongs to.
|
Register the field with the model class it belongs to.
|
||||||
|
|
||||||
If private_only is True, a separate instance of this field will be
|
If private_only is True, create a separate instance of this field
|
||||||
created for every subclass of cls, even if cls is not an abstract
|
for every subclass of cls, even if cls is not an abstract model.
|
||||||
model.
|
|
||||||
"""
|
"""
|
||||||
self.set_attributes_from_name(name)
|
self.set_attributes_from_name(name)
|
||||||
self.model = cls
|
self.model = cls
|
||||||
@ -709,22 +712,18 @@ class Field(RegisterLookupMixin):
|
|||||||
return self.__class__.__name__
|
return self.__class__.__name__
|
||||||
|
|
||||||
def pre_save(self, model_instance, add):
|
def pre_save(self, model_instance, add):
|
||||||
"""
|
"""Return field's value just before saving."""
|
||||||
Returns field's value just before saving.
|
|
||||||
"""
|
|
||||||
return getattr(model_instance, self.attname)
|
return getattr(model_instance, self.attname)
|
||||||
|
|
||||||
def get_prep_value(self, value):
|
def get_prep_value(self, value):
|
||||||
"""
|
"""Perform preliminary non-db specific value checks and conversions."""
|
||||||
Perform preliminary non-db specific value checks and conversions.
|
|
||||||
"""
|
|
||||||
if isinstance(value, Promise):
|
if isinstance(value, Promise):
|
||||||
value = value._proxy____cast()
|
value = value._proxy____cast()
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def get_db_prep_value(self, value, connection, prepared=False):
|
def get_db_prep_value(self, value, connection, prepared=False):
|
||||||
"""Returns field's value prepared for interacting with the database
|
"""
|
||||||
backend.
|
Return field's value prepared for interacting with the database backend.
|
||||||
|
|
||||||
Used by the default implementations of get_db_prep_save().
|
Used by the default implementations of get_db_prep_save().
|
||||||
"""
|
"""
|
||||||
@ -733,22 +732,15 @@ class Field(RegisterLookupMixin):
|
|||||||
return value
|
return value
|
||||||
|
|
||||||
def get_db_prep_save(self, value, connection):
|
def get_db_prep_save(self, value, connection):
|
||||||
"""
|
"""Return field's value prepared for saving into a database."""
|
||||||
Returns field's value prepared for saving into a database.
|
return self.get_db_prep_value(value, connection=connection, prepared=False)
|
||||||
"""
|
|
||||||
return self.get_db_prep_value(value, connection=connection,
|
|
||||||
prepared=False)
|
|
||||||
|
|
||||||
def has_default(self):
|
def has_default(self):
|
||||||
"""
|
"""Return a boolean of whether this field has a default value."""
|
||||||
Returns a boolean of whether this field has a default value.
|
|
||||||
"""
|
|
||||||
return self.default is not NOT_PROVIDED
|
return self.default is not NOT_PROVIDED
|
||||||
|
|
||||||
def get_default(self):
|
def get_default(self):
|
||||||
"""
|
"""Return the default value for this field."""
|
||||||
Returns the default value for this field.
|
|
||||||
"""
|
|
||||||
return self._get_default()
|
return self._get_default()
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
@ -760,11 +752,13 @@ class Field(RegisterLookupMixin):
|
|||||||
|
|
||||||
if not self.empty_strings_allowed or self.null and not connection.features.interprets_empty_strings_as_nulls:
|
if not self.empty_strings_allowed or self.null and not connection.features.interprets_empty_strings_as_nulls:
|
||||||
return return_None
|
return return_None
|
||||||
return str # returns empty string
|
return str # return empty string
|
||||||
|
|
||||||
def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH, limit_choices_to=None):
|
def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH, limit_choices_to=None):
|
||||||
"""Returns choices with a default blank choices included, for use
|
"""
|
||||||
as SelectField choices for this field."""
|
Return choices with a default blank choices included, for use
|
||||||
|
as <select> choices for this field.
|
||||||
|
"""
|
||||||
blank_defined = False
|
blank_defined = False
|
||||||
choices = list(self.choices) if self.choices else []
|
choices = list(self.choices) if self.choices else []
|
||||||
named_groups = choices and isinstance(choices[0][1], (list, tuple))
|
named_groups = choices and isinstance(choices[0][1], (list, tuple))
|
||||||
@ -793,7 +787,7 @@ class Field(RegisterLookupMixin):
|
|||||||
|
|
||||||
def value_to_string(self, obj):
|
def value_to_string(self, obj):
|
||||||
"""
|
"""
|
||||||
Returns a string value of this field from the passed obj.
|
Return a string value of this field from the passed obj.
|
||||||
This is used by the serialization framework.
|
This is used by the serialization framework.
|
||||||
"""
|
"""
|
||||||
return force_text(self.value_from_object(obj))
|
return force_text(self.value_from_object(obj))
|
||||||
@ -813,9 +807,7 @@ class Field(RegisterLookupMixin):
|
|||||||
setattr(instance, self.name, data)
|
setattr(instance, self.name, data)
|
||||||
|
|
||||||
def formfield(self, form_class=None, choices_form_class=None, **kwargs):
|
def formfield(self, form_class=None, choices_form_class=None, **kwargs):
|
||||||
"""
|
"""Return a django.forms.Field instance for this field."""
|
||||||
Returns a django.forms.Field instance for this database Field.
|
|
||||||
"""
|
|
||||||
defaults = {'required': not self.blank,
|
defaults = {'required': not self.blank,
|
||||||
'label': capfirst(self.verbose_name),
|
'label': capfirst(self.verbose_name),
|
||||||
'help_text': self.help_text}
|
'help_text': self.help_text}
|
||||||
@ -851,9 +843,7 @@ class Field(RegisterLookupMixin):
|
|||||||
return form_class(**defaults)
|
return form_class(**defaults)
|
||||||
|
|
||||||
def value_from_object(self, obj):
|
def value_from_object(self, obj):
|
||||||
"""
|
"""Return the value of this field in the given model instance."""
|
||||||
Returns the value of this field in the given model instance.
|
|
||||||
"""
|
|
||||||
return getattr(obj, self.attname)
|
return getattr(obj, self.attname)
|
||||||
|
|
||||||
|
|
||||||
@ -1137,11 +1127,8 @@ class DateField(DateTimeCheckMixin, Field):
|
|||||||
|
|
||||||
def _check_fix_default_value(self):
|
def _check_fix_default_value(self):
|
||||||
"""
|
"""
|
||||||
Adds a warning to the checks framework stating, that using an actual
|
Warn that using an actual date or datetime value is probably wrong;
|
||||||
date or datetime value is probably wrong; it's only being evaluated on
|
it's only evaluated on server startup.
|
||||||
server start-up.
|
|
||||||
|
|
||||||
For details see ticket #21905
|
|
||||||
"""
|
"""
|
||||||
if not self.has_default():
|
if not self.has_default():
|
||||||
return []
|
return []
|
||||||
@ -1279,11 +1266,8 @@ class DateTimeField(DateField):
|
|||||||
|
|
||||||
def _check_fix_default_value(self):
|
def _check_fix_default_value(self):
|
||||||
"""
|
"""
|
||||||
Adds a warning to the checks framework stating, that using an actual
|
Warn that using an actual date or datetime value is probably wrong;
|
||||||
date or datetime value is probably wrong; it's only being evaluated on
|
it's only evaluated on server startup.
|
||||||
server start-up.
|
|
||||||
|
|
||||||
For details see ticket #21905
|
|
||||||
"""
|
"""
|
||||||
if not self.has_default():
|
if not self.has_default():
|
||||||
return []
|
return []
|
||||||
@ -1539,7 +1523,7 @@ class DecimalField(Field):
|
|||||||
|
|
||||||
def format_number(self, value):
|
def format_number(self, value):
|
||||||
"""
|
"""
|
||||||
Formats a number into a string with the requisite number of digits and
|
Format a number into a string with the requisite number of digits and
|
||||||
decimal places.
|
decimal places.
|
||||||
"""
|
"""
|
||||||
# Method moved to django.db.backends.utils.
|
# Method moved to django.db.backends.utils.
|
||||||
@ -1569,9 +1553,10 @@ class DecimalField(Field):
|
|||||||
|
|
||||||
|
|
||||||
class DurationField(Field):
|
class DurationField(Field):
|
||||||
"""Stores timedelta objects.
|
"""
|
||||||
|
Store timedelta objects.
|
||||||
|
|
||||||
Uses interval on PostgreSQL, INTERVAL DAY TO SECOND on Oracle, and bigint
|
Use interval on PostgreSQL, INTERVAL DAY TO SECOND on Oracle, and bigint
|
||||||
of microseconds on other databases.
|
of microseconds on other databases.
|
||||||
"""
|
"""
|
||||||
empty_strings_allowed = False
|
empty_strings_allowed = False
|
||||||
@ -2123,11 +2108,8 @@ class TimeField(DateTimeCheckMixin, Field):
|
|||||||
|
|
||||||
def _check_fix_default_value(self):
|
def _check_fix_default_value(self):
|
||||||
"""
|
"""
|
||||||
Adds a warning to the checks framework stating, that using an actual
|
Warn that using an actual date or datetime value is probably wrong;
|
||||||
time or datetime value is probably wrong; it's only being evaluated on
|
it's only evaluated on server startup.
|
||||||
server start-up.
|
|
||||||
|
|
||||||
For details see ticket #21905
|
|
||||||
"""
|
"""
|
||||||
if not self.has_default():
|
if not self.has_default():
|
||||||
return []
|
return []
|
||||||
|
@ -133,14 +133,14 @@ class FieldFile(File):
|
|||||||
|
|
||||||
class FileDescriptor:
|
class FileDescriptor:
|
||||||
"""
|
"""
|
||||||
The descriptor for the file attribute on the model instance. Returns a
|
The descriptor for the file attribute on the model instance. Return a
|
||||||
FieldFile when accessed so you can do stuff like::
|
FieldFile when accessed so you can write code like::
|
||||||
|
|
||||||
>>> from myapp.models import MyModel
|
>>> from myapp.models import MyModel
|
||||||
>>> instance = MyModel.objects.get(pk=1)
|
>>> instance = MyModel.objects.get(pk=1)
|
||||||
>>> instance.file.size
|
>>> instance.file.size
|
||||||
|
|
||||||
Assigns a file object on assignment so you can do::
|
Assign a file object on assignment so you can do::
|
||||||
|
|
||||||
>>> with open('/path/to/hello.world', 'r') as f:
|
>>> with open('/path/to/hello.world', 'r') as f:
|
||||||
... instance.file = File(f)
|
... instance.file = File(f)
|
||||||
@ -275,7 +275,6 @@ class FileField(Field):
|
|||||||
return "FileField"
|
return "FileField"
|
||||||
|
|
||||||
def get_prep_value(self, value):
|
def get_prep_value(self, value):
|
||||||
"Returns field's value prepared for saving into a database."
|
|
||||||
value = super().get_prep_value(value)
|
value = super().get_prep_value(value)
|
||||||
# Need to convert File objects provided via a form to string for database insertion
|
# Need to convert File objects provided via a form to string for database insertion
|
||||||
if value is None:
|
if value is None:
|
||||||
@ -283,7 +282,6 @@ class FileField(Field):
|
|||||||
return str(value)
|
return str(value)
|
||||||
|
|
||||||
def pre_save(self, model_instance, add):
|
def pre_save(self, model_instance, add):
|
||||||
"Returns field's value just before saving."
|
|
||||||
file = super().pre_save(model_instance, add)
|
file = super().pre_save(model_instance, add)
|
||||||
if file and not file._committed:
|
if file and not file._committed:
|
||||||
# Commit the file to storage prior to saving the model
|
# Commit the file to storage prior to saving the model
|
||||||
@ -406,7 +404,7 @@ class ImageField(FileField):
|
|||||||
|
|
||||||
def update_dimension_fields(self, instance, force=False, *args, **kwargs):
|
def update_dimension_fields(self, instance, force=False, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Updates field's width and height fields, if defined.
|
Update field's width and height fields, if defined.
|
||||||
|
|
||||||
This method is hooked up to model's post_init signal to update
|
This method is hooked up to model's post_init signal to update
|
||||||
dimensions after instantiating a model instance. However, dimensions
|
dimensions after instantiating a model instance. However, dimensions
|
||||||
|
@ -80,9 +80,7 @@ def lazy_related_operation(function, model, *related_models, **kwargs):
|
|||||||
|
|
||||||
|
|
||||||
class RelatedField(Field):
|
class RelatedField(Field):
|
||||||
"""
|
"""Base class that all relational fields inherit from."""
|
||||||
Base class that all relational fields inherit from.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Field flags
|
# Field flags
|
||||||
one_to_many = False
|
one_to_many = False
|
||||||
@ -192,9 +190,7 @@ class RelatedField(Field):
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
def _check_clashes(self):
|
def _check_clashes(self):
|
||||||
"""
|
"""Check accessor and reverse query name clashes."""
|
||||||
Check accessor and reverse query name clashes.
|
|
||||||
"""
|
|
||||||
from django.db.models.base import ModelBase
|
from django.db.models.base import ModelBase
|
||||||
|
|
||||||
errors = []
|
errors = []
|
||||||
@ -424,7 +420,7 @@ class RelatedField(Field):
|
|||||||
@property
|
@property
|
||||||
def target_field(self):
|
def target_field(self):
|
||||||
"""
|
"""
|
||||||
When filtering against this relation, returns the field on the remote
|
When filtering against this relation, return the field on the remote
|
||||||
model against which the filtering should happen.
|
model against which the filtering should happen.
|
||||||
"""
|
"""
|
||||||
target_fields = self.get_path_info()[-1].target_fields
|
target_fields = self.get_path_info()[-1].target_fields
|
||||||
@ -436,7 +432,7 @@ class RelatedField(Field):
|
|||||||
|
|
||||||
class ForeignObject(RelatedField):
|
class ForeignObject(RelatedField):
|
||||||
"""
|
"""
|
||||||
Abstraction of the ForeignKey relation, supports multi-column relations.
|
Abstraction of the ForeignKey relation to support multi-column relations.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Field flags
|
# Field flags
|
||||||
@ -693,17 +689,13 @@ class ForeignObject(RelatedField):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
def get_path_info(self):
|
def get_path_info(self):
|
||||||
"""
|
"""Get path from this field to the related model."""
|
||||||
Get path from this field to the related model.
|
|
||||||
"""
|
|
||||||
opts = self.remote_field.model._meta
|
opts = self.remote_field.model._meta
|
||||||
from_opts = self.model._meta
|
from_opts = self.model._meta
|
||||||
return [PathInfo(from_opts, opts, self.foreign_related_fields, self, False, True)]
|
return [PathInfo(from_opts, opts, self.foreign_related_fields, self, False, True)]
|
||||||
|
|
||||||
def get_reverse_path_info(self):
|
def get_reverse_path_info(self):
|
||||||
"""
|
"""Get path from the related model to this field's model."""
|
||||||
Get path from the related model to this field's model.
|
|
||||||
"""
|
|
||||||
opts = self.model._meta
|
opts = self.model._meta
|
||||||
from_opts = self.remote_field.model._meta
|
from_opts = self.remote_field.model._meta
|
||||||
pathinfos = [PathInfo(from_opts, opts, (opts.pk,), self.remote_field, not self.unique, False)]
|
pathinfos = [PathInfo(from_opts, opts, (opts.pk,), self.remote_field, not self.unique, False)]
|
||||||
@ -861,9 +853,7 @@ class ForeignKey(ForeignObject):
|
|||||||
return self.foreign_related_fields[0]
|
return self.foreign_related_fields[0]
|
||||||
|
|
||||||
def get_reverse_path_info(self):
|
def get_reverse_path_info(self):
|
||||||
"""
|
"""Get path from the related model to this field's model."""
|
||||||
Get path from the related model to this field's model.
|
|
||||||
"""
|
|
||||||
opts = self.model._meta
|
opts = self.model._meta
|
||||||
from_opts = self.remote_field.model._meta
|
from_opts = self.remote_field.model._meta
|
||||||
pathinfos = [PathInfo(from_opts, opts, (opts.pk,), self.remote_field, not self.unique, False)]
|
pathinfos = [PathInfo(from_opts, opts, (opts.pk,), self.remote_field, not self.unique, False)]
|
||||||
@ -900,7 +890,7 @@ class ForeignKey(ForeignObject):
|
|||||||
return attname, column
|
return attname, column
|
||||||
|
|
||||||
def get_default(self):
|
def get_default(self):
|
||||||
"Here we check if the default value is an object and return the to_field if so."
|
"""Return the to_field if the default value is an object."""
|
||||||
field_default = super().get_default()
|
field_default = super().get_default()
|
||||||
if isinstance(field_default, self.remote_field.model):
|
if isinstance(field_default, self.remote_field.model):
|
||||||
return getattr(field_default, self.target_field.attname)
|
return getattr(field_default, self.target_field.attname)
|
||||||
@ -1441,9 +1431,7 @@ class ManyToManyField(RelatedField):
|
|||||||
return name, path, args, kwargs
|
return name, path, args, kwargs
|
||||||
|
|
||||||
def _get_path_info(self, direct=False):
|
def _get_path_info(self, direct=False):
|
||||||
"""
|
"""Called by both direct and indirect m2m traversal."""
|
||||||
Called by both direct and indirect m2m traversal.
|
|
||||||
"""
|
|
||||||
pathinfos = []
|
pathinfos = []
|
||||||
int_model = self.remote_field.through
|
int_model = self.remote_field.through
|
||||||
linkfield1 = int_model._meta.get_field(self.m2m_field_name())
|
linkfield1 = int_model._meta.get_field(self.m2m_field_name())
|
||||||
@ -1598,9 +1586,6 @@ class ManyToManyField(RelatedField):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def value_from_object(self, obj):
|
def value_from_object(self, obj):
|
||||||
"""
|
|
||||||
Return the value of this field in the given model instance.
|
|
||||||
"""
|
|
||||||
if obj.pk is None:
|
if obj.pk is None:
|
||||||
return self.related_model.objects.none()
|
return self.related_model.objects.none()
|
||||||
return getattr(obj, self.attname).all()
|
return getattr(obj, self.attname).all()
|
||||||
|
@ -66,7 +66,7 @@ class ForeignObjectRel:
|
|||||||
@property
|
@property
|
||||||
def target_field(self):
|
def target_field(self):
|
||||||
"""
|
"""
|
||||||
When filtering against this relation, returns the field on the remote
|
When filtering against this relation, return the field on the remote
|
||||||
model against which the filtering should happen.
|
model against which the filtering should happen.
|
||||||
"""
|
"""
|
||||||
target_fields = self.get_path_info()[-1].target_fields
|
target_fields = self.get_path_info()[-1].target_fields
|
||||||
@ -116,8 +116,8 @@ class ForeignObjectRel:
|
|||||||
|
|
||||||
def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH):
|
def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH):
|
||||||
"""
|
"""
|
||||||
Return choices with a default blank choices included, for use as
|
Return choices with a default blank choices included, for use
|
||||||
SelectField choices for this field.
|
as <select> choices for this field.
|
||||||
|
|
||||||
Analog of django.db.models.fields.Field.get_choices(), provided
|
Analog of django.db.models.fields.Field.get_choices(), provided
|
||||||
initially for utilization by RelatedFieldListFilter.
|
initially for utilization by RelatedFieldListFilter.
|
||||||
@ -127,7 +127,7 @@ class ForeignObjectRel:
|
|||||||
]
|
]
|
||||||
|
|
||||||
def is_hidden(self):
|
def is_hidden(self):
|
||||||
"Should the related object be hidden?"
|
"""Should the related object be hidden?"""
|
||||||
return bool(self.related_name) and self.related_name[-1] == '+'
|
return bool(self.related_name) and self.related_name[-1] == '+'
|
||||||
|
|
||||||
def get_joining_columns(self):
|
def get_joining_columns(self):
|
||||||
|
@ -5,9 +5,7 @@ from django.db.models import Func, Transform, Value, fields
|
|||||||
|
|
||||||
|
|
||||||
class Cast(Func):
|
class Cast(Func):
|
||||||
"""
|
"""Coerce an expression to a new field type."""
|
||||||
Coerce an expression to a new field type.
|
|
||||||
"""
|
|
||||||
function = 'CAST'
|
function = 'CAST'
|
||||||
template = '%(function)s(%(expressions)s AS %(db_type)s)'
|
template = '%(function)s(%(expressions)s AS %(db_type)s)'
|
||||||
|
|
||||||
@ -38,9 +36,7 @@ class Cast(Func):
|
|||||||
|
|
||||||
|
|
||||||
class Coalesce(Func):
|
class Coalesce(Func):
|
||||||
"""
|
"""Return, from left to right, the first non-null expression."""
|
||||||
Chooses, from left to right, the first non-null expression and returns it.
|
|
||||||
"""
|
|
||||||
function = 'COALESCE'
|
function = 'COALESCE'
|
||||||
|
|
||||||
def __init__(self, *expressions, **extra):
|
def __init__(self, *expressions, **extra):
|
||||||
@ -65,9 +61,8 @@ class Coalesce(Func):
|
|||||||
|
|
||||||
class ConcatPair(Func):
|
class ConcatPair(Func):
|
||||||
"""
|
"""
|
||||||
A helper class that concatenates two arguments together. This is used
|
Concatenate two arguments together. This is used by `Concat` because not
|
||||||
by `Concat` because not all backend databases support more than two
|
all backend databases support more than two arguments.
|
||||||
arguments.
|
|
||||||
"""
|
"""
|
||||||
function = 'CONCAT'
|
function = 'CONCAT'
|
||||||
|
|
||||||
@ -98,9 +93,9 @@ class ConcatPair(Func):
|
|||||||
|
|
||||||
class Concat(Func):
|
class Concat(Func):
|
||||||
"""
|
"""
|
||||||
Concatenates text fields together. Backends that result in an entire
|
Concatenate text fields together. Backends that result in an entire
|
||||||
null expression when any arguments are null will wrap each argument in
|
null expression when any arguments are null will wrap each argument in
|
||||||
coalesce functions to ensure we always get a non-null result.
|
coalesce functions to ensure a non-null result.
|
||||||
"""
|
"""
|
||||||
function = None
|
function = None
|
||||||
template = "%(expressions)s"
|
template = "%(expressions)s"
|
||||||
@ -122,7 +117,7 @@ class Concat(Func):
|
|||||||
|
|
||||||
class Greatest(Func):
|
class Greatest(Func):
|
||||||
"""
|
"""
|
||||||
Chooses the maximum expression and returns it.
|
Return the maximum expression.
|
||||||
|
|
||||||
If any expression is null the return value is database-specific:
|
If any expression is null the return value is database-specific:
|
||||||
On Postgres, the maximum not-null expression is returned.
|
On Postgres, the maximum not-null expression is returned.
|
||||||
@ -142,11 +137,11 @@ class Greatest(Func):
|
|||||||
|
|
||||||
class Least(Func):
|
class Least(Func):
|
||||||
"""
|
"""
|
||||||
Chooses the minimum expression and returns it.
|
Return the minimum expression.
|
||||||
|
|
||||||
If any expression is null the return value is database-specific:
|
If any expression is null the return value is database-specific:
|
||||||
On Postgres, the minimum not-null expression is returned.
|
On Postgres, return the minimum not-null expression.
|
||||||
On MySQL, Oracle, and SQLite, if any expression is null, null is returned.
|
On MySQL, Oracle, and SQLite, if any expression is null, return null.
|
||||||
"""
|
"""
|
||||||
function = 'LEAST'
|
function = 'LEAST'
|
||||||
|
|
||||||
@ -161,7 +156,7 @@ class Least(Func):
|
|||||||
|
|
||||||
|
|
||||||
class Length(Transform):
|
class Length(Transform):
|
||||||
"""Returns the number of characters in the expression"""
|
"""Return the number of characters in the expression."""
|
||||||
function = 'LENGTH'
|
function = 'LENGTH'
|
||||||
lookup_name = 'length'
|
lookup_name = 'length'
|
||||||
|
|
||||||
|
@ -7,18 +7,18 @@ from django.db.models.query import QuerySet
|
|||||||
|
|
||||||
|
|
||||||
class BaseManager:
|
class BaseManager:
|
||||||
# Tracks each time a Manager instance is created. Used to retain order.
|
# To retain order, track each time a Manager instance is created.
|
||||||
creation_counter = 0
|
creation_counter = 0
|
||||||
|
|
||||||
# Set to True for the 'objects' managers that are automatically created.
|
# Set to True for the 'objects' managers that are automatically created.
|
||||||
auto_created = False
|
auto_created = False
|
||||||
|
|
||||||
#: If set to True the manager will be serialized into migrations and will
|
#: If set to True the manager will be serialized into migrations and will
|
||||||
#: thus be available in e.g. RunPython operations
|
#: thus be available in e.g. RunPython operations.
|
||||||
use_in_migrations = False
|
use_in_migrations = False
|
||||||
|
|
||||||
def __new__(cls, *args, **kwargs):
|
def __new__(cls, *args, **kwargs):
|
||||||
# We capture the arguments to make returning them trivial
|
# Capture the arguments to make returning them trivial.
|
||||||
obj = super().__new__(cls)
|
obj = super().__new__(cls)
|
||||||
obj._constructor_args = (args, kwargs)
|
obj._constructor_args = (args, kwargs)
|
||||||
return obj
|
return obj
|
||||||
@ -32,15 +32,15 @@ class BaseManager:
|
|||||||
self._hints = {}
|
self._hints = {}
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
""" Return "app_label.model_label.manager_name". """
|
"""Return "app_label.model_label.manager_name"."""
|
||||||
return '%s.%s' % (self.model._meta.label, self.name)
|
return '%s.%s' % (self.model._meta.label, self.name)
|
||||||
|
|
||||||
def deconstruct(self):
|
def deconstruct(self):
|
||||||
"""
|
"""
|
||||||
Returns a 5-tuple of the form (as_manager (True), manager_class,
|
Return a 5-tuple of the form (as_manager (True), manager_class,
|
||||||
queryset_class, args, kwargs).
|
queryset_class, args, kwargs).
|
||||||
|
|
||||||
Raises a ValueError if the manager is dynamically generated.
|
Raise a ValueError if the manager is dynamically generated.
|
||||||
"""
|
"""
|
||||||
qs_class = self._queryset_class
|
qs_class = self._queryset_class
|
||||||
if getattr(self, '_built_with_as_manager', False):
|
if getattr(self, '_built_with_as_manager', False):
|
||||||
@ -118,7 +118,7 @@ class BaseManager:
|
|||||||
|
|
||||||
def _set_creation_counter(self):
|
def _set_creation_counter(self):
|
||||||
"""
|
"""
|
||||||
Sets the creation counter value for this instance and increments the
|
Set the creation counter value for this instance and increment the
|
||||||
class-level copy.
|
class-level copy.
|
||||||
"""
|
"""
|
||||||
self.creation_counter = BaseManager.creation_counter
|
self.creation_counter = BaseManager.creation_counter
|
||||||
@ -140,8 +140,8 @@ class BaseManager:
|
|||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
"""
|
"""
|
||||||
Returns a new QuerySet object. Subclasses can override this method to
|
Return a new QuerySet object. Subclasses can override this method to
|
||||||
easily customize the behavior of the Manager.
|
customize the behavior of the Manager.
|
||||||
"""
|
"""
|
||||||
return self._queryset_class(model=self.model, using=self._db, hints=self._hints)
|
return self._queryset_class(model=self.model, using=self._db, hints=self._hints)
|
||||||
|
|
||||||
|
@ -284,7 +284,7 @@ class Options:
|
|||||||
|
|
||||||
def setup_proxy(self, target):
|
def setup_proxy(self, target):
|
||||||
"""
|
"""
|
||||||
Does the internal setup so that the current model is a proxy for
|
Do the internal setup so that the current model is a proxy for
|
||||||
"target".
|
"target".
|
||||||
"""
|
"""
|
||||||
self.pk = target._meta.pk
|
self.pk = target._meta.pk
|
||||||
@ -315,11 +315,7 @@ class Options:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def verbose_name_raw(self):
|
def verbose_name_raw(self):
|
||||||
"""
|
"""Return the untranslated verbose name."""
|
||||||
There are a few places where the untranslated verbose name is needed
|
|
||||||
(so that we get the same value regardless of currently active
|
|
||||||
locale).
|
|
||||||
"""
|
|
||||||
with override(None):
|
with override(None):
|
||||||
return force_text(self.verbose_name)
|
return force_text(self.verbose_name)
|
||||||
|
|
||||||
@ -427,7 +423,7 @@ class Options:
|
|||||||
@cached_property
|
@cached_property
|
||||||
def fields(self):
|
def fields(self):
|
||||||
"""
|
"""
|
||||||
Returns a list of all forward fields on the model and its parents,
|
Return a list of all forward fields on the model and its parents,
|
||||||
excluding ManyToManyFields.
|
excluding ManyToManyFields.
|
||||||
|
|
||||||
Private API intended only to be used by Django itself; get_fields()
|
Private API intended only to be used by Django itself; get_fields()
|
||||||
@ -461,7 +457,7 @@ class Options:
|
|||||||
@cached_property
|
@cached_property
|
||||||
def concrete_fields(self):
|
def concrete_fields(self):
|
||||||
"""
|
"""
|
||||||
Returns a list of all concrete fields on the model and its parents.
|
Return a list of all concrete fields on the model and its parents.
|
||||||
|
|
||||||
Private API intended only to be used by Django itself; get_fields()
|
Private API intended only to be used by Django itself; get_fields()
|
||||||
combined with filtering of field properties is the public API for
|
combined with filtering of field properties is the public API for
|
||||||
@ -474,7 +470,7 @@ class Options:
|
|||||||
@cached_property
|
@cached_property
|
||||||
def local_concrete_fields(self):
|
def local_concrete_fields(self):
|
||||||
"""
|
"""
|
||||||
Returns a list of all concrete fields on the model.
|
Return a list of all concrete fields on the model.
|
||||||
|
|
||||||
Private API intended only to be used by Django itself; get_fields()
|
Private API intended only to be used by Django itself; get_fields()
|
||||||
combined with filtering of field properties is the public API for
|
combined with filtering of field properties is the public API for
|
||||||
@ -487,7 +483,7 @@ class Options:
|
|||||||
@cached_property
|
@cached_property
|
||||||
def many_to_many(self):
|
def many_to_many(self):
|
||||||
"""
|
"""
|
||||||
Returns a list of all many to many fields on the model and its parents.
|
Return a list of all many to many fields on the model and its parents.
|
||||||
|
|
||||||
Private API intended only to be used by Django itself; get_fields()
|
Private API intended only to be used by Django itself; get_fields()
|
||||||
combined with filtering of field properties is the public API for
|
combined with filtering of field properties is the public API for
|
||||||
@ -501,7 +497,7 @@ class Options:
|
|||||||
@cached_property
|
@cached_property
|
||||||
def related_objects(self):
|
def related_objects(self):
|
||||||
"""
|
"""
|
||||||
Returns all related objects pointing to the current model. The related
|
Return all related objects pointing to the current model. The related
|
||||||
objects can come from a one-to-one, one-to-many, or many-to-many field
|
objects can come from a one-to-one, one-to-many, or many-to-many field
|
||||||
relation type.
|
relation type.
|
||||||
|
|
||||||
@ -589,7 +585,7 @@ class Options:
|
|||||||
|
|
||||||
def get_parent_list(self):
|
def get_parent_list(self):
|
||||||
"""
|
"""
|
||||||
Returns all the ancestors of this model as a list ordered by MRO.
|
Return all the ancestors of this model as a list ordered by MRO.
|
||||||
Useful for determining if something is an ancestor, regardless of lineage.
|
Useful for determining if something is an ancestor, regardless of lineage.
|
||||||
"""
|
"""
|
||||||
result = OrderedSet(self.parents)
|
result = OrderedSet(self.parents)
|
||||||
@ -600,12 +596,12 @@ class Options:
|
|||||||
|
|
||||||
def get_ancestor_link(self, ancestor):
|
def get_ancestor_link(self, ancestor):
|
||||||
"""
|
"""
|
||||||
Returns the field on the current model which points to the given
|
Return the field on the current model which points to the given
|
||||||
"ancestor". This is possible an indirect link (a pointer to a parent
|
"ancestor". This is possible an indirect link (a pointer to a parent
|
||||||
model, which points, eventually, to the ancestor). Used when
|
model, which points, eventually, to the ancestor). Used when
|
||||||
constructing table joins for model inheritance.
|
constructing table joins for model inheritance.
|
||||||
|
|
||||||
Returns None if the model isn't an ancestor of this one.
|
Return None if the model isn't an ancestor of this one.
|
||||||
"""
|
"""
|
||||||
if ancestor in self.parents:
|
if ancestor in self.parents:
|
||||||
return self.parents[ancestor]
|
return self.parents[ancestor]
|
||||||
@ -717,7 +713,7 @@ class Options:
|
|||||||
|
|
||||||
def get_fields(self, include_parents=True, include_hidden=False):
|
def get_fields(self, include_parents=True, include_hidden=False):
|
||||||
"""
|
"""
|
||||||
Returns a list of fields associated to the model. By default, includes
|
Return a list of fields associated to the model. By default, include
|
||||||
forward and reverse fields, fields derived from inheritance, but not
|
forward and reverse fields, fields derived from inheritance, but not
|
||||||
hidden fields. The returned fields can be changed using the parameters:
|
hidden fields. The returned fields can be changed using the parameters:
|
||||||
|
|
||||||
|
@ -39,9 +39,7 @@ class BaseIterable:
|
|||||||
|
|
||||||
|
|
||||||
class ModelIterable(BaseIterable):
|
class ModelIterable(BaseIterable):
|
||||||
"""
|
"""Iterable that yields a model instance for each row."""
|
||||||
Iterable that yields a model instance for each row.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
queryset = self.queryset
|
queryset = self.queryset
|
||||||
@ -86,8 +84,7 @@ class ModelIterable(BaseIterable):
|
|||||||
|
|
||||||
class ValuesIterable(BaseIterable):
|
class ValuesIterable(BaseIterable):
|
||||||
"""
|
"""
|
||||||
Iterable returned by QuerySet.values() that yields a dict
|
Iterable returned by QuerySet.values() that yields a dict for each row.
|
||||||
for each row.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
@ -108,8 +105,8 @@ class ValuesIterable(BaseIterable):
|
|||||||
|
|
||||||
class ValuesListIterable(BaseIterable):
|
class ValuesListIterable(BaseIterable):
|
||||||
"""
|
"""
|
||||||
Iterable returned by QuerySet.values_list(flat=False)
|
Iterable returned by QuerySet.values_list(flat=False) that yields a tuple
|
||||||
that yields a tuple for each row.
|
for each row.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
@ -141,8 +138,8 @@ class ValuesListIterable(BaseIterable):
|
|||||||
|
|
||||||
class FlatValuesListIterable(BaseIterable):
|
class FlatValuesListIterable(BaseIterable):
|
||||||
"""
|
"""
|
||||||
Iterable returned by QuerySet.values_list(flat=True) that
|
Iterable returned by QuerySet.values_list(flat=True) that yields single
|
||||||
yields single values.
|
values.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
@ -153,9 +150,7 @@ class FlatValuesListIterable(BaseIterable):
|
|||||||
|
|
||||||
|
|
||||||
class QuerySet:
|
class QuerySet:
|
||||||
"""
|
"""Represent a lazy database lookup for a set of objects."""
|
||||||
Represents a lazy database lookup for a set of objects.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, model=None, query=None, using=None, hints=None):
|
def __init__(self, model=None, query=None, using=None, hints=None):
|
||||||
self.model = model
|
self.model = model
|
||||||
@ -185,9 +180,7 @@ class QuerySet:
|
|||||||
########################
|
########################
|
||||||
|
|
||||||
def __deepcopy__(self, memo):
|
def __deepcopy__(self, memo):
|
||||||
"""
|
"""Don't populate the QuerySet's cache."""
|
||||||
Deep copy of a QuerySet doesn't populate the cache
|
|
||||||
"""
|
|
||||||
obj = self.__class__()
|
obj = self.__class__()
|
||||||
for k, v in self.__dict__.items():
|
for k, v in self.__dict__.items():
|
||||||
if k == '_result_cache':
|
if k == '_result_cache':
|
||||||
@ -254,9 +247,7 @@ class QuerySet:
|
|||||||
return bool(self._result_cache)
|
return bool(self._result_cache)
|
||||||
|
|
||||||
def __getitem__(self, k):
|
def __getitem__(self, k):
|
||||||
"""
|
"""Retrieve an item or slice from the set of results."""
|
||||||
Retrieves an item or slice from the set of results.
|
|
||||||
"""
|
|
||||||
if not isinstance(k, (int, slice)):
|
if not isinstance(k, (int, slice)):
|
||||||
raise TypeError
|
raise TypeError
|
||||||
assert ((not isinstance(k, slice) and (k >= 0)) or
|
assert ((not isinstance(k, slice) and (k >= 0)) or
|
||||||
@ -319,8 +310,8 @@ class QuerySet:
|
|||||||
|
|
||||||
def aggregate(self, *args, **kwargs):
|
def aggregate(self, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Returns a dictionary containing the calculations (aggregation)
|
Return a dictionary containing the calculations (aggregation)
|
||||||
over the current queryset
|
over the current queryset.
|
||||||
|
|
||||||
If args is present the expression is passed as a kwarg using
|
If args is present the expression is passed as a kwarg using
|
||||||
the Aggregate object's default alias.
|
the Aggregate object's default alias.
|
||||||
@ -347,11 +338,11 @@ class QuerySet:
|
|||||||
|
|
||||||
def count(self):
|
def count(self):
|
||||||
"""
|
"""
|
||||||
Performs a SELECT COUNT() and returns the number of records as an
|
Perform a SELECT COUNT() and return the number of records as an
|
||||||
integer.
|
integer.
|
||||||
|
|
||||||
If the QuerySet is already fully cached this simply returns the length
|
If the QuerySet is already fully cached, return the length of the
|
||||||
of the cached results set to avoid multiple SELECT COUNT(*) calls.
|
cached results set to avoid multiple SELECT COUNT(*) calls.
|
||||||
"""
|
"""
|
||||||
if self._result_cache is not None:
|
if self._result_cache is not None:
|
||||||
return len(self._result_cache)
|
return len(self._result_cache)
|
||||||
@ -360,7 +351,7 @@ class QuerySet:
|
|||||||
|
|
||||||
def get(self, *args, **kwargs):
|
def get(self, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Performs the query and returns a single object matching the given
|
Perform the query and return a single object matching the given
|
||||||
keyword arguments.
|
keyword arguments.
|
||||||
"""
|
"""
|
||||||
clone = self.filter(*args, **kwargs)
|
clone = self.filter(*args, **kwargs)
|
||||||
@ -381,7 +372,7 @@ class QuerySet:
|
|||||||
|
|
||||||
def create(self, **kwargs):
|
def create(self, **kwargs):
|
||||||
"""
|
"""
|
||||||
Creates a new object with the given kwargs, saving it to the database
|
Create a new object with the given kwargs, saving it to the database
|
||||||
and returning the created object.
|
and returning the created object.
|
||||||
"""
|
"""
|
||||||
obj = self.model(**kwargs)
|
obj = self.model(**kwargs)
|
||||||
@ -396,9 +387,9 @@ class QuerySet:
|
|||||||
|
|
||||||
def bulk_create(self, objs, batch_size=None):
|
def bulk_create(self, objs, batch_size=None):
|
||||||
"""
|
"""
|
||||||
Inserts each of the instances into the database. This does *not* call
|
Insert each of the instances into the database. Do *not* call
|
||||||
save() on each of the instances, does not send any pre/post save
|
save() on each of the instances, do not send any pre/post_save
|
||||||
signals, and does not set the primary key attribute if it is an
|
signals, and do not set the primary key attribute if it is an
|
||||||
autoincrement field (except if features.can_return_ids_from_bulk_insert=True).
|
autoincrement field (except if features.can_return_ids_from_bulk_insert=True).
|
||||||
Multi-table models are not supported.
|
Multi-table models are not supported.
|
||||||
"""
|
"""
|
||||||
@ -447,8 +438,8 @@ class QuerySet:
|
|||||||
|
|
||||||
def get_or_create(self, defaults=None, **kwargs):
|
def get_or_create(self, defaults=None, **kwargs):
|
||||||
"""
|
"""
|
||||||
Looks up an object with the given kwargs, creating one if necessary.
|
Look up an object with the given kwargs, creating one if necessary.
|
||||||
Returns a tuple of (object, created), where created is a boolean
|
Return a tuple of (object, created), where created is a boolean
|
||||||
specifying whether an object was created.
|
specifying whether an object was created.
|
||||||
"""
|
"""
|
||||||
lookup, params = self._extract_model_params(defaults, **kwargs)
|
lookup, params = self._extract_model_params(defaults, **kwargs)
|
||||||
@ -462,9 +453,9 @@ class QuerySet:
|
|||||||
|
|
||||||
def update_or_create(self, defaults=None, **kwargs):
|
def update_or_create(self, defaults=None, **kwargs):
|
||||||
"""
|
"""
|
||||||
Looks up an object with the given kwargs, updating one with defaults
|
Look up an object with the given kwargs, updating one with defaults
|
||||||
if it exists, otherwise creates a new one.
|
if it exists, otherwise create a new one.
|
||||||
Returns a tuple (object, created), where created is a boolean
|
Return a tuple (object, created), where created is a boolean
|
||||||
specifying whether an object was created.
|
specifying whether an object was created.
|
||||||
"""
|
"""
|
||||||
defaults = defaults or {}
|
defaults = defaults or {}
|
||||||
@ -484,8 +475,8 @@ class QuerySet:
|
|||||||
|
|
||||||
def _create_object_from_params(self, lookup, params):
|
def _create_object_from_params(self, lookup, params):
|
||||||
"""
|
"""
|
||||||
Tries to create an object using passed params.
|
Try to create an object using passed params. Used by get_or_create()
|
||||||
Used by get_or_create and update_or_create
|
and update_or_create().
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
with transaction.atomic(using=self.db):
|
with transaction.atomic(using=self.db):
|
||||||
@ -502,9 +493,9 @@ class QuerySet:
|
|||||||
|
|
||||||
def _extract_model_params(self, defaults, **kwargs):
|
def _extract_model_params(self, defaults, **kwargs):
|
||||||
"""
|
"""
|
||||||
Prepares `lookup` (kwargs that are valid model attributes), `params`
|
Prepare `lookup` (kwargs that are valid model attributes), `params`
|
||||||
(for creating a model instance) based on given kwargs; for use by
|
(for creating a model instance) based on given kwargs; for use by
|
||||||
get_or_create and update_or_create.
|
get_or_create() and update_or_create().
|
||||||
"""
|
"""
|
||||||
defaults = defaults or {}
|
defaults = defaults or {}
|
||||||
lookup = kwargs.copy()
|
lookup = kwargs.copy()
|
||||||
@ -530,7 +521,7 @@ class QuerySet:
|
|||||||
|
|
||||||
def _earliest_or_latest(self, field_name=None, direction="-"):
|
def _earliest_or_latest(self, field_name=None, direction="-"):
|
||||||
"""
|
"""
|
||||||
Returns the latest object, according to the model's
|
Return the latest object, according to the model's
|
||||||
'get_latest_by' option or optional given field_name.
|
'get_latest_by' option or optional given field_name.
|
||||||
"""
|
"""
|
||||||
order_by = field_name or getattr(self.model._meta, 'get_latest_by')
|
order_by = field_name or getattr(self.model._meta, 'get_latest_by')
|
||||||
@ -551,18 +542,14 @@ class QuerySet:
|
|||||||
return self._earliest_or_latest(field_name=field_name, direction="-")
|
return self._earliest_or_latest(field_name=field_name, direction="-")
|
||||||
|
|
||||||
def first(self):
|
def first(self):
|
||||||
"""
|
"""Return the first object of a query or None if no match is found."""
|
||||||
Returns the first object of a query, returns None if no match is found.
|
|
||||||
"""
|
|
||||||
objects = list((self if self.ordered else self.order_by('pk'))[:1])
|
objects = list((self if self.ordered else self.order_by('pk'))[:1])
|
||||||
if objects:
|
if objects:
|
||||||
return objects[0]
|
return objects[0]
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def last(self):
|
def last(self):
|
||||||
"""
|
"""Return the last object of a query or None if no match is found."""
|
||||||
Returns the last object of a query, returns None if no match is found.
|
|
||||||
"""
|
|
||||||
objects = list((self.reverse() if self.ordered else self.order_by('-pk'))[:1])
|
objects = list((self.reverse() if self.ordered else self.order_by('-pk'))[:1])
|
||||||
if objects:
|
if objects:
|
||||||
return objects[0]
|
return objects[0]
|
||||||
@ -570,8 +557,8 @@ class QuerySet:
|
|||||||
|
|
||||||
def in_bulk(self, id_list=None):
|
def in_bulk(self, id_list=None):
|
||||||
"""
|
"""
|
||||||
Returns a dictionary mapping each of the given IDs to the object with
|
Return a dictionary mapping each of the given IDs to the object with
|
||||||
that ID. If `id_list` isn't provided, the entire QuerySet is evaluated.
|
that ID. If `id_list` isn't provided, evaluate the entire QuerySet.
|
||||||
"""
|
"""
|
||||||
assert self.query.can_filter(), \
|
assert self.query.can_filter(), \
|
||||||
"Cannot use 'limit' or 'offset' with in_bulk"
|
"Cannot use 'limit' or 'offset' with in_bulk"
|
||||||
@ -584,9 +571,7 @@ class QuerySet:
|
|||||||
return {obj._get_pk_val(): obj for obj in qs}
|
return {obj._get_pk_val(): obj for obj in qs}
|
||||||
|
|
||||||
def delete(self):
|
def delete(self):
|
||||||
"""
|
"""Delete the records in the current QuerySet."""
|
||||||
Deletes the records in the current QuerySet.
|
|
||||||
"""
|
|
||||||
assert self.query.can_filter(), \
|
assert self.query.can_filter(), \
|
||||||
"Cannot use 'limit' or 'offset' with delete."
|
"Cannot use 'limit' or 'offset' with delete."
|
||||||
|
|
||||||
@ -618,15 +603,15 @@ class QuerySet:
|
|||||||
|
|
||||||
def _raw_delete(self, using):
|
def _raw_delete(self, using):
|
||||||
"""
|
"""
|
||||||
Deletes objects found from the given queryset in single direct SQL
|
Delete objects found from the given queryset in single direct SQL
|
||||||
query. No signals are sent, and there is no protection for cascades.
|
query. No signals are sent and there is no protection for cascades.
|
||||||
"""
|
"""
|
||||||
return sql.DeleteQuery(self.model).delete_qs(self, using)
|
return sql.DeleteQuery(self.model).delete_qs(self, using)
|
||||||
_raw_delete.alters_data = True
|
_raw_delete.alters_data = True
|
||||||
|
|
||||||
def update(self, **kwargs):
|
def update(self, **kwargs):
|
||||||
"""
|
"""
|
||||||
Updates all elements in the current QuerySet, setting all the given
|
Update all elements in the current QuerySet, setting all the given
|
||||||
fields to the appropriate values.
|
fields to the appropriate values.
|
||||||
"""
|
"""
|
||||||
assert self.query.can_filter(), \
|
assert self.query.can_filter(), \
|
||||||
@ -644,7 +629,7 @@ class QuerySet:
|
|||||||
|
|
||||||
def _update(self, values):
|
def _update(self, values):
|
||||||
"""
|
"""
|
||||||
A version of update that accepts field objects instead of field names.
|
A version of update() that accepts field objects instead of field names.
|
||||||
Used primarily for model saving and not intended for use by general
|
Used primarily for model saving and not intended for use by general
|
||||||
code (it requires too much poking around at model internals to be
|
code (it requires too much poking around at model internals to be
|
||||||
useful at that level).
|
useful at that level).
|
||||||
@ -711,7 +696,7 @@ class QuerySet:
|
|||||||
|
|
||||||
def dates(self, field_name, kind, order='ASC'):
|
def dates(self, field_name, kind, order='ASC'):
|
||||||
"""
|
"""
|
||||||
Returns a list of date objects representing all available dates for
|
Return a list of date objects representing all available dates for
|
||||||
the given field_name, scoped to 'kind'.
|
the given field_name, scoped to 'kind'.
|
||||||
"""
|
"""
|
||||||
assert kind in ("year", "month", "day"), \
|
assert kind in ("year", "month", "day"), \
|
||||||
@ -727,7 +712,7 @@ class QuerySet:
|
|||||||
|
|
||||||
def datetimes(self, field_name, kind, order='ASC', tzinfo=None):
|
def datetimes(self, field_name, kind, order='ASC', tzinfo=None):
|
||||||
"""
|
"""
|
||||||
Returns a list of datetime objects representing all available
|
Return a list of datetime objects representing all available
|
||||||
datetimes for the given field_name, scoped to 'kind'.
|
datetimes for the given field_name, scoped to 'kind'.
|
||||||
"""
|
"""
|
||||||
assert kind in ("year", "month", "day", "hour", "minute", "second"), \
|
assert kind in ("year", "month", "day", "hour", "minute", "second"), \
|
||||||
@ -747,9 +732,7 @@ class QuerySet:
|
|||||||
).distinct().filter(plain_field__isnull=False).order_by(('-' if order == 'DESC' else '') + 'datetimefield')
|
).distinct().filter(plain_field__isnull=False).order_by(('-' if order == 'DESC' else '') + 'datetimefield')
|
||||||
|
|
||||||
def none(self):
|
def none(self):
|
||||||
"""
|
"""Return an empty QuerySet."""
|
||||||
Returns an empty QuerySet.
|
|
||||||
"""
|
|
||||||
clone = self._clone()
|
clone = self._clone()
|
||||||
clone.query.set_empty()
|
clone.query.set_empty()
|
||||||
return clone
|
return clone
|
||||||
@ -760,21 +743,21 @@ class QuerySet:
|
|||||||
|
|
||||||
def all(self):
|
def all(self):
|
||||||
"""
|
"""
|
||||||
Returns a new QuerySet that is a copy of the current one. This allows a
|
Return a new QuerySet that is a copy of the current one. This allows a
|
||||||
QuerySet to proxy for a model manager in some cases.
|
QuerySet to proxy for a model manager in some cases.
|
||||||
"""
|
"""
|
||||||
return self._clone()
|
return self._clone()
|
||||||
|
|
||||||
def filter(self, *args, **kwargs):
|
def filter(self, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Returns a new QuerySet instance with the args ANDed to the existing
|
Return a new QuerySet instance with the args ANDed to the existing
|
||||||
set.
|
set.
|
||||||
"""
|
"""
|
||||||
return self._filter_or_exclude(False, *args, **kwargs)
|
return self._filter_or_exclude(False, *args, **kwargs)
|
||||||
|
|
||||||
def exclude(self, *args, **kwargs):
|
def exclude(self, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Returns a new QuerySet instance with NOT (args) ANDed to the existing
|
Return a new QuerySet instance with NOT (args) ANDed to the existing
|
||||||
set.
|
set.
|
||||||
"""
|
"""
|
||||||
return self._filter_or_exclude(True, *args, **kwargs)
|
return self._filter_or_exclude(True, *args, **kwargs)
|
||||||
@ -793,7 +776,7 @@ class QuerySet:
|
|||||||
|
|
||||||
def complex_filter(self, filter_obj):
|
def complex_filter(self, filter_obj):
|
||||||
"""
|
"""
|
||||||
Returns a new QuerySet instance with filter_obj added to the filters.
|
Return a new QuerySet instance with filter_obj added to the filters.
|
||||||
|
|
||||||
filter_obj can be a Q object (or anything with an add_to_query()
|
filter_obj can be a Q object (or anything with an add_to_query()
|
||||||
method) or a dictionary of keyword lookup arguments.
|
method) or a dictionary of keyword lookup arguments.
|
||||||
@ -830,7 +813,7 @@ class QuerySet:
|
|||||||
|
|
||||||
def select_for_update(self, nowait=False, skip_locked=False):
|
def select_for_update(self, nowait=False, skip_locked=False):
|
||||||
"""
|
"""
|
||||||
Returns a new QuerySet instance that will select objects with a
|
Return a new QuerySet instance that will select objects with a
|
||||||
FOR UPDATE lock.
|
FOR UPDATE lock.
|
||||||
"""
|
"""
|
||||||
if nowait and skip_locked:
|
if nowait and skip_locked:
|
||||||
@ -844,12 +827,12 @@ class QuerySet:
|
|||||||
|
|
||||||
def select_related(self, *fields):
|
def select_related(self, *fields):
|
||||||
"""
|
"""
|
||||||
Returns a new QuerySet instance that will select related objects.
|
Return a new QuerySet instance that will select related objects.
|
||||||
|
|
||||||
If fields are specified, they must be ForeignKey fields and only those
|
If fields are specified, they must be ForeignKey fields and only those
|
||||||
related objects are included in the selection.
|
related objects are included in the selection.
|
||||||
|
|
||||||
If select_related(None) is called, the list is cleared.
|
If select_related(None) is called, clear the list.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if self._fields is not None:
|
if self._fields is not None:
|
||||||
@ -866,13 +849,12 @@ class QuerySet:
|
|||||||
|
|
||||||
def prefetch_related(self, *lookups):
|
def prefetch_related(self, *lookups):
|
||||||
"""
|
"""
|
||||||
Returns a new QuerySet instance that will prefetch the specified
|
Return a new QuerySet instance that will prefetch the specified
|
||||||
Many-To-One and Many-To-Many related objects when the QuerySet is
|
Many-To-One and Many-To-Many related objects when the QuerySet is
|
||||||
evaluated.
|
evaluated.
|
||||||
|
|
||||||
When prefetch_related() is called more than once, the list of lookups to
|
When prefetch_related() is called more than once, append to the list of
|
||||||
prefetch is appended to. If prefetch_related(None) is called, the list
|
prefetch lookups. If prefetch_related(None) is called, clear the list.
|
||||||
is cleared.
|
|
||||||
"""
|
"""
|
||||||
clone = self._clone()
|
clone = self._clone()
|
||||||
if lookups == (None,):
|
if lookups == (None,):
|
||||||
@ -924,9 +906,7 @@ class QuerySet:
|
|||||||
return clone
|
return clone
|
||||||
|
|
||||||
def order_by(self, *field_names):
|
def order_by(self, *field_names):
|
||||||
"""
|
"""Return a new QuerySet instance with the ordering changed."""
|
||||||
Returns a new QuerySet instance with the ordering changed.
|
|
||||||
"""
|
|
||||||
assert self.query.can_filter(), \
|
assert self.query.can_filter(), \
|
||||||
"Cannot reorder a query once a slice has been taken."
|
"Cannot reorder a query once a slice has been taken."
|
||||||
obj = self._clone()
|
obj = self._clone()
|
||||||
@ -936,7 +916,7 @@ class QuerySet:
|
|||||||
|
|
||||||
def distinct(self, *field_names):
|
def distinct(self, *field_names):
|
||||||
"""
|
"""
|
||||||
Returns a new QuerySet instance that will select only distinct results.
|
Return a new QuerySet instance that will select only distinct results.
|
||||||
"""
|
"""
|
||||||
assert self.query.can_filter(), \
|
assert self.query.can_filter(), \
|
||||||
"Cannot create distinct fields once a slice has been taken."
|
"Cannot create distinct fields once a slice has been taken."
|
||||||
@ -946,9 +926,7 @@ class QuerySet:
|
|||||||
|
|
||||||
def extra(self, select=None, where=None, params=None, tables=None,
|
def extra(self, select=None, where=None, params=None, tables=None,
|
||||||
order_by=None, select_params=None):
|
order_by=None, select_params=None):
|
||||||
"""
|
"""Add extra SQL fragments to the query."""
|
||||||
Adds extra SQL fragments to the query.
|
|
||||||
"""
|
|
||||||
assert self.query.can_filter(), \
|
assert self.query.can_filter(), \
|
||||||
"Cannot change a query once a slice has been taken"
|
"Cannot change a query once a slice has been taken"
|
||||||
clone = self._clone()
|
clone = self._clone()
|
||||||
@ -956,20 +934,17 @@ class QuerySet:
|
|||||||
return clone
|
return clone
|
||||||
|
|
||||||
def reverse(self):
|
def reverse(self):
|
||||||
"""
|
"""Reverse the ordering of the QuerySet."""
|
||||||
Reverses the ordering of the QuerySet.
|
|
||||||
"""
|
|
||||||
clone = self._clone()
|
clone = self._clone()
|
||||||
clone.query.standard_ordering = not clone.query.standard_ordering
|
clone.query.standard_ordering = not clone.query.standard_ordering
|
||||||
return clone
|
return clone
|
||||||
|
|
||||||
def defer(self, *fields):
|
def defer(self, *fields):
|
||||||
"""
|
"""
|
||||||
Defers the loading of data for certain fields until they are accessed.
|
Defer the loading of data for certain fields until they are accessed.
|
||||||
The set of fields to defer is added to any existing set of deferred
|
Add the set of deferred fields to any existing set of deferred fields.
|
||||||
fields. The only exception to this is if None is passed in as the only
|
The only exception to this is if None is passed in as the only
|
||||||
parameter, in which case all deferrals are removed (None acts as a
|
parameter, in which case removal all deferrals.
|
||||||
reset option).
|
|
||||||
"""
|
"""
|
||||||
if self._fields is not None:
|
if self._fields is not None:
|
||||||
raise TypeError("Cannot call defer() after .values() or .values_list()")
|
raise TypeError("Cannot call defer() after .values() or .values_list()")
|
||||||
@ -982,7 +957,7 @@ class QuerySet:
|
|||||||
|
|
||||||
def only(self, *fields):
|
def only(self, *fields):
|
||||||
"""
|
"""
|
||||||
Essentially, the opposite of defer. Only the fields passed into this
|
Essentially, the opposite of defer(). Only the fields passed into this
|
||||||
method and that are not already specified as deferred are loaded
|
method and that are not already specified as deferred are loaded
|
||||||
immediately when the queryset is evaluated.
|
immediately when the queryset is evaluated.
|
||||||
"""
|
"""
|
||||||
@ -997,9 +972,7 @@ class QuerySet:
|
|||||||
return clone
|
return clone
|
||||||
|
|
||||||
def using(self, alias):
|
def using(self, alias):
|
||||||
"""
|
"""Select which database this QuerySet should execute against."""
|
||||||
Selects which database this QuerySet should execute its query against.
|
|
||||||
"""
|
|
||||||
clone = self._clone()
|
clone = self._clone()
|
||||||
clone._db = alias
|
clone._db = alias
|
||||||
return clone
|
return clone
|
||||||
@ -1011,7 +984,7 @@ class QuerySet:
|
|||||||
@property
|
@property
|
||||||
def ordered(self):
|
def ordered(self):
|
||||||
"""
|
"""
|
||||||
Returns True if the QuerySet is ordered -- i.e. has an order_by()
|
Return True if the QuerySet is ordered -- i.e. has an order_by()
|
||||||
clause or a default ordering on the model.
|
clause or a default ordering on the model.
|
||||||
"""
|
"""
|
||||||
if self.query.extra_order_by or self.query.order_by:
|
if self.query.extra_order_by or self.query.order_by:
|
||||||
@ -1023,7 +996,7 @@ class QuerySet:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def db(self):
|
def db(self):
|
||||||
"Return the database that will be used if this query is executed now"
|
"""Return the database used if this query is executed now."""
|
||||||
if self._for_write:
|
if self._for_write:
|
||||||
return self._db or router.db_for_write(self.model, **self._hints)
|
return self._db or router.db_for_write(self.model, **self._hints)
|
||||||
return self._db or router.db_for_read(self.model, **self._hints)
|
return self._db or router.db_for_read(self.model, **self._hints)
|
||||||
@ -1034,7 +1007,7 @@ class QuerySet:
|
|||||||
|
|
||||||
def _insert(self, objs, fields, return_id=False, raw=False, using=None):
|
def _insert(self, objs, fields, return_id=False, raw=False, using=None):
|
||||||
"""
|
"""
|
||||||
Inserts a new record for the given model. This provides an interface to
|
Insert a new record for the given model. This provides an interface to
|
||||||
the InsertQuery class and is how Model.save() is implemented.
|
the InsertQuery class and is how Model.save() is implemented.
|
||||||
"""
|
"""
|
||||||
self._for_write = True
|
self._for_write = True
|
||||||
@ -1048,9 +1021,9 @@ class QuerySet:
|
|||||||
|
|
||||||
def _batched_insert(self, objs, fields, batch_size):
|
def _batched_insert(self, objs, fields, batch_size):
|
||||||
"""
|
"""
|
||||||
A little helper method for bulk_insert to insert the bulk one batch
|
A helper method for bulk_create() to insert the bulk one batch at a
|
||||||
at a time. Inserts recursively a batch from the front of the bulk and
|
time. Insert recursively a batch from the front of the bulk and then
|
||||||
then _batched_insert() the remaining objects again.
|
_batched_insert() the remaining objects again.
|
||||||
"""
|
"""
|
||||||
if not objs:
|
if not objs:
|
||||||
return
|
return
|
||||||
@ -1090,7 +1063,7 @@ class QuerySet:
|
|||||||
|
|
||||||
def _next_is_sticky(self):
|
def _next_is_sticky(self):
|
||||||
"""
|
"""
|
||||||
Indicates that the next filter call and the one following that should
|
Indicate that the next filter call and the one following that should
|
||||||
be treated as a single filter. This is only important when it comes to
|
be treated as a single filter. This is only important when it comes to
|
||||||
determining when to reuse tables for many-to-many filters. Required so
|
determining when to reuse tables for many-to-many filters. Required so
|
||||||
that we can filter naturally on the results of related managers.
|
that we can filter naturally on the results of related managers.
|
||||||
@ -1103,9 +1076,7 @@ class QuerySet:
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
def _merge_sanity_check(self, other):
|
def _merge_sanity_check(self, other):
|
||||||
"""
|
"""Check that two QuerySet classes may be merged."""
|
||||||
Checks that we are merging two comparable QuerySet classes.
|
|
||||||
"""
|
|
||||||
if self._fields is not None and (
|
if self._fields is not None and (
|
||||||
set(self.query.values_select) != set(other.query.values_select) or
|
set(self.query.values_select) != set(other.query.values_select) or
|
||||||
set(self.query.extra_select) != set(other.query.extra_select) or
|
set(self.query.extra_select) != set(other.query.extra_select) or
|
||||||
@ -1136,17 +1107,16 @@ class QuerySet:
|
|||||||
|
|
||||||
def _add_hints(self, **hints):
|
def _add_hints(self, **hints):
|
||||||
"""
|
"""
|
||||||
Update hinting information for later use by Routers
|
Update hinting information for use by routers. Add new key/values or
|
||||||
|
overwrite existing key/values.
|
||||||
"""
|
"""
|
||||||
# If there is any hinting information, add it to what we already know.
|
|
||||||
# If we have a new hint for an existing key, overwrite with the new value.
|
|
||||||
self._hints.update(hints)
|
self._hints.update(hints)
|
||||||
|
|
||||||
def _has_filters(self):
|
def _has_filters(self):
|
||||||
"""
|
"""
|
||||||
Checks if this QuerySet has any filtering going on. Note that this
|
Check if this QuerySet has any filtering going on. This isn't
|
||||||
isn't equivalent for checking if all objects are present in results,
|
equivalent with checking if all objects are present in results, for
|
||||||
for example qs[1:]._has_filters() -> False.
|
example, qs[1:]._has_filters() -> False.
|
||||||
"""
|
"""
|
||||||
return self.query.has_filters()
|
return self.query.has_filters()
|
||||||
|
|
||||||
@ -1158,7 +1128,7 @@ class InstanceCheckMeta(type):
|
|||||||
|
|
||||||
class EmptyQuerySet(metaclass=InstanceCheckMeta):
|
class EmptyQuerySet(metaclass=InstanceCheckMeta):
|
||||||
"""
|
"""
|
||||||
Marker class usable for checking if a queryset is empty by .none():
|
Marker class to checking if a queryset is empty by .none():
|
||||||
isinstance(qs.none(), EmptyQuerySet) -> True
|
isinstance(qs.none(), EmptyQuerySet) -> True
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -1168,7 +1138,7 @@ class EmptyQuerySet(metaclass=InstanceCheckMeta):
|
|||||||
|
|
||||||
class RawQuerySet:
|
class RawQuerySet:
|
||||||
"""
|
"""
|
||||||
Provides an iterator which converts the results of raw SQL queries into
|
Provide an iterator which converts the results of raw SQL queries into
|
||||||
annotated model instances.
|
annotated model instances.
|
||||||
"""
|
"""
|
||||||
def __init__(self, raw_query, model=None, query=None, params=None,
|
def __init__(self, raw_query, model=None, query=None, params=None,
|
||||||
@ -1182,9 +1152,7 @@ class RawQuerySet:
|
|||||||
self.translations = translations or {}
|
self.translations = translations or {}
|
||||||
|
|
||||||
def resolve_model_init_order(self):
|
def resolve_model_init_order(self):
|
||||||
"""
|
"""Resolve the init field names and value positions."""
|
||||||
Resolve the init field names and value positions
|
|
||||||
"""
|
|
||||||
model_init_fields = [f for f in self.model._meta.fields if f.column in self.columns]
|
model_init_fields = [f for f in self.model._meta.fields if f.column in self.columns]
|
||||||
annotation_fields = [(column, pos) for pos, column in enumerate(self.columns)
|
annotation_fields = [(column, pos) for pos, column in enumerate(self.columns)
|
||||||
if column not in self.model_fields]
|
if column not in self.model_fields]
|
||||||
@ -1240,13 +1208,11 @@ class RawQuerySet:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def db(self):
|
def db(self):
|
||||||
"Return the database that will be used if this query is executed now"
|
"""Return the database used if this query is executed now."""
|
||||||
return self._db or router.db_for_read(self.model, **self._hints)
|
return self._db or router.db_for_read(self.model, **self._hints)
|
||||||
|
|
||||||
def using(self, alias):
|
def using(self, alias):
|
||||||
"""
|
"""Select the database this RawQuerySet should execute against."""
|
||||||
Selects which database this Raw QuerySet should execute its query against.
|
|
||||||
"""
|
|
||||||
return RawQuerySet(
|
return RawQuerySet(
|
||||||
self.raw_query, model=self.model,
|
self.raw_query, model=self.model,
|
||||||
query=self.query.clone(using=alias),
|
query=self.query.clone(using=alias),
|
||||||
@ -1273,9 +1239,7 @@ class RawQuerySet:
|
|||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def model_fields(self):
|
def model_fields(self):
|
||||||
"""
|
"""A dict mapping column names to model field names."""
|
||||||
A dict mapping column names to model field names.
|
|
||||||
"""
|
|
||||||
converter = connections[self.db].introspection.table_name_converter
|
converter = connections[self.db].introspection.table_name_converter
|
||||||
model_fields = {}
|
model_fields = {}
|
||||||
for field in self.model._meta.fields:
|
for field in self.model._meta.fields:
|
||||||
@ -1336,9 +1300,7 @@ class Prefetch:
|
|||||||
|
|
||||||
|
|
||||||
def normalize_prefetch_lookups(lookups, prefix=None):
|
def normalize_prefetch_lookups(lookups, prefix=None):
|
||||||
"""
|
"""Normalize lookups into Prefetch objects."""
|
||||||
Helper function that normalize lookups into Prefetch objects.
|
|
||||||
"""
|
|
||||||
ret = []
|
ret = []
|
||||||
for lookup in lookups:
|
for lookup in lookups:
|
||||||
if not isinstance(lookup, Prefetch):
|
if not isinstance(lookup, Prefetch):
|
||||||
@ -1474,9 +1436,9 @@ def prefetch_related_objects(model_instances, *related_lookups):
|
|||||||
|
|
||||||
def get_prefetcher(instance, through_attr, to_attr):
|
def get_prefetcher(instance, through_attr, to_attr):
|
||||||
"""
|
"""
|
||||||
For the attribute 'through_attr' on the given instance, finds
|
For the attribute 'through_attr' on the given instance, find
|
||||||
an object that has a get_prefetch_queryset().
|
an object that has a get_prefetch_queryset().
|
||||||
Returns a 4 tuple containing:
|
Return a 4 tuple containing:
|
||||||
(the object with get_prefetch_queryset (or None),
|
(the object with get_prefetch_queryset (or None),
|
||||||
the descriptor object representing this relationship (or None),
|
the descriptor object representing this relationship (or None),
|
||||||
a boolean that is False if the attribute was not found at all,
|
a boolean that is False if the attribute was not found at all,
|
||||||
@ -1521,14 +1483,13 @@ def get_prefetcher(instance, through_attr, to_attr):
|
|||||||
|
|
||||||
def prefetch_one_level(instances, prefetcher, lookup, level):
|
def prefetch_one_level(instances, prefetcher, lookup, level):
|
||||||
"""
|
"""
|
||||||
Helper function for prefetch_related_objects
|
Helper function for prefetch_related_objects().
|
||||||
|
|
||||||
Runs prefetches on all instances using the prefetcher object,
|
Run prefetches on all instances using the prefetcher object,
|
||||||
assigning results to relevant caches in instance.
|
assigning results to relevant caches in instance.
|
||||||
|
|
||||||
The prefetched objects are returned, along with any additional
|
Return the prefetched objects along with any additional prefetches that
|
||||||
prefetches that must be done due to prefetch_related lookups
|
must be done due to prefetch_related lookups found from default managers.
|
||||||
found from default managers.
|
|
||||||
"""
|
"""
|
||||||
# prefetcher must have a method get_prefetch_queryset() which takes a list
|
# prefetcher must have a method get_prefetch_queryset() which takes a list
|
||||||
# of instances, and returns a tuple:
|
# of instances, and returns a tuple:
|
||||||
|
@ -19,9 +19,7 @@ PathInfo = namedtuple('PathInfo', 'from_opts to_opts target_fields join_field m2
|
|||||||
|
|
||||||
|
|
||||||
class InvalidQuery(Exception):
|
class InvalidQuery(Exception):
|
||||||
"""
|
"""The query passed to raw() isn't a safe query to use with raw()."""
|
||||||
The query passed to raw isn't a safe query to use with raw.
|
|
||||||
"""
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@ -47,7 +45,7 @@ class QueryWrapper:
|
|||||||
|
|
||||||
class Q(tree.Node):
|
class Q(tree.Node):
|
||||||
"""
|
"""
|
||||||
Encapsulates filters as objects that can then be combined logically (using
|
Encapsulate filters as objects that can then be combined logically (using
|
||||||
`&` and `|`).
|
`&` and `|`).
|
||||||
"""
|
"""
|
||||||
# Connection types
|
# Connection types
|
||||||
@ -112,8 +110,8 @@ class DeferredAttribute:
|
|||||||
|
|
||||||
def __get__(self, instance, cls=None):
|
def __get__(self, instance, cls=None):
|
||||||
"""
|
"""
|
||||||
Retrieves and caches the value from the datastore on the first lookup.
|
Retrieve and caches the value from the datastore on the first lookup.
|
||||||
Returns the cached value.
|
Return the cached value.
|
||||||
"""
|
"""
|
||||||
if instance is None:
|
if instance is None:
|
||||||
return self
|
return self
|
||||||
@ -211,7 +209,7 @@ class RegisterLookupMixin:
|
|||||||
|
|
||||||
def select_related_descend(field, restricted, requested, load_fields, reverse=False):
|
def select_related_descend(field, restricted, requested, load_fields, reverse=False):
|
||||||
"""
|
"""
|
||||||
Returns True if this field should be used to descend deeper for
|
Return True if this field should be used to descend deeper for
|
||||||
select_related() purposes. Used by both the query construction code
|
select_related() purposes. Used by both the query construction code
|
||||||
(sql.query.fill_related_selections()) and the model instance creation code
|
(sql.query.fill_related_selections()) and the model instance creation code
|
||||||
(query.get_klass_info()).
|
(query.get_klass_info()).
|
||||||
@ -247,10 +245,9 @@ def select_related_descend(field, restricted, requested, load_fields, reverse=Fa
|
|||||||
|
|
||||||
def refs_expression(lookup_parts, annotations):
|
def refs_expression(lookup_parts, annotations):
|
||||||
"""
|
"""
|
||||||
A helper method to check if the lookup_parts contains references
|
Check if the lookup_parts contains references to the given annotations set.
|
||||||
to the given annotations set. Because the LOOKUP_SEP is contained in the
|
Because the LOOKUP_SEP is contained in the default annotation names, check
|
||||||
default annotation names we must check each prefix of the lookup_parts
|
each prefix of the lookup_parts for a match.
|
||||||
for a match.
|
|
||||||
"""
|
"""
|
||||||
for n in range(len(lookup_parts) + 1):
|
for n in range(len(lookup_parts) + 1):
|
||||||
level_n_lookup = LOOKUP_SEP.join(lookup_parts[0:n])
|
level_n_lookup = LOOKUP_SEP.join(lookup_parts[0:n])
|
||||||
|
@ -38,7 +38,7 @@ class SQLCompiler:
|
|||||||
|
|
||||||
def pre_sql_setup(self):
|
def pre_sql_setup(self):
|
||||||
"""
|
"""
|
||||||
Does any necessary class setup immediately prior to producing SQL. This
|
Do any necessary class setup immediately prior to producing SQL. This
|
||||||
is for things that can't necessarily be done in __init__ because we
|
is for things that can't necessarily be done in __init__ because we
|
||||||
might not have all the pieces in place at that time.
|
might not have all the pieces in place at that time.
|
||||||
"""
|
"""
|
||||||
@ -51,7 +51,7 @@ class SQLCompiler:
|
|||||||
|
|
||||||
def get_group_by(self, select, order_by):
|
def get_group_by(self, select, order_by):
|
||||||
"""
|
"""
|
||||||
Returns a list of 2-tuples of form (sql, params).
|
Return a list of 2-tuples of form (sql, params).
|
||||||
|
|
||||||
The logic of what exactly the GROUP BY clause contains is hard
|
The logic of what exactly the GROUP BY clause contains is hard
|
||||||
to describe in other words than "if it passes the test suite,
|
to describe in other words than "if it passes the test suite,
|
||||||
@ -163,7 +163,7 @@ class SQLCompiler:
|
|||||||
|
|
||||||
def get_select(self):
|
def get_select(self):
|
||||||
"""
|
"""
|
||||||
Returns three values:
|
Return three values:
|
||||||
- a list of 3-tuples of (expression, (sql, params), alias)
|
- a list of 3-tuples of (expression, (sql, params), alias)
|
||||||
- a klass_info structure,
|
- a klass_info structure,
|
||||||
- a dictionary of annotations
|
- a dictionary of annotations
|
||||||
@ -232,7 +232,7 @@ class SQLCompiler:
|
|||||||
|
|
||||||
def get_order_by(self):
|
def get_order_by(self):
|
||||||
"""
|
"""
|
||||||
Returns a list of 2-tuples of form (expr, (sql, params, is_ref)) for the
|
Return a list of 2-tuples of form (expr, (sql, params, is_ref)) for the
|
||||||
ORDER BY clause.
|
ORDER BY clause.
|
||||||
|
|
||||||
The order_by clause can alter the select clause (for example it
|
The order_by clause can alter the select clause (for example it
|
||||||
@ -400,7 +400,7 @@ class SQLCompiler:
|
|||||||
|
|
||||||
def as_sql(self, with_limits=True, with_col_aliases=False):
|
def as_sql(self, with_limits=True, with_col_aliases=False):
|
||||||
"""
|
"""
|
||||||
Creates the SQL for this query. Returns the SQL string and list of
|
Create the SQL for this query. Return the SQL string and list of
|
||||||
parameters.
|
parameters.
|
||||||
|
|
||||||
If 'with_limits' is False, any limit/offset information is not included
|
If 'with_limits' is False, any limit/offset information is not included
|
||||||
@ -513,14 +513,14 @@ class SQLCompiler:
|
|||||||
|
|
||||||
def get_default_columns(self, start_alias=None, opts=None, from_parent=None):
|
def get_default_columns(self, start_alias=None, opts=None, from_parent=None):
|
||||||
"""
|
"""
|
||||||
Computes the default columns for selecting every field in the base
|
Compute the default columns for selecting every field in the base
|
||||||
model. Will sometimes be called to pull in related models (e.g. via
|
model. Will sometimes be called to pull in related models (e.g. via
|
||||||
select_related), in which case "opts" and "start_alias" will be given
|
select_related), in which case "opts" and "start_alias" will be given
|
||||||
to provide a starting point for the traversal.
|
to provide a starting point for the traversal.
|
||||||
|
|
||||||
Returns a list of strings, quoted appropriately for use in SQL
|
Return a list of strings, quoted appropriately for use in SQL
|
||||||
directly, as well as a set of aliases used in the select statement (if
|
directly, as well as a set of aliases used in the select statement (if
|
||||||
'as_pairs' is True, returns a list of (alias, col_name) pairs instead
|
'as_pairs' is True, return a list of (alias, col_name) pairs instead
|
||||||
of strings as the first component and None as the second component).
|
of strings as the first component and None as the second component).
|
||||||
"""
|
"""
|
||||||
result = []
|
result = []
|
||||||
@ -558,10 +558,10 @@ class SQLCompiler:
|
|||||||
|
|
||||||
def get_distinct(self):
|
def get_distinct(self):
|
||||||
"""
|
"""
|
||||||
Returns a quoted list of fields to use in DISTINCT ON part of the query.
|
Return a quoted list of fields to use in DISTINCT ON part of the query.
|
||||||
|
|
||||||
Note that this method can alter the tables in the query, and thus it
|
This method can alter the tables in the query, and thus it must be
|
||||||
must be called before get_from_clause().
|
called before get_from_clause().
|
||||||
"""
|
"""
|
||||||
qn = self.quote_name_unless_alias
|
qn = self.quote_name_unless_alias
|
||||||
qn2 = self.connection.ops.quote_name
|
qn2 = self.connection.ops.quote_name
|
||||||
@ -582,7 +582,7 @@ class SQLCompiler:
|
|||||||
def find_ordering_name(self, name, opts, alias=None, default_order='ASC',
|
def find_ordering_name(self, name, opts, alias=None, default_order='ASC',
|
||||||
already_seen=None):
|
already_seen=None):
|
||||||
"""
|
"""
|
||||||
Returns the table alias (the name might be ambiguous, the alias will
|
Return the table alias (the name might be ambiguous, the alias will
|
||||||
not be) and column name for ordering by the given 'name' parameter.
|
not be) and column name for ordering by the given 'name' parameter.
|
||||||
The 'name' is of the form 'field1__field2__...__fieldN'.
|
The 'name' is of the form 'field1__field2__...__fieldN'.
|
||||||
"""
|
"""
|
||||||
@ -613,11 +613,11 @@ class SQLCompiler:
|
|||||||
|
|
||||||
def _setup_joins(self, pieces, opts, alias):
|
def _setup_joins(self, pieces, opts, alias):
|
||||||
"""
|
"""
|
||||||
A helper method for get_order_by and get_distinct.
|
Helper method for get_order_by() and get_distinct().
|
||||||
|
|
||||||
Note that get_ordering and get_distinct must produce same target
|
get_ordering() and get_distinct() must produce same target columns on
|
||||||
columns on same input, as the prefixes of get_ordering and get_distinct
|
same input, as the prefixes of get_ordering() and get_distinct() must
|
||||||
must match. Executing SQL where this is not true is an error.
|
match. Executing SQL where this is not true is an error.
|
||||||
"""
|
"""
|
||||||
if not alias:
|
if not alias:
|
||||||
alias = self.query.get_initial_alias()
|
alias = self.query.get_initial_alias()
|
||||||
@ -628,14 +628,14 @@ class SQLCompiler:
|
|||||||
|
|
||||||
def get_from_clause(self):
|
def get_from_clause(self):
|
||||||
"""
|
"""
|
||||||
Returns a list of strings that are joined together to go after the
|
Return a list of strings that are joined together to go after the
|
||||||
"FROM" part of the query, as well as a list any extra parameters that
|
"FROM" part of the query, as well as a list any extra parameters that
|
||||||
need to be included. Sub-classes, can override this to create a
|
need to be included. Subclasses, can override this to create a
|
||||||
from-clause via a "select".
|
from-clause via a "select".
|
||||||
|
|
||||||
This should only be called after any SQL construction methods that
|
This should only be called after any SQL construction methods that
|
||||||
might change the tables we need. This means the select columns,
|
might change the tables that are needed. This means the select columns,
|
||||||
ordering and distinct must be done first.
|
ordering, and distinct must be done first.
|
||||||
"""
|
"""
|
||||||
result = []
|
result = []
|
||||||
params = []
|
params = []
|
||||||
@ -792,8 +792,8 @@ class SQLCompiler:
|
|||||||
|
|
||||||
def deferred_to_columns(self):
|
def deferred_to_columns(self):
|
||||||
"""
|
"""
|
||||||
Converts the self.deferred_loading data structure to mapping of table
|
Convert the self.deferred_loading data structure to mapping of table
|
||||||
names to sets of column names which are to be loaded. Returns the
|
names to sets of column names which are to be loaded. Return the
|
||||||
dictionary.
|
dictionary.
|
||||||
"""
|
"""
|
||||||
columns = {}
|
columns = {}
|
||||||
@ -820,9 +820,7 @@ class SQLCompiler:
|
|||||||
return tuple(row)
|
return tuple(row)
|
||||||
|
|
||||||
def results_iter(self, results=None):
|
def results_iter(self, results=None):
|
||||||
"""
|
"""Return an iterator over the results from executing this query."""
|
||||||
Returns an iterator over the results from executing this query.
|
|
||||||
"""
|
|
||||||
if results is None:
|
if results is None:
|
||||||
results = self.execute_sql(MULTI)
|
results = self.execute_sql(MULTI)
|
||||||
fields = [s[0] for s in self.select[0:self.col_count]]
|
fields = [s[0] for s in self.select[0:self.col_count]]
|
||||||
@ -845,7 +843,7 @@ class SQLCompiler:
|
|||||||
|
|
||||||
def execute_sql(self, result_type=MULTI, chunked_fetch=False):
|
def execute_sql(self, result_type=MULTI, chunked_fetch=False):
|
||||||
"""
|
"""
|
||||||
Run the query against the database and returns the result(s). The
|
Run the query against the database and return the result(s). The
|
||||||
return value is a single data item if result_type is SINGLE, or an
|
return value is a single data item if result_type is SINGLE, or an
|
||||||
iterator over the results if the result_type is MULTI.
|
iterator over the results if the result_type is MULTI.
|
||||||
|
|
||||||
@ -933,10 +931,10 @@ class SQLInsertCompiler(SQLCompiler):
|
|||||||
def field_as_sql(self, field, val):
|
def field_as_sql(self, field, val):
|
||||||
"""
|
"""
|
||||||
Take a field and a value intended to be saved on that field, and
|
Take a field and a value intended to be saved on that field, and
|
||||||
return placeholder SQL and accompanying params. Checks for raw values,
|
return placeholder SQL and accompanying params. Check for raw values,
|
||||||
expressions and fields with get_placeholder() defined in that order.
|
expressions, and fields with get_placeholder() defined in that order.
|
||||||
|
|
||||||
When field is None, the value is considered raw and is used as the
|
When field is None, consider the value raw and use it as the
|
||||||
placeholder, with no corresponding parameters returned.
|
placeholder, with no corresponding parameters returned.
|
||||||
"""
|
"""
|
||||||
if field is None:
|
if field is None:
|
||||||
@ -994,9 +992,9 @@ class SQLInsertCompiler(SQLCompiler):
|
|||||||
|
|
||||||
def assemble_as_sql(self, fields, value_rows):
|
def assemble_as_sql(self, fields, value_rows):
|
||||||
"""
|
"""
|
||||||
Take a sequence of N fields and a sequence of M rows of values,
|
Take a sequence of N fields and a sequence of M rows of values, and
|
||||||
generate placeholder SQL and parameters for each field and value, and
|
generate placeholder SQL and parameters for each field and value.
|
||||||
return a pair containing:
|
Return a pair containing:
|
||||||
* a sequence of M rows of N SQL placeholder strings, and
|
* a sequence of M rows of N SQL placeholder strings, and
|
||||||
* a sequence of M rows of corresponding parameter values.
|
* a sequence of M rows of corresponding parameter values.
|
||||||
|
|
||||||
@ -1105,7 +1103,7 @@ class SQLInsertCompiler(SQLCompiler):
|
|||||||
class SQLDeleteCompiler(SQLCompiler):
|
class SQLDeleteCompiler(SQLCompiler):
|
||||||
def as_sql(self):
|
def as_sql(self):
|
||||||
"""
|
"""
|
||||||
Creates the SQL for this query. Returns the SQL string and list of
|
Create the SQL for this query. Return the SQL string and list of
|
||||||
parameters.
|
parameters.
|
||||||
"""
|
"""
|
||||||
assert len([t for t in self.query.tables if self.query.alias_refcount[t] > 0]) == 1, \
|
assert len([t for t in self.query.tables if self.query.alias_refcount[t] > 0]) == 1, \
|
||||||
@ -1121,7 +1119,7 @@ class SQLDeleteCompiler(SQLCompiler):
|
|||||||
class SQLUpdateCompiler(SQLCompiler):
|
class SQLUpdateCompiler(SQLCompiler):
|
||||||
def as_sql(self):
|
def as_sql(self):
|
||||||
"""
|
"""
|
||||||
Creates the SQL for this query. Returns the SQL string and list of
|
Create the SQL for this query. Return the SQL string and list of
|
||||||
parameters.
|
parameters.
|
||||||
"""
|
"""
|
||||||
self.pre_sql_setup()
|
self.pre_sql_setup()
|
||||||
@ -1176,7 +1174,7 @@ class SQLUpdateCompiler(SQLCompiler):
|
|||||||
|
|
||||||
def execute_sql(self, result_type):
|
def execute_sql(self, result_type):
|
||||||
"""
|
"""
|
||||||
Execute the specified update. Returns the number of rows affected by
|
Execute the specified update. Return the number of rows affected by
|
||||||
the primary update query. The "primary update query" is the first
|
the primary update query. The "primary update query" is the first
|
||||||
non-empty query that is executed. Row counts for any subsequent,
|
non-empty query that is executed. Row counts for any subsequent,
|
||||||
related queries are not available.
|
related queries are not available.
|
||||||
@ -1197,13 +1195,12 @@ class SQLUpdateCompiler(SQLCompiler):
|
|||||||
|
|
||||||
def pre_sql_setup(self):
|
def pre_sql_setup(self):
|
||||||
"""
|
"""
|
||||||
If the update depends on results from other tables, we need to do some
|
If the update depends on results from other tables, munge the "where"
|
||||||
munging of the "where" conditions to match the format required for
|
conditions to match the format required for (portable) SQL updates.
|
||||||
(portable) SQL updates. That is done here.
|
|
||||||
|
|
||||||
Further, if we are going to be running multiple updates, we pull out
|
If multiple updates are required, pull out the id values to update at
|
||||||
the id values to update at this point so that they don't change as a
|
this point so that they don't change as a result of the progressive
|
||||||
result of the progressive updates.
|
updates.
|
||||||
"""
|
"""
|
||||||
refcounts_before = self.query.alias_refcount.copy()
|
refcounts_before = self.query.alias_refcount.copy()
|
||||||
# Ensure base table is in the query
|
# Ensure base table is in the query
|
||||||
@ -1242,7 +1239,7 @@ class SQLUpdateCompiler(SQLCompiler):
|
|||||||
class SQLAggregateCompiler(SQLCompiler):
|
class SQLAggregateCompiler(SQLCompiler):
|
||||||
def as_sql(self):
|
def as_sql(self):
|
||||||
"""
|
"""
|
||||||
Creates the SQL for this query. Returns the SQL string and list of
|
Create the SQL for this query. Return the SQL string and list of
|
||||||
parameters.
|
parameters.
|
||||||
"""
|
"""
|
||||||
sql, params = [], []
|
sql, params = [], []
|
||||||
@ -1261,7 +1258,7 @@ class SQLAggregateCompiler(SQLCompiler):
|
|||||||
|
|
||||||
def cursor_iter(cursor, sentinel, col_count):
|
def cursor_iter(cursor, sentinel, col_count):
|
||||||
"""
|
"""
|
||||||
Yields blocks of rows from a cursor and ensures the cursor is closed when
|
Yield blocks of rows from a cursor and ensure the cursor is closed when
|
||||||
done.
|
done.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
|
@ -59,7 +59,7 @@ class Join:
|
|||||||
|
|
||||||
def as_sql(self, compiler, connection):
|
def as_sql(self, compiler, connection):
|
||||||
"""
|
"""
|
||||||
Generates the full
|
Generate the full
|
||||||
LEFT OUTER JOIN sometable ON sometable.somecol = othertable.othercol, params
|
LEFT OUTER JOIN sometable ON sometable.somecol = othertable.othercol, params
|
||||||
clause for this join.
|
clause for this join.
|
||||||
"""
|
"""
|
||||||
|
@ -44,9 +44,7 @@ def get_field_names_from_opts(opts):
|
|||||||
|
|
||||||
|
|
||||||
class RawQuery:
|
class RawQuery:
|
||||||
"""
|
"""A single raw SQL query."""
|
||||||
A single raw SQL query
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, sql, using, params=None, context=None):
|
def __init__(self, sql, using, params=None, context=None):
|
||||||
self.params = params or ()
|
self.params = params or ()
|
||||||
@ -112,9 +110,7 @@ class RawQuery:
|
|||||||
|
|
||||||
|
|
||||||
class Query:
|
class Query:
|
||||||
"""
|
"""A single SQL query."""
|
||||||
A single SQL query.
|
|
||||||
"""
|
|
||||||
|
|
||||||
alias_prefix = 'T'
|
alias_prefix = 'T'
|
||||||
subq_aliases = frozenset([alias_prefix])
|
subq_aliases = frozenset([alias_prefix])
|
||||||
@ -221,7 +217,7 @@ class Query:
|
|||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
"""
|
"""
|
||||||
Returns the query as a string of SQL with the parameter values
|
Return the query as a string of SQL with the parameter values
|
||||||
substituted in (use sql_with_params() to see the unsubstituted string).
|
substituted in (use sql_with_params() to see the unsubstituted string).
|
||||||
|
|
||||||
Parameter values won't necessarily be quoted correctly, since that is
|
Parameter values won't necessarily be quoted correctly, since that is
|
||||||
@ -232,7 +228,7 @@ class Query:
|
|||||||
|
|
||||||
def sql_with_params(self):
|
def sql_with_params(self):
|
||||||
"""
|
"""
|
||||||
Returns the query as an SQL string and the parameters that will be
|
Return the query as an SQL string and the parameters that will be
|
||||||
substituted into the query.
|
substituted into the query.
|
||||||
"""
|
"""
|
||||||
return self.get_compiler(DEFAULT_DB_ALIAS).as_sql()
|
return self.get_compiler(DEFAULT_DB_ALIAS).as_sql()
|
||||||
@ -254,7 +250,7 @@ class Query:
|
|||||||
|
|
||||||
def get_meta(self):
|
def get_meta(self):
|
||||||
"""
|
"""
|
||||||
Returns the Options instance (the model._meta) from which to start
|
Return the Options instance (the model._meta) from which to start
|
||||||
processing. Normally, this is self.model._meta, but it can be changed
|
processing. Normally, this is self.model._meta, but it can be changed
|
||||||
by subclasses.
|
by subclasses.
|
||||||
"""
|
"""
|
||||||
@ -262,7 +258,7 @@ class Query:
|
|||||||
|
|
||||||
def clone(self, klass=None, memo=None, **kwargs):
|
def clone(self, klass=None, memo=None, **kwargs):
|
||||||
"""
|
"""
|
||||||
Creates a copy of the current instance. The 'kwargs' parameter can be
|
Create a copy of the current instance. The 'kwargs' parameter can be
|
||||||
used by clients to update attributes after copying has taken place.
|
used by clients to update attributes after copying has taken place.
|
||||||
"""
|
"""
|
||||||
obj = Empty()
|
obj = Empty()
|
||||||
@ -395,7 +391,7 @@ class Query:
|
|||||||
|
|
||||||
def get_aggregation(self, using, added_aggregate_names):
|
def get_aggregation(self, using, added_aggregate_names):
|
||||||
"""
|
"""
|
||||||
Returns the dictionary with the values of the existing aggregations.
|
Return the dictionary with the values of the existing aggregations.
|
||||||
"""
|
"""
|
||||||
if not self.annotation_select:
|
if not self.annotation_select:
|
||||||
return {}
|
return {}
|
||||||
@ -488,7 +484,7 @@ class Query:
|
|||||||
|
|
||||||
def get_count(self, using):
|
def get_count(self, using):
|
||||||
"""
|
"""
|
||||||
Performs a COUNT() query using the current filter constraints.
|
Perform a COUNT() query using the current filter constraints.
|
||||||
"""
|
"""
|
||||||
obj = self.clone()
|
obj = self.clone()
|
||||||
obj.add_annotation(Count('*'), alias='__count', is_summary=True)
|
obj.add_annotation(Count('*'), alias='__count', is_summary=True)
|
||||||
@ -613,7 +609,7 @@ class Query:
|
|||||||
|
|
||||||
def deferred_to_data(self, target, callback):
|
def deferred_to_data(self, target, callback):
|
||||||
"""
|
"""
|
||||||
Converts the self.deferred_loading data structure to an alternate data
|
Convert the self.deferred_loading data structure to an alternate data
|
||||||
structure, describing the field that *will* be loaded. This is used to
|
structure, describing the field that *will* be loaded. This is used to
|
||||||
compute the columns to select from the database and also by the
|
compute the columns to select from the database and also by the
|
||||||
QuerySet class to work out which fields are being initialized on each
|
QuerySet class to work out which fields are being initialized on each
|
||||||
@ -699,7 +695,7 @@ class Query:
|
|||||||
|
|
||||||
def table_alias(self, table_name, create=False):
|
def table_alias(self, table_name, create=False):
|
||||||
"""
|
"""
|
||||||
Returns a table alias for the given table_name and whether this is a
|
Return a table alias for the given table_name and whether this is a
|
||||||
new alias or not.
|
new alias or not.
|
||||||
|
|
||||||
If 'create' is true, a new alias is always created. Otherwise, the
|
If 'create' is true, a new alias is always created. Otherwise, the
|
||||||
@ -724,17 +720,17 @@ class Query:
|
|||||||
return alias, True
|
return alias, True
|
||||||
|
|
||||||
def ref_alias(self, alias):
|
def ref_alias(self, alias):
|
||||||
""" Increases the reference count for this alias. """
|
"""Increases the reference count for this alias."""
|
||||||
self.alias_refcount[alias] += 1
|
self.alias_refcount[alias] += 1
|
||||||
|
|
||||||
def unref_alias(self, alias, amount=1):
|
def unref_alias(self, alias, amount=1):
|
||||||
""" Decreases the reference count for this alias. """
|
"""Decreases the reference count for this alias."""
|
||||||
self.alias_refcount[alias] -= amount
|
self.alias_refcount[alias] -= amount
|
||||||
|
|
||||||
def promote_joins(self, aliases):
|
def promote_joins(self, aliases):
|
||||||
"""
|
"""
|
||||||
Promotes recursively the join type of given aliases and its children to
|
Promote recursively the join type of given aliases and its children to
|
||||||
an outer join. If 'unconditional' is False, the join is only promoted if
|
an outer join. If 'unconditional' is False, only promote the join if
|
||||||
it is nullable or the parent join is an outer join.
|
it is nullable or the parent join is an outer join.
|
||||||
|
|
||||||
The children promotion is done to avoid join chains that contain a LOUTER
|
The children promotion is done to avoid join chains that contain a LOUTER
|
||||||
@ -786,8 +782,8 @@ class Query:
|
|||||||
|
|
||||||
def reset_refcounts(self, to_counts):
|
def reset_refcounts(self, to_counts):
|
||||||
"""
|
"""
|
||||||
This method will reset reference counts for aliases so that they match
|
Reset reference counts for aliases so that they match the value passed
|
||||||
the value passed in :param to_counts:.
|
in `to_counts`.
|
||||||
"""
|
"""
|
||||||
for alias, cur_refcount in self.alias_refcount.copy().items():
|
for alias, cur_refcount in self.alias_refcount.copy().items():
|
||||||
unref_amount = cur_refcount - to_counts.get(alias, 0)
|
unref_amount = cur_refcount - to_counts.get(alias, 0)
|
||||||
@ -795,7 +791,7 @@ class Query:
|
|||||||
|
|
||||||
def change_aliases(self, change_map):
|
def change_aliases(self, change_map):
|
||||||
"""
|
"""
|
||||||
Changes the aliases in change_map (which maps old-alias -> new-alias),
|
Change the aliases in change_map (which maps old-alias -> new-alias),
|
||||||
relabelling any references to them in select columns and the where
|
relabelling any references to them in select columns and the where
|
||||||
clause.
|
clause.
|
||||||
"""
|
"""
|
||||||
@ -831,14 +827,14 @@ class Query:
|
|||||||
|
|
||||||
def bump_prefix(self, outer_query):
|
def bump_prefix(self, outer_query):
|
||||||
"""
|
"""
|
||||||
Changes the alias prefix to the next letter in the alphabet in a way
|
Change the alias prefix to the next letter in the alphabet in a way
|
||||||
that the outer query's aliases and this query's aliases will not
|
that the outer query's aliases and this query's aliases will not
|
||||||
conflict. Even tables that previously had no alias will get an alias
|
conflict. Even tables that previously had no alias will get an alias
|
||||||
after this call.
|
after this call.
|
||||||
"""
|
"""
|
||||||
def prefix_gen():
|
def prefix_gen():
|
||||||
"""
|
"""
|
||||||
Generates a sequence of characters in alphabetical order:
|
Generate a sequence of characters in alphabetical order:
|
||||||
-> 'A', 'B', 'C', ...
|
-> 'A', 'B', 'C', ...
|
||||||
|
|
||||||
When the alphabet is finished, the sequence will continue with the
|
When the alphabet is finished, the sequence will continue with the
|
||||||
@ -878,7 +874,7 @@ class Query:
|
|||||||
|
|
||||||
def get_initial_alias(self):
|
def get_initial_alias(self):
|
||||||
"""
|
"""
|
||||||
Returns the first alias for this query, after increasing its reference
|
Return the first alias for this query, after increasing its reference
|
||||||
count.
|
count.
|
||||||
"""
|
"""
|
||||||
if self.tables:
|
if self.tables:
|
||||||
@ -890,15 +886,15 @@ class Query:
|
|||||||
|
|
||||||
def count_active_tables(self):
|
def count_active_tables(self):
|
||||||
"""
|
"""
|
||||||
Returns the number of tables in this query with a non-zero reference
|
Return the number of tables in this query with a non-zero reference
|
||||||
count. Note that after execution, the reference counts are zeroed, so
|
count. After execution, the reference counts are zeroed, so tables
|
||||||
tables added in compiler will not be seen by this method.
|
added in compiler will not be seen by this method.
|
||||||
"""
|
"""
|
||||||
return len([1 for count in self.alias_refcount.values() if count])
|
return len([1 for count in self.alias_refcount.values() if count])
|
||||||
|
|
||||||
def join(self, join, reuse=None):
|
def join(self, join, reuse=None):
|
||||||
"""
|
"""
|
||||||
Returns an alias for the join in 'connection', either reusing an
|
Return an alias for the join in 'connection', either reusing an
|
||||||
existing alias for that join or creating a new one. 'connection' is a
|
existing alias for that join or creating a new one. 'connection' is a
|
||||||
tuple (lhs, table, join_cols) where 'lhs' is either an existing
|
tuple (lhs, table, join_cols) where 'lhs' is either an existing
|
||||||
table alias or a table name. 'join_cols' is a tuple of tuples containing
|
table alias or a table name. 'join_cols' is a tuple of tuples containing
|
||||||
@ -940,7 +936,7 @@ class Query:
|
|||||||
|
|
||||||
def join_parent_model(self, opts, model, alias, seen):
|
def join_parent_model(self, opts, model, alias, seen):
|
||||||
"""
|
"""
|
||||||
Makes sure the given 'model' is joined in the query. If 'model' isn't
|
Make sure the given 'model' is joined in the query. If 'model' isn't
|
||||||
a parent of 'opts' or if it is None this method is a no-op.
|
a parent of 'opts' or if it is None this method is a no-op.
|
||||||
|
|
||||||
The 'alias' is the root alias for starting the join, 'seen' is a dict
|
The 'alias' is the root alias for starting the join, 'seen' is a dict
|
||||||
@ -973,9 +969,7 @@ class Query:
|
|||||||
return alias or seen[None]
|
return alias or seen[None]
|
||||||
|
|
||||||
def add_annotation(self, annotation, alias, is_summary=False):
|
def add_annotation(self, annotation, alias, is_summary=False):
|
||||||
"""
|
"""Add a single annotation expression to the Query."""
|
||||||
Adds a single annotation expression to the Query
|
|
||||||
"""
|
|
||||||
annotation = annotation.resolve_expression(self, allow_joins=True, reuse=None,
|
annotation = annotation.resolve_expression(self, allow_joins=True, reuse=None,
|
||||||
summarize=is_summary)
|
summarize=is_summary)
|
||||||
self.append_annotation_mask([alias])
|
self.append_annotation_mask([alias])
|
||||||
@ -1031,7 +1025,7 @@ class Query:
|
|||||||
|
|
||||||
def solve_lookup_type(self, lookup):
|
def solve_lookup_type(self, lookup):
|
||||||
"""
|
"""
|
||||||
Solve the lookup type from the lookup (eg: 'foobar__id__icontains')
|
Solve the lookup type from the lookup (e.g.: 'foobar__id__icontains').
|
||||||
"""
|
"""
|
||||||
lookup_splitted = lookup.split(LOOKUP_SEP)
|
lookup_splitted = lookup.split(LOOKUP_SEP)
|
||||||
if self._annotations:
|
if self._annotations:
|
||||||
@ -1051,8 +1045,8 @@ class Query:
|
|||||||
|
|
||||||
def check_query_object_type(self, value, opts, field):
|
def check_query_object_type(self, value, opts, field):
|
||||||
"""
|
"""
|
||||||
Checks whether the object passed while querying is of the correct type.
|
Check whether the object passed while querying is of the correct type.
|
||||||
If not, it raises a ValueError specifying the wrong object.
|
If not, raise a ValueError specifying the wrong object.
|
||||||
"""
|
"""
|
||||||
if hasattr(value, '_meta'):
|
if hasattr(value, '_meta'):
|
||||||
if not check_rel_lookup_compatibility(value._meta.model, opts, field):
|
if not check_rel_lookup_compatibility(value._meta.model, opts, field):
|
||||||
@ -1061,9 +1055,7 @@ class Query:
|
|||||||
(value, opts.object_name))
|
(value, opts.object_name))
|
||||||
|
|
||||||
def check_related_objects(self, field, value, opts):
|
def check_related_objects(self, field, value, opts):
|
||||||
"""
|
"""Check the type of object passed to query relations."""
|
||||||
Checks the type of object passed to query relations.
|
|
||||||
"""
|
|
||||||
if field.is_relation:
|
if field.is_relation:
|
||||||
# Check that the field and the queryset use the same model in a
|
# Check that the field and the queryset use the same model in a
|
||||||
# query like .filter(author=Author.objects.all()). For example, the
|
# query like .filter(author=Author.objects.all()). For example, the
|
||||||
@ -1087,7 +1079,7 @@ class Query:
|
|||||||
|
|
||||||
def build_lookup(self, lookups, lhs, rhs):
|
def build_lookup(self, lookups, lhs, rhs):
|
||||||
"""
|
"""
|
||||||
Tries to extract transforms and lookup from given lhs.
|
Try to extract transforms and lookup from given lhs.
|
||||||
|
|
||||||
The lhs value is something that works like SQLExpression.
|
The lhs value is something that works like SQLExpression.
|
||||||
The rhs value is what the lookup is going to compare against.
|
The rhs value is what the lookup is going to compare against.
|
||||||
@ -1114,7 +1106,7 @@ class Query:
|
|||||||
|
|
||||||
def try_transform(self, lhs, name, rest_of_lookups):
|
def try_transform(self, lhs, name, rest_of_lookups):
|
||||||
"""
|
"""
|
||||||
Helper method for build_lookup. Tries to fetch and initialize
|
Helper method for build_lookup(). Try to fetch and initialize
|
||||||
a transform for name parameter from lhs.
|
a transform for name parameter from lhs.
|
||||||
"""
|
"""
|
||||||
transform_class = lhs.get_transform(name)
|
transform_class = lhs.get_transform(name)
|
||||||
@ -1129,7 +1121,7 @@ class Query:
|
|||||||
def build_filter(self, filter_expr, branch_negated=False, current_negated=False,
|
def build_filter(self, filter_expr, branch_negated=False, current_negated=False,
|
||||||
can_reuse=None, connector=AND, allow_joins=True, split_subq=True):
|
can_reuse=None, connector=AND, allow_joins=True, split_subq=True):
|
||||||
"""
|
"""
|
||||||
Builds a WhereNode for a single filter clause, but doesn't add it
|
Build a WhereNode for a single filter clause but don't add it
|
||||||
to this Query. Query.add_q() will then add this filter to the where
|
to this Query. Query.add_q() will then add this filter to the where
|
||||||
Node.
|
Node.
|
||||||
|
|
||||||
@ -1140,7 +1132,7 @@ class Query:
|
|||||||
negated or not and this will be used to determine if IS NULL filtering
|
negated or not and this will be used to determine if IS NULL filtering
|
||||||
is needed.
|
is needed.
|
||||||
|
|
||||||
The difference between current_netageted and branch_negated is that
|
The difference between current_negated and branch_negated is that
|
||||||
branch_negated is set on first negation, but current_negated is
|
branch_negated is set on first negation, but current_negated is
|
||||||
flipped for each negation.
|
flipped for each negation.
|
||||||
|
|
||||||
@ -1263,9 +1255,7 @@ class Query:
|
|||||||
|
|
||||||
def _add_q(self, q_object, used_aliases, branch_negated=False,
|
def _add_q(self, q_object, used_aliases, branch_negated=False,
|
||||||
current_negated=False, allow_joins=True, split_subq=True):
|
current_negated=False, allow_joins=True, split_subq=True):
|
||||||
"""
|
"""Add a Q-object to the current filter."""
|
||||||
Adds a Q-object to the current filter.
|
|
||||||
"""
|
|
||||||
connector = q_object.connector
|
connector = q_object.connector
|
||||||
current_negated = current_negated ^ q_object.negated
|
current_negated = current_negated ^ q_object.negated
|
||||||
branch_negated = branch_negated or q_object.negated
|
branch_negated = branch_negated or q_object.negated
|
||||||
@ -1292,20 +1282,18 @@ class Query:
|
|||||||
|
|
||||||
def names_to_path(self, names, opts, allow_many=True, fail_on_missing=False):
|
def names_to_path(self, names, opts, allow_many=True, fail_on_missing=False):
|
||||||
"""
|
"""
|
||||||
Walks the list of names and turns them into PathInfo tuples. Note that
|
Walk the list of names and turns them into PathInfo tuples. A single
|
||||||
a single name in 'names' can generate multiple PathInfos (m2m for
|
name in 'names' can generate multiple PathInfos (m2m, for example).
|
||||||
example).
|
|
||||||
|
|
||||||
'names' is the path of names to travel, 'opts' is the model Options we
|
'names' is the path of names to travel, 'opts' is the model Options we
|
||||||
start the name resolving from, 'allow_many' is as for setup_joins().
|
start the name resolving from, 'allow_many' is as for setup_joins().
|
||||||
If fail_on_missing is set to True, then a name that can't be resolved
|
If fail_on_missing is set to True, then a name that can't be resolved
|
||||||
will generate a FieldError.
|
will generate a FieldError.
|
||||||
|
|
||||||
Returns a list of PathInfo tuples. In addition returns the final field
|
Return a list of PathInfo tuples. In addition return the final field
|
||||||
(the last used join field), and target (which is a field guaranteed to
|
(the last used join field) and target (which is a field guaranteed to
|
||||||
contain the same value as the final field). Finally, the method returns
|
contain the same value as the final field). Finally, return those names
|
||||||
those names that weren't found (which are likely transforms and the
|
that weren't found (which are likely transforms and the final lookup).
|
||||||
final lookup).
|
|
||||||
"""
|
"""
|
||||||
path, names_with_path = [], []
|
path, names_with_path = [], []
|
||||||
for pos, name in enumerate(names):
|
for pos, name in enumerate(names):
|
||||||
@ -1397,7 +1385,7 @@ class Query:
|
|||||||
If 'allow_many' is False, then any reverse foreign key seen will
|
If 'allow_many' is False, then any reverse foreign key seen will
|
||||||
generate a MultiJoin exception.
|
generate a MultiJoin exception.
|
||||||
|
|
||||||
Returns the final field involved in the joins, the target field (used
|
Return the final field involved in the joins, the target field (used
|
||||||
for any 'where' constraint), the final 'opts' value, the joins and the
|
for any 'where' constraint), the final 'opts' value, the joins and the
|
||||||
field path travelled to generate the joins.
|
field path travelled to generate the joins.
|
||||||
|
|
||||||
@ -1433,13 +1421,12 @@ class Query:
|
|||||||
is the full list of join aliases. The 'path' contain the PathInfos
|
is the full list of join aliases. The 'path' contain the PathInfos
|
||||||
used to create the joins.
|
used to create the joins.
|
||||||
|
|
||||||
Returns the final target field and table alias and the new active
|
Return the final target field and table alias and the new active
|
||||||
joins.
|
joins.
|
||||||
|
|
||||||
We will always trim any direct join if we have the target column
|
Always trim any direct join if the target column is already in the
|
||||||
available already in the previous table. Reverse joins can't be
|
previous table. Can't trim reverse joins as it's unknown if there's
|
||||||
trimmed as we don't know if there is anything on the other side of
|
anything on the other side of the join.
|
||||||
the join.
|
|
||||||
"""
|
"""
|
||||||
joins = joins[:]
|
joins = joins[:]
|
||||||
for pos, info in enumerate(reversed(path)):
|
for pos, info in enumerate(reversed(path)):
|
||||||
@ -1555,13 +1542,12 @@ class Query:
|
|||||||
|
|
||||||
def set_limits(self, low=None, high=None):
|
def set_limits(self, low=None, high=None):
|
||||||
"""
|
"""
|
||||||
Adjusts the limits on the rows retrieved. We use low/high to set these,
|
Adjust the limits on the rows retrieved. Use low/high to set these,
|
||||||
as it makes it more Pythonic to read and write. When the SQL query is
|
as it makes it more Pythonic to read and write. When the SQL query is
|
||||||
created, they are converted to the appropriate offset and limit values.
|
created, convert them to the appropriate offset and limit values.
|
||||||
|
|
||||||
Any limits passed in here are applied relative to the existing
|
Apply any limits passed in here to the existing constraints. Add low
|
||||||
constraints. So low is added to the current low value and both will be
|
to the current low value and clamp both to any existing high value.
|
||||||
clamped to any existing high value.
|
|
||||||
"""
|
"""
|
||||||
if high is not None:
|
if high is not None:
|
||||||
if self.high_mark is not None:
|
if self.high_mark is not None:
|
||||||
@ -1578,23 +1564,19 @@ class Query:
|
|||||||
self.set_empty()
|
self.set_empty()
|
||||||
|
|
||||||
def clear_limits(self):
|
def clear_limits(self):
|
||||||
"""
|
"""Clear any existing limits."""
|
||||||
Clears any existing limits.
|
|
||||||
"""
|
|
||||||
self.low_mark, self.high_mark = 0, None
|
self.low_mark, self.high_mark = 0, None
|
||||||
|
|
||||||
def can_filter(self):
|
def can_filter(self):
|
||||||
"""
|
"""
|
||||||
Returns True if adding filters to this instance is still possible.
|
Return True if adding filters to this instance is still possible.
|
||||||
|
|
||||||
Typically, this means no limits or offsets have been put on the results.
|
Typically, this means no limits or offsets have been put on the results.
|
||||||
"""
|
"""
|
||||||
return not self.low_mark and self.high_mark is None
|
return not self.low_mark and self.high_mark is None
|
||||||
|
|
||||||
def clear_select_clause(self):
|
def clear_select_clause(self):
|
||||||
"""
|
"""Remove all fields from SELECT clause."""
|
||||||
Removes all fields from SELECT clause.
|
|
||||||
"""
|
|
||||||
self.select = []
|
self.select = []
|
||||||
self.default_cols = False
|
self.default_cols = False
|
||||||
self.select_related = False
|
self.select_related = False
|
||||||
@ -1603,7 +1585,7 @@ class Query:
|
|||||||
|
|
||||||
def clear_select_fields(self):
|
def clear_select_fields(self):
|
||||||
"""
|
"""
|
||||||
Clears the list of fields to select (but not extra_select columns).
|
Clear the list of fields to select (but not extra_select columns).
|
||||||
Some queryset types completely replace any existing list of select
|
Some queryset types completely replace any existing list of select
|
||||||
columns.
|
columns.
|
||||||
"""
|
"""
|
||||||
@ -1620,15 +1602,15 @@ class Query:
|
|||||||
|
|
||||||
def add_distinct_fields(self, *field_names):
|
def add_distinct_fields(self, *field_names):
|
||||||
"""
|
"""
|
||||||
Adds and resolves the given fields to the query's "distinct on" clause.
|
Add and resolve the given fields to the query's "distinct on" clause.
|
||||||
"""
|
"""
|
||||||
self.distinct_fields = field_names
|
self.distinct_fields = field_names
|
||||||
self.distinct = True
|
self.distinct = True
|
||||||
|
|
||||||
def add_fields(self, field_names, allow_m2m=True):
|
def add_fields(self, field_names, allow_m2m=True):
|
||||||
"""
|
"""
|
||||||
Adds the given (model) fields to the select set. The field names are
|
Add the given (model) fields to the select set. Add the field names in
|
||||||
added in the order specified.
|
the order specified.
|
||||||
"""
|
"""
|
||||||
alias = self.get_initial_alias()
|
alias = self.get_initial_alias()
|
||||||
opts = self.get_meta()
|
opts = self.get_meta()
|
||||||
@ -1656,12 +1638,12 @@ class Query:
|
|||||||
|
|
||||||
def add_ordering(self, *ordering):
|
def add_ordering(self, *ordering):
|
||||||
"""
|
"""
|
||||||
Adds items from the 'ordering' sequence to the query's "order by"
|
Add items from the 'ordering' sequence to the query's "order by"
|
||||||
clause. These items are either field names (not column names) --
|
clause. These items are either field names (not column names) --
|
||||||
possibly with a direction prefix ('-' or '?') -- or OrderBy
|
possibly with a direction prefix ('-' or '?') -- or OrderBy
|
||||||
expressions.
|
expressions.
|
||||||
|
|
||||||
If 'ordering' is empty, all ordering is cleared from the query.
|
If 'ordering' is empty, clear all ordering from the query.
|
||||||
"""
|
"""
|
||||||
errors = []
|
errors = []
|
||||||
for item in ordering:
|
for item in ordering:
|
||||||
@ -1681,7 +1663,7 @@ class Query:
|
|||||||
|
|
||||||
def clear_ordering(self, force_empty):
|
def clear_ordering(self, force_empty):
|
||||||
"""
|
"""
|
||||||
Removes any ordering settings. If 'force_empty' is True, there will be
|
Remove any ordering settings. If 'force_empty' is True, there will be
|
||||||
no ordering in the resulting query (not even the model's default).
|
no ordering in the resulting query (not even the model's default).
|
||||||
"""
|
"""
|
||||||
self.order_by = []
|
self.order_by = []
|
||||||
@ -1691,7 +1673,7 @@ class Query:
|
|||||||
|
|
||||||
def set_group_by(self):
|
def set_group_by(self):
|
||||||
"""
|
"""
|
||||||
Expands the GROUP BY clause required by the query.
|
Expand the GROUP BY clause required by the query.
|
||||||
|
|
||||||
This will usually be the set of all non-aggregate fields in the
|
This will usually be the set of all non-aggregate fields in the
|
||||||
return data. If the database backend supports grouping by the
|
return data. If the database backend supports grouping by the
|
||||||
@ -1710,7 +1692,7 @@ class Query:
|
|||||||
|
|
||||||
def add_select_related(self, fields):
|
def add_select_related(self, fields):
|
||||||
"""
|
"""
|
||||||
Sets up the select_related data structure so that we only select
|
Set up the select_related data structure so that we only select
|
||||||
certain related models (as opposed to all models, when
|
certain related models (as opposed to all models, when
|
||||||
self.select_related=True).
|
self.select_related=True).
|
||||||
"""
|
"""
|
||||||
@ -1726,7 +1708,7 @@ class Query:
|
|||||||
|
|
||||||
def add_extra(self, select, select_params, where, params, tables, order_by):
|
def add_extra(self, select, select_params, where, params, tables, order_by):
|
||||||
"""
|
"""
|
||||||
Adds data to the various extra_* attributes for user-created additions
|
Add data to the various extra_* attributes for user-created additions
|
||||||
to the query.
|
to the query.
|
||||||
"""
|
"""
|
||||||
if select:
|
if select:
|
||||||
@ -1758,16 +1740,14 @@ class Query:
|
|||||||
self.extra_order_by = order_by
|
self.extra_order_by = order_by
|
||||||
|
|
||||||
def clear_deferred_loading(self):
|
def clear_deferred_loading(self):
|
||||||
"""
|
"""Remove any fields from the deferred loading set."""
|
||||||
Remove any fields from the deferred loading set.
|
|
||||||
"""
|
|
||||||
self.deferred_loading = (set(), True)
|
self.deferred_loading = (set(), True)
|
||||||
|
|
||||||
def add_deferred_loading(self, field_names):
|
def add_deferred_loading(self, field_names):
|
||||||
"""
|
"""
|
||||||
Add the given list of model field names to the set of fields to
|
Add the given list of model field names to the set of fields to
|
||||||
exclude from loading from the database when automatic column selection
|
exclude from loading from the database when automatic column selection
|
||||||
is done. The new field names are added to any existing field names that
|
is done. Add the new field names to any existing field names that
|
||||||
are deferred (or removed from any existing field names that are marked
|
are deferred (or removed from any existing field names that are marked
|
||||||
as the only ones for immediate loading).
|
as the only ones for immediate loading).
|
||||||
"""
|
"""
|
||||||
@ -1788,8 +1768,8 @@ class Query:
|
|||||||
Add the given list of model field names to the set of fields to
|
Add the given list of model field names to the set of fields to
|
||||||
retrieve when the SQL is executed ("immediate loading" fields). The
|
retrieve when the SQL is executed ("immediate loading" fields). The
|
||||||
field names replace any existing immediate loading field names. If
|
field names replace any existing immediate loading field names. If
|
||||||
there are field names already specified for deferred loading, those
|
there are field names already specified for deferred loading, remove
|
||||||
names are removed from the new field_names before storing the new names
|
those names from the new field_names before storing the new names
|
||||||
for immediate loading. (That is, immediate loading overrides any
|
for immediate loading. (That is, immediate loading overrides any
|
||||||
existing immediate values, but respects existing deferrals.)
|
existing immediate values, but respects existing deferrals.)
|
||||||
"""
|
"""
|
||||||
@ -1809,12 +1789,12 @@ class Query:
|
|||||||
|
|
||||||
def get_loaded_field_names(self):
|
def get_loaded_field_names(self):
|
||||||
"""
|
"""
|
||||||
If any fields are marked to be deferred, returns a dictionary mapping
|
If any fields are marked to be deferred, return a dictionary mapping
|
||||||
models to a set of names in those fields that will be loaded. If a
|
models to a set of names in those fields that will be loaded. If a
|
||||||
model is not in the returned dictionary, none of its fields are
|
model is not in the returned dictionary, none of its fields are
|
||||||
deferred.
|
deferred.
|
||||||
|
|
||||||
If no fields are marked for deferral, returns an empty dictionary.
|
If no fields are marked for deferral, return an empty dictionary.
|
||||||
"""
|
"""
|
||||||
# We cache this because we call this function multiple times
|
# We cache this because we call this function multiple times
|
||||||
# (compiler.fill_related_selections, query.iterator)
|
# (compiler.fill_related_selections, query.iterator)
|
||||||
@ -1827,13 +1807,11 @@ class Query:
|
|||||||
return collection
|
return collection
|
||||||
|
|
||||||
def get_loaded_field_names_cb(self, target, model, fields):
|
def get_loaded_field_names_cb(self, target, model, fields):
|
||||||
"""
|
"""Callback used by get_deferred_field_names()."""
|
||||||
Callback used by get_deferred_field_names().
|
|
||||||
"""
|
|
||||||
target[model] = {f.attname for f in fields}
|
target[model] = {f.attname for f in fields}
|
||||||
|
|
||||||
def set_annotation_mask(self, names):
|
def set_annotation_mask(self, names):
|
||||||
"Set the mask of annotations that will actually be returned by the SELECT"
|
"""Set the mask of annotations that will be returned by the SELECT."""
|
||||||
if names is None:
|
if names is None:
|
||||||
self.annotation_select_mask = None
|
self.annotation_select_mask = None
|
||||||
else:
|
else:
|
||||||
@ -1846,9 +1824,8 @@ class Query:
|
|||||||
|
|
||||||
def set_extra_mask(self, names):
|
def set_extra_mask(self, names):
|
||||||
"""
|
"""
|
||||||
Set the mask of extra select items that will be returned by SELECT,
|
Set the mask of extra select items that will be returned by SELECT.
|
||||||
we don't actually remove them from the Query since they might be used
|
Don't remove them from the Query since they might be used later.
|
||||||
later
|
|
||||||
"""
|
"""
|
||||||
if names is None:
|
if names is None:
|
||||||
self.extra_select_mask = None
|
self.extra_select_mask = None
|
||||||
@ -1893,10 +1870,9 @@ class Query:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def annotation_select(self):
|
def annotation_select(self):
|
||||||
"""The OrderedDict of aggregate columns that are not masked, and should
|
"""
|
||||||
be used in the SELECT clause.
|
Return the OrderedDict of aggregate columns that are not masked and
|
||||||
|
should be used in the SELECT clause. Cache this result for performance.
|
||||||
This result is cached for optimization purposes.
|
|
||||||
"""
|
"""
|
||||||
if self._annotation_select_cache is not None:
|
if self._annotation_select_cache is not None:
|
||||||
return self._annotation_select_cache
|
return self._annotation_select_cache
|
||||||
@ -1928,16 +1904,16 @@ class Query:
|
|||||||
|
|
||||||
def trim_start(self, names_with_path):
|
def trim_start(self, names_with_path):
|
||||||
"""
|
"""
|
||||||
Trims joins from the start of the join path. The candidates for trim
|
Trim joins from the start of the join path. The candidates for trim
|
||||||
are the PathInfos in names_with_path structure that are m2m joins.
|
are the PathInfos in names_with_path structure that are m2m joins.
|
||||||
|
|
||||||
Also sets the select column so the start matches the join.
|
Also set the select column so the start matches the join.
|
||||||
|
|
||||||
This method is meant to be used for generating the subquery joins &
|
This method is meant to be used for generating the subquery joins &
|
||||||
cols in split_exclude().
|
cols in split_exclude().
|
||||||
|
|
||||||
Returns a lookup usable for doing outerq.filter(lookup=self). Returns
|
Return a lookup usable for doing outerq.filter(lookup=self) and a
|
||||||
also if the joins in the prefix contain a LEFT OUTER join.
|
boolean indicating if the joins in the prefix contain a LEFT OUTER join.
|
||||||
_"""
|
_"""
|
||||||
all_paths = []
|
all_paths = []
|
||||||
for _, paths in names_with_path:
|
for _, paths in names_with_path:
|
||||||
@ -1995,7 +1971,7 @@ class Query:
|
|||||||
|
|
||||||
def is_nullable(self, field):
|
def is_nullable(self, field):
|
||||||
"""
|
"""
|
||||||
A helper to check if the given field should be treated as nullable.
|
Check if the given field should be treated as nullable.
|
||||||
|
|
||||||
Some backends treat '' as null and Django treats such fields as
|
Some backends treat '' as null and Django treats such fields as
|
||||||
nullable for those backends. In such situations field.null can be
|
nullable for those backends. In such situations field.null can be
|
||||||
@ -2025,7 +2001,7 @@ class Query:
|
|||||||
|
|
||||||
def get_order_dir(field, default='ASC'):
|
def get_order_dir(field, default='ASC'):
|
||||||
"""
|
"""
|
||||||
Returns the field name and direction for an order specification. For
|
Return the field name and direction for an order specification. For
|
||||||
example, '-foo' is returned as ('foo', 'DESC').
|
example, '-foo' is returned as ('foo', 'DESC').
|
||||||
|
|
||||||
The 'default' param is used to indicate which way no prefix (or a '+'
|
The 'default' param is used to indicate which way no prefix (or a '+'
|
||||||
@ -2039,8 +2015,8 @@ def get_order_dir(field, default='ASC'):
|
|||||||
|
|
||||||
def add_to_dict(data, key, value):
|
def add_to_dict(data, key, value):
|
||||||
"""
|
"""
|
||||||
A helper function to add "value" to the set of values for "key", whether or
|
Add "value" to the set of values for "key", whether or not "key" already
|
||||||
not "key" already exists.
|
exists.
|
||||||
"""
|
"""
|
||||||
if key in data:
|
if key in data:
|
||||||
data[key].add(value)
|
data[key].add(value)
|
||||||
@ -2050,8 +2026,8 @@ def add_to_dict(data, key, value):
|
|||||||
|
|
||||||
def is_reverse_o2o(field):
|
def is_reverse_o2o(field):
|
||||||
"""
|
"""
|
||||||
A little helper to check if the given field is reverse-o2o. The field is
|
Check if the given field is reverse-o2o. The field is expected to be some
|
||||||
expected to be some sort of relation field or related object.
|
sort of relation field or related object.
|
||||||
"""
|
"""
|
||||||
return field.is_relation and field.one_to_one and not field.concrete
|
return field.is_relation and field.one_to_one and not field.concrete
|
||||||
|
|
||||||
|
@ -14,10 +14,7 @@ __all__ = ['DeleteQuery', 'UpdateQuery', 'InsertQuery', 'AggregateQuery']
|
|||||||
|
|
||||||
|
|
||||||
class DeleteQuery(Query):
|
class DeleteQuery(Query):
|
||||||
"""
|
"""A DELETE SQL query."""
|
||||||
Delete queries are done through this class, since they are more constrained
|
|
||||||
than general queries.
|
|
||||||
"""
|
|
||||||
|
|
||||||
compiler = 'SQLDeleteCompiler'
|
compiler = 'SQLDeleteCompiler'
|
||||||
|
|
||||||
@ -81,9 +78,7 @@ class DeleteQuery(Query):
|
|||||||
|
|
||||||
|
|
||||||
class UpdateQuery(Query):
|
class UpdateQuery(Query):
|
||||||
"""
|
"""An UPDATE SQL query."""
|
||||||
Represents an "update" SQL query.
|
|
||||||
"""
|
|
||||||
|
|
||||||
compiler = 'SQLUpdateCompiler'
|
compiler = 'SQLUpdateCompiler'
|
||||||
|
|
||||||
@ -93,7 +88,7 @@ class UpdateQuery(Query):
|
|||||||
|
|
||||||
def _setup_query(self):
|
def _setup_query(self):
|
||||||
"""
|
"""
|
||||||
Runs on initialization and after cloning. Any attributes that would
|
Run on initialization and after cloning. Any attributes that would
|
||||||
normally be set in __init__ should go in here, instead, so that they
|
normally be set in __init__ should go in here, instead, so that they
|
||||||
are also set up after a clone() call.
|
are also set up after a clone() call.
|
||||||
"""
|
"""
|
||||||
@ -148,15 +143,15 @@ class UpdateQuery(Query):
|
|||||||
|
|
||||||
def add_related_update(self, model, field, value):
|
def add_related_update(self, model, field, value):
|
||||||
"""
|
"""
|
||||||
Adds (name, value) to an update query for an ancestor model.
|
Add (name, value) to an update query for an ancestor model.
|
||||||
|
|
||||||
Updates are coalesced so that we only run one update query per ancestor.
|
Update are coalesced so that only one update query per ancestor is run.
|
||||||
"""
|
"""
|
||||||
self.related_updates.setdefault(model, []).append((field, None, value))
|
self.related_updates.setdefault(model, []).append((field, None, value))
|
||||||
|
|
||||||
def get_related_updates(self):
|
def get_related_updates(self):
|
||||||
"""
|
"""
|
||||||
Returns a list of query objects: one for each update required to an
|
Return a list of query objects: one for each update required to an
|
||||||
ancestor model. Each query will have the same filtering conditions as
|
ancestor model. Each query will have the same filtering conditions as
|
||||||
the current query but will only update a single table.
|
the current query but will only update a single table.
|
||||||
"""
|
"""
|
||||||
@ -181,15 +176,6 @@ class InsertQuery(Query):
|
|||||||
self.objs = []
|
self.objs = []
|
||||||
|
|
||||||
def insert_values(self, fields, objs, raw=False):
|
def insert_values(self, fields, objs, raw=False):
|
||||||
"""
|
|
||||||
Set up the insert query from the 'insert_values' dictionary. The
|
|
||||||
dictionary gives the model field names and their target values.
|
|
||||||
|
|
||||||
If 'raw_values' is True, the values in the 'insert_values' dictionary
|
|
||||||
are inserted directly into the query, rather than passed as SQL
|
|
||||||
parameters. This provides a way to insert NULL and DEFAULT keywords
|
|
||||||
into the query, for example.
|
|
||||||
"""
|
|
||||||
self.fields = fields
|
self.fields = fields
|
||||||
self.objs = objs
|
self.objs = objs
|
||||||
self.raw = raw
|
self.raw = raw
|
||||||
@ -197,8 +183,8 @@ class InsertQuery(Query):
|
|||||||
|
|
||||||
class AggregateQuery(Query):
|
class AggregateQuery(Query):
|
||||||
"""
|
"""
|
||||||
An AggregateQuery takes another query as a parameter to the FROM
|
Take another query as a parameter to the FROM clause and only select the
|
||||||
clause and only selects the elements in the provided list.
|
elements in the provided list.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
compiler = 'SQLAggregateCompiler'
|
compiler = 'SQLAggregateCompiler'
|
||||||
|
@ -13,7 +13,7 @@ OR = 'OR'
|
|||||||
|
|
||||||
class WhereNode(tree.Node):
|
class WhereNode(tree.Node):
|
||||||
"""
|
"""
|
||||||
Used to represent the SQL where-clause.
|
An SQL WHERE clause.
|
||||||
|
|
||||||
The class is tied to the Query class that created it (in order to create
|
The class is tied to the Query class that created it (in order to create
|
||||||
the correct SQL).
|
the correct SQL).
|
||||||
@ -29,7 +29,7 @@ class WhereNode(tree.Node):
|
|||||||
|
|
||||||
def split_having(self, negated=False):
|
def split_having(self, negated=False):
|
||||||
"""
|
"""
|
||||||
Returns two possibly None nodes: one for those parts of self that
|
Return two possibly None nodes: one for those parts of self that
|
||||||
should be included in the WHERE clause and one for those parts of
|
should be included in the WHERE clause and one for those parts of
|
||||||
self that must be included in the HAVING clause.
|
self that must be included in the HAVING clause.
|
||||||
"""
|
"""
|
||||||
@ -62,9 +62,9 @@ class WhereNode(tree.Node):
|
|||||||
|
|
||||||
def as_sql(self, compiler, connection):
|
def as_sql(self, compiler, connection):
|
||||||
"""
|
"""
|
||||||
Returns the SQL version of the where clause and the value to be
|
Return the SQL version of the where clause and the value to be
|
||||||
substituted in. Returns '', [] if this node matches everything,
|
substituted in. Return '', [] if this node matches everything,
|
||||||
None, [] if this node is empty, and raises EmptyResultSet if this
|
None, [] if this node is empty, and raise EmptyResultSet if this
|
||||||
node can't match anything.
|
node can't match anything.
|
||||||
"""
|
"""
|
||||||
result = []
|
result = []
|
||||||
@ -127,7 +127,7 @@ class WhereNode(tree.Node):
|
|||||||
|
|
||||||
def relabel_aliases(self, change_map):
|
def relabel_aliases(self, change_map):
|
||||||
"""
|
"""
|
||||||
Relabels the alias values of any children. 'change_map' is a dictionary
|
Relabel the alias values of any children. 'change_map' is a dictionary
|
||||||
mapping old (current) alias values to the new values.
|
mapping old (current) alias values to the new values.
|
||||||
"""
|
"""
|
||||||
for pos, child in enumerate(self.children):
|
for pos, child in enumerate(self.children):
|
||||||
@ -139,7 +139,7 @@ class WhereNode(tree.Node):
|
|||||||
|
|
||||||
def clone(self):
|
def clone(self):
|
||||||
"""
|
"""
|
||||||
Creates a clone of the tree. Must only be called on root nodes (nodes
|
Create a clone of the tree. Must only be called on root nodes (nodes
|
||||||
with empty subtree_parents). Childs must be either (Contraint, lookup,
|
with empty subtree_parents). Childs must be either (Contraint, lookup,
|
||||||
value) tuples, or objects supporting .clone().
|
value) tuples, or objects supporting .clone().
|
||||||
"""
|
"""
|
||||||
@ -173,9 +173,7 @@ class WhereNode(tree.Node):
|
|||||||
|
|
||||||
|
|
||||||
class NothingNode:
|
class NothingNode:
|
||||||
"""
|
"""A node that matches nothing."""
|
||||||
A node that matches nothing.
|
|
||||||
"""
|
|
||||||
contains_aggregate = False
|
contains_aggregate = False
|
||||||
|
|
||||||
def as_sql(self, compiler=None, connection=None):
|
def as_sql(self, compiler=None, connection=None):
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
def make_model_tuple(model):
|
def make_model_tuple(model):
|
||||||
"""
|
"""
|
||||||
Takes a model or a string of the form "app_label.ModelName" and returns a
|
Take a model or a string of the form "app_label.ModelName" and return a
|
||||||
corresponding ("app_label", "modelname") tuple. If a tuple is passed in,
|
corresponding ("app_label", "modelname") tuple. If a tuple is passed in,
|
||||||
it's assumed to be a valid model tuple already and returned unchanged.
|
assume it's a valid model tuple already and return it unchanged.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
if isinstance(model, tuple):
|
if isinstance(model, tuple):
|
||||||
|
@ -6,9 +6,7 @@ from django.db import (
|
|||||||
|
|
||||||
|
|
||||||
class TransactionManagementError(ProgrammingError):
|
class TransactionManagementError(ProgrammingError):
|
||||||
"""
|
"""Transaction management is used improperly."""
|
||||||
This exception is thrown when transaction management is used improperly.
|
|
||||||
"""
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@ -23,37 +21,29 @@ def get_connection(using=None):
|
|||||||
|
|
||||||
|
|
||||||
def get_autocommit(using=None):
|
def get_autocommit(using=None):
|
||||||
"""
|
"""Get the autocommit status of the connection."""
|
||||||
Get the autocommit status of the connection.
|
|
||||||
"""
|
|
||||||
return get_connection(using).get_autocommit()
|
return get_connection(using).get_autocommit()
|
||||||
|
|
||||||
|
|
||||||
def set_autocommit(autocommit, using=None):
|
def set_autocommit(autocommit, using=None):
|
||||||
"""
|
"""Set the autocommit status of the connection."""
|
||||||
Set the autocommit status of the connection.
|
|
||||||
"""
|
|
||||||
return get_connection(using).set_autocommit(autocommit)
|
return get_connection(using).set_autocommit(autocommit)
|
||||||
|
|
||||||
|
|
||||||
def commit(using=None):
|
def commit(using=None):
|
||||||
"""
|
"""Commit a transaction."""
|
||||||
Commits a transaction.
|
|
||||||
"""
|
|
||||||
get_connection(using).commit()
|
get_connection(using).commit()
|
||||||
|
|
||||||
|
|
||||||
def rollback(using=None):
|
def rollback(using=None):
|
||||||
"""
|
"""Roll back a transaction."""
|
||||||
Rolls back a transaction.
|
|
||||||
"""
|
|
||||||
get_connection(using).rollback()
|
get_connection(using).rollback()
|
||||||
|
|
||||||
|
|
||||||
def savepoint(using=None):
|
def savepoint(using=None):
|
||||||
"""
|
"""
|
||||||
Creates a savepoint (if supported and required by the backend) inside the
|
Create a savepoint (if supported and required by the backend) inside the
|
||||||
current transaction. Returns an identifier for the savepoint that will be
|
current transaction. Return an identifier for the savepoint that will be
|
||||||
used for the subsequent rollback or commit.
|
used for the subsequent rollback or commit.
|
||||||
"""
|
"""
|
||||||
return get_connection(using).savepoint()
|
return get_connection(using).savepoint()
|
||||||
@ -61,7 +51,7 @@ def savepoint(using=None):
|
|||||||
|
|
||||||
def savepoint_rollback(sid, using=None):
|
def savepoint_rollback(sid, using=None):
|
||||||
"""
|
"""
|
||||||
Rolls back the most recent savepoint (if one exists). Does nothing if
|
Roll back the most recent savepoint (if one exists). Do nothing if
|
||||||
savepoints are not supported.
|
savepoints are not supported.
|
||||||
"""
|
"""
|
||||||
get_connection(using).savepoint_rollback(sid)
|
get_connection(using).savepoint_rollback(sid)
|
||||||
@ -69,7 +59,7 @@ def savepoint_rollback(sid, using=None):
|
|||||||
|
|
||||||
def savepoint_commit(sid, using=None):
|
def savepoint_commit(sid, using=None):
|
||||||
"""
|
"""
|
||||||
Commits the most recent savepoint (if one exists). Does nothing if
|
Commit the most recent savepoint (if one exists). Do nothing if
|
||||||
savepoints are not supported.
|
savepoints are not supported.
|
||||||
"""
|
"""
|
||||||
get_connection(using).savepoint_commit(sid)
|
get_connection(using).savepoint_commit(sid)
|
||||||
@ -77,29 +67,27 @@ def savepoint_commit(sid, using=None):
|
|||||||
|
|
||||||
def clean_savepoints(using=None):
|
def clean_savepoints(using=None):
|
||||||
"""
|
"""
|
||||||
Resets the counter used to generate unique savepoint ids in this thread.
|
Reset the counter used to generate unique savepoint ids in this thread.
|
||||||
"""
|
"""
|
||||||
get_connection(using).clean_savepoints()
|
get_connection(using).clean_savepoints()
|
||||||
|
|
||||||
|
|
||||||
def get_rollback(using=None):
|
def get_rollback(using=None):
|
||||||
"""
|
"""Get the "needs rollback" flag -- for *advanced use* only."""
|
||||||
Gets the "needs rollback" flag -- for *advanced use* only.
|
|
||||||
"""
|
|
||||||
return get_connection(using).get_rollback()
|
return get_connection(using).get_rollback()
|
||||||
|
|
||||||
|
|
||||||
def set_rollback(rollback, using=None):
|
def set_rollback(rollback, using=None):
|
||||||
"""
|
"""
|
||||||
Sets or unsets the "needs rollback" flag -- for *advanced use* only.
|
Set or unset the "needs rollback" flag -- for *advanced use* only.
|
||||||
|
|
||||||
When `rollback` is `True`, it triggers a rollback when exiting the
|
When `rollback` is `True`, trigger a rollback when exiting the innermost
|
||||||
innermost enclosing atomic block that has `savepoint=True` (that's the
|
enclosing atomic block that has `savepoint=True` (that's the default). Use
|
||||||
default). Use this to force a rollback without raising an exception.
|
this to force a rollback without raising an exception.
|
||||||
|
|
||||||
When `rollback` is `False`, it prevents such a rollback. Use this only
|
When `rollback` is `False`, prevent such a rollback. Use this only after
|
||||||
after rolling back to a known-good state! Otherwise, you break the atomic
|
rolling back to a known-good state! Otherwise, you break the atomic block
|
||||||
block and data corruption may occur.
|
and data corruption may occur.
|
||||||
"""
|
"""
|
||||||
return get_connection(using).set_rollback(rollback)
|
return get_connection(using).set_rollback(rollback)
|
||||||
|
|
||||||
@ -118,7 +106,7 @@ def on_commit(func, using=None):
|
|||||||
|
|
||||||
class Atomic(ContextDecorator):
|
class Atomic(ContextDecorator):
|
||||||
"""
|
"""
|
||||||
This class guarantees the atomic execution of a given block.
|
Guarantee the atomic execution of a given block.
|
||||||
|
|
||||||
An instance can be used either as a decorator or as a context manager.
|
An instance can be used either as a decorator or as a context manager.
|
||||||
|
|
||||||
|
@ -50,7 +50,7 @@ class NotSupportedError(DatabaseError):
|
|||||||
|
|
||||||
class DatabaseErrorWrapper:
|
class DatabaseErrorWrapper:
|
||||||
"""
|
"""
|
||||||
Context manager and decorator that re-throws backend-specific database
|
Context manager and decorator that reraises backend-specific database
|
||||||
exceptions using Django's common wrappers.
|
exceptions using Django's common wrappers.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -161,7 +161,7 @@ class ConnectionHandler:
|
|||||||
|
|
||||||
def ensure_defaults(self, alias):
|
def ensure_defaults(self, alias):
|
||||||
"""
|
"""
|
||||||
Puts the defaults into the settings dictionary for a given connection
|
Put the defaults into the settings dictionary for a given connection
|
||||||
where no settings is provided.
|
where no settings is provided.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
@ -182,7 +182,7 @@ class ConnectionHandler:
|
|||||||
|
|
||||||
def prepare_test_settings(self, alias):
|
def prepare_test_settings(self, alias):
|
||||||
"""
|
"""
|
||||||
Makes sure the test settings are available in the 'TEST' sub-dictionary.
|
Make sure the test settings are available in the 'TEST' sub-dictionary.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
conn = self.databases[alias]
|
conn = self.databases[alias]
|
||||||
@ -229,7 +229,7 @@ class ConnectionHandler:
|
|||||||
class ConnectionRouter:
|
class ConnectionRouter:
|
||||||
def __init__(self, routers=None):
|
def __init__(self, routers=None):
|
||||||
"""
|
"""
|
||||||
If routers is not specified, will default to settings.DATABASE_ROUTERS.
|
If routers is not specified, default to settings.DATABASE_ROUTERS.
|
||||||
"""
|
"""
|
||||||
self._routers = routers
|
self._routers = routers
|
||||||
|
|
||||||
@ -304,8 +304,6 @@ class ConnectionRouter:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def get_migratable_models(self, app_config, db, include_auto_created=False):
|
def get_migratable_models(self, app_config, db, include_auto_created=False):
|
||||||
"""
|
"""Return app models allowed to be migrated on provided db."""
|
||||||
Return app models allowed to be synchronized on provided db.
|
|
||||||
"""
|
|
||||||
models = app_config.get_models(include_auto_created=include_auto_created)
|
models = app_config.get_models(include_auto_created=include_auto_created)
|
||||||
return [model for model in models if self.allow_migrate_model(db, model)]
|
return [model for model in models if self.allow_migrate_model(db, model)]
|
||||||
|
Loading…
x
Reference in New Issue
Block a user