mirror of
https://github.com/django/django.git
synced 2025-10-26 23:26:08 +00:00
Refs #23919 -- Replaced super(ClassName, self) with super().
This commit is contained in:
@@ -39,7 +39,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||
}
|
||||
|
||||
def get_field_type(self, data_type, description):
|
||||
field_type = super(DatabaseIntrospection, self).get_field_type(data_type, description)
|
||||
field_type = super().get_field_type(data_type, description)
|
||||
if 'auto_increment' in description.extra:
|
||||
if field_type == 'IntegerField':
|
||||
return 'AutoField'
|
||||
|
||||
@@ -202,10 +202,10 @@ class DatabaseOperations(BaseDatabaseOperations):
|
||||
elif connector == '>>':
|
||||
lhs, rhs = sub_expressions
|
||||
return 'FLOOR(%(lhs)s / POW(2, %(rhs)s))' % {'lhs': lhs, 'rhs': rhs}
|
||||
return super(DatabaseOperations, self).combine_expression(connector, sub_expressions)
|
||||
return super().combine_expression(connector, sub_expressions)
|
||||
|
||||
def get_db_converters(self, expression):
|
||||
converters = super(DatabaseOperations, self).get_db_converters(expression)
|
||||
converters = super().get_db_converters(expression)
|
||||
internal_type = expression.output_field.get_internal_type()
|
||||
if internal_type == 'TextField':
|
||||
converters.append(self.convert_textfield_value)
|
||||
|
||||
@@ -45,7 +45,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
||||
)
|
||||
|
||||
def add_field(self, model, field):
|
||||
super(DatabaseSchemaEditor, self).add_field(model, field)
|
||||
super().add_field(model, field)
|
||||
|
||||
# Simulate the effect of a one-off default.
|
||||
# field.default may be unhashable, so a set isn't used for "in" check.
|
||||
@@ -57,7 +57,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
||||
}, [effective_default])
|
||||
|
||||
def _field_should_be_indexed(self, model, field):
|
||||
create_index = super(DatabaseSchemaEditor, self)._field_should_be_indexed(model, field)
|
||||
create_index = super()._field_should_be_indexed(model, field)
|
||||
storage = self.connection.introspection.get_storage_engine(
|
||||
self.connection.cursor(), model._meta.db_table
|
||||
)
|
||||
@@ -85,7 +85,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
||||
constraint_names = self._constraint_names(model, [first_field.column], index=True)
|
||||
if not constraint_names:
|
||||
self.execute(self._create_index_sql(model, [first_field], suffix=""))
|
||||
return super(DatabaseSchemaEditor, self)._delete_composed_index(model, fields, *args)
|
||||
return super()._delete_composed_index(model, fields, *args)
|
||||
|
||||
def _set_field_new_type_null_status(self, field, new_type):
|
||||
"""
|
||||
@@ -100,8 +100,8 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
||||
|
||||
def _alter_column_type_sql(self, table, old_field, new_field, new_type):
|
||||
new_type = self._set_field_new_type_null_status(old_field, new_type)
|
||||
return super(DatabaseSchemaEditor, self)._alter_column_type_sql(table, old_field, new_field, new_type)
|
||||
return super()._alter_column_type_sql(table, old_field, new_field, new_type)
|
||||
|
||||
def _rename_field_sql(self, table, old_field, new_field, new_type):
|
||||
new_type = self._set_field_new_type_null_status(old_field, new_type)
|
||||
return super(DatabaseSchemaEditor, self)._rename_field_sql(table, old_field, new_field, new_type)
|
||||
return super()._rename_field_sql(table, old_field, new_field, new_type)
|
||||
|
||||
@@ -5,7 +5,7 @@ from django.utils.version import get_docs_version
|
||||
|
||||
class DatabaseValidation(BaseDatabaseValidation):
|
||||
def check(self, **kwargs):
|
||||
issues = super(DatabaseValidation, self).check(**kwargs)
|
||||
issues = super().check(**kwargs)
|
||||
issues.extend(self._check_sql_mode(**kwargs))
|
||||
return issues
|
||||
|
||||
@@ -32,7 +32,7 @@ class DatabaseValidation(BaseDatabaseValidation):
|
||||
No character (varchar) fields can have a length exceeding 255
|
||||
characters if they have a unique index on them.
|
||||
"""
|
||||
errors = super(DatabaseValidation, self).check_field(field, **kwargs)
|
||||
errors = super().check_field(field, **kwargs)
|
||||
|
||||
# Ignore any related fields.
|
||||
if getattr(field, 'remote_field', None):
|
||||
|
||||
@@ -175,7 +175,7 @@ class DatabaseWrapper(BaseDatabaseWrapper):
|
||||
ops_class = DatabaseOperations
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(DatabaseWrapper, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
use_returning_into = self.settings_dict["OPTIONS"].get('use_returning_into', True)
|
||||
self.features.can_return_id_from_insert = use_returning_into
|
||||
|
||||
|
||||
@@ -16,15 +16,9 @@ class SQLCompiler(compiler.SQLCompiler):
|
||||
# the SQL needed to use limit/offset with Oracle.
|
||||
do_offset = with_limits and (self.query.high_mark is not None or self.query.low_mark)
|
||||
if not do_offset:
|
||||
sql, params = super(SQLCompiler, self).as_sql(
|
||||
with_limits=False,
|
||||
with_col_aliases=with_col_aliases,
|
||||
)
|
||||
sql, params = super().as_sql(with_limits=False, with_col_aliases=with_col_aliases)
|
||||
else:
|
||||
sql, params = super(SQLCompiler, self).as_sql(
|
||||
with_limits=False,
|
||||
with_col_aliases=True,
|
||||
)
|
||||
sql, params = super().as_sql(with_limits=False, with_col_aliases=True)
|
||||
# Wrap the base query in an outer SELECT * with boundaries on
|
||||
# the "_RN" column. This is the canonical way to emulate LIMIT
|
||||
# and OFFSET on Oracle.
|
||||
|
||||
@@ -49,4 +49,4 @@ class DatabaseFeatures(BaseDatabaseFeatures):
|
||||
"""
|
||||
if self.connection.oracle_full_version < '11.2.0.2' and field and field.has_default() and created_separately:
|
||||
return 'IntegerField'
|
||||
return super(DatabaseFeatures, self).introspected_boolean_field_type(field, created_separately)
|
||||
return super().introspected_boolean_field_type(field, created_separately)
|
||||
|
||||
@@ -12,7 +12,7 @@ class IntervalToSeconds(Func):
|
||||
|
||||
def __init__(self, expression, **extra):
|
||||
output_field = extra.pop('output_field', DecimalField())
|
||||
super(IntervalToSeconds, self).__init__(expression, output_field=output_field, **extra)
|
||||
super().__init__(expression, output_field=output_field, **extra)
|
||||
|
||||
|
||||
class SecondsToInterval(Func):
|
||||
@@ -21,4 +21,4 @@ class SecondsToInterval(Func):
|
||||
|
||||
def __init__(self, expression, **extra):
|
||||
output_field = extra.pop('output_field', DurationField())
|
||||
super(SecondsToInterval, self).__init__(expression, output_field=output_field, **extra)
|
||||
super().__init__(expression, output_field=output_field, **extra)
|
||||
|
||||
@@ -48,7 +48,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||
elif scale == -127:
|
||||
return 'FloatField'
|
||||
|
||||
return super(DatabaseIntrospection, self).get_field_type(data_type, description)
|
||||
return super().get_field_type(data_type, description)
|
||||
|
||||
def get_table_list(self, cursor):
|
||||
"""
|
||||
|
||||
@@ -40,7 +40,7 @@ END;
|
||||
/"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(DatabaseOperations, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.set_operators['difference'] = 'MINUS'
|
||||
|
||||
def autoinc_sql(self, table, column):
|
||||
@@ -165,7 +165,7 @@ WHEN (new.%(col_name)s IS NULL)
|
||||
return sql
|
||||
|
||||
def get_db_converters(self, expression):
|
||||
converters = super(DatabaseOperations, self).get_db_converters(expression)
|
||||
converters = super().get_db_converters(expression)
|
||||
internal_type = expression.output_field.get_internal_type()
|
||||
if internal_type == 'TextField':
|
||||
converters.append(self.convert_textfield_value)
|
||||
@@ -254,7 +254,7 @@ WHEN (new.%(col_name)s IS NULL)
|
||||
statement = cursor.statement
|
||||
# Unlike Psycopg's `query` and MySQLdb`'s `_last_executed`, CxOracle's
|
||||
# `statement` doesn't contain the query parameters. refs #20010.
|
||||
return super(DatabaseOperations, self).last_executed_query(cursor, statement, params)
|
||||
return super().last_executed_query(cursor, statement, params)
|
||||
|
||||
def last_insert_id(self, cursor, table_name, pk_name):
|
||||
sq_name = self._get_sequence_name(table_name)
|
||||
@@ -514,7 +514,7 @@ WHEN (new.%(col_name)s IS NULL)
|
||||
return 'FLOOR(%(lhs)s / POWER(2, %(rhs)s))' % {'lhs': lhs, 'rhs': rhs}
|
||||
elif connector == '^':
|
||||
return 'POWER(%s)' % ','.join(sub_expressions)
|
||||
return super(DatabaseOperations, self).combine_expression(connector, sub_expressions)
|
||||
return super().combine_expression(connector, sub_expressions)
|
||||
|
||||
def _get_sequence_name(self, table):
|
||||
name_length = self.max_name_length() - 3
|
||||
@@ -537,4 +537,4 @@ WHEN (new.%(col_name)s IS NULL)
|
||||
lhs_sql, lhs_params = lhs
|
||||
rhs_sql, rhs_params = rhs
|
||||
return "NUMTODSINTERVAL(%s - %s, 'DAY')" % (lhs_sql, rhs_sql), lhs_params + rhs_params
|
||||
return super(DatabaseOperations, self).subtract_temporals(internal_type, lhs, rhs)
|
||||
return super().subtract_temporals(internal_type, lhs, rhs)
|
||||
|
||||
@@ -33,7 +33,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
||||
|
||||
def delete_model(self, model):
|
||||
# Run superclass action
|
||||
super(DatabaseSchemaEditor, self).delete_model(model)
|
||||
super().delete_model(model)
|
||||
# Clean up any autoincrement trigger
|
||||
self.execute("""
|
||||
DECLARE
|
||||
@@ -49,7 +49,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
||||
|
||||
def alter_field(self, model, old_field, new_field, strict=False):
|
||||
try:
|
||||
super(DatabaseSchemaEditor, self).alter_field(model, old_field, new_field, strict)
|
||||
super().alter_field(model, old_field, new_field, strict)
|
||||
except DatabaseError as e:
|
||||
description = str(e)
|
||||
# If we're changing type to an unsupported type we need a
|
||||
@@ -100,7 +100,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
||||
# Drop the old field
|
||||
self.remove_field(model, old_field)
|
||||
# Rename and possibly make the new field NOT NULL
|
||||
super(DatabaseSchemaEditor, self).alter_field(model, new_temp_field, new_field)
|
||||
super().alter_field(model, new_temp_field, new_field)
|
||||
|
||||
def normalize_name(self, name):
|
||||
"""
|
||||
|
||||
@@ -141,7 +141,7 @@ class DatabaseWrapper(BaseDatabaseWrapper):
|
||||
ops_class = DatabaseOperations
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(DatabaseWrapper, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self._named_cursor_idx = 0
|
||||
|
||||
def get_connection_params(self):
|
||||
@@ -248,7 +248,7 @@ class DatabaseWrapper(BaseDatabaseWrapper):
|
||||
|
||||
@property
|
||||
def _nodb_connection(self):
|
||||
nodb_connection = super(DatabaseWrapper, self)._nodb_connection
|
||||
nodb_connection = super()._nodb_connection
|
||||
try:
|
||||
nodb_connection.ensure_connection()
|
||||
except (Database.DatabaseError, WrappedDatabaseError):
|
||||
|
||||
@@ -43,7 +43,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||
AND c.relname = %s"""
|
||||
|
||||
def get_field_type(self, data_type, description):
|
||||
field_type = super(DatabaseIntrospection, self).get_field_type(data_type, description)
|
||||
field_type = super().get_field_type(data_type, description)
|
||||
if description.default and 'nextval' in description.default:
|
||||
if field_type == 'IntegerField':
|
||||
return 'AutoField'
|
||||
|
||||
@@ -261,7 +261,7 @@ class DatabaseOperations(BaseDatabaseOperations):
|
||||
lhs_sql, lhs_params = lhs
|
||||
rhs_sql, rhs_params = rhs
|
||||
return "age(%s, %s)" % (lhs_sql, rhs_sql), lhs_params + rhs_params
|
||||
return super(DatabaseOperations, self).subtract_temporals(internal_type, lhs, rhs)
|
||||
return super().subtract_temporals(internal_type, lhs, rhs)
|
||||
|
||||
def fulltext_search_sql(self, field_name):
|
||||
raise NotImplementedError(
|
||||
|
||||
@@ -23,7 +23,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
||||
return psycopg2.extensions.adapt(value)
|
||||
|
||||
def _field_indexes_sql(self, model, field):
|
||||
output = super(DatabaseSchemaEditor, self)._field_indexes_sql(model, field)
|
||||
output = super()._field_indexes_sql(model, field)
|
||||
like_index_statement = self._create_like_index_sql(model, field)
|
||||
if like_index_statement is not None:
|
||||
output.append(like_index_statement)
|
||||
@@ -101,13 +101,11 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
||||
],
|
||||
)
|
||||
else:
|
||||
return super(DatabaseSchemaEditor, self)._alter_column_type_sql(
|
||||
table, old_field, new_field, new_type
|
||||
)
|
||||
return super()._alter_column_type_sql(table, old_field, new_field, new_type)
|
||||
|
||||
def _alter_field(self, model, old_field, new_field, old_type, new_type,
|
||||
old_db_params, new_db_params, strict=False):
|
||||
super(DatabaseSchemaEditor, self)._alter_field(
|
||||
super()._alter_field(
|
||||
model, old_field, new_field, old_type, new_type, old_db_params,
|
||||
new_db_params, strict,
|
||||
)
|
||||
|
||||
@@ -195,7 +195,7 @@ class DatabaseOperations(BaseDatabaseOperations):
|
||||
return str(value)
|
||||
|
||||
def get_db_converters(self, expression):
|
||||
converters = super(DatabaseOperations, self).get_db_converters(expression)
|
||||
converters = super().get_db_converters(expression)
|
||||
internal_type = expression.output_field.get_internal_type()
|
||||
if internal_type == 'DateTimeField':
|
||||
converters.append(self.convert_datetimefield_value)
|
||||
@@ -256,7 +256,7 @@ class DatabaseOperations(BaseDatabaseOperations):
|
||||
# user-defined function django_power that's registered in connect().
|
||||
if connector == '^':
|
||||
return 'django_power(%s)' % ','.join(sub_expressions)
|
||||
return super(DatabaseOperations, self).combine_expression(connector, sub_expressions)
|
||||
return super().combine_expression(connector, sub_expressions)
|
||||
|
||||
def combine_duration_expression(self, connector, sub_expressions):
|
||||
if connector not in ['+', '-']:
|
||||
|
||||
@@ -23,10 +23,10 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
||||
c.execute('PRAGMA foreign_keys')
|
||||
self._initial_pragma_fk = c.fetchone()[0]
|
||||
c.execute('PRAGMA foreign_keys = 0')
|
||||
return super(DatabaseSchemaEditor, self).__enter__()
|
||||
return super().__enter__()
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
super(DatabaseSchemaEditor, self).__exit__(exc_type, exc_value, traceback)
|
||||
super().__exit__(exc_type, exc_value, traceback)
|
||||
with self.connection.cursor() as c:
|
||||
# Restore initial FK setting - PRAGMA values can't be parametrized
|
||||
c.execute('PRAGMA foreign_keys = %s' % int(self._initial_pragma_fk))
|
||||
@@ -216,7 +216,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
||||
|
||||
def delete_model(self, model, handle_autom2m=True):
|
||||
if handle_autom2m:
|
||||
super(DatabaseSchemaEditor, self).delete_model(model)
|
||||
super().delete_model(model)
|
||||
else:
|
||||
# Delete the table (and only that)
|
||||
self.execute(self.sql_delete_table % {
|
||||
|
||||
@@ -74,7 +74,7 @@ class CursorDebugWrapper(CursorWrapper):
|
||||
def execute(self, sql, params=None):
|
||||
start = time()
|
||||
try:
|
||||
return super(CursorDebugWrapper, self).execute(sql, params)
|
||||
return super().execute(sql, params)
|
||||
finally:
|
||||
stop = time()
|
||||
duration = stop - start
|
||||
@@ -91,7 +91,7 @@ class CursorDebugWrapper(CursorWrapper):
|
||||
def executemany(self, sql, param_list):
|
||||
start = time()
|
||||
try:
|
||||
return super(CursorDebugWrapper, self).executemany(sql, param_list)
|
||||
return super().executemany(sql, param_list)
|
||||
finally:
|
||||
stop = time()
|
||||
duration = stop - start
|
||||
|
||||
@@ -77,7 +77,7 @@ class Node:
|
||||
|
||||
class DummyNode(Node):
|
||||
def __init__(self, key, origin, error_message):
|
||||
super(DummyNode, self).__init__(key)
|
||||
super().__init__(key)
|
||||
self.origin = origin
|
||||
self.error_message = error_message
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ class FieldOperation(Operation):
|
||||
|
||||
def reduce(self, operation, in_between, app_label=None):
|
||||
return (
|
||||
super(FieldOperation, self).reduce(operation, in_between, app_label=app_label) or
|
||||
super().reduce(operation, in_between, app_label=app_label) or
|
||||
not operation.references_field(self.model_name, self.name, app_label)
|
||||
)
|
||||
|
||||
@@ -44,7 +44,7 @@ class AddField(FieldOperation):
|
||||
def __init__(self, model_name, name, field, preserve_default=True):
|
||||
self.field = field
|
||||
self.preserve_default = preserve_default
|
||||
super(AddField, self).__init__(model_name, name)
|
||||
super().__init__(model_name, name)
|
||||
|
||||
def deconstruct(self):
|
||||
kwargs = {
|
||||
@@ -114,7 +114,7 @@ class AddField(FieldOperation):
|
||||
field=self.field,
|
||||
),
|
||||
]
|
||||
return super(AddField, self).reduce(operation, in_between, app_label=app_label)
|
||||
return super().reduce(operation, in_between, app_label=app_label)
|
||||
|
||||
|
||||
class RemoveField(FieldOperation):
|
||||
@@ -169,7 +169,7 @@ class AlterField(FieldOperation):
|
||||
def __init__(self, model_name, name, field, preserve_default=True):
|
||||
self.field = field
|
||||
self.preserve_default = preserve_default
|
||||
super(AlterField, self).__init__(model_name, name)
|
||||
super().__init__(model_name, name)
|
||||
|
||||
def deconstruct(self):
|
||||
kwargs = {
|
||||
@@ -232,7 +232,7 @@ class AlterField(FieldOperation):
|
||||
field=self.field,
|
||||
),
|
||||
]
|
||||
return super(AlterField, self).reduce(operation, in_between, app_label=app_label)
|
||||
return super().reduce(operation, in_between, app_label=app_label)
|
||||
|
||||
|
||||
class RenameField(FieldOperation):
|
||||
@@ -243,7 +243,7 @@ class RenameField(FieldOperation):
|
||||
def __init__(self, model_name, old_name, new_name):
|
||||
self.old_name = old_name
|
||||
self.new_name = new_name
|
||||
super(RenameField, self).__init__(model_name, old_name)
|
||||
super().__init__(model_name, old_name)
|
||||
|
||||
@cached_property
|
||||
def old_name_lower(self):
|
||||
|
||||
@@ -33,7 +33,7 @@ class ModelOperation(Operation):
|
||||
|
||||
def reduce(self, operation, in_between, app_label=None):
|
||||
return (
|
||||
super(ModelOperation, self).reduce(operation, in_between, app_label=app_label) or
|
||||
super().reduce(operation, in_between, app_label=app_label) or
|
||||
not operation.references_model(self.name, app_label)
|
||||
)
|
||||
|
||||
@@ -50,7 +50,7 @@ class CreateModel(ModelOperation):
|
||||
self.options = options or {}
|
||||
self.bases = bases or (models.Model,)
|
||||
self.managers = managers or []
|
||||
super(CreateModel, self).__init__(name)
|
||||
super().__init__(name)
|
||||
# Sanity-check that there are no duplicated field names, bases, or
|
||||
# manager names
|
||||
_check_for_duplicates('fields', (name for name, _ in self.fields))
|
||||
@@ -223,7 +223,7 @@ class CreateModel(ModelOperation):
|
||||
managers=self.managers,
|
||||
),
|
||||
]
|
||||
return super(CreateModel, self).reduce(operation, in_between, app_label=app_label)
|
||||
return super().reduce(operation, in_between, app_label=app_label)
|
||||
|
||||
|
||||
class DeleteModel(ModelOperation):
|
||||
@@ -266,7 +266,7 @@ class RenameModel(ModelOperation):
|
||||
def __init__(self, old_name, new_name):
|
||||
self.old_name = old_name
|
||||
self.new_name = new_name
|
||||
super(RenameModel, self).__init__(old_name)
|
||||
super().__init__(old_name)
|
||||
|
||||
@cached_property
|
||||
def old_name_lower(self):
|
||||
@@ -429,7 +429,7 @@ class AlterModelTable(ModelOperation):
|
||||
|
||||
def __init__(self, name, table):
|
||||
self.table = table
|
||||
super(AlterModelTable, self).__init__(name)
|
||||
super().__init__(name)
|
||||
|
||||
def deconstruct(self):
|
||||
kwargs = {
|
||||
@@ -476,14 +476,14 @@ class AlterModelTable(ModelOperation):
|
||||
def reduce(self, operation, in_between, app_label=None):
|
||||
if isinstance(operation, (AlterModelTable, DeleteModel)) and self.name_lower == operation.name_lower:
|
||||
return [operation]
|
||||
return super(AlterModelTable, self).reduce(operation, in_between, app_label=app_label)
|
||||
return super().reduce(operation, in_between, app_label=app_label)
|
||||
|
||||
|
||||
class ModelOptionOperation(ModelOperation):
|
||||
def reduce(self, operation, in_between, app_label=None):
|
||||
if isinstance(operation, (self.__class__, DeleteModel)) and self.name_lower == operation.name_lower:
|
||||
return [operation]
|
||||
return super(ModelOptionOperation, self).reduce(operation, in_between, app_label=app_label)
|
||||
return super().reduce(operation, in_between, app_label=app_label)
|
||||
|
||||
|
||||
class FieldRelatedOptionOperation(ModelOptionOperation):
|
||||
@@ -492,7 +492,7 @@ class FieldRelatedOptionOperation(ModelOptionOperation):
|
||||
self.name_lower == operation.model_name_lower and
|
||||
not self.references_field(operation.model_name, operation.name)):
|
||||
return [operation, self]
|
||||
return super(FieldRelatedOptionOperation, self).reduce(operation, in_between, app_label=app_label)
|
||||
return super().reduce(operation, in_between, app_label=app_label)
|
||||
|
||||
|
||||
class AlterUniqueTogether(FieldRelatedOptionOperation):
|
||||
@@ -505,7 +505,7 @@ class AlterUniqueTogether(FieldRelatedOptionOperation):
|
||||
def __init__(self, name, unique_together):
|
||||
unique_together = normalize_together(unique_together)
|
||||
self.unique_together = set(tuple(cons) for cons in unique_together)
|
||||
super(AlterUniqueTogether, self).__init__(name)
|
||||
super().__init__(name)
|
||||
|
||||
def deconstruct(self):
|
||||
kwargs = {
|
||||
@@ -559,7 +559,7 @@ class AlterIndexTogether(FieldRelatedOptionOperation):
|
||||
def __init__(self, name, index_together):
|
||||
index_together = normalize_together(index_together)
|
||||
self.index_together = set(tuple(cons) for cons in index_together)
|
||||
super(AlterIndexTogether, self).__init__(name)
|
||||
super().__init__(name)
|
||||
|
||||
def deconstruct(self):
|
||||
kwargs = {
|
||||
@@ -610,7 +610,7 @@ class AlterOrderWithRespectTo(FieldRelatedOptionOperation):
|
||||
|
||||
def __init__(self, name, order_with_respect_to):
|
||||
self.order_with_respect_to = order_with_respect_to
|
||||
super(AlterOrderWithRespectTo, self).__init__(name)
|
||||
super().__init__(name)
|
||||
|
||||
def deconstruct(self):
|
||||
kwargs = {
|
||||
@@ -685,7 +685,7 @@ class AlterModelOptions(ModelOptionOperation):
|
||||
|
||||
def __init__(self, name, options):
|
||||
self.options = options
|
||||
super(AlterModelOptions, self).__init__(name)
|
||||
super().__init__(name)
|
||||
|
||||
def deconstruct(self):
|
||||
kwargs = {
|
||||
@@ -726,7 +726,7 @@ class AlterModelManagers(ModelOptionOperation):
|
||||
|
||||
def __init__(self, name, managers):
|
||||
self.managers = managers
|
||||
super(AlterModelManagers, self).__init__(name)
|
||||
super().__init__(name)
|
||||
|
||||
def deconstruct(self):
|
||||
return (
|
||||
|
||||
@@ -136,7 +136,7 @@ class FloatSerializer(BaseSimpleSerializer):
|
||||
def serialize(self):
|
||||
if math.isnan(self.value) or math.isinf(self.value):
|
||||
return 'float("{}")'.format(self.value), set()
|
||||
return super(FloatSerializer, self).serialize()
|
||||
return super().serialize()
|
||||
|
||||
|
||||
class FrozensetSerializer(BaseSequenceSerializer):
|
||||
|
||||
@@ -246,7 +246,7 @@ class AppConfigStub(AppConfig):
|
||||
# App-label and app-name are not the same thing, so technically passing
|
||||
# in the label here is wrong. In practice, migrations don't care about
|
||||
# the app name, but we need something unique, and the label works fine.
|
||||
super(AppConfigStub, self).__init__(label, None)
|
||||
super().__init__(label, None)
|
||||
|
||||
def import_models(self):
|
||||
self.models = self.apps.all_models[self.label]
|
||||
@@ -271,7 +271,7 @@ class StateApps(Apps):
|
||||
# Populate the app registry with a stub for each application.
|
||||
app_labels = {model_state.app_label for model_state in models.values()}
|
||||
app_configs = [AppConfigStub(label) for label in sorted(real_apps + list(app_labels))]
|
||||
super(StateApps, self).__init__(app_configs)
|
||||
super().__init__(app_configs)
|
||||
|
||||
# The lock gets in the way of copying as implemented in clone(), which
|
||||
# is called whenever Django duplicates a StateApps before updating it.
|
||||
|
||||
@@ -16,7 +16,7 @@ class Aggregate(Func):
|
||||
|
||||
def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False):
|
||||
# Aggregates are not allowed in UPDATE queries, so ignore for_save
|
||||
c = super(Aggregate, self).resolve_expression(query, allow_joins, reuse, summarize)
|
||||
c = super().resolve_expression(query, allow_joins, reuse, summarize)
|
||||
if not summarize:
|
||||
expressions = c.get_source_expressions()
|
||||
for index, expr in enumerate(expressions):
|
||||
@@ -45,7 +45,7 @@ class Avg(Aggregate):
|
||||
source_field = self.get_source_fields()[0]
|
||||
if isinstance(source_field, (IntegerField, DecimalField)):
|
||||
self._output_field = FloatField()
|
||||
super(Avg, self)._resolve_output_field()
|
||||
super()._resolve_output_field()
|
||||
|
||||
def as_oracle(self, compiler, connection):
|
||||
if self.output_field.get_internal_type() == 'DurationField':
|
||||
@@ -54,7 +54,7 @@ class Avg(Aggregate):
|
||||
return compiler.compile(
|
||||
SecondsToInterval(Avg(IntervalToSeconds(expression)))
|
||||
)
|
||||
return super(Avg, self).as_sql(compiler, connection)
|
||||
return super().as_sql(compiler, connection)
|
||||
|
||||
|
||||
class Count(Aggregate):
|
||||
@@ -65,8 +65,10 @@ class Count(Aggregate):
|
||||
def __init__(self, expression, distinct=False, **extra):
|
||||
if expression == '*':
|
||||
expression = Star()
|
||||
super(Count, self).__init__(
|
||||
expression, distinct='DISTINCT ' if distinct else '', output_field=IntegerField(), **extra)
|
||||
super().__init__(
|
||||
expression, distinct='DISTINCT ' if distinct else '',
|
||||
output_field=IntegerField(), **extra
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "{}({}, distinct={})".format(
|
||||
@@ -96,7 +98,7 @@ class StdDev(Aggregate):
|
||||
|
||||
def __init__(self, expression, sample=False, **extra):
|
||||
self.function = 'STDDEV_SAMP' if sample else 'STDDEV_POP'
|
||||
super(StdDev, self).__init__(expression, output_field=FloatField(), **extra)
|
||||
super().__init__(expression, output_field=FloatField(), **extra)
|
||||
|
||||
def __repr__(self):
|
||||
return "{}({}, sample={})".format(
|
||||
@@ -122,7 +124,7 @@ class Sum(Aggregate):
|
||||
return compiler.compile(
|
||||
SecondsToInterval(Sum(IntervalToSeconds(expression)))
|
||||
)
|
||||
return super(Sum, self).as_sql(compiler, connection)
|
||||
return super().as_sql(compiler, connection)
|
||||
|
||||
|
||||
class Variance(Aggregate):
|
||||
@@ -130,7 +132,7 @@ class Variance(Aggregate):
|
||||
|
||||
def __init__(self, expression, sample=False, **extra):
|
||||
self.function = 'VAR_SAMP' if sample else 'VAR_POP'
|
||||
super(Variance, self).__init__(expression, output_field=FloatField(), **extra)
|
||||
super().__init__(expression, output_field=FloatField(), **extra)
|
||||
|
||||
def __repr__(self):
|
||||
return "{}({}, sample={})".format(
|
||||
|
||||
@@ -73,7 +73,7 @@ class ModelBase(type):
|
||||
Metaclass for all models.
|
||||
"""
|
||||
def __new__(cls, name, bases, attrs):
|
||||
super_new = super(ModelBase, cls).__new__
|
||||
super_new = super().__new__
|
||||
|
||||
# Also ensure initialization is only performed for subclasses of Model
|
||||
# (excluding Model class itself).
|
||||
@@ -486,7 +486,7 @@ class Model(metaclass=ModelBase):
|
||||
pass
|
||||
if kwargs:
|
||||
raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0])
|
||||
super(Model, self).__init__()
|
||||
super().__init__()
|
||||
post_init.send(sender=cls, instance=self)
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -8,7 +8,7 @@ from django.db.models import signals, sql
|
||||
class ProtectedError(IntegrityError):
|
||||
def __init__(self, msg, protected_objects):
|
||||
self.protected_objects = protected_objects
|
||||
super(ProtectedError, self).__init__(msg, protected_objects)
|
||||
super().__init__(msg, protected_objects)
|
||||
|
||||
|
||||
def CASCADE(collector, field, sub_objs, using):
|
||||
|
||||
@@ -157,7 +157,7 @@ class BaseExpression:
|
||||
```
|
||||
def override_as_sql(self, compiler, connection):
|
||||
# custom logic
|
||||
return super(Expression, self).as_sql(compiler, connection)
|
||||
return super().as_sql(compiler, connection)
|
||||
setattr(Expression, 'as_' + connection.vendor, override_as_sql)
|
||||
```
|
||||
|
||||
@@ -351,7 +351,7 @@ class Expression(BaseExpression, Combinable):
|
||||
class CombinedExpression(Expression):
|
||||
|
||||
def __init__(self, lhs, connector, rhs, output_field=None):
|
||||
super(CombinedExpression, self).__init__(output_field=output_field)
|
||||
super().__init__(output_field=output_field)
|
||||
self.connector = connector
|
||||
self.lhs = lhs
|
||||
self.rhs = rhs
|
||||
@@ -437,7 +437,7 @@ class DurationExpression(CombinedExpression):
|
||||
|
||||
class TemporalSubtraction(CombinedExpression):
|
||||
def __init__(self, lhs, rhs):
|
||||
super(TemporalSubtraction, self).__init__(lhs, self.SUB, rhs, output_field=fields.DurationField())
|
||||
super().__init__(lhs, self.SUB, rhs, output_field=fields.DurationField())
|
||||
|
||||
def as_sql(self, compiler, connection):
|
||||
connection.ops.check_expression_support(self)
|
||||
@@ -517,7 +517,7 @@ class Func(Expression):
|
||||
)
|
||||
)
|
||||
output_field = extra.pop('output_field', None)
|
||||
super(Func, self).__init__(output_field=output_field)
|
||||
super().__init__(output_field=output_field)
|
||||
self.source_expressions = self._parse_expressions(*expressions)
|
||||
self.extra = extra
|
||||
|
||||
@@ -573,7 +573,7 @@ class Func(Expression):
|
||||
return sql, params
|
||||
|
||||
def copy(self):
|
||||
copy = super(Func, self).copy()
|
||||
copy = super().copy()
|
||||
copy.source_expressions = self.source_expressions[:]
|
||||
copy.extra = self.extra.copy()
|
||||
return copy
|
||||
@@ -592,7 +592,7 @@ class Value(Expression):
|
||||
* output_field: an instance of the model field type that this
|
||||
expression will return, such as IntegerField() or CharField().
|
||||
"""
|
||||
super(Value, self).__init__(output_field=output_field)
|
||||
super().__init__(output_field=output_field)
|
||||
self.value = value
|
||||
|
||||
def __repr__(self):
|
||||
@@ -617,7 +617,7 @@ class Value(Expression):
|
||||
return '%s', [val]
|
||||
|
||||
def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False):
|
||||
c = super(Value, self).resolve_expression(query, allow_joins, reuse, summarize, for_save)
|
||||
c = super().resolve_expression(query, allow_joins, reuse, summarize, for_save)
|
||||
c.for_save = for_save
|
||||
return c
|
||||
|
||||
@@ -629,7 +629,7 @@ class DurationValue(Value):
|
||||
def as_sql(self, compiler, connection):
|
||||
connection.ops.check_expression_support(self)
|
||||
if connection.features.has_native_duration_field:
|
||||
return super(DurationValue, self).as_sql(compiler, connection)
|
||||
return super().as_sql(compiler, connection)
|
||||
return connection.ops.date_interval_sql(self.value)
|
||||
|
||||
|
||||
@@ -638,7 +638,7 @@ class RawSQL(Expression):
|
||||
if output_field is None:
|
||||
output_field = fields.Field()
|
||||
self.sql, self.params = sql, params
|
||||
super(RawSQL, self).__init__(output_field=output_field)
|
||||
super().__init__(output_field=output_field)
|
||||
|
||||
def __repr__(self):
|
||||
return "{}({}, {})".format(self.__class__.__name__, self.sql, self.params)
|
||||
@@ -660,7 +660,7 @@ class Star(Expression):
|
||||
|
||||
class Random(Expression):
|
||||
def __init__(self):
|
||||
super(Random, self).__init__(output_field=fields.FloatField())
|
||||
super().__init__(output_field=fields.FloatField())
|
||||
|
||||
def __repr__(self):
|
||||
return "Random()"
|
||||
@@ -676,7 +676,7 @@ class Col(Expression):
|
||||
def __init__(self, alias, target, output_field=None):
|
||||
if output_field is None:
|
||||
output_field = target
|
||||
super(Col, self).__init__(output_field=output_field)
|
||||
super().__init__(output_field=output_field)
|
||||
self.alias, self.target = alias, target
|
||||
|
||||
def __repr__(self):
|
||||
@@ -706,7 +706,7 @@ class Ref(Expression):
|
||||
qs.annotate(sum_cost=Sum('cost')) query.
|
||||
"""
|
||||
def __init__(self, refs, source):
|
||||
super(Ref, self).__init__()
|
||||
super().__init__()
|
||||
self.refs, self.source = refs, source
|
||||
|
||||
def __repr__(self):
|
||||
@@ -740,7 +740,7 @@ class ExpressionWrapper(Expression):
|
||||
"""
|
||||
|
||||
def __init__(self, expression, output_field):
|
||||
super(ExpressionWrapper, self).__init__(output_field=output_field)
|
||||
super().__init__(output_field=output_field)
|
||||
self.expression = expression
|
||||
|
||||
def set_source_expressions(self, exprs):
|
||||
@@ -764,7 +764,7 @@ class When(Expression):
|
||||
condition, lookups = Q(**lookups), None
|
||||
if condition is None or not isinstance(condition, Q) or lookups:
|
||||
raise TypeError("__init__() takes either a Q object or lookups as keyword arguments")
|
||||
super(When, self).__init__(output_field=None)
|
||||
super().__init__(output_field=None)
|
||||
self.condition = condition
|
||||
self.result = self._parse_expressions(then)[0]
|
||||
|
||||
@@ -833,7 +833,7 @@ class Case(Expression):
|
||||
raise TypeError("Positional arguments must all be When objects.")
|
||||
default = extra.pop('default', None)
|
||||
output_field = extra.pop('output_field', None)
|
||||
super(Case, self).__init__(output_field)
|
||||
super().__init__(output_field)
|
||||
self.cases = list(cases)
|
||||
self.default = self._parse_expressions(default)[0]
|
||||
self.extra = extra
|
||||
@@ -860,7 +860,7 @@ class Case(Expression):
|
||||
return c
|
||||
|
||||
def copy(self):
|
||||
c = super(Case, self).copy()
|
||||
c = super().copy()
|
||||
c.cases = c.cases[:]
|
||||
return c
|
||||
|
||||
@@ -905,10 +905,10 @@ class Subquery(Expression):
|
||||
self.extra = extra
|
||||
if output_field is None and len(self.queryset.query.select) == 1:
|
||||
output_field = self.queryset.query.select[0].field
|
||||
super(Subquery, self).__init__(output_field)
|
||||
super().__init__(output_field)
|
||||
|
||||
def copy(self):
|
||||
clone = super(Subquery, self).copy()
|
||||
clone = super().copy()
|
||||
clone.queryset = clone.queryset.all()
|
||||
return clone
|
||||
|
||||
@@ -985,7 +985,7 @@ class Exists(Subquery):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.negated = kwargs.pop('negated', False)
|
||||
super(Exists, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def __invert__(self):
|
||||
return type(self)(self.queryset, self.output_field, negated=(not self.negated), **self.extra)
|
||||
@@ -998,10 +998,10 @@ class Exists(Subquery):
|
||||
# As a performance optimization, remove ordering since EXISTS doesn't
|
||||
# care about it, just whether or not a row matches.
|
||||
self.queryset = self.queryset.order_by()
|
||||
return super(Exists, self).resolve_expression(query, **kwargs)
|
||||
return super().resolve_expression(query, **kwargs)
|
||||
|
||||
def as_sql(self, compiler, connection, template=None, **extra_context):
|
||||
sql, params = super(Exists, self).as_sql(compiler, connection, template, **extra_context)
|
||||
sql, params = super().as_sql(compiler, connection, template, **extra_context)
|
||||
if self.negated:
|
||||
sql = 'NOT {}'.format(sql)
|
||||
return sql, params
|
||||
|
||||
@@ -184,7 +184,7 @@ class Field(RegisterLookupMixin):
|
||||
models.
|
||||
"""
|
||||
if not hasattr(self, 'model'):
|
||||
return super(Field, self).__str__()
|
||||
return super().__str__()
|
||||
model = self.model
|
||||
app = model._meta.app_label
|
||||
return '%s.%s.%s' % (app, model._meta.object_name, self.name)
|
||||
@@ -867,10 +867,10 @@ class AutoField(Field):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['blank'] = True
|
||||
super(AutoField, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def check(self, **kwargs):
|
||||
errors = super(AutoField, self).check(**kwargs)
|
||||
errors = super().check(**kwargs)
|
||||
errors.extend(self._check_primary_key())
|
||||
return errors
|
||||
|
||||
@@ -887,7 +887,7 @@ class AutoField(Field):
|
||||
return []
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(AutoField, self).deconstruct()
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
del kwargs['blank']
|
||||
kwargs['primary_key'] = True
|
||||
return name, path, args, kwargs
|
||||
@@ -920,14 +920,14 @@ class AutoField(Field):
|
||||
return value
|
||||
|
||||
def get_prep_value(self, value):
|
||||
value = super(AutoField, self).get_prep_value(value)
|
||||
value = super().get_prep_value(value)
|
||||
if value is None:
|
||||
return None
|
||||
return int(value)
|
||||
|
||||
def contribute_to_class(self, cls, name, **kwargs):
|
||||
assert not cls._meta.auto_field, "A model can't have more than one AutoField."
|
||||
super(AutoField, self).contribute_to_class(cls, name, **kwargs)
|
||||
super().contribute_to_class(cls, name, **kwargs)
|
||||
cls._meta.auto_field = self
|
||||
|
||||
def formfield(self, **kwargs):
|
||||
@@ -953,10 +953,10 @@ class BooleanField(Field):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['blank'] = True
|
||||
super(BooleanField, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def check(self, **kwargs):
|
||||
errors = super(BooleanField, self).check(**kwargs)
|
||||
errors = super().check(**kwargs)
|
||||
errors.extend(self._check_null(**kwargs))
|
||||
return errors
|
||||
|
||||
@@ -974,7 +974,7 @@ class BooleanField(Field):
|
||||
return []
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(BooleanField, self).deconstruct()
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
del kwargs['blank']
|
||||
return name, path, args, kwargs
|
||||
|
||||
@@ -997,7 +997,7 @@ class BooleanField(Field):
|
||||
)
|
||||
|
||||
def get_prep_value(self, value):
|
||||
value = super(BooleanField, self).get_prep_value(value)
|
||||
value = super().get_prep_value(value)
|
||||
if value is None:
|
||||
return None
|
||||
return self.to_python(value)
|
||||
@@ -1011,18 +1011,18 @@ class BooleanField(Field):
|
||||
else:
|
||||
defaults = {'form_class': forms.BooleanField}
|
||||
defaults.update(kwargs)
|
||||
return super(BooleanField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
|
||||
class CharField(Field):
|
||||
description = _("String (up to %(max_length)s)")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CharField, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.validators.append(validators.MaxLengthValidator(self.max_length))
|
||||
|
||||
def check(self, **kwargs):
|
||||
errors = super(CharField, self).check(**kwargs)
|
||||
errors = super().check(**kwargs)
|
||||
errors.extend(self._check_max_length_attribute(**kwargs))
|
||||
return errors
|
||||
|
||||
@@ -1055,7 +1055,7 @@ class CharField(Field):
|
||||
return force_text(value)
|
||||
|
||||
def get_prep_value(self, value):
|
||||
value = super(CharField, self).get_prep_value(value)
|
||||
value = super().get_prep_value(value)
|
||||
return self.to_python(value)
|
||||
|
||||
def formfield(self, **kwargs):
|
||||
@@ -1067,7 +1067,7 @@ class CharField(Field):
|
||||
if self.null and not connection.features.interprets_empty_strings_as_nulls:
|
||||
defaults['empty_value'] = None
|
||||
defaults.update(kwargs)
|
||||
return super(CharField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
|
||||
class CommaSeparatedIntegerField(CharField):
|
||||
@@ -1089,7 +1089,7 @@ class CommaSeparatedIntegerField(CharField):
|
||||
class DateTimeCheckMixin:
|
||||
|
||||
def check(self, **kwargs):
|
||||
errors = super(DateTimeCheckMixin, self).check(**kwargs)
|
||||
errors = super().check(**kwargs)
|
||||
errors.extend(self._check_mutually_exclusive_options())
|
||||
errors.extend(self._check_fix_default_value())
|
||||
return errors
|
||||
@@ -1133,7 +1133,7 @@ class DateField(DateTimeCheckMixin, Field):
|
||||
if auto_now or auto_now_add:
|
||||
kwargs['editable'] = False
|
||||
kwargs['blank'] = True
|
||||
super(DateField, self).__init__(verbose_name, name, **kwargs)
|
||||
super().__init__(verbose_name, name, **kwargs)
|
||||
|
||||
def _check_fix_default_value(self):
|
||||
"""
|
||||
@@ -1179,7 +1179,7 @@ class DateField(DateTimeCheckMixin, Field):
|
||||
return []
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(DateField, self).deconstruct()
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
if self.auto_now:
|
||||
kwargs['auto_now'] = True
|
||||
if self.auto_now_add:
|
||||
@@ -1228,10 +1228,10 @@ class DateField(DateTimeCheckMixin, Field):
|
||||
setattr(model_instance, self.attname, value)
|
||||
return value
|
||||
else:
|
||||
return super(DateField, self).pre_save(model_instance, add)
|
||||
return super().pre_save(model_instance, add)
|
||||
|
||||
def contribute_to_class(self, cls, name, **kwargs):
|
||||
super(DateField, self).contribute_to_class(cls, name, **kwargs)
|
||||
super().contribute_to_class(cls, name, **kwargs)
|
||||
if not self.null:
|
||||
setattr(
|
||||
cls, 'get_next_by_%s' % self.name,
|
||||
@@ -1243,7 +1243,7 @@ class DateField(DateTimeCheckMixin, Field):
|
||||
)
|
||||
|
||||
def get_prep_value(self, value):
|
||||
value = super(DateField, self).get_prep_value(value)
|
||||
value = super().get_prep_value(value)
|
||||
return self.to_python(value)
|
||||
|
||||
def get_db_prep_value(self, value, connection, prepared=False):
|
||||
@@ -1259,7 +1259,7 @@ class DateField(DateTimeCheckMixin, Field):
|
||||
def formfield(self, **kwargs):
|
||||
defaults = {'form_class': forms.DateField}
|
||||
defaults.update(kwargs)
|
||||
return super(DateField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
|
||||
class DateTimeField(DateField):
|
||||
@@ -1380,13 +1380,13 @@ class DateTimeField(DateField):
|
||||
setattr(model_instance, self.attname, value)
|
||||
return value
|
||||
else:
|
||||
return super(DateTimeField, self).pre_save(model_instance, add)
|
||||
return super().pre_save(model_instance, add)
|
||||
|
||||
# contribute_to_class is inherited from DateField, it registers
|
||||
# get_next_by_FOO and get_prev_by_FOO
|
||||
|
||||
def get_prep_value(self, value):
|
||||
value = super(DateTimeField, self).get_prep_value(value)
|
||||
value = super().get_prep_value(value)
|
||||
value = self.to_python(value)
|
||||
if value is not None and settings.USE_TZ and timezone.is_naive(value):
|
||||
# For backwards compatibility, interpret naive datetimes in local
|
||||
@@ -1417,7 +1417,7 @@ class DateTimeField(DateField):
|
||||
def formfield(self, **kwargs):
|
||||
defaults = {'form_class': forms.DateTimeField}
|
||||
defaults.update(kwargs)
|
||||
return super(DateTimeField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
|
||||
class DecimalField(Field):
|
||||
@@ -1430,10 +1430,10 @@ class DecimalField(Field):
|
||||
def __init__(self, verbose_name=None, name=None, max_digits=None,
|
||||
decimal_places=None, **kwargs):
|
||||
self.max_digits, self.decimal_places = max_digits, decimal_places
|
||||
super(DecimalField, self).__init__(verbose_name, name, **kwargs)
|
||||
super().__init__(verbose_name, name, **kwargs)
|
||||
|
||||
def check(self, **kwargs):
|
||||
errors = super(DecimalField, self).check(**kwargs)
|
||||
errors = super().check(**kwargs)
|
||||
|
||||
digits_errors = self._check_decimal_places()
|
||||
digits_errors.extend(self._check_max_digits())
|
||||
@@ -1504,12 +1504,12 @@ class DecimalField(Field):
|
||||
|
||||
@cached_property
|
||||
def validators(self):
|
||||
return super(DecimalField, self).validators + [
|
||||
return super().validators + [
|
||||
validators.DecimalValidator(self.max_digits, self.decimal_places)
|
||||
]
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(DecimalField, self).deconstruct()
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
if self.max_digits is not None:
|
||||
kwargs['max_digits'] = self.max_digits
|
||||
if self.decimal_places is not None:
|
||||
@@ -1555,7 +1555,7 @@ class DecimalField(Field):
|
||||
return connection.ops.adapt_decimalfield_value(self.to_python(value), self.max_digits, self.decimal_places)
|
||||
|
||||
def get_prep_value(self, value):
|
||||
value = super(DecimalField, self).get_prep_value(value)
|
||||
value = super().get_prep_value(value)
|
||||
return self.to_python(value)
|
||||
|
||||
def formfield(self, **kwargs):
|
||||
@@ -1565,7 +1565,7 @@ class DecimalField(Field):
|
||||
'form_class': forms.DecimalField,
|
||||
}
|
||||
defaults.update(kwargs)
|
||||
return super(DecimalField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
|
||||
class DurationField(Field):
|
||||
@@ -1615,7 +1615,7 @@ class DurationField(Field):
|
||||
converters = []
|
||||
if not connection.features.has_native_duration_field:
|
||||
converters.append(connection.ops.convert_durationfield_value)
|
||||
return converters + super(DurationField, self).get_db_converters(connection)
|
||||
return converters + super().get_db_converters(connection)
|
||||
|
||||
def value_to_string(self, obj):
|
||||
val = self.value_from_object(obj)
|
||||
@@ -1626,7 +1626,7 @@ class DurationField(Field):
|
||||
'form_class': forms.DurationField,
|
||||
}
|
||||
defaults.update(kwargs)
|
||||
return super(DurationField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
|
||||
class EmailField(CharField):
|
||||
@@ -1636,10 +1636,10 @@ class EmailField(CharField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
# max_length=254 to be compliant with RFCs 3696 and 5321
|
||||
kwargs['max_length'] = kwargs.get('max_length', 254)
|
||||
super(EmailField, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(EmailField, self).deconstruct()
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
# We do not exclude max_length if it matches default as we want to change
|
||||
# the default in future.
|
||||
return name, path, args, kwargs
|
||||
@@ -1651,7 +1651,7 @@ class EmailField(CharField):
|
||||
'form_class': forms.EmailField,
|
||||
}
|
||||
defaults.update(kwargs)
|
||||
return super(EmailField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
|
||||
class FilePathField(Field):
|
||||
@@ -1662,10 +1662,10 @@ class FilePathField(Field):
|
||||
self.path, self.match, self.recursive = path, match, recursive
|
||||
self.allow_files, self.allow_folders = allow_files, allow_folders
|
||||
kwargs['max_length'] = kwargs.get('max_length', 100)
|
||||
super(FilePathField, self).__init__(verbose_name, name, **kwargs)
|
||||
super().__init__(verbose_name, name, **kwargs)
|
||||
|
||||
def check(self, **kwargs):
|
||||
errors = super(FilePathField, self).check(**kwargs)
|
||||
errors = super().check(**kwargs)
|
||||
errors.extend(self._check_allowing_files_or_folders(**kwargs))
|
||||
return errors
|
||||
|
||||
@@ -1681,7 +1681,7 @@ class FilePathField(Field):
|
||||
return []
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(FilePathField, self).deconstruct()
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
if self.path != '':
|
||||
kwargs['path'] = self.path
|
||||
if self.match is not None:
|
||||
@@ -1697,7 +1697,7 @@ class FilePathField(Field):
|
||||
return name, path, args, kwargs
|
||||
|
||||
def get_prep_value(self, value):
|
||||
value = super(FilePathField, self).get_prep_value(value)
|
||||
value = super().get_prep_value(value)
|
||||
if value is None:
|
||||
return None
|
||||
return str(value)
|
||||
@@ -1712,7 +1712,7 @@ class FilePathField(Field):
|
||||
'allow_folders': self.allow_folders,
|
||||
}
|
||||
defaults.update(kwargs)
|
||||
return super(FilePathField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
def get_internal_type(self):
|
||||
return "FilePathField"
|
||||
@@ -1726,7 +1726,7 @@ class FloatField(Field):
|
||||
description = _("Floating point number")
|
||||
|
||||
def get_prep_value(self, value):
|
||||
value = super(FloatField, self).get_prep_value(value)
|
||||
value = super().get_prep_value(value)
|
||||
if value is None:
|
||||
return None
|
||||
return float(value)
|
||||
@@ -1749,7 +1749,7 @@ class FloatField(Field):
|
||||
def formfield(self, **kwargs):
|
||||
defaults = {'form_class': forms.FloatField}
|
||||
defaults.update(kwargs)
|
||||
return super(FloatField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
|
||||
class IntegerField(Field):
|
||||
@@ -1760,7 +1760,7 @@ class IntegerField(Field):
|
||||
description = _("Integer")
|
||||
|
||||
def check(self, **kwargs):
|
||||
errors = super(IntegerField, self).check(**kwargs)
|
||||
errors = super().check(**kwargs)
|
||||
errors.extend(self._check_max_length_warning())
|
||||
return errors
|
||||
|
||||
@@ -1780,7 +1780,7 @@ class IntegerField(Field):
|
||||
def validators(self):
|
||||
# These validators can't be added at field initialization time since
|
||||
# they're based on values retrieved from `connection`.
|
||||
validators_ = super(IntegerField, self).validators
|
||||
validators_ = super().validators
|
||||
internal_type = self.get_internal_type()
|
||||
min_value, max_value = connection.ops.integer_field_range(internal_type)
|
||||
if min_value is not None:
|
||||
@@ -1798,7 +1798,7 @@ class IntegerField(Field):
|
||||
return validators_
|
||||
|
||||
def get_prep_value(self, value):
|
||||
value = super(IntegerField, self).get_prep_value(value)
|
||||
value = super().get_prep_value(value)
|
||||
if value is None:
|
||||
return None
|
||||
return int(value)
|
||||
@@ -1821,7 +1821,7 @@ class IntegerField(Field):
|
||||
def formfield(self, **kwargs):
|
||||
defaults = {'form_class': forms.IntegerField}
|
||||
defaults.update(kwargs)
|
||||
return super(IntegerField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
|
||||
class BigIntegerField(IntegerField):
|
||||
@@ -1836,7 +1836,7 @@ class BigIntegerField(IntegerField):
|
||||
defaults = {'min_value': -BigIntegerField.MAX_BIGINT - 1,
|
||||
'max_value': BigIntegerField.MAX_BIGINT}
|
||||
defaults.update(kwargs)
|
||||
return super(BigIntegerField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
|
||||
class IPAddressField(Field):
|
||||
@@ -1853,15 +1853,15 @@ class IPAddressField(Field):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['max_length'] = 15
|
||||
super(IPAddressField, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(IPAddressField, self).deconstruct()
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
del kwargs['max_length']
|
||||
return name, path, args, kwargs
|
||||
|
||||
def get_prep_value(self, value):
|
||||
value = super(IPAddressField, self).get_prep_value(value)
|
||||
value = super().get_prep_value(value)
|
||||
if value is None:
|
||||
return None
|
||||
return str(value)
|
||||
@@ -1883,11 +1883,10 @@ class GenericIPAddressField(Field):
|
||||
validators.ip_address_validators(protocol, unpack_ipv4)
|
||||
self.default_error_messages['invalid'] = invalid_error_message
|
||||
kwargs['max_length'] = 39
|
||||
super(GenericIPAddressField, self).__init__(verbose_name, name, *args,
|
||||
**kwargs)
|
||||
super().__init__(verbose_name, name, *args, **kwargs)
|
||||
|
||||
def check(self, **kwargs):
|
||||
errors = super(GenericIPAddressField, self).check(**kwargs)
|
||||
errors = super().check(**kwargs)
|
||||
errors.extend(self._check_blank_and_null_values(**kwargs))
|
||||
return errors
|
||||
|
||||
@@ -1904,7 +1903,7 @@ class GenericIPAddressField(Field):
|
||||
return []
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(GenericIPAddressField, self).deconstruct()
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
if self.unpack_ipv4 is not False:
|
||||
kwargs['unpack_ipv4'] = self.unpack_ipv4
|
||||
if self.protocol != "both":
|
||||
@@ -1932,7 +1931,7 @@ class GenericIPAddressField(Field):
|
||||
return connection.ops.adapt_ipaddressfield_value(value)
|
||||
|
||||
def get_prep_value(self, value):
|
||||
value = super(GenericIPAddressField, self).get_prep_value(value)
|
||||
value = super().get_prep_value(value)
|
||||
if value is None:
|
||||
return None
|
||||
if value and ':' in value:
|
||||
@@ -1948,7 +1947,7 @@ class GenericIPAddressField(Field):
|
||||
'form_class': forms.GenericIPAddressField,
|
||||
}
|
||||
defaults.update(kwargs)
|
||||
return super(GenericIPAddressField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
|
||||
class NullBooleanField(Field):
|
||||
@@ -1961,10 +1960,10 @@ class NullBooleanField(Field):
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['null'] = True
|
||||
kwargs['blank'] = True
|
||||
super(NullBooleanField, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(NullBooleanField, self).deconstruct()
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
del kwargs['null']
|
||||
del kwargs['blank']
|
||||
return name, path, args, kwargs
|
||||
@@ -1990,7 +1989,7 @@ class NullBooleanField(Field):
|
||||
)
|
||||
|
||||
def get_prep_value(self, value):
|
||||
value = super(NullBooleanField, self).get_prep_value(value)
|
||||
value = super().get_prep_value(value)
|
||||
if value is None:
|
||||
return None
|
||||
return self.to_python(value)
|
||||
@@ -1998,7 +1997,7 @@ class NullBooleanField(Field):
|
||||
def formfield(self, **kwargs):
|
||||
defaults = {'form_class': forms.NullBooleanField}
|
||||
defaults.update(kwargs)
|
||||
return super(NullBooleanField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
|
||||
class PositiveIntegerRelDbTypeMixin:
|
||||
@@ -2027,7 +2026,7 @@ class PositiveIntegerField(PositiveIntegerRelDbTypeMixin, IntegerField):
|
||||
def formfield(self, **kwargs):
|
||||
defaults = {'min_value': 0}
|
||||
defaults.update(kwargs)
|
||||
return super(PositiveIntegerField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
|
||||
class PositiveSmallIntegerField(PositiveIntegerRelDbTypeMixin, IntegerField):
|
||||
@@ -2039,7 +2038,7 @@ class PositiveSmallIntegerField(PositiveIntegerRelDbTypeMixin, IntegerField):
|
||||
def formfield(self, **kwargs):
|
||||
defaults = {'min_value': 0}
|
||||
defaults.update(kwargs)
|
||||
return super(PositiveSmallIntegerField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
|
||||
class SlugField(CharField):
|
||||
@@ -2054,10 +2053,10 @@ class SlugField(CharField):
|
||||
self.allow_unicode = kwargs.pop('allow_unicode', False)
|
||||
if self.allow_unicode:
|
||||
self.default_validators = [validators.validate_unicode_slug]
|
||||
super(SlugField, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(SlugField, self).deconstruct()
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
if kwargs.get("max_length") == 50:
|
||||
del kwargs['max_length']
|
||||
if self.db_index is False:
|
||||
@@ -2074,7 +2073,7 @@ class SlugField(CharField):
|
||||
def formfield(self, **kwargs):
|
||||
defaults = {'form_class': forms.SlugField, 'allow_unicode': self.allow_unicode}
|
||||
defaults.update(kwargs)
|
||||
return super(SlugField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
|
||||
class SmallIntegerField(IntegerField):
|
||||
@@ -2096,7 +2095,7 @@ class TextField(Field):
|
||||
return force_text(value)
|
||||
|
||||
def get_prep_value(self, value):
|
||||
value = super(TextField, self).get_prep_value(value)
|
||||
value = super().get_prep_value(value)
|
||||
return self.to_python(value)
|
||||
|
||||
def formfield(self, **kwargs):
|
||||
@@ -2105,7 +2104,7 @@ class TextField(Field):
|
||||
# the value in the form field (to pass into widget for example).
|
||||
defaults = {'max_length': self.max_length, 'widget': forms.Textarea}
|
||||
defaults.update(kwargs)
|
||||
return super(TextField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
|
||||
class TimeField(DateTimeCheckMixin, Field):
|
||||
@@ -2124,7 +2123,7 @@ class TimeField(DateTimeCheckMixin, Field):
|
||||
if auto_now or auto_now_add:
|
||||
kwargs['editable'] = False
|
||||
kwargs['blank'] = True
|
||||
super(TimeField, self).__init__(verbose_name, name, **kwargs)
|
||||
super().__init__(verbose_name, name, **kwargs)
|
||||
|
||||
def _check_fix_default_value(self):
|
||||
"""
|
||||
@@ -2173,7 +2172,7 @@ class TimeField(DateTimeCheckMixin, Field):
|
||||
return []
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(TimeField, self).deconstruct()
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
if self.auto_now is not False:
|
||||
kwargs["auto_now"] = self.auto_now
|
||||
if self.auto_now_add is not False:
|
||||
@@ -2220,10 +2219,10 @@ class TimeField(DateTimeCheckMixin, Field):
|
||||
setattr(model_instance, self.attname, value)
|
||||
return value
|
||||
else:
|
||||
return super(TimeField, self).pre_save(model_instance, add)
|
||||
return super().pre_save(model_instance, add)
|
||||
|
||||
def get_prep_value(self, value):
|
||||
value = super(TimeField, self).get_prep_value(value)
|
||||
value = super().get_prep_value(value)
|
||||
return self.to_python(value)
|
||||
|
||||
def get_db_prep_value(self, value, connection, prepared=False):
|
||||
@@ -2239,7 +2238,7 @@ class TimeField(DateTimeCheckMixin, Field):
|
||||
def formfield(self, **kwargs):
|
||||
defaults = {'form_class': forms.TimeField}
|
||||
defaults.update(kwargs)
|
||||
return super(TimeField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
|
||||
class URLField(CharField):
|
||||
@@ -2248,10 +2247,10 @@ class URLField(CharField):
|
||||
|
||||
def __init__(self, verbose_name=None, name=None, **kwargs):
|
||||
kwargs['max_length'] = kwargs.get('max_length', 200)
|
||||
super(URLField, self).__init__(verbose_name, name, **kwargs)
|
||||
super().__init__(verbose_name, name, **kwargs)
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(URLField, self).deconstruct()
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
if kwargs.get("max_length") == 200:
|
||||
del kwargs['max_length']
|
||||
return name, path, args, kwargs
|
||||
@@ -2263,7 +2262,7 @@ class URLField(CharField):
|
||||
'form_class': forms.URLField,
|
||||
}
|
||||
defaults.update(kwargs)
|
||||
return super(URLField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
|
||||
class BinaryField(Field):
|
||||
@@ -2272,12 +2271,12 @@ class BinaryField(Field):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['editable'] = False
|
||||
super(BinaryField, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
if self.max_length is not None:
|
||||
self.validators.append(validators.MaxLengthValidator(self.max_length))
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(BinaryField, self).deconstruct()
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
del kwargs['editable']
|
||||
return name, path, args, kwargs
|
||||
|
||||
@@ -2290,13 +2289,13 @@ class BinaryField(Field):
|
||||
def get_default(self):
|
||||
if self.has_default() and not callable(self.default):
|
||||
return self.default
|
||||
default = super(BinaryField, self).get_default()
|
||||
default = super().get_default()
|
||||
if default == '':
|
||||
return b''
|
||||
return default
|
||||
|
||||
def get_db_prep_value(self, value, connection, prepared=False):
|
||||
value = super(BinaryField, self).get_db_prep_value(value, connection, prepared)
|
||||
value = super().get_db_prep_value(value, connection, prepared)
|
||||
if value is not None:
|
||||
return connection.Database.Binary(value)
|
||||
return value
|
||||
@@ -2321,10 +2320,10 @@ class UUIDField(Field):
|
||||
|
||||
def __init__(self, verbose_name=None, **kwargs):
|
||||
kwargs['max_length'] = 32
|
||||
super(UUIDField, self).__init__(verbose_name, **kwargs)
|
||||
super().__init__(verbose_name, **kwargs)
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(UUIDField, self).deconstruct()
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
del kwargs['max_length']
|
||||
return name, path, args, kwargs
|
||||
|
||||
@@ -2358,4 +2357,4 @@ class UUIDField(Field):
|
||||
'form_class': forms.UUIDField,
|
||||
}
|
||||
defaults.update(kwargs)
|
||||
return super(UUIDField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
@@ -15,7 +15,7 @@ from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
class FieldFile(File):
|
||||
def __init__(self, instance, field, name):
|
||||
super(FieldFile, self).__init__(None, name)
|
||||
super().__init__(None, name)
|
||||
self.instance = instance
|
||||
self.field = field
|
||||
self.storage = field.storage
|
||||
@@ -228,10 +228,10 @@ class FileField(Field):
|
||||
self.upload_to = upload_to
|
||||
|
||||
kwargs['max_length'] = kwargs.get('max_length', 100)
|
||||
super(FileField, self).__init__(verbose_name, name, **kwargs)
|
||||
super().__init__(verbose_name, name, **kwargs)
|
||||
|
||||
def check(self, **kwargs):
|
||||
errors = super(FileField, self).check(**kwargs)
|
||||
errors = super().check(**kwargs)
|
||||
errors.extend(self._check_primary_key())
|
||||
errors.extend(self._check_upload_to())
|
||||
return errors
|
||||
@@ -263,7 +263,7 @@ class FileField(Field):
|
||||
return []
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(FileField, self).deconstruct()
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
if kwargs.get("max_length") == 100:
|
||||
del kwargs["max_length"]
|
||||
kwargs['upload_to'] = self.upload_to
|
||||
@@ -276,7 +276,7 @@ class FileField(Field):
|
||||
|
||||
def get_prep_value(self, value):
|
||||
"Returns field's value prepared for saving into a database."
|
||||
value = super(FileField, self).get_prep_value(value)
|
||||
value = super().get_prep_value(value)
|
||||
# Need to convert File objects provided via a form to string for database insertion
|
||||
if value is None:
|
||||
return None
|
||||
@@ -284,14 +284,14 @@ class FileField(Field):
|
||||
|
||||
def pre_save(self, model_instance, add):
|
||||
"Returns field's value just before saving."
|
||||
file = super(FileField, self).pre_save(model_instance, add)
|
||||
file = super().pre_save(model_instance, add)
|
||||
if file and not file._committed:
|
||||
# Commit the file to storage prior to saving the model
|
||||
file.save(file.name, file.file, save=False)
|
||||
return file
|
||||
|
||||
def contribute_to_class(self, cls, name, **kwargs):
|
||||
super(FileField, self).contribute_to_class(cls, name, **kwargs)
|
||||
super().contribute_to_class(cls, name, **kwargs)
|
||||
setattr(cls, self.name, self.descriptor_class(self))
|
||||
|
||||
def generate_filename(self, instance, filename):
|
||||
@@ -330,7 +330,7 @@ class FileField(Field):
|
||||
if 'initial' in kwargs:
|
||||
defaults['required'] = False
|
||||
defaults.update(kwargs)
|
||||
return super(FileField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
|
||||
class ImageFileDescriptor(FileDescriptor):
|
||||
@@ -340,7 +340,7 @@ class ImageFileDescriptor(FileDescriptor):
|
||||
"""
|
||||
def __set__(self, instance, value):
|
||||
previous_file = instance.__dict__.get(self.field.name)
|
||||
super(ImageFileDescriptor, self).__set__(instance, value)
|
||||
super().__set__(instance, value)
|
||||
|
||||
# To prevent recalculating image dimensions when we are instantiating
|
||||
# an object from the database (bug #11084), only update dimensions if
|
||||
@@ -360,7 +360,7 @@ class ImageFieldFile(ImageFile, FieldFile):
|
||||
# Clear the image dimensions cache
|
||||
if hasattr(self, '_dimensions_cache'):
|
||||
del self._dimensions_cache
|
||||
super(ImageFieldFile, self).delete(save)
|
||||
super().delete(save)
|
||||
|
||||
|
||||
class ImageField(FileField):
|
||||
@@ -371,10 +371,10 @@ class ImageField(FileField):
|
||||
|
||||
def __init__(self, verbose_name=None, name=None, width_field=None, height_field=None, **kwargs):
|
||||
self.width_field, self.height_field = width_field, height_field
|
||||
super(ImageField, self).__init__(verbose_name, name, **kwargs)
|
||||
super().__init__(verbose_name, name, **kwargs)
|
||||
|
||||
def check(self, **kwargs):
|
||||
errors = super(ImageField, self).check(**kwargs)
|
||||
errors = super().check(**kwargs)
|
||||
errors.extend(self._check_image_library_installed())
|
||||
return errors
|
||||
|
||||
@@ -395,7 +395,7 @@ class ImageField(FileField):
|
||||
return []
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(ImageField, self).deconstruct()
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
if self.width_field:
|
||||
kwargs['width_field'] = self.width_field
|
||||
if self.height_field:
|
||||
@@ -403,7 +403,7 @@ class ImageField(FileField):
|
||||
return name, path, args, kwargs
|
||||
|
||||
def contribute_to_class(self, cls, name, **kwargs):
|
||||
super(ImageField, self).contribute_to_class(cls, name, **kwargs)
|
||||
super().contribute_to_class(cls, name, **kwargs)
|
||||
# Attach update_dimension_fields so that dimension fields declared
|
||||
# after their corresponding image field don't stay cleared by
|
||||
# Model.__init__, see bug #11196.
|
||||
@@ -471,4 +471,4 @@ class ImageField(FileField):
|
||||
def formfield(self, **kwargs):
|
||||
defaults = {'form_class': forms.ImageField}
|
||||
defaults.update(kwargs)
|
||||
return super(ImageField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
@@ -15,9 +15,9 @@ class OrderWrt(fields.IntegerField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['name'] = '_order'
|
||||
kwargs['editable'] = False
|
||||
super(OrderWrt, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(OrderWrt, self).deconstruct()
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
del kwargs['editable']
|
||||
return name, path, args, kwargs
|
||||
|
||||
@@ -97,7 +97,7 @@ class RelatedField(Field):
|
||||
return self.remote_field.model
|
||||
|
||||
def check(self, **kwargs):
|
||||
errors = super(RelatedField, self).check(**kwargs)
|
||||
errors = super().check(**kwargs)
|
||||
errors.extend(self._check_related_name_is_valid())
|
||||
errors.extend(self._check_related_query_name_is_valid())
|
||||
errors.extend(self._check_relation_model_exists())
|
||||
@@ -294,7 +294,7 @@ class RelatedField(Field):
|
||||
|
||||
def contribute_to_class(self, cls, name, private_only=False, **kwargs):
|
||||
|
||||
super(RelatedField, self).contribute_to_class(cls, name, private_only=private_only, **kwargs)
|
||||
super().contribute_to_class(cls, name, private_only=private_only, **kwargs)
|
||||
|
||||
self.opts = cls._meta
|
||||
|
||||
@@ -412,7 +412,7 @@ class RelatedField(Field):
|
||||
'limit_choices_to': limit_choices_to,
|
||||
})
|
||||
defaults.update(kwargs)
|
||||
return super(RelatedField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
def related_query_name(self):
|
||||
"""
|
||||
@@ -464,14 +464,14 @@ class ForeignObject(RelatedField):
|
||||
on_delete=on_delete,
|
||||
)
|
||||
|
||||
super(ForeignObject, self).__init__(rel=rel, **kwargs)
|
||||
super().__init__(rel=rel, **kwargs)
|
||||
|
||||
self.from_fields = from_fields
|
||||
self.to_fields = to_fields
|
||||
self.swappable = swappable
|
||||
|
||||
def check(self, **kwargs):
|
||||
errors = super(ForeignObject, self).check(**kwargs)
|
||||
errors = super().check(**kwargs)
|
||||
errors.extend(self._check_to_fields_exist())
|
||||
errors.extend(self._check_unique_target())
|
||||
return errors
|
||||
@@ -555,7 +555,7 @@ class ForeignObject(RelatedField):
|
||||
return []
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(ForeignObject, self).deconstruct()
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
kwargs['on_delete'] = self.remote_field.on_delete
|
||||
kwargs['from_fields'] = self.from_fields
|
||||
kwargs['to_fields'] = self.to_fields
|
||||
@@ -653,7 +653,7 @@ class ForeignObject(RelatedField):
|
||||
return tuple(ret)
|
||||
|
||||
def get_attname_column(self):
|
||||
attname, column = super(ForeignObject, self).get_attname_column()
|
||||
attname, column = super().get_attname_column()
|
||||
return attname, None
|
||||
|
||||
def get_joining_columns(self, reverse_join=False):
|
||||
@@ -718,7 +718,7 @@ class ForeignObject(RelatedField):
|
||||
return cls.merge_dicts(class_lookups)
|
||||
|
||||
def contribute_to_class(self, cls, name, private_only=False, **kwargs):
|
||||
super(ForeignObject, self).contribute_to_class(cls, name, private_only=private_only, **kwargs)
|
||||
super().contribute_to_class(cls, name, private_only=private_only, **kwargs)
|
||||
setattr(cls, self.name, self.forward_related_accessor_class(self))
|
||||
|
||||
def contribute_to_related_class(self, cls, related):
|
||||
@@ -795,13 +795,12 @@ class ForeignKey(ForeignObject):
|
||||
|
||||
kwargs['db_index'] = kwargs.get('db_index', True)
|
||||
|
||||
super(ForeignKey, self).__init__(
|
||||
to, on_delete, from_fields=['self'], to_fields=[to_field], **kwargs)
|
||||
super().__init__(to, on_delete, from_fields=['self'], to_fields=[to_field], **kwargs)
|
||||
|
||||
self.db_constraint = db_constraint
|
||||
|
||||
def check(self, **kwargs):
|
||||
errors = super(ForeignKey, self).check(**kwargs)
|
||||
errors = super().check(**kwargs)
|
||||
errors.extend(self._check_on_delete())
|
||||
errors.extend(self._check_unique())
|
||||
return errors
|
||||
@@ -840,7 +839,7 @@ class ForeignKey(ForeignObject):
|
||||
] if self.unique else []
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(ForeignKey, self).deconstruct()
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
del kwargs['to_fields']
|
||||
del kwargs['from_fields']
|
||||
# Handle the simpler arguments
|
||||
@@ -873,7 +872,7 @@ class ForeignKey(ForeignObject):
|
||||
def validate(self, value, model_instance):
|
||||
if self.remote_field.parent_link:
|
||||
return
|
||||
super(ForeignKey, self).validate(value, model_instance)
|
||||
super().validate(value, model_instance)
|
||||
if value is None:
|
||||
return
|
||||
|
||||
@@ -902,7 +901,7 @@ class ForeignKey(ForeignObject):
|
||||
|
||||
def get_default(self):
|
||||
"Here we check if the default value is an object and return the to_field if so."
|
||||
field_default = super(ForeignKey, self).get_default()
|
||||
field_default = super().get_default()
|
||||
if isinstance(field_default, self.remote_field.model):
|
||||
return getattr(field_default, self.target_field.attname)
|
||||
return field_default
|
||||
@@ -919,7 +918,7 @@ class ForeignKey(ForeignObject):
|
||||
return self.target_field.get_db_prep_value(value, connection, prepared)
|
||||
|
||||
def contribute_to_related_class(self, cls, related):
|
||||
super(ForeignKey, self).contribute_to_related_class(cls, related)
|
||||
super().contribute_to_related_class(cls, related)
|
||||
if self.remote_field.field_name is None:
|
||||
self.remote_field.field_name = cls._meta.pk.name
|
||||
|
||||
@@ -935,7 +934,7 @@ class ForeignKey(ForeignObject):
|
||||
'to_field_name': self.remote_field.field_name,
|
||||
}
|
||||
defaults.update(kwargs)
|
||||
return super(ForeignKey, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
def db_check(self, connection):
|
||||
return []
|
||||
@@ -952,13 +951,13 @@ class ForeignKey(ForeignObject):
|
||||
return value
|
||||
|
||||
def get_db_converters(self, connection):
|
||||
converters = super(ForeignKey, self).get_db_converters(connection)
|
||||
converters = super().get_db_converters(connection)
|
||||
if connection.features.interprets_empty_strings_as_nulls:
|
||||
converters += [self.convert_empty_strings]
|
||||
return converters
|
||||
|
||||
def get_col(self, alias, output_field=None):
|
||||
return super(ForeignKey, self).get_col(alias, output_field or self.target_field)
|
||||
return super().get_col(alias, output_field or self.target_field)
|
||||
|
||||
|
||||
class OneToOneField(ForeignKey):
|
||||
@@ -983,10 +982,10 @@ class OneToOneField(ForeignKey):
|
||||
|
||||
def __init__(self, to, on_delete, to_field=None, **kwargs):
|
||||
kwargs['unique'] = True
|
||||
super(OneToOneField, self).__init__(to, on_delete, to_field=to_field, **kwargs)
|
||||
super().__init__(to, on_delete, to_field=to_field, **kwargs)
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(OneToOneField, self).deconstruct()
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
if "unique" in kwargs:
|
||||
del kwargs['unique']
|
||||
return name, path, args, kwargs
|
||||
@@ -994,7 +993,7 @@ class OneToOneField(ForeignKey):
|
||||
def formfield(self, **kwargs):
|
||||
if self.remote_field.parent_link:
|
||||
return None
|
||||
return super(OneToOneField, self).formfield(**kwargs)
|
||||
return super().formfield(**kwargs)
|
||||
|
||||
def save_form_data(self, instance, data):
|
||||
if isinstance(data, self.remote_field.model):
|
||||
@@ -1107,13 +1106,13 @@ class ManyToManyField(RelatedField):
|
||||
)
|
||||
self.has_null_arg = 'null' in kwargs
|
||||
|
||||
super(ManyToManyField, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
self.db_table = db_table
|
||||
self.swappable = swappable
|
||||
|
||||
def check(self, **kwargs):
|
||||
errors = super(ManyToManyField, self).check(**kwargs)
|
||||
errors = super().check(**kwargs)
|
||||
errors.extend(self._check_unique(**kwargs))
|
||||
errors.extend(self._check_relationship_model(**kwargs))
|
||||
errors.extend(self._check_ignored_options(**kwargs))
|
||||
@@ -1396,7 +1395,7 @@ class ManyToManyField(RelatedField):
|
||||
return []
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(ManyToManyField, self).deconstruct()
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
# Handle the simpler arguments.
|
||||
if self.db_table is not None:
|
||||
kwargs['db_table'] = self.db_table
|
||||
@@ -1558,7 +1557,7 @@ class ManyToManyField(RelatedField):
|
||||
# clashes between multiple m2m fields with related_name == '+'.
|
||||
self.remote_field.related_name = "_%s_%s_+" % (cls.__name__.lower(), name)
|
||||
|
||||
super(ManyToManyField, self).contribute_to_class(cls, name, **kwargs)
|
||||
super().contribute_to_class(cls, name, **kwargs)
|
||||
|
||||
# The intermediate m2m model is not auto created if:
|
||||
# 1) There is a manually specified intermediate, or
|
||||
@@ -1624,7 +1623,7 @@ class ManyToManyField(RelatedField):
|
||||
if callable(initial):
|
||||
initial = initial()
|
||||
defaults['initial'] = [i._get_pk_val() for i in initial]
|
||||
return super(ManyToManyField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
def db_check(self, connection):
|
||||
return None
|
||||
|
||||
@@ -272,7 +272,7 @@ class ForwardOneToOneDescriptor(ForwardManyToOneDescriptor):
|
||||
if not any(field in fields for field in deferred):
|
||||
kwargs = {field: getattr(instance, field) for field in fields}
|
||||
return rel_model(**kwargs)
|
||||
return super(ForwardOneToOneDescriptor, self).get_object(instance)
|
||||
return super().get_object(instance)
|
||||
|
||||
|
||||
class ReverseOneToOneDescriptor:
|
||||
@@ -502,7 +502,7 @@ def create_reverse_many_to_one_manager(superclass, rel):
|
||||
|
||||
class RelatedManager(superclass):
|
||||
def __init__(self, instance):
|
||||
super(RelatedManager, self).__init__()
|
||||
super().__init__()
|
||||
|
||||
self.instance = instance
|
||||
self.model = rel.related_model
|
||||
@@ -545,12 +545,12 @@ def create_reverse_many_to_one_manager(superclass, rel):
|
||||
try:
|
||||
return self.instance._prefetched_objects_cache[self.field.related_query_name()]
|
||||
except (AttributeError, KeyError):
|
||||
queryset = super(RelatedManager, self).get_queryset()
|
||||
queryset = super().get_queryset()
|
||||
return self._apply_rel_filters(queryset)
|
||||
|
||||
def get_prefetch_queryset(self, instances, queryset=None):
|
||||
if queryset is None:
|
||||
queryset = super(RelatedManager, self).get_queryset()
|
||||
queryset = super().get_queryset()
|
||||
|
||||
queryset._add_hints(instance=instances[0])
|
||||
queryset = queryset.using(queryset._db or self._db)
|
||||
@@ -708,7 +708,7 @@ class ManyToManyDescriptor(ReverseManyToOneDescriptor):
|
||||
"""
|
||||
|
||||
def __init__(self, rel, reverse=False):
|
||||
super(ManyToManyDescriptor, self).__init__(rel)
|
||||
super().__init__(rel)
|
||||
|
||||
self.reverse = reverse
|
||||
|
||||
@@ -746,7 +746,7 @@ def create_forward_many_to_many_manager(superclass, rel, reverse):
|
||||
|
||||
class ManyRelatedManager(superclass):
|
||||
def __init__(self, instance=None):
|
||||
super(ManyRelatedManager, self).__init__()
|
||||
super().__init__()
|
||||
|
||||
self.instance = instance
|
||||
|
||||
@@ -834,12 +834,12 @@ def create_forward_many_to_many_manager(superclass, rel, reverse):
|
||||
try:
|
||||
return self.instance._prefetched_objects_cache[self.prefetch_cache_name]
|
||||
except (AttributeError, KeyError):
|
||||
queryset = super(ManyRelatedManager, self).get_queryset()
|
||||
queryset = super().get_queryset()
|
||||
return self._apply_rel_filters(queryset)
|
||||
|
||||
def get_prefetch_queryset(self, instances, queryset=None):
|
||||
if queryset is None:
|
||||
queryset = super(ManyRelatedManager, self).get_queryset()
|
||||
queryset = super().get_queryset()
|
||||
|
||||
queryset._add_hints(instance=instances[0])
|
||||
queryset = queryset.using(queryset._db or self._db)
|
||||
@@ -914,7 +914,7 @@ def create_forward_many_to_many_manager(superclass, rel, reverse):
|
||||
model=self.model, pk_set=None, using=db,
|
||||
)
|
||||
self._remove_prefetched_objects()
|
||||
filters = self._build_remove_filters(super(ManyRelatedManager, self).get_queryset().using(db))
|
||||
filters = self._build_remove_filters(super().get_queryset().using(db))
|
||||
self.through._default_manager.using(db).filter(filters).delete()
|
||||
|
||||
signals.m2m_changed.send(
|
||||
@@ -1091,7 +1091,7 @@ def create_forward_many_to_many_manager(superclass, rel, reverse):
|
||||
instance=self.instance, reverse=self.reverse,
|
||||
model=self.model, pk_set=old_ids, using=db,
|
||||
)
|
||||
target_model_qs = super(ManyRelatedManager, self).get_queryset()
|
||||
target_model_qs = super().get_queryset()
|
||||
if target_model_qs._has_filters():
|
||||
old_vals = target_model_qs.using(db).filter(**{
|
||||
'%s__in' % self.target_field.target_field.attname: old_ids})
|
||||
|
||||
@@ -54,7 +54,7 @@ class RelatedIn(In):
|
||||
# only one as we don't get to the direct value branch otherwise.
|
||||
target_field = self.lhs.output_field.get_path_info()[-1].target_fields[-1]
|
||||
self.rhs = [target_field.get_prep_value(v) for v in self.rhs]
|
||||
return super(RelatedIn, self).get_prep_lookup()
|
||||
return super().get_prep_lookup()
|
||||
|
||||
def as_sql(self, compiler, connection):
|
||||
if isinstance(self.lhs, MultiColSource):
|
||||
@@ -91,7 +91,7 @@ class RelatedIn(In):
|
||||
else:
|
||||
target_field = self.lhs.field.target_field.name
|
||||
self.rhs.add_fields([target_field], True)
|
||||
return super(RelatedIn, self).as_sql(compiler, connection)
|
||||
return super().as_sql(compiler, connection)
|
||||
|
||||
|
||||
class RelatedLookupMixin:
|
||||
@@ -109,7 +109,7 @@ class RelatedLookupMixin:
|
||||
target_field = self.lhs.output_field.get_path_info()[-1].target_fields[-1]
|
||||
self.rhs = target_field.get_prep_value(self.rhs)
|
||||
|
||||
return super(RelatedLookupMixin, self).get_prep_lookup()
|
||||
return super().get_prep_lookup()
|
||||
|
||||
def as_sql(self, compiler, connection):
|
||||
if isinstance(self.lhs, MultiColSource):
|
||||
@@ -122,7 +122,7 @@ class RelatedLookupMixin:
|
||||
root_constraint.add(
|
||||
lookup_class(target.get_col(self.lhs.alias, source), val), AND)
|
||||
return root_constraint.as_sql(compiler, connection)
|
||||
return super(RelatedLookupMixin, self).as_sql(compiler, connection)
|
||||
return super().as_sql(compiler, connection)
|
||||
|
||||
|
||||
class RelatedExact(RelatedLookupMixin, Exact):
|
||||
|
||||
@@ -187,7 +187,7 @@ class ManyToOneRel(ForeignObjectRel):
|
||||
|
||||
def __init__(self, field, to, field_name, related_name=None, related_query_name=None,
|
||||
limit_choices_to=None, parent_link=False, on_delete=None):
|
||||
super(ManyToOneRel, self).__init__(
|
||||
super().__init__(
|
||||
field, to,
|
||||
related_name=related_name,
|
||||
related_query_name=related_query_name,
|
||||
@@ -226,7 +226,7 @@ class OneToOneRel(ManyToOneRel):
|
||||
|
||||
def __init__(self, field, to, field_name, related_name=None, related_query_name=None,
|
||||
limit_choices_to=None, parent_link=False, on_delete=None):
|
||||
super(OneToOneRel, self).__init__(
|
||||
super().__init__(
|
||||
field, to, field_name,
|
||||
related_name=related_name,
|
||||
related_query_name=related_query_name,
|
||||
@@ -249,7 +249,7 @@ class ManyToManyRel(ForeignObjectRel):
|
||||
def __init__(self, field, to, related_name=None, related_query_name=None,
|
||||
limit_choices_to=None, symmetrical=True, through=None,
|
||||
through_fields=None, db_constraint=True):
|
||||
super(ManyToManyRel, self).__init__(
|
||||
super().__init__(
|
||||
field, to,
|
||||
related_name=related_name,
|
||||
related_query_name=related_query_name,
|
||||
|
||||
@@ -18,12 +18,12 @@ class Cast(Func):
|
||||
}
|
||||
|
||||
def __init__(self, expression, output_field):
|
||||
super(Cast, self).__init__(expression, output_field=output_field)
|
||||
super().__init__(expression, output_field=output_field)
|
||||
|
||||
def as_sql(self, compiler, connection, **extra_context):
|
||||
if 'db_type' not in extra_context:
|
||||
extra_context['db_type'] = self._output_field.db_type(connection)
|
||||
return super(Cast, self).as_sql(compiler, connection, **extra_context)
|
||||
return super().as_sql(compiler, connection, **extra_context)
|
||||
|
||||
def as_mysql(self, compiler, connection):
|
||||
extra_context = {}
|
||||
@@ -46,7 +46,7 @@ class Coalesce(Func):
|
||||
def __init__(self, *expressions, **extra):
|
||||
if len(expressions) < 2:
|
||||
raise ValueError('Coalesce must take at least two expressions')
|
||||
super(Coalesce, self).__init__(*expressions, **extra)
|
||||
super().__init__(*expressions, **extra)
|
||||
|
||||
def as_oracle(self, compiler, connection):
|
||||
# we can't mix TextField (NCLOB) and CharField (NVARCHAR), so convert
|
||||
@@ -72,7 +72,7 @@ class ConcatPair(Func):
|
||||
function = 'CONCAT'
|
||||
|
||||
def __init__(self, left, right, **extra):
|
||||
super(ConcatPair, self).__init__(left, right, **extra)
|
||||
super().__init__(left, right, **extra)
|
||||
|
||||
def as_sqlite(self, compiler, connection):
|
||||
coalesced = self.coalesce()
|
||||
@@ -82,7 +82,7 @@ class ConcatPair(Func):
|
||||
|
||||
def as_mysql(self, compiler, connection):
|
||||
# Use CONCAT_WS with an empty separator so that NULLs are ignored.
|
||||
return super(ConcatPair, self).as_sql(
|
||||
return super().as_sql(
|
||||
compiler, connection, function='CONCAT_WS', template="%(function)s('', %(expressions)s)"
|
||||
)
|
||||
|
||||
@@ -109,7 +109,7 @@ class Concat(Func):
|
||||
if len(expressions) < 2:
|
||||
raise ValueError('Concat must take at least two expressions')
|
||||
paired = self._paired(expressions)
|
||||
super(Concat, self).__init__(paired, **extra)
|
||||
super().__init__(paired, **extra)
|
||||
|
||||
def _paired(self, expressions):
|
||||
# wrap pairs of expressions in successive concat functions
|
||||
@@ -133,11 +133,11 @@ class Greatest(Func):
|
||||
def __init__(self, *expressions, **extra):
|
||||
if len(expressions) < 2:
|
||||
raise ValueError('Greatest must take at least two expressions')
|
||||
super(Greatest, self).__init__(*expressions, **extra)
|
||||
super().__init__(*expressions, **extra)
|
||||
|
||||
def as_sqlite(self, compiler, connection):
|
||||
"""Use the MAX function on SQLite."""
|
||||
return super(Greatest, self).as_sql(compiler, connection, function='MAX')
|
||||
return super().as_sql(compiler, connection, function='MAX')
|
||||
|
||||
|
||||
class Least(Func):
|
||||
@@ -153,11 +153,11 @@ class Least(Func):
|
||||
def __init__(self, *expressions, **extra):
|
||||
if len(expressions) < 2:
|
||||
raise ValueError('Least must take at least two expressions')
|
||||
super(Least, self).__init__(*expressions, **extra)
|
||||
super().__init__(*expressions, **extra)
|
||||
|
||||
def as_sqlite(self, compiler, connection):
|
||||
"""Use the MIN function on SQLite."""
|
||||
return super(Least, self).as_sql(compiler, connection, function='MIN')
|
||||
return super().as_sql(compiler, connection, function='MIN')
|
||||
|
||||
|
||||
class Length(Transform):
|
||||
@@ -167,10 +167,10 @@ class Length(Transform):
|
||||
|
||||
def __init__(self, expression, **extra):
|
||||
output_field = extra.pop('output_field', fields.IntegerField())
|
||||
super(Length, self).__init__(expression, output_field=output_field, **extra)
|
||||
super().__init__(expression, output_field=output_field, **extra)
|
||||
|
||||
def as_mysql(self, compiler, connection):
|
||||
return super(Length, self).as_sql(compiler, connection, function='CHAR_LENGTH')
|
||||
return super().as_sql(compiler, connection, function='CHAR_LENGTH')
|
||||
|
||||
|
||||
class Lower(Transform):
|
||||
@@ -184,7 +184,7 @@ class Now(Func):
|
||||
def __init__(self, output_field=None, **extra):
|
||||
if output_field is None:
|
||||
output_field = fields.DateTimeField()
|
||||
super(Now, self).__init__(output_field=output_field, **extra)
|
||||
super().__init__(output_field=output_field, **extra)
|
||||
|
||||
def as_postgresql(self, compiler, connection):
|
||||
# Postgres' CURRENT_TIMESTAMP means "the time at the start of the
|
||||
@@ -211,13 +211,13 @@ class Substr(Func):
|
||||
if not hasattr(length, 'resolve_expression'):
|
||||
length = Value(length)
|
||||
expressions.append(length)
|
||||
super(Substr, self).__init__(*expressions, **extra)
|
||||
super().__init__(*expressions, **extra)
|
||||
|
||||
def as_sqlite(self, compiler, connection):
|
||||
return super(Substr, self).as_sql(compiler, connection, function='SUBSTR')
|
||||
return super().as_sql(compiler, connection, function='SUBSTR')
|
||||
|
||||
def as_oracle(self, compiler, connection):
|
||||
return super(Substr, self).as_sql(compiler, connection, function='SUBSTR')
|
||||
return super().as_sql(compiler, connection, function='SUBSTR')
|
||||
|
||||
|
||||
class Upper(Transform):
|
||||
|
||||
@@ -37,7 +37,7 @@ class Extract(TimezoneMixin, Transform):
|
||||
if self.lookup_name is None:
|
||||
raise ValueError('lookup_name must be provided')
|
||||
self.tzinfo = tzinfo
|
||||
super(Extract, self).__init__(expression, **extra)
|
||||
super().__init__(expression, **extra)
|
||||
|
||||
def as_sql(self, compiler, connection):
|
||||
sql, params = compiler.compile(self.lhs)
|
||||
@@ -57,7 +57,7 @@ class Extract(TimezoneMixin, Transform):
|
||||
return sql, params
|
||||
|
||||
def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False):
|
||||
copy = super(Extract, self).resolve_expression(query, allow_joins, reuse, summarize, for_save)
|
||||
copy = super().resolve_expression(query, allow_joins, reuse, summarize, for_save)
|
||||
field = copy.lhs.output_field
|
||||
if not isinstance(field, (DateField, DateTimeField, TimeField)):
|
||||
raise ValueError('Extract input expression must be DateField, DateTimeField, or TimeField.')
|
||||
@@ -142,7 +142,7 @@ class TruncBase(TimezoneMixin, Transform):
|
||||
|
||||
def __init__(self, expression, output_field=None, tzinfo=None, **extra):
|
||||
self.tzinfo = tzinfo
|
||||
super(TruncBase, self).__init__(expression, output_field=output_field, **extra)
|
||||
super().__init__(expression, output_field=output_field, **extra)
|
||||
|
||||
def as_sql(self, compiler, connection):
|
||||
inner_sql, inner_params = compiler.compile(self.lhs)
|
||||
@@ -162,7 +162,7 @@ class TruncBase(TimezoneMixin, Transform):
|
||||
return sql, inner_params + params
|
||||
|
||||
def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False):
|
||||
copy = super(TruncBase, self).resolve_expression(query, allow_joins, reuse, summarize, for_save)
|
||||
copy = super().resolve_expression(query, allow_joins, reuse, summarize, for_save)
|
||||
field = copy.lhs.output_field
|
||||
# DateTimeField is a subclass of DateField so this works for both.
|
||||
assert isinstance(field, (DateField, TimeField)), (
|
||||
@@ -210,7 +210,7 @@ class Trunc(TruncBase):
|
||||
|
||||
def __init__(self, expression, kind, output_field=None, tzinfo=None, **extra):
|
||||
self.kind = kind
|
||||
super(Trunc, self).__init__(expression, output_field=output_field, tzinfo=tzinfo, **extra)
|
||||
super().__init__(expression, output_field=output_field, tzinfo=tzinfo, **extra)
|
||||
|
||||
|
||||
class TruncYear(TruncBase):
|
||||
|
||||
@@ -153,8 +153,7 @@ class Transform(RegisterLookupMixin, Func):
|
||||
|
||||
class BuiltinLookup(Lookup):
|
||||
def process_lhs(self, compiler, connection, lhs=None):
|
||||
lhs_sql, params = super(BuiltinLookup, self).process_lhs(
|
||||
compiler, connection, lhs)
|
||||
lhs_sql, params = super().process_lhs(compiler, connection, lhs)
|
||||
field_internal_type = self.lhs.output_field.get_internal_type()
|
||||
db_type = self.lhs.output_field.db_type(connection=connection)
|
||||
lhs_sql = connection.ops.field_cast_sql(
|
||||
@@ -223,7 +222,7 @@ class FieldGetDbPrepValueIterableMixin(FieldGetDbPrepValueMixin):
|
||||
# to prepare/transform those values.
|
||||
return self.batch_process_rhs(compiler, connection)
|
||||
else:
|
||||
return super(FieldGetDbPrepValueIterableMixin, self).process_rhs(compiler, connection)
|
||||
return super().process_rhs(compiler, connection)
|
||||
|
||||
def resolve_expression_parameter(self, compiler, connection, sql, param):
|
||||
params = [param]
|
||||
@@ -234,7 +233,7 @@ class FieldGetDbPrepValueIterableMixin(FieldGetDbPrepValueMixin):
|
||||
return sql, params
|
||||
|
||||
def batch_process_rhs(self, compiler, connection, rhs=None):
|
||||
pre_processed = super(FieldGetDbPrepValueIterableMixin, self).batch_process_rhs(compiler, connection, rhs)
|
||||
pre_processed = super().batch_process_rhs(compiler, connection, rhs)
|
||||
# The params list may contain expressions which compile to a
|
||||
# sql/param pair. Zip them to get sql and param pairs that refer to the
|
||||
# same argument and attempt to replace them with the result of
|
||||
@@ -258,7 +257,7 @@ class IExact(BuiltinLookup):
|
||||
prepare_rhs = False
|
||||
|
||||
def process_rhs(self, qn, connection):
|
||||
rhs, params = super(IExact, self).process_rhs(qn, connection)
|
||||
rhs, params = super().process_rhs(qn, connection)
|
||||
if params:
|
||||
params[0] = connection.ops.prep_for_iexact_query(params[0])
|
||||
return rhs, params
|
||||
@@ -292,7 +291,7 @@ class IntegerFieldFloatRounding:
|
||||
def get_prep_lookup(self):
|
||||
if isinstance(self.rhs, float):
|
||||
self.rhs = math.ceil(self.rhs)
|
||||
return super(IntegerFieldFloatRounding, self).get_prep_lookup()
|
||||
return super().get_prep_lookup()
|
||||
|
||||
|
||||
@IntegerField.register_lookup
|
||||
@@ -366,7 +365,7 @@ class In(FieldGetDbPrepValueIterableMixin, BuiltinLookup):
|
||||
placeholder = '(' + ', '.join(sqls) + ')'
|
||||
return (placeholder, sqls_params)
|
||||
else:
|
||||
return super(In, self).process_rhs(compiler, connection)
|
||||
return super().process_rhs(compiler, connection)
|
||||
|
||||
def get_rhs_op(self, connection, rhs):
|
||||
return 'IN %s' % rhs
|
||||
@@ -375,7 +374,7 @@ class In(FieldGetDbPrepValueIterableMixin, BuiltinLookup):
|
||||
max_in_list_size = connection.ops.max_in_list_size()
|
||||
if self.rhs_is_direct_value() and max_in_list_size and len(self.rhs) > max_in_list_size:
|
||||
return self.split_parameter_list_as_sql(compiler, connection)
|
||||
return super(In, self).as_sql(compiler, connection)
|
||||
return super().as_sql(compiler, connection)
|
||||
|
||||
def split_parameter_list_as_sql(self, compiler, connection):
|
||||
# This is a special case for databases which limit the number of
|
||||
@@ -416,7 +415,7 @@ class PatternLookup(BuiltinLookup):
|
||||
pattern = connection.pattern_ops[self.lookup_name].format(connection.pattern_esc)
|
||||
return pattern.format(rhs)
|
||||
else:
|
||||
return super(PatternLookup, self).get_rhs_op(connection, rhs)
|
||||
return super().get_rhs_op(connection, rhs)
|
||||
|
||||
|
||||
@Field.register_lookup
|
||||
@@ -425,7 +424,7 @@ class Contains(PatternLookup):
|
||||
prepare_rhs = False
|
||||
|
||||
def process_rhs(self, qn, connection):
|
||||
rhs, params = super(Contains, self).process_rhs(qn, connection)
|
||||
rhs, params = super().process_rhs(qn, connection)
|
||||
if params and not self.bilateral_transforms:
|
||||
params[0] = "%%%s%%" % connection.ops.prep_for_like_query(params[0])
|
||||
return rhs, params
|
||||
@@ -443,7 +442,7 @@ class StartsWith(PatternLookup):
|
||||
prepare_rhs = False
|
||||
|
||||
def process_rhs(self, qn, connection):
|
||||
rhs, params = super(StartsWith, self).process_rhs(qn, connection)
|
||||
rhs, params = super().process_rhs(qn, connection)
|
||||
if params and not self.bilateral_transforms:
|
||||
params[0] = "%s%%" % connection.ops.prep_for_like_query(params[0])
|
||||
return rhs, params
|
||||
@@ -455,7 +454,7 @@ class IStartsWith(PatternLookup):
|
||||
prepare_rhs = False
|
||||
|
||||
def process_rhs(self, qn, connection):
|
||||
rhs, params = super(IStartsWith, self).process_rhs(qn, connection)
|
||||
rhs, params = super().process_rhs(qn, connection)
|
||||
if params and not self.bilateral_transforms:
|
||||
params[0] = "%s%%" % connection.ops.prep_for_like_query(params[0])
|
||||
return rhs, params
|
||||
@@ -467,7 +466,7 @@ class EndsWith(PatternLookup):
|
||||
prepare_rhs = False
|
||||
|
||||
def process_rhs(self, qn, connection):
|
||||
rhs, params = super(EndsWith, self).process_rhs(qn, connection)
|
||||
rhs, params = super().process_rhs(qn, connection)
|
||||
if params and not self.bilateral_transforms:
|
||||
params[0] = "%%%s" % connection.ops.prep_for_like_query(params[0])
|
||||
return rhs, params
|
||||
@@ -479,7 +478,7 @@ class IEndsWith(PatternLookup):
|
||||
prepare_rhs = False
|
||||
|
||||
def process_rhs(self, qn, connection):
|
||||
rhs, params = super(IEndsWith, self).process_rhs(qn, connection)
|
||||
rhs, params = super().process_rhs(qn, connection)
|
||||
if params and not self.bilateral_transforms:
|
||||
params[0] = "%%%s" % connection.ops.prep_for_like_query(params[0])
|
||||
return rhs, params
|
||||
@@ -513,7 +512,7 @@ class Regex(BuiltinLookup):
|
||||
|
||||
def as_sql(self, compiler, connection):
|
||||
if self.lookup_name in connection.operators:
|
||||
return super(Regex, self).as_sql(compiler, connection)
|
||||
return super().as_sql(compiler, connection)
|
||||
else:
|
||||
lhs, lhs_params = self.process_lhs(compiler, connection)
|
||||
rhs, rhs_params = self.process_rhs(compiler, connection)
|
||||
@@ -571,7 +570,7 @@ class YearExact(YearLookup, Exact):
|
||||
except (IndexError, TypeError, ValueError):
|
||||
# Can't determine the bounds before executing the query, so skip
|
||||
# optimizations by falling back to a standard exact comparison.
|
||||
return super(Exact, self).as_sql(compiler, connection)
|
||||
return super().as_sql(compiler, connection)
|
||||
bounds = self.year_lookup_bounds(connection, rhs_params[0])
|
||||
params.extend(bounds)
|
||||
return '%s BETWEEN %%s AND %%s' % lhs_sql, params
|
||||
|
||||
@@ -19,12 +19,12 @@ class BaseManager:
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
# We capture the arguments to make returning them trivial
|
||||
obj = super(BaseManager, cls).__new__(cls)
|
||||
obj = super().__new__(cls)
|
||||
obj._constructor_args = (args, kwargs)
|
||||
return obj
|
||||
|
||||
def __init__(self):
|
||||
super(BaseManager, self).__init__()
|
||||
super().__init__()
|
||||
self._set_creation_counter()
|
||||
self.model = None
|
||||
self.name = None
|
||||
@@ -196,8 +196,8 @@ class ManagerDescriptor:
|
||||
|
||||
class EmptyManager(Manager):
|
||||
def __init__(self, model):
|
||||
super(EmptyManager, self).__init__()
|
||||
super().__init__()
|
||||
self.model = model
|
||||
|
||||
def get_queryset(self):
|
||||
return super(EmptyManager, self).get_queryset().none()
|
||||
return super().get_queryset().none()
|
||||
|
||||
@@ -56,7 +56,7 @@ class Q(tree.Node):
|
||||
default = AND
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Q, self).__init__(children=list(args) + list(kwargs.items()))
|
||||
super().__init__(children=list(args) + list(kwargs.items()))
|
||||
|
||||
def _combine(self, other, conn):
|
||||
if not isinstance(other, Q):
|
||||
|
||||
@@ -25,13 +25,13 @@ class ModelSignal(Signal):
|
||||
|
||||
def connect(self, receiver, sender=None, weak=True, dispatch_uid=None, apps=None):
|
||||
self._lazy_method(
|
||||
super(ModelSignal, self).connect, apps, receiver, sender,
|
||||
super().connect, apps, receiver, sender,
|
||||
weak=weak, dispatch_uid=dispatch_uid,
|
||||
)
|
||||
|
||||
def disconnect(self, receiver=None, sender=None, dispatch_uid=None, apps=None):
|
||||
return self._lazy_method(
|
||||
super(ModelSignal, self).disconnect, apps, receiver, sender, dispatch_uid=dispatch_uid
|
||||
super().disconnect, apps, receiver, sender, dispatch_uid=dispatch_uid
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -928,7 +928,7 @@ class SQLInsertCompiler(SQLCompiler):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.return_id = False
|
||||
super(SQLInsertCompiler, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def field_as_sql(self, field, val):
|
||||
"""
|
||||
@@ -1181,7 +1181,7 @@ class SQLUpdateCompiler(SQLCompiler):
|
||||
non-empty query that is executed. Row counts for any subsequent,
|
||||
related queries are not available.
|
||||
"""
|
||||
cursor = super(SQLUpdateCompiler, self).execute_sql(result_type)
|
||||
cursor = super().execute_sql(result_type)
|
||||
try:
|
||||
rows = cursor.rowcount if cursor else 0
|
||||
is_empty = cursor is None
|
||||
@@ -1217,7 +1217,7 @@ class SQLUpdateCompiler(SQLCompiler):
|
||||
query._extra = {}
|
||||
query.select = []
|
||||
query.add_fields([query.get_meta().pk.name])
|
||||
super(SQLUpdateCompiler, self).pre_sql_setup()
|
||||
super().pre_sql_setup()
|
||||
|
||||
must_pre_select = count > 1 and not self.connection.features.update_can_self_select
|
||||
|
||||
|
||||
@@ -88,7 +88,7 @@ class UpdateQuery(Query):
|
||||
compiler = 'SQLUpdateCompiler'
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(UpdateQuery, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self._setup_query()
|
||||
|
||||
def _setup_query(self):
|
||||
@@ -103,7 +103,7 @@ class UpdateQuery(Query):
|
||||
self.related_updates = {}
|
||||
|
||||
def clone(self, klass=None, **kwargs):
|
||||
return super(UpdateQuery, self).clone(klass, related_updates=self.related_updates.copy(), **kwargs)
|
||||
return super().clone(klass, related_updates=self.related_updates.copy(), **kwargs)
|
||||
|
||||
def update_batch(self, pk_list, values, using):
|
||||
self.add_update_values(values)
|
||||
@@ -176,7 +176,7 @@ class InsertQuery(Query):
|
||||
compiler = 'SQLInsertCompiler'
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(InsertQuery, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields = []
|
||||
self.objs = []
|
||||
|
||||
|
||||
Reference in New Issue
Block a user