mirror of
https://github.com/django/django.git
synced 2025-04-04 13:36:42 +00:00
Refs #28909 -- Simplified code using unpacking generalizations.
This commit is contained in:
parent
6888375c53
commit
c972af69e2
@ -507,7 +507,7 @@ class BaseModelAdmin(metaclass=forms.MediaDefiningClass):
|
||||
# Is it a valid relational lookup?
|
||||
return not {
|
||||
LOOKUP_SEP.join(relation_parts),
|
||||
LOOKUP_SEP.join(relation_parts + [part]),
|
||||
LOOKUP_SEP.join([*relation_parts, part]),
|
||||
}.isdisjoint(valid_lookups)
|
||||
|
||||
def to_field_allowed(self, request, to_field):
|
||||
@ -737,8 +737,7 @@ class ModelAdmin(BaseModelAdmin):
|
||||
"<path:object_id>/",
|
||||
wrap(
|
||||
RedirectView.as_view(
|
||||
pattern_name="%s:%s_%s_change"
|
||||
% ((self.admin_site.name,) + info)
|
||||
pattern_name="%s:%s_%s_change" % (self.admin_site.name, *info)
|
||||
)
|
||||
),
|
||||
),
|
||||
@ -1051,7 +1050,7 @@ class ModelAdmin(BaseModelAdmin):
|
||||
Return a list of choices for use in a form object. Each choice is a
|
||||
tuple (name, description).
|
||||
"""
|
||||
choices = [] + default_choices
|
||||
choices = [*default_choices]
|
||||
for func, name, description in self.get_actions(request).values():
|
||||
choice = (name, description % model_format_dict(self.opts))
|
||||
choices.append(choice)
|
||||
|
@ -307,7 +307,7 @@ class RelatedFieldWidgetWrapper(forms.Widget):
|
||||
|
||||
def get_related_url(self, info, action, *args):
|
||||
return reverse(
|
||||
"admin:%s_%s_%s" % (info + (action,)),
|
||||
"admin:%s_%s_%s" % (*info, action),
|
||||
current_app=self.admin_site.name,
|
||||
args=args,
|
||||
)
|
||||
@ -582,9 +582,7 @@ class AutocompleteMixin:
|
||||
js=(
|
||||
"admin/js/vendor/jquery/jquery%s.js" % extra,
|
||||
"admin/js/vendor/select2/select2.full%s.js" % extra,
|
||||
)
|
||||
+ i18n_file
|
||||
+ (
|
||||
*i18n_file,
|
||||
"admin/js/jquery.init.js",
|
||||
"admin/js/autocomplete.js",
|
||||
),
|
||||
|
@ -104,7 +104,8 @@ class UserAdmin(admin.ModelAdmin):
|
||||
self.admin_site.admin_view(self.user_change_password),
|
||||
name="auth_user_password_change",
|
||||
),
|
||||
] + super().get_urls()
|
||||
*super().get_urls(),
|
||||
]
|
||||
|
||||
def lookup_allowed(self, lookup, value, request):
|
||||
# Don't allow lookups involving passwords.
|
||||
|
@ -80,7 +80,7 @@ class Permission(models.Model):
|
||||
return "%s | %s" % (self.content_type, self.name)
|
||||
|
||||
def natural_key(self):
|
||||
return (self.codename,) + self.content_type.natural_key()
|
||||
return (self.codename, *self.content_type.natural_key())
|
||||
|
||||
natural_key.dependencies = ["contenttypes.contenttype"]
|
||||
|
||||
|
@ -185,7 +185,7 @@ class UserAttributeSimilarityValidator:
|
||||
if not value or not isinstance(value, str):
|
||||
continue
|
||||
value_lower = value.lower()
|
||||
value_parts = re.split(r"\W+", value_lower) + [value_lower]
|
||||
value_parts = [*re.split(r"\W+", value_lower), value_lower]
|
||||
for value_part in value_parts:
|
||||
if exceeds_maximum_length_ratio(
|
||||
password, self.max_similarity, value_part
|
||||
|
@ -5,7 +5,8 @@ from django.db.backends.postgresql.introspection import DatabaseIntrospection
|
||||
class PostGISIntrospection(DatabaseIntrospection):
|
||||
postgis_oid_lookup = {} # Populated when introspection is performed.
|
||||
|
||||
ignored_tables = DatabaseIntrospection.ignored_tables + [
|
||||
ignored_tables = [
|
||||
*DatabaseIntrospection.ignored_tables,
|
||||
"geography_columns",
|
||||
"geometry_columns",
|
||||
"raster_columns",
|
||||
|
@ -360,7 +360,7 @@ class PostGISOperations(BaseSpatialOperations, DatabaseOperations):
|
||||
minor, subminor).
|
||||
"""
|
||||
version = self.postgis_lib_version()
|
||||
return (version,) + get_version_tuple(version)
|
||||
return (version, *get_version_tuple(version))
|
||||
|
||||
def proj_version_tuple(self):
|
||||
"""
|
||||
|
@ -196,7 +196,7 @@ class SpatiaLiteOperations(BaseSpatialOperations, DatabaseOperations):
|
||||
minor, subminor).
|
||||
"""
|
||||
version = self.spatialite_version()
|
||||
return (version,) + get_version_tuple(version)
|
||||
return (version, *get_version_tuple(version))
|
||||
|
||||
def spatial_aggregate_name(self, agg_name):
|
||||
"""
|
||||
|
@ -284,7 +284,7 @@ class RelateLookup(GISLookup):
|
||||
elif not isinstance(pattern, str) or not self.pattern_regex.match(pattern):
|
||||
raise ValueError('Invalid intersection matrix pattern "%s".' % pattern)
|
||||
sql, params = super().process_rhs(compiler, connection)
|
||||
return sql, params + [pattern]
|
||||
return sql, [*params, pattern]
|
||||
|
||||
|
||||
@BaseSpatialField.register_lookup
|
||||
|
@ -149,7 +149,7 @@ class CookieStorage(BaseStorage):
|
||||
|
||||
def compute_msg(some_serialized_msg):
|
||||
return self._encode_parts(
|
||||
some_serialized_msg + [self.not_finished_json],
|
||||
[*some_serialized_msg, self.not_finished_json],
|
||||
encode_empty=True,
|
||||
)
|
||||
|
||||
|
@ -87,7 +87,7 @@ class KeyTransform(Transform):
|
||||
|
||||
def as_sql(self, compiler, connection):
|
||||
lhs, params = compiler.compile(self.lhs)
|
||||
return "(%s -> %%s)" % lhs, tuple(params) + (self.key_name,)
|
||||
return "(%s -> %%s)" % lhs, (*params, self.key_name)
|
||||
|
||||
|
||||
class KeyTransformFactory:
|
||||
|
@ -211,7 +211,7 @@ class SearchQuery(SearchQueryCombinable, Func):
|
||||
expressions = (value,)
|
||||
self.config = SearchConfig.from_parameter(config)
|
||||
if self.config is not None:
|
||||
expressions = (self.config,) + expressions
|
||||
expressions = [self.config, *expressions]
|
||||
self.invert = invert
|
||||
super().__init__(*expressions, output_field=output_field)
|
||||
|
||||
@ -258,16 +258,16 @@ class SearchRank(Func):
|
||||
vector = SearchVector(vector)
|
||||
if not hasattr(query, "resolve_expression"):
|
||||
query = SearchQuery(query)
|
||||
expressions = (vector, query)
|
||||
expressions = [vector, query]
|
||||
if weights is not None:
|
||||
if not hasattr(weights, "resolve_expression"):
|
||||
weights = Value(weights)
|
||||
weights = Cast(weights, ArrayField(_Float4Field()))
|
||||
expressions = (weights,) + expressions
|
||||
expressions = [weights, *expressions]
|
||||
if normalization is not None:
|
||||
if not hasattr(normalization, "resolve_expression"):
|
||||
normalization = Value(normalization)
|
||||
expressions += (normalization,)
|
||||
expressions.append(normalization)
|
||||
if cover_density:
|
||||
self.function = "ts_rank_cd"
|
||||
super().__init__(*expressions)
|
||||
@ -311,7 +311,7 @@ class SearchHeadline(Func):
|
||||
expressions = (expression, query)
|
||||
if config is not None:
|
||||
config = SearchConfig.from_parameter(config)
|
||||
expressions = (config,) + expressions
|
||||
expressions = (config, *expressions)
|
||||
super().__init__(*expressions)
|
||||
|
||||
def as_sql(self, compiler, connection, function=None, template=None):
|
||||
|
@ -62,7 +62,7 @@ def _load_all_namespaces(resolver, parents=()):
|
||||
"""
|
||||
url_patterns = getattr(resolver, "url_patterns", [])
|
||||
namespaces = [
|
||||
":".join(parents + (url.namespace,))
|
||||
":".join([*parents, url.namespace])
|
||||
for url in url_patterns
|
||||
if getattr(url, "namespace", None) is not None
|
||||
]
|
||||
|
@ -74,7 +74,7 @@ class Command(BaseCommand):
|
||||
ignore_patterns = set(options["ignore_patterns"])
|
||||
self.verbosity = options["verbosity"]
|
||||
if options["fuzzy"]:
|
||||
self.program_options = self.program_options + ["-f"]
|
||||
self.program_options = [*self.program_options, "-f"]
|
||||
|
||||
if find_command(self.program) is None:
|
||||
raise CommandError(
|
||||
|
@ -498,7 +498,7 @@ class Command(BaseCommand):
|
||||
potfile = os.path.join(path, "%s.pot" % self.domain)
|
||||
if not os.path.exists(potfile):
|
||||
continue
|
||||
args = ["msguniq"] + self.msguniq_options + [potfile]
|
||||
args = ["msguniq", *self.msguniq_options, potfile]
|
||||
msgs, errors, status = popen_wrapper(args)
|
||||
if errors:
|
||||
if status != STATUS_OK:
|
||||
@ -702,7 +702,7 @@ class Command(BaseCommand):
|
||||
pofile = os.path.join(basedir, "%s.po" % self.domain)
|
||||
|
||||
if os.path.exists(pofile):
|
||||
args = ["msgmerge"] + self.msgmerge_options + [pofile, potfile]
|
||||
args = ["msgmerge", *self.msgmerge_options, pofile, potfile]
|
||||
_, errors, status = popen_wrapper(args)
|
||||
if errors:
|
||||
if status != STATUS_OK:
|
||||
@ -725,7 +725,7 @@ class Command(BaseCommand):
|
||||
fp.write(msgs)
|
||||
|
||||
if self.no_obsolete:
|
||||
args = ["msgattrib"] + self.msgattrib_options + ["-o", pofile, pofile]
|
||||
args = ["msgattrib", *self.msgattrib_options, "-o", pofile, pofile]
|
||||
msgs, errors, status = popen_wrapper(args)
|
||||
if errors:
|
||||
if status != STATUS_OK:
|
||||
|
@ -11,15 +11,21 @@ from django.utils.datastructures import OrderedSet
|
||||
|
||||
FieldInfo = namedtuple(
|
||||
"FieldInfo",
|
||||
BaseFieldInfo._fields
|
||||
+ ("extra", "is_unsigned", "has_json_constraint", "comment", "data_type"),
|
||||
[
|
||||
*BaseFieldInfo._fields,
|
||||
"extra",
|
||||
"is_unsigned",
|
||||
"has_json_constraint",
|
||||
"comment",
|
||||
"data_type",
|
||||
],
|
||||
)
|
||||
InfoLine = namedtuple(
|
||||
"InfoLine",
|
||||
"col_name data_type max_len num_prec num_scale extra column_default "
|
||||
"collation is_unsigned comment",
|
||||
)
|
||||
TableInfo = namedtuple("TableInfo", BaseTableInfo._fields + ("comment",))
|
||||
TableInfo = namedtuple("TableInfo", [*BaseTableInfo._fields, "comment"])
|
||||
|
||||
|
||||
class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||
|
@ -8,9 +8,9 @@ from django.db.backends.base.introspection import FieldInfo as BaseFieldInfo
|
||||
from django.db.backends.base.introspection import TableInfo as BaseTableInfo
|
||||
|
||||
FieldInfo = namedtuple(
|
||||
"FieldInfo", BaseFieldInfo._fields + ("is_autofield", "is_json", "comment")
|
||||
"FieldInfo", [*BaseFieldInfo._fields, "is_autofield", "is_json", "comment"]
|
||||
)
|
||||
TableInfo = namedtuple("TableInfo", BaseTableInfo._fields + ("comment",))
|
||||
TableInfo = namedtuple("TableInfo", [*BaseTableInfo._fields, "comment"])
|
||||
|
||||
|
||||
class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||
|
@ -5,8 +5,8 @@ from django.db.backends.base.introspection import FieldInfo as BaseFieldInfo
|
||||
from django.db.backends.base.introspection import TableInfo as BaseTableInfo
|
||||
from django.db.models import Index
|
||||
|
||||
FieldInfo = namedtuple("FieldInfo", BaseFieldInfo._fields + ("is_autofield", "comment"))
|
||||
TableInfo = namedtuple("TableInfo", BaseTableInfo._fields + ("comment",))
|
||||
FieldInfo = namedtuple("FieldInfo", [*BaseFieldInfo._fields, "is_autofield", "comment"])
|
||||
TableInfo = namedtuple("TableInfo", [*BaseTableInfo._fields, "comment"])
|
||||
|
||||
|
||||
class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||
|
@ -10,7 +10,7 @@ from django.db.models import Index
|
||||
from django.utils.regex_helper import _lazy_re_compile
|
||||
|
||||
FieldInfo = namedtuple(
|
||||
"FieldInfo", BaseFieldInfo._fields + ("pk", "has_json_constraint")
|
||||
"FieldInfo", [*BaseFieldInfo._fields, "pk", "has_json_constraint"]
|
||||
)
|
||||
|
||||
field_size_re = _lazy_re_compile(r"^\s*(?:var)?char\s*\(\s*(\d+)\s*\)\s*$")
|
||||
|
@ -375,7 +375,7 @@ class DatabaseOperations(BaseDatabaseOperations):
|
||||
def combine_duration_expression(self, connector, sub_expressions):
|
||||
if connector not in ["+", "-", "*", "/"]:
|
||||
raise DatabaseError("Invalid connector for timedelta: %s." % connector)
|
||||
fn_params = ["'%s'" % connector] + sub_expressions
|
||||
fn_params = ["'%s'" % connector, *sub_expressions]
|
||||
if len(fn_params) > 3:
|
||||
raise ValueError("Too many params for timedelta operations.")
|
||||
return "django_format_dtdelta(%s)" % ", ".join(fn_params)
|
||||
|
@ -1233,7 +1233,7 @@ class MigrationAutodetector:
|
||||
# Handle ForeignKey which can only have a single to_field.
|
||||
remote_field_name = getattr(new_field.remote_field, "field_name", None)
|
||||
if remote_field_name:
|
||||
to_field_rename_key = rename_key + (remote_field_name,)
|
||||
to_field_rename_key = (*rename_key, remote_field_name)
|
||||
if to_field_rename_key in self.renamed_fields:
|
||||
# Repoint both model and field name because to_field
|
||||
# inclusion in ForeignKey.deconstruct() is based on
|
||||
@ -1249,14 +1249,14 @@ class MigrationAutodetector:
|
||||
new_field.from_fields = tuple(
|
||||
[
|
||||
self.renamed_fields.get(
|
||||
from_rename_key + (from_field,), from_field
|
||||
(*from_rename_key, from_field), from_field
|
||||
)
|
||||
for from_field in from_fields
|
||||
]
|
||||
)
|
||||
new_field.to_fields = tuple(
|
||||
[
|
||||
self.renamed_fields.get(rename_key + (to_field,), to_field)
|
||||
self.renamed_fields.get((*rename_key, to_field), to_field)
|
||||
for to_field in new_field.to_fields
|
||||
]
|
||||
)
|
||||
|
@ -231,7 +231,7 @@ class CreateModel(ModelOperation):
|
||||
return [
|
||||
replace(
|
||||
self,
|
||||
fields=self.fields + [(operation.name, operation.field)],
|
||||
fields=[*self.fields, (operation.name, operation.field)],
|
||||
),
|
||||
]
|
||||
elif isinstance(operation, AlterField):
|
||||
|
@ -108,7 +108,7 @@ class Aggregate(Func):
|
||||
|
||||
def get_source_expressions(self):
|
||||
source_expressions = super().get_source_expressions()
|
||||
return source_expressions + [self.filter, self.order_by]
|
||||
return [*source_expressions, self.filter, self.order_by]
|
||||
|
||||
def set_source_expressions(self, exprs):
|
||||
*exprs, self.filter, self.order_by = exprs
|
||||
|
@ -250,7 +250,7 @@ class ModelBase(type):
|
||||
|
||||
# Collect the parent links for multi-table inheritance.
|
||||
parent_links = {}
|
||||
for base in reversed([new_class] + parents):
|
||||
for base in reversed([new_class, *parents]):
|
||||
# Conceptually equivalent to `if base is Model`.
|
||||
if not hasattr(base, "_meta"):
|
||||
continue
|
||||
|
@ -101,7 +101,7 @@ class BaseConstraint:
|
||||
errors.append(
|
||||
checks.Error(
|
||||
"'constraints' refers to the joined field '%s'."
|
||||
% LOOKUP_SEP.join([field_name] + lookups),
|
||||
% LOOKUP_SEP.join([field_name, *lookups]),
|
||||
obj=model,
|
||||
id="models.E041",
|
||||
)
|
||||
|
@ -1686,7 +1686,7 @@ class Case(SQLiteNumericMixin, Expression):
|
||||
return "<%s: %s>" % (self.__class__.__name__, self)
|
||||
|
||||
def get_source_expressions(self):
|
||||
return self.cases + [self.default]
|
||||
return [*self.cases, self.default]
|
||||
|
||||
def set_source_expressions(self, exprs):
|
||||
*self.cases, self.default = exprs
|
||||
|
@ -1788,8 +1788,9 @@ class DecimalField(Field):
|
||||
|
||||
@cached_property
|
||||
def validators(self):
|
||||
return super().validators + [
|
||||
validators.DecimalValidator(self.max_digits, self.decimal_places)
|
||||
return [
|
||||
*super().validators,
|
||||
validators.DecimalValidator(self.max_digits, self.decimal_places),
|
||||
]
|
||||
|
||||
@cached_property
|
||||
|
@ -235,7 +235,7 @@ class HasKeyLookup(PostgresOperatorLookup):
|
||||
compiler, connection
|
||||
):
|
||||
sql_parts.append(template % (lhs_sql, "%s"))
|
||||
params.extend(lhs_params + [rhs_json_path])
|
||||
params.extend([*lhs_params, rhs_json_path])
|
||||
return self._combine_sql_parts(sql_parts), tuple(params)
|
||||
|
||||
def as_mysql(self, compiler, connection):
|
||||
@ -379,7 +379,7 @@ class KeyTransform(Transform):
|
||||
def as_mysql(self, compiler, connection):
|
||||
lhs, params, key_transforms = self.preprocess_lhs(compiler, connection)
|
||||
json_path = compile_json_path(key_transforms)
|
||||
return "JSON_EXTRACT(%s, %%s)" % lhs, tuple(params) + (json_path,)
|
||||
return "JSON_EXTRACT(%s, %%s)" % lhs, (*params, json_path)
|
||||
|
||||
def as_oracle(self, compiler, connection):
|
||||
lhs, params, key_transforms = self.preprocess_lhs(compiler, connection)
|
||||
@ -410,12 +410,12 @@ class KeyTransform(Transform):
|
||||
lhs, params, key_transforms = self.preprocess_lhs(compiler, connection)
|
||||
if len(key_transforms) > 1:
|
||||
sql = "(%s %s %%s)" % (lhs, self.postgres_nested_operator)
|
||||
return sql, tuple(params) + (key_transforms,)
|
||||
return sql, (*params, key_transforms)
|
||||
try:
|
||||
lookup = int(self.key_name)
|
||||
except ValueError:
|
||||
lookup = self.key_name
|
||||
return "(%s %s %%s)" % (lhs, self.postgres_operator), tuple(params) + (lookup,)
|
||||
return "(%s %s %%s)" % (lhs, self.postgres_operator), (*params, lookup)
|
||||
|
||||
def as_sqlite(self, compiler, connection):
|
||||
lhs, params, key_transforms = self.preprocess_lhs(compiler, connection)
|
||||
@ -426,7 +426,7 @@ class KeyTransform(Transform):
|
||||
return (
|
||||
"(CASE WHEN JSON_TYPE(%s, %%s) IN (%s) "
|
||||
"THEN JSON_TYPE(%s, %%s) ELSE JSON_EXTRACT(%s, %%s) END)"
|
||||
) % (lhs, datatype_values, lhs, lhs), (tuple(params) + (json_path,)) * 3
|
||||
) % (lhs, datatype_values, lhs, lhs), (*params, json_path) * 3
|
||||
|
||||
|
||||
class KeyTextTransform(KeyTransform):
|
||||
@ -442,7 +442,7 @@ class KeyTextTransform(KeyTransform):
|
||||
else:
|
||||
lhs, params, key_transforms = self.preprocess_lhs(compiler, connection)
|
||||
json_path = compile_json_path(key_transforms)
|
||||
return "(%s ->> %%s)" % lhs, tuple(params) + (json_path,)
|
||||
return "(%s ->> %%s)" % lhs, (*params, json_path)
|
||||
|
||||
@classmethod
|
||||
def from_lookup(cls, lookup):
|
||||
|
@ -291,7 +291,7 @@ class ManyToOneRel(ForeignObjectRel):
|
||||
|
||||
@property
|
||||
def identity(self):
|
||||
return super().identity + (self.field_name,)
|
||||
return (*super().identity, self.field_name)
|
||||
|
||||
def get_related_field(self):
|
||||
"""
|
||||
@ -382,7 +382,8 @@ class ManyToManyRel(ForeignObjectRel):
|
||||
|
||||
@property
|
||||
def identity(self):
|
||||
return super().identity + (
|
||||
return (
|
||||
*super().identity,
|
||||
self.through,
|
||||
make_hashable(self.through_fields),
|
||||
self.db_constraint,
|
||||
|
@ -176,7 +176,7 @@ class Index:
|
||||
]
|
||||
# The length of the parts of the name is based on the default max
|
||||
# length of 30 characters.
|
||||
hash_data = [table_name] + column_names_with_order + [self.suffix]
|
||||
hash_data = [table_name, *column_names_with_order, self.suffix]
|
||||
self.name = "%s_%s_%s" % (
|
||||
table_name[:11],
|
||||
column_names[0][:7],
|
||||
|
@ -891,7 +891,7 @@ class QuerySet(AltersData):
|
||||
self._for_write = True
|
||||
connection = connections[self.db]
|
||||
max_batch_size = connection.ops.bulk_batch_size(
|
||||
[opts.pk, opts.pk] + fields, objs
|
||||
[opts.pk, opts.pk, *fields], objs
|
||||
)
|
||||
batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size
|
||||
requires_casting = connection.features.requires_casted_case_in_updates
|
||||
@ -1524,9 +1524,7 @@ class QuerySet(AltersData):
|
||||
# Clear limits and ordering so they can be reapplied
|
||||
clone.query.clear_ordering(force=True)
|
||||
clone.query.clear_limits()
|
||||
clone.query.combined_queries = (self.query,) + tuple(
|
||||
qs.query for qs in other_qs
|
||||
)
|
||||
clone.query.combined_queries = (self.query, *(qs.query for qs in other_qs))
|
||||
clone.query.combinator = combinator
|
||||
clone.query.combinator_all = all
|
||||
return clone
|
||||
|
@ -1452,7 +1452,7 @@ class SQLCompiler:
|
||||
field = klass_info["field"]
|
||||
if klass_info["reverse"]:
|
||||
field = field.remote_field
|
||||
path = parent_path + [field.name]
|
||||
path = [*parent_path, field.name]
|
||||
yield LOOKUP_SEP.join(path)
|
||||
queue.extend(
|
||||
(path, klass_info)
|
||||
@ -1905,7 +1905,7 @@ class SQLInsertCompiler(SQLCompiler):
|
||||
if on_conflict_suffix_sql:
|
||||
result.append(on_conflict_suffix_sql)
|
||||
return [
|
||||
(" ".join(result + ["VALUES (%s)" % ", ".join(p)]), vals)
|
||||
(" ".join([*result, "VALUES (%s)" % ", ".join(p)]), vals)
|
||||
for p, vals in zip(placeholder_rows, param_rows)
|
||||
]
|
||||
|
||||
|
@ -895,7 +895,7 @@ class BaseModelFormSet(BaseFormSet, AltersData):
|
||||
# object
|
||||
else:
|
||||
date_data = (getattr(form.cleaned_data[unique_for], lookup),)
|
||||
data = (form.cleaned_data[field],) + date_data
|
||||
data = (form.cleaned_data[field], *date_data)
|
||||
# if we've already seen it then we have a uniqueness failure
|
||||
if data in seen_data:
|
||||
# poke error messages into the right places and mark
|
||||
|
@ -107,7 +107,7 @@ class MultiPartParser:
|
||||
# For compatibility with low-level network APIs (with 32-bit integers),
|
||||
# the chunk size should be < 2^31, but still divisible by 4.
|
||||
possible_sizes = [x.chunk_size for x in upload_handlers if x.chunk_size]
|
||||
self._chunk_size = min([2**31 - 4] + possible_sizes)
|
||||
self._chunk_size = min([2**31 - 4, *possible_sizes])
|
||||
|
||||
self._meta = META
|
||||
self._encoding = encoding or settings.DEFAULT_CHARSET
|
||||
|
@ -304,7 +304,7 @@ class TagHelperNode(Node):
|
||||
def get_resolved_arguments(self, context):
|
||||
resolved_args = [var.resolve(context) for var in self.args]
|
||||
if self.takes_context:
|
||||
resolved_args = [context] + resolved_args
|
||||
resolved_args = [context, *resolved_args]
|
||||
resolved_kwargs = {k: v.resolve(context) for k, v in self.kwargs.items()}
|
||||
return resolved_args, resolved_kwargs
|
||||
|
||||
|
@ -145,7 +145,7 @@ class SimpleTemplateResponse(HttpResponse):
|
||||
|
||||
|
||||
class TemplateResponse(SimpleTemplateResponse):
|
||||
rendering_attrs = SimpleTemplateResponse.rendering_attrs + ["_request"]
|
||||
rendering_attrs = [*SimpleTemplateResponse.rendering_attrs, "_request"]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -71,7 +71,7 @@ class ResolverMatch:
|
||||
self._func_path = func.__module__ + "." + func.__name__
|
||||
|
||||
view_path = url_name or self._func_path
|
||||
self.view_name = ":".join(self.namespaces + [view_path])
|
||||
self.view_name = ":".join([*self.namespaces, view_path])
|
||||
|
||||
def __getitem__(self, index):
|
||||
return (self.func, self.args, self.kwargs)[index]
|
||||
@ -691,8 +691,8 @@ class URLResolver:
|
||||
sub_match_args,
|
||||
sub_match_dict,
|
||||
sub_match.url_name,
|
||||
[self.app_name] + sub_match.app_names,
|
||||
[self.namespace] + sub_match.namespaces,
|
||||
[self.app_name, *sub_match.app_names],
|
||||
[self.namespace, *sub_match.namespaces],
|
||||
self._join_route(current_route, sub_match.route),
|
||||
tried,
|
||||
captured_kwargs=sub_match.captured_kwargs,
|
||||
|
@ -193,7 +193,7 @@ def common_roots(paths):
|
||||
# Turn the tree into a list of Path instances.
|
||||
def _walk(node, path):
|
||||
for prefix, child in node.items():
|
||||
yield from _walk(child, path + (prefix,))
|
||||
yield from _walk(child, [*path, prefix])
|
||||
if not node:
|
||||
yield Path(*path)
|
||||
|
||||
|
@ -96,7 +96,7 @@ def lazy(func, *resultclasses):
|
||||
def __reduce__(self):
|
||||
return (
|
||||
_lazy_proxy_unpickle,
|
||||
(func, self._args, self._kw) + resultclasses,
|
||||
(func, self._args, self._kw, *resultclasses),
|
||||
)
|
||||
|
||||
def __deepcopy__(self, memo):
|
||||
|
@ -235,13 +235,13 @@ def fetch(resources=None, languages=None):
|
||||
]
|
||||
# Transifex pull
|
||||
if languages is None:
|
||||
run(cmd + ["--all"])
|
||||
run([*cmd, "--all"])
|
||||
target_langs = sorted(
|
||||
d for d in os.listdir(dir_) if not d.startswith("_") and d != "en"
|
||||
)
|
||||
else:
|
||||
for lang in languages:
|
||||
run(cmd + ["-l", lang])
|
||||
run([*cmd, "-l", lang])
|
||||
target_langs = languages
|
||||
|
||||
target_langs = [LANG_OVERRIDES.get(d, d) for d in target_langs]
|
||||
|
Loading…
x
Reference in New Issue
Block a user