mirror of
https://github.com/django/django.git
synced 2025-03-12 18:30:48 +00:00
Refs #35842 -- Fixed handling of quotes in JSONField key lookups on Oracle.
This commit is contained in:
parent
ea34de3bd7
commit
b28438f379
@ -50,6 +50,10 @@ class DatabaseFeatures(BaseDatabaseFeatures):
|
|||||||
# The django_format_dtdelta() function doesn't properly handle mixed
|
# The django_format_dtdelta() function doesn't properly handle mixed
|
||||||
# Date/DateTime fields and timedeltas.
|
# Date/DateTime fields and timedeltas.
|
||||||
"expressions.tests.FTimeDeltaTests.test_mixed_comparisons1",
|
"expressions.tests.FTimeDeltaTests.test_mixed_comparisons1",
|
||||||
|
# SQLite doesn't parse escaped double quotes in the JSON path notation,
|
||||||
|
# so it cannot match keys that contains double quotes (#35842).
|
||||||
|
"model_fields.test_jsonfield.TestQuerying."
|
||||||
|
"test_lookups_special_chars_double_quotes",
|
||||||
}
|
}
|
||||||
create_test_table_with_composite_primary_key = """
|
create_test_table_with_composite_primary_key = """
|
||||||
CREATE TABLE test_table_composite_pk (
|
CREATE TABLE test_table_composite_pk (
|
||||||
|
@ -230,10 +230,11 @@ class HasKeyLookup(PostgresOperatorLookup):
|
|||||||
|
|
||||||
def as_oracle(self, compiler, connection):
|
def as_oracle(self, compiler, connection):
|
||||||
sql, params = self.as_sql(
|
sql, params = self.as_sql(
|
||||||
compiler, connection, template="JSON_EXISTS(%s, '%%s')"
|
compiler, connection, template="JSON_EXISTS(%s, q'\uffff%%s\uffff')"
|
||||||
)
|
)
|
||||||
# Add paths directly into SQL because path expressions cannot be passed
|
# Add paths directly into SQL because path expressions cannot be passed
|
||||||
# as bind variables on Oracle.
|
# as bind variables on Oracle. Use a custom delimiter to prevent the
|
||||||
|
# JSON path from escaping the SQL literal. See comment in KeyTransform.
|
||||||
return sql % tuple(params), []
|
return sql % tuple(params), []
|
||||||
|
|
||||||
def as_postgresql(self, compiler, connection):
|
def as_postgresql(self, compiler, connection):
|
||||||
@ -362,10 +363,24 @@ class KeyTransform(Transform):
|
|||||||
json_path = compile_json_path(key_transforms)
|
json_path = compile_json_path(key_transforms)
|
||||||
if connection.features.supports_primitives_in_json_field:
|
if connection.features.supports_primitives_in_json_field:
|
||||||
sql = (
|
sql = (
|
||||||
"COALESCE(JSON_VALUE(%s, '%s'), JSON_QUERY(%s, '%s' DISALLOW SCALARS))"
|
"COALESCE("
|
||||||
|
"JSON_VALUE(%s, q'\uffff%s\uffff'),"
|
||||||
|
"JSON_QUERY(%s, q'\uffff%s\uffff' DISALLOW SCALARS)"
|
||||||
|
")"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
sql = "COALESCE(JSON_QUERY(%s, '%s'), JSON_VALUE(%s, '%s'))"
|
sql = (
|
||||||
|
"COALESCE("
|
||||||
|
"JSON_QUERY(%s, q'\uffff%s\uffff'),"
|
||||||
|
"JSON_VALUE(%s, q'\uffff%s\uffff')"
|
||||||
|
")"
|
||||||
|
)
|
||||||
|
# Add paths directly into SQL because path expressions cannot be passed
|
||||||
|
# as bind variables on Oracle. Use a custom delimiter to prevent the
|
||||||
|
# JSON path from escaping the SQL literal. Each key in the JSON path is
|
||||||
|
# passed through json.dumps() with ensure_ascii=True (the default),
|
||||||
|
# which converts the delimiter into the escaped \uffff format. This
|
||||||
|
# ensures that the delimiter is not present in the JSON path.
|
||||||
return sql % ((lhs, json_path) * 2), tuple(params) * 2
|
return sql % ((lhs, json_path) * 2), tuple(params) * 2
|
||||||
|
|
||||||
def as_postgresql(self, compiler, connection):
|
def as_postgresql(self, compiler, connection):
|
||||||
|
@ -808,6 +808,59 @@ class TestQuerying(TestCase):
|
|||||||
)
|
)
|
||||||
self.assertIs(NullableJSONModel.objects.filter(value__c__lt=5).exists(), False)
|
self.assertIs(NullableJSONModel.objects.filter(value__c__lt=5).exists(), False)
|
||||||
|
|
||||||
|
def test_lookups_special_chars(self):
|
||||||
|
test_keys = [
|
||||||
|
"CONTROL",
|
||||||
|
"single'",
|
||||||
|
"dollar$",
|
||||||
|
"dot.dot",
|
||||||
|
"with space",
|
||||||
|
"back\\slash",
|
||||||
|
"question?mark",
|
||||||
|
"user@name",
|
||||||
|
"emo🤡'ji",
|
||||||
|
"com,ma",
|
||||||
|
"curly{{{brace}}}s",
|
||||||
|
"escape\uffff'seq'\uffffue\uffff'nce",
|
||||||
|
]
|
||||||
|
json_value = {key: "some value" for key in test_keys}
|
||||||
|
obj = NullableJSONModel.objects.create(value=json_value)
|
||||||
|
obj.refresh_from_db()
|
||||||
|
self.assertEqual(obj.value, json_value)
|
||||||
|
|
||||||
|
for key in test_keys:
|
||||||
|
lookups = {
|
||||||
|
"has_key": Q(value__has_key=key),
|
||||||
|
"has_keys": Q(value__has_keys=[key, "CONTROL"]),
|
||||||
|
"has_any_keys": Q(value__has_any_keys=[key, "does_not_exist"]),
|
||||||
|
"exact": Q(**{f"value__{key}": "some value"}),
|
||||||
|
}
|
||||||
|
for lookup, condition in lookups.items():
|
||||||
|
results = NullableJSONModel.objects.filter(condition)
|
||||||
|
with self.subTest(key=key, lookup=lookup):
|
||||||
|
self.assertSequenceEqual(results, [obj])
|
||||||
|
|
||||||
|
def test_lookups_special_chars_double_quotes(self):
|
||||||
|
test_keys = [
|
||||||
|
'double"',
|
||||||
|
"m\\i@x. m🤡'a,t{{{ch}}}e?d$\"'es\uffff'ca\uffff'pe",
|
||||||
|
]
|
||||||
|
json_value = {key: "some value" for key in test_keys}
|
||||||
|
obj = NullableJSONModel.objects.create(value=json_value)
|
||||||
|
obj.refresh_from_db()
|
||||||
|
self.assertEqual(obj.value, json_value)
|
||||||
|
self.assertSequenceEqual(
|
||||||
|
NullableJSONModel.objects.filter(value__has_keys=test_keys), [obj]
|
||||||
|
)
|
||||||
|
for key in test_keys:
|
||||||
|
with self.subTest(key=key):
|
||||||
|
results = NullableJSONModel.objects.filter(
|
||||||
|
Q(value__has_key=key),
|
||||||
|
Q(value__has_any_keys=[key, "does_not_exist"]),
|
||||||
|
Q(**{f"value__{key}": "some value"}),
|
||||||
|
)
|
||||||
|
self.assertSequenceEqual(results, [obj])
|
||||||
|
|
||||||
def test_lookup_exclude(self):
|
def test_lookup_exclude(self):
|
||||||
tests = [
|
tests = [
|
||||||
(Q(value__a="b"), [self.objs[0]]),
|
(Q(value__a="b"), [self.objs[0]]),
|
||||||
|
Loading…
x
Reference in New Issue
Block a user