Skip to content

Commit 72b3284

Browse files
committed
Allow TAG fields to be sortable
RediSearch now supports SORTABLE on TAG fields. Remove restrictions that prevented TAG fields from being sortable in JsonModel. Update documentation to reflect that all field types (TAG, TEXT, NUMERIC, GEO) support sorting. Add regression test for TAG field sortability.
1 parent d7ad230 commit 72b3284

File tree

17 files changed

+195
-86
lines changed

17 files changed

+195
-86
lines changed

aredis_om/model/cli/migrate_data.py

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -605,14 +605,18 @@ async def check_schema_async():
605605
detector = DatetimeFieldDetector(migrator.redis)
606606
result = await detector.check_for_schema_mismatches(models)
607607

608-
if not result['has_mismatches']:
609-
click.echo("✅ No schema mismatches detected - all datetime fields are properly indexed")
608+
if not result["has_mismatches"]:
609+
click.echo(
610+
"✅ No schema mismatches detected - all datetime fields are properly indexed"
611+
)
610612
return
611613

612-
click.echo(f"⚠️ Found {len(result['mismatches'])} datetime field schema mismatch(es):")
614+
click.echo(
615+
f"⚠️ Found {len(result['mismatches'])} datetime field schema mismatch(es):"
616+
)
613617
click.echo()
614618

615-
for mismatch in result['mismatches']:
619+
for mismatch in result["mismatches"]:
616620
click.echo(f" Model: {mismatch['model']}")
617621
click.echo(f" Field: {mismatch['field']}")
618622
click.echo(f" Current Redis type: {mismatch['current_type']}")
@@ -621,12 +625,14 @@ async def check_schema_async():
621625
click.echo()
622626

623627
click.echo("🚨 CRITICAL ISSUE DETECTED:")
624-
click.echo(result['recommendation'])
628+
click.echo(result["recommendation"])
625629
click.echo()
626630
click.echo("To fix this issue, run:")
627631
click.echo(" om migrate-data datetime")
628632
click.echo()
629-
click.echo("This will convert your datetime fields from TAG to NUMERIC indexing,")
633+
click.echo(
634+
"This will convert your datetime fields from TAG to NUMERIC indexing,"
635+
)
630636
click.echo("enabling proper range queries and sorting.")
631637

632638
raise click.ClickException("Schema mismatches detected")

aredis_om/model/encoders.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,13 +33,16 @@
3333

3434
from pydantic import BaseModel
3535

36+
3637
try:
3738
from pydantic.deprecated.json import ENCODERS_BY_TYPE
3839
from pydantic_core import PydanticUndefined
40+
3941
PYDANTIC_V2 = True
4042
except ImportError:
4143
# Pydantic v1 compatibility
4244
from pydantic.json import ENCODERS_BY_TYPE
45+
4346
PydanticUndefined = ...
4447
PYDANTIC_V2 = False
4548

aredis_om/model/migrations/__init__.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -10,25 +10,24 @@
1010
from .data import BaseMigration, DataMigrationError, DataMigrator
1111
from .schema import (
1212
BaseSchemaMigration,
13+
MigrationAction,
14+
MigrationError,
15+
Migrator,
1316
SchemaMigrationError,
1417
SchemaMigrator,
15-
Migrator,
16-
MigrationError,
17-
MigrationAction
1818
)
1919

20+
2021
# Maintain backward compatibility by exposing the same API
2122
__all__ = [
2223
# Data migration classes
2324
"BaseMigration",
2425
"DataMigrationError",
2526
"DataMigrator",
26-
2727
# Schema migration classes
2828
"BaseSchemaMigration",
2929
"SchemaMigrationError",
3030
"SchemaMigrator",
31-
3231
# Legacy classes (for backward compatibility)
3332
"Migrator",
3433
"MigrationError",

aredis_om/model/migrations/data/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,4 +8,5 @@
88
from .base import BaseMigration, DataMigrationError
99
from .migrator import DataMigrator
1010

11+
1112
__all__ = ["BaseMigration", "DataMigrationError", "DataMigrator"]

aredis_om/model/migrations/data/base.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
import time
1010
from typing import Any, Dict, List
1111

12+
1213
try:
1314
import psutil
1415
except ImportError:
@@ -19,6 +20,7 @@
1920

2021
class DataMigrationError(Exception):
2122
"""Exception raised when data migration operations fail."""
23+
2224
pass
2325

2426

aredis_om/model/migrations/data/builtin/__init__.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,11 @@
55
common data transformation scenarios.
66
"""
77

8-
from .datetime_migration import DatetimeFieldMigration, DatetimeFieldDetector, ConversionFailureMode
8+
from .datetime_migration import (
9+
ConversionFailureMode,
10+
DatetimeFieldDetector,
11+
DatetimeFieldMigration,
12+
)
13+
914

1015
__all__ = ["DatetimeFieldMigration", "DatetimeFieldDetector", "ConversionFailureMode"]

aredis_om/model/migrations/data/builtin/datetime_migration.py

Lines changed: 42 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222

2323
class SchemaMismatchError(Exception):
2424
"""Raised when deployed code expects different field types than what's in Redis."""
25+
2526
pass
2627

2728

@@ -48,7 +49,9 @@ async def check_for_schema_mismatches(self, models: List[Any]) -> Dict[str, Any]
4849
for model in models:
4950
try:
5051
# Get the current index schema from Redis
51-
index_name = f"{model._meta.global_key_prefix}:{model._meta.model_key_prefix}"
52+
index_name = (
53+
f"{model._meta.global_key_prefix}:{model._meta.model_key_prefix}"
54+
)
5255

5356
try:
5457
# Try to get index info
@@ -62,26 +65,31 @@ async def check_for_schema_mismatches(self, models: List[Any]) -> Dict[str, Any]
6265
datetime_fields = self._get_datetime_fields(model)
6366

6467
for field_name, field_info in datetime_fields.items():
65-
redis_field_type = current_schema.get(field_name, {}).get('type')
66-
67-
if redis_field_type == 'TAG' and field_info.get('expected_type') == 'NUMERIC':
68-
mismatches.append({
69-
'model': model.__name__,
70-
'field': field_name,
71-
'current_type': 'TAG',
72-
'expected_type': 'NUMERIC',
73-
'index_name': index_name
74-
})
68+
redis_field_type = current_schema.get(field_name, {}).get("type")
69+
70+
if (
71+
redis_field_type == "TAG"
72+
and field_info.get("expected_type") == "NUMERIC"
73+
):
74+
mismatches.append(
75+
{
76+
"model": model.__name__,
77+
"field": field_name,
78+
"current_type": "TAG",
79+
"expected_type": "NUMERIC",
80+
"index_name": index_name,
81+
}
82+
)
7583

7684
except Exception as e:
7785
log.warning(f"Could not check schema for model {model.__name__}: {e}")
7886
continue
7987

8088
return {
81-
'has_mismatches': len(mismatches) > 0,
82-
'mismatches': mismatches,
83-
'total_affected_models': len(set(m['model'] for m in mismatches)),
84-
'recommendation': self._get_recommendation(mismatches)
89+
"has_mismatches": len(mismatches) > 0,
90+
"mismatches": mismatches,
91+
"total_affected_models": len(set(m["model"] for m in mismatches)),
92+
"recommendation": self._get_recommendation(mismatches),
8593
}
8694

8795
def _parse_index_schema(self, index_info: List) -> Dict[str, Dict[str, Any]]:
@@ -92,22 +100,27 @@ def _parse_index_schema(self, index_info: List) -> Dict[str, Dict[str, Any]]:
92100
info_dict = {}
93101
for i in range(0, len(index_info), 2):
94102
if i + 1 < len(index_info):
95-
key = index_info[i].decode() if isinstance(index_info[i], bytes) else str(index_info[i])
103+
key = (
104+
index_info[i].decode()
105+
if isinstance(index_info[i], bytes)
106+
else str(index_info[i])
107+
)
96108
value = index_info[i + 1]
97109
info_dict[key] = value
98110

99111
# Extract attributes (field definitions)
100-
attributes = info_dict.get('attributes', [])
112+
attributes = info_dict.get("attributes", [])
101113

102114
for attr in attributes:
103115
if isinstance(attr, list) and len(attr) >= 4:
104-
field_name = attr[0].decode() if isinstance(attr[0], bytes) else str(attr[0])
105-
field_type = attr[2].decode() if isinstance(attr[2], bytes) else str(attr[2])
116+
field_name = (
117+
attr[0].decode() if isinstance(attr[0], bytes) else str(attr[0])
118+
)
119+
field_type = (
120+
attr[2].decode() if isinstance(attr[2], bytes) else str(attr[2])
121+
)
106122

107-
schema[field_name] = {
108-
'type': field_type,
109-
'raw_attr': attr
110-
}
123+
schema[field_name] = {"type": field_type, "raw_attr": attr}
111124

112125
return schema
113126

@@ -117,20 +130,20 @@ def _get_datetime_fields(self, model) -> Dict[str, Dict[str, Any]]:
117130

118131
try:
119132
# Get model fields in a compatible way
120-
if hasattr(model, '_get_model_fields'):
133+
if hasattr(model, "_get_model_fields"):
121134
model_fields = model._get_model_fields()
122-
elif hasattr(model, 'model_fields'):
135+
elif hasattr(model, "model_fields"):
123136
model_fields = model.model_fields
124137
else:
125-
model_fields = getattr(model, '__fields__', {})
138+
model_fields = getattr(model, "__fields__", {})
126139

127140
for field_name, field_info in model_fields.items():
128141
# Check if this is a datetime field
129-
field_type = getattr(field_info, 'annotation', None)
142+
field_type = getattr(field_info, "annotation", None)
130143
if field_type in (datetime.datetime, datetime.date):
131144
datetime_fields[field_name] = {
132-
'expected_type': 'NUMERIC', # New code expects NUMERIC
133-
'field_info': field_info
145+
"expected_type": "NUMERIC", # New code expects NUMERIC
146+
"field_info": field_info,
134147
}
135148

136149
except Exception as e:

aredis_om/model/migrations/data/migrator.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,9 +10,9 @@
1010
import importlib.util
1111
import os
1212
import time
13-
from datetime import datetime, date
13+
from datetime import date, datetime
1414
from pathlib import Path
15-
from typing import Any, Dict, List, Optional, Set, Callable
15+
from typing import Any, Callable, Dict, List, Optional, Set
1616

1717
import redis
1818

@@ -297,7 +297,7 @@ async def run_migrations_with_monitoring(
297297
dry_run: bool = False,
298298
limit: Optional[int] = None,
299299
verbose: bool = False,
300-
progress_callback: Optional[Callable] = None # type: ignore,
300+
progress_callback: Optional[Callable] = None, # type: ignore,
301301
) -> Dict[str, Any]:
302302
"""
303303
Run pending migrations with enhanced performance monitoring.

aredis_om/model/migrations/schema/__init__.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,14 +6,15 @@
66
"""
77

88
from .base import BaseSchemaMigration, SchemaMigrationError
9+
from .legacy_migrator import MigrationAction, MigrationError, Migrator
910
from .migrator import SchemaMigrator
10-
from .legacy_migrator import Migrator, MigrationError, MigrationAction
11+
1112

1213
__all__ = [
1314
"BaseSchemaMigration",
1415
"SchemaMigrationError",
1516
"SchemaMigrator",
1617
"Migrator",
1718
"MigrationError",
18-
"MigrationAction"
19+
"MigrationAction",
1920
]

aredis_om/model/migrations/schema/base.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212

1313
class SchemaMigrationError(Exception):
1414
"""Exception raised when schema migration operations fail."""
15+
1516
pass
1617

1718

0 commit comments

Comments
 (0)