1
1
import logging
2
2
from datetime import datetime , timedelta , timezone
3
- from typing import Generic , List , Optional , Type , TypeVar , get_args , get_origin
3
+ from typing import Dict , Generic , List , Optional , Type , TypeVar , get_args , get_origin
4
4
5
5
from bson import Binary
6
6
from fastapi import APIRouter , Depends , HTTPException , Path , Query , Response , status
@@ -187,7 +187,7 @@ def is_pydantic_model(obj):
187
187
return isinstance (obj , type ) and issubclass (obj , BaseModel )
188
188
189
189
190
- def get_nested_fields (model : Type [BaseModel ]):
190
+ def get_nested_fields (model : Type [BaseModel ]) -> Dict [ str , Type [ BaseModel ]] :
191
191
nested_fields = {}
192
192
for field_name , field in model .__fields__ .items ():
193
193
if is_pydantic_model (field .type_ ):
@@ -206,7 +206,7 @@ def create_nested_pipeline(model: Type[BaseModel], prefix=""):
206
206
"timestamp" : "$timestamp" ,
207
207
}
208
208
209
- for field_name , field_type in model .__fields__ .items ():
209
+ for field_name , field in model .__fields__ .items ():
210
210
if field_name == "timestamp" :
211
211
continue
212
212
lookup_field = (
@@ -219,31 +219,30 @@ def create_nested_pipeline(model: Type[BaseModel], prefix=""):
219
219
unit_field_name = f"{ prefix } { mongo_field } _unit"
220
220
pipeline ["unit" ] = f"${ unit_field_name } "
221
221
match_conditions [unit_field_name ] = {"$exists" : True }
222
- else :
223
- pipeline [field_name ] = f"${ full_mongo_field_name } "
224
- match_conditions [full_mongo_field_name ] = {"$exists" : True }
225
-
226
- if field_name in nested_fields :
227
- if get_origin (field_type .type_ ) is List :
228
- nested_pipeline , nested_match = create_nested_pipeline (
229
- nested_fields [field_name ], "" # Empty prefix for list items
230
- )
222
+ elif field_name in nested_fields :
223
+ nested_type = nested_fields [field_name ]
224
+ if get_origin (field .type_ ) is List :
225
+ # Handle array of nested objects
226
+ nested_pipeline , nested_match = create_nested_pipeline (nested_type , "" )
231
227
pipeline [field_name ] = {
232
228
"$map" : {
233
229
"input" : f"${ full_mongo_field_name } " ,
234
230
"as" : "item" ,
235
- "in" : {
236
- k : f"$$item.{ v .replace ('$' , '' )} " for k , v in nested_pipeline .items ()
237
- },
231
+ "in" : nested_pipeline ,
238
232
}
239
233
}
240
234
match_conditions [full_mongo_field_name ] = {"$exists" : True , "$ne" : []}
241
235
else :
236
+ # Handle nested object
242
237
nested_pipeline , nested_match = create_nested_pipeline (
243
- nested_fields [ field_name ] , f"{ field_name } ."
238
+ nested_type , f"{ field_name } ."
244
239
)
245
240
pipeline [field_name ] = nested_pipeline
246
241
match_conditions .update ({f"{ field_name } .{ k } " : v for k , v in nested_match .items ()})
242
+ else :
243
+ # Handle simple field
244
+ pipeline [field_name ] = f"${ full_mongo_field_name } "
245
+ match_conditions [full_mongo_field_name ] = {"$exists" : True }
247
246
248
247
logger .debug (f"Field: { field_name } , Full mongo field name: { full_mongo_field_name } " )
249
248
logger .debug (f"Resulting pipeline part: { pipeline [field_name ]} " )
0 commit comments