Skip to content

Commit

Permalink
feat: try to support normal pandas
Browse files Browse the repository at this point in the history
  • Loading branch information
Yazawazi committed Nov 28, 2023
1 parent c6bd389 commit e57e697
Show file tree
Hide file tree
Showing 3 changed files with 114 additions and 60 deletions.
145 changes: 95 additions & 50 deletions backend/funix/decorator/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1261,61 +1261,106 @@ def parse_widget(widget_info: str | tuple | list) -> list[str] | str:

for _, function_param in function_params.items():
if __pandas_use:
anno = function_param.annotation
default_values = (
{}
if function_param.default is Parameter.empty
else function_param.default
)

def analyze_columns_and_default_value(pandas_like_anno):
column_names = []
dataframe_parse_metadata[
function_id
] = dataframe_parse_metadata.get(function_id, {})
columns = {}
if isinstance(pandas_like_anno.columns, dict):
columns = pandas_like_anno.columns
else:
# Should be Index here
for column_name in pandas_like_anno.columns.to_list():
columns[column_name] = {"don't": "check"}
for name, column in columns.items():
if name in default_values:
column_default = list(default_values[name])
else:
column_default = None
if hasattr(column, "dtype"):
d_type = column.dtype
items = analyze(type(d_type))
items["widget"] = "sheet"
else:
if column_default is None:
items = {"type": "string", "widget": "sheet"}
else:
items = get_type_widget_prop(
get_type_dict(type(column_default[0]))["type"],
0,
[],
{},
None,
)
items = {
"type": items["type"],
"widget": "sheet",
}
column_names.append(name)
anal = {
"type": "array",
"widget": "sheet",
"items": items,
"customLayout": False,
"treat_as": "config",
}
dec_param = {
"widget": "sheet",
"treat_as": "config",
"type": f"<mock>list[{items['type']}]</mock>",
}
if column_default:
anal["default"] = column_default
dec_param["default"] = column_default
json_schema_props[name] = anal
decorated_params[name] = dec_param
dataframe_parse_metadata[function_id][
function_param.name
] = column_names

if isinstance(anno, __pandas_module.DataFrame):
if anno.columns.size == 0:
raise Exception(
f"{function_name}: pandas.DataFrame() is not supported, "
f"but you can add columns to it, if you mean DataFrame with no columns, "
f"please use `pandas.DataFrame` instead."
)
else:
analyze_columns_and_default_value(anno)
continue

if anno is __pandas_module.core.frame.DataFrame:
if function_param.default is not Parameter.empty:
analyze_columns_and_default_value(default_values)
else:
# Be sheet later
json_schema_props[function_param.name] = {
"type": "object",
"widget": "json",
"treat_as": "config",
"customLayout": False,
}
decorated_params[function_param.name] = {
"widget": "json",
"treat_as": "config",
}
continue
if (
function_param.annotation
is __pandas_module.core.frame.DataFrame
):
raise Exception(
f"{function_name}: pandas DataFrame is not supported, "
f"please use pandera.typing.DataFrame instead"
)
if (
hasattr(function_param.annotation, "__origin__")
and getattr(function_param.annotation, "__origin__")
hasattr(anno, "__origin__")
and getattr(anno, "__origin__")
is __pandera_module.typing.pandas.DataFrame
):
anno = function_param.annotation
default_values = (
{}
if function_param.default is Parameter.empty
else function_param.default
)
if hasattr(anno, "__args__"):
model_class = getattr(anno, "__args__")[0]
schema_columns = model_class.to_schema().columns
dataframe_parse_metadata[
function_id
] = dataframe_parse_metadata.get(function_id, {})
column_names = []
for name, column in schema_columns.items():
if name in default_values:
column_default = list(default_values[name])
else:
column_default = None
d_type = column.dtype
items = analyze(type(d_type))
items["widget"] = "sheet"
column_names.append(name)
anal = {
"type": "array",
"widget": "sheet",
"items": items,
"customLayout": False,
"treat_as": "config",
}
dec_param = {
"widget": "sheet",
"treat_as": "config",
"type": f"<mock>list[{items['type']}]</mock>",
}
if column_default:
anal["default"] = column_default
dec_param["default"] = column_default
json_schema_props[name] = anal
decorated_params[name] = dec_param
dataframe_parse_metadata[function_id][
function_param.name
] = column_names
analyze_columns_and_default_value(model_class.to_schema())
else:
raise Exception(
"Please give a schema with pandera.DataFrameModel for DataFrame"
Expand Down
25 changes: 17 additions & 8 deletions backend/funix/decorator/annnotation_analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,14 @@
"""


def is_hashable(t):
try:
hash(t)
return True
except: # No TypeError
return False


class Step(Enum):
"""
The step of the analyzer.
Expand Down Expand Up @@ -75,14 +83,15 @@ def analyze(value: Parameter | Any) -> dict:
Returns:
dict: The analyzed result.
"""

if isinstance(value, Parameter):
annotation = value.annotation
if annotation in __registered__:
return __registered__[annotation](value)
else:
if value in __registered__:
return __registered__[value](value)
# check type un hashable
if is_hashable(value):
if isinstance(value, Parameter):
annotation = value.annotation
if annotation in __registered__:
return __registered__[annotation](value)
else:
if value in __registered__:
return __registered__[value](value)
return {}


Expand Down
4 changes: 2 additions & 2 deletions backend/funix/decorator/magic.py
Original file line number Diff line number Diff line change
Expand Up @@ -414,7 +414,7 @@ def anal_function_result(
):
call_result = call_result.data

if not isinstance(function_call_result, (str, dict, tuple)):
if not isinstance(call_result, (str, dict, tuple)):
call_result = json.dumps(call_result)

if cast_to_list_flag:
Expand All @@ -425,7 +425,7 @@ def anal_function_result(
if isinstance(call_result, tuple):
call_result = list(call_result)

if function_call_result and isinstance(function_call_result, list):
if call_result and isinstance(call_result, list):
if isinstance(return_type_parsed, list):
for position, single_return_type in enumerate(return_type_parsed):
if __ipython_use:
Expand Down

0 comments on commit e57e697

Please sign in to comment.