@@ -748,20 +748,32 @@ def to_frame(self, index=True, name=None) -> DataFrame:
748
748
name = self ._internal .index_names [0 ]
749
749
elif isinstance (name , str ):
750
750
name = (name ,)
751
- scol = self .spark .column .alias (name_like_string (name ))
752
751
753
- sdf = self ._internal . spark_frame . select ( scol , NATURAL_ORDER_COLUMN_NAME )
752
+ return self ._to_frame ( index = index , names = [ name ] )
754
753
754
+ def _to_frame (self , index , names ):
755
755
if index :
756
- index_map = OrderedDict ({name_like_string (name ): self ._internal .index_names [0 ]})
756
+ index_map = self ._internal .index_map
757
+ data_columns = self ._internal .index_spark_column_names
758
+ sdf = self ._internal .spark_frame .select (
759
+ self ._internal .index_spark_columns + [NATURAL_ORDER_COLUMN_NAME ]
760
+ )
757
761
else :
758
- index_map = None # type: ignore
762
+ index_map = None
763
+ data_columns = [name_like_string (label ) for label in names ]
764
+ sdf = self ._internal .spark_frame .select (
765
+ [
766
+ scol .alias (col )
767
+ for scol , col in zip (self ._internal .index_spark_columns , data_columns )
768
+ ]
769
+ + [NATURAL_ORDER_COLUMN_NAME ]
770
+ )
759
771
760
772
internal = InternalFrame (
761
773
spark_frame = sdf ,
762
774
index_map = index_map ,
763
- column_labels = [ name ] ,
764
- data_spark_columns = [scol_for (sdf , name_like_string ( name )) ],
775
+ column_labels = names ,
776
+ data_spark_columns = [scol_for (sdf , col ) for col in data_columns ],
765
777
)
766
778
return DataFrame (internal )
767
779
@@ -2385,28 +2397,7 @@ def to_frame(self, index=True, name=None) -> DataFrame:
2385
2397
else :
2386
2398
raise TypeError ("'name' must be a list / sequence of column names." )
2387
2399
2388
- sdf = self ._internal .spark_frame .select (
2389
- [
2390
- scol .alias (name_like_string (label ))
2391
- for scol , label in zip (self ._internal .index_spark_columns , name )
2392
- ]
2393
- + [NATURAL_ORDER_COLUMN_NAME ]
2394
- )
2395
-
2396
- if index :
2397
- index_map = OrderedDict (
2398
- (name_like_string (label ), n ) for label , n in zip (name , self ._internal .index_names )
2399
- )
2400
- else :
2401
- index_map = None # type: ignore
2402
-
2403
- internal = InternalFrame (
2404
- spark_frame = sdf ,
2405
- index_map = index_map ,
2406
- column_labels = name ,
2407
- data_spark_columns = [scol_for (sdf , name_like_string (label )) for label in name ],
2408
- )
2409
- return DataFrame (internal )
2400
+ return self ._to_frame (index = index , names = name )
2410
2401
2411
2402
def to_pandas (self ) -> pd .MultiIndex :
2412
2403
"""
0 commit comments