@@ -711,27 +711,14 @@ def to_pandas_batches(
711711 # To reduce the number of edge cases to consider when working with the
712712 # results of this, always return at least one DataFrame. See:
713713 # b/428918844.
714- series_map = {}
715- for col in itertools .chain (self .value_columns , self .index_columns ):
716- dtype = self .expr .get_column_type (col )
717- try :
718- series_map [col ] = pd .Series ([], dtype = dtype )
719- except pa .ArrowNotImplementedError :
720- # PyArrow doesn't support creating an empty array with
721- # db_dtypes.JSONArrowType, especially when nested.
722- # Create with string type and then cast.
723- if isinstance (dtype , pd .ArrowDtype ):
724- safe_pa_type = bigframes .dtypes .to_storage_type (dtype .pyarrow_dtype )
725- # Create empty array with safe type, but preserve original dtype metadata
726- empty_array = pa .array ([], type = safe_pa_type )
727- series_map [col ] = pd .Series (
728- empty_array ,
729- dtype = dtype , # Use original dtype directly
730- )
731- else :
732- # Fallback for other types that might error
733- series_map [col ] = pd .Series ([], dtype = "object" ).astype (dtype )
734- empty_val = pd .DataFrame (series_map )
714+ try :
715+ empty_arrow_table = self .expr .schema .to_pyarrow ().empty_table ()
716+ except pa .ArrowNotImplementedError :
717+ # Bug with some pyarrow versions, empty_table only supports base storage types, not extension types.
718+ empty_arrow_table = self .expr .schema .to_pyarrow (
719+ use_storage_types = True
720+ ).empty_table ()
721+ empty_val = empty_arrow_table .to_pandas ()
735722 dfs = map (
736723 lambda a : a [0 ],
737724 itertools .zip_longest (
0 commit comments