Skip to content

Commit c7810aa

Browse files
reduce diff
Signed-off-by: varun-edachali-dbx <varun.edachali@databricks.com>
1 parent d53d1ea commit c7810aa

File tree

3 files changed

+5
-4
lines changed

3 files changed

+5
-4
lines changed

src/databricks/sql/backend/sea/queue.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,7 @@ def build_queue(
7272
return JsonQueue(result_data.data)
7373
elif manifest.format == ResultFormat.ARROW_STREAM.value:
7474
if result_data.attachment is not None:
75+
# direct results from Hybrid disposition
7576
arrow_file = (
7677
ResultSetDownloadHandler._decompress_data(result_data.attachment)
7778
if lz4_compressed

src/databricks/sql/backend/sea/utils/filters.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ def _filter_sea_result_set(
5353
# Reuse the command_id from the original result set
5454
command_id = result_set.command_id
5555

56-
# Create an ExecuteResponse with the filtered data
56+
# Create an ExecuteResponse for the filtered data
5757
execute_response = ExecuteResponse(
5858
command_id=command_id,
5959
status=result_set.status,

src/databricks/sql/result_set.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ def __init__(
5555
5656
Parameters:
5757
:param connection: The parent connection that was used to execute this command
58-
:param backend: The backend specialised backend client to be invoked in the fetch phase
58+
:param backend: The specialised backend client to be invoked in the fetch phase
5959
:param arraysize: The max number of rows to fetch at a time (PEP-249)
6060
:param buffer_size_bytes: The size (in bytes) of the internal buffer + max fetch
6161
:param command_id: The command ID
@@ -334,7 +334,7 @@ def fetchmany_arrow(self, size: int) -> "pyarrow.Table":
334334
n_remaining_rows -= partial_results.num_rows
335335
self._next_row_index += partial_results.num_rows
336336

337-
return pyarrow.concat_tables(partial_result_chunks)
337+
return pyarrow.concat_tables(partial_result_chunks, use_threads=True)
338338

339339
def fetchmany_columnar(self, size: int):
340340
"""
@@ -385,7 +385,7 @@ def fetchall_arrow(self) -> "pyarrow.Table":
385385
for name, col in zip(results.column_names, results.column_table)
386386
}
387387
return pyarrow.Table.from_pydict(data)
388-
return pyarrow.concat_tables(partial_result_chunks)
388+
return pyarrow.concat_tables(partial_result_chunks, use_threads=True)
389389

390390
def fetchall_columnar(self):
391391
"""Fetch all (remaining) rows of a query result, returning them as a Columnar table."""

0 commit comments

Comments
 (0)