Skip to content

Commit ac1a70a

Browse files
committed
Replace parquetQuery with parquetRead and parquetReadObjects
When passing onChunk, it will skip the unneeded formatting step. Otherwise, the function name (parquetReadObjects) will be more precise.
1 parent b6df0b7 commit ac1a70a

1 file changed

Lines changed: 11 additions & 4 deletions

File tree

src/lib/workers/parquetWorker.ts

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { ColumnData, parquetQuery } from 'hyparquet'
1+
import { ColumnData, parquetRead, parquetReadObjects } from 'hyparquet'
22
import { compressors } from 'hyparquet-compressors'
33
import { getParquetColumn } from '../getParquetColumn.js'
44
import { asyncBufferFrom } from '../utils.js'
@@ -51,11 +51,18 @@ self.onmessage = async ({ data }: { data: ClientMessage }) => {
5151
} catch (error) {
5252
postErrorMessage({ error: error as Error, queryId })
5353
}
54+
} else if (data.chunks) {
55+
function onChunk(chunk: ColumnData) { postChunkMessage({ chunk, queryId }) }
56+
const { rowStart, rowEnd } = data
57+
try {
58+
await parquetRead({ metadata, file, rowStart, rowEnd, compressors, onChunk })
59+
} catch (error) {
60+
postErrorMessage({ error: error as Error, queryId })
61+
}
5462
} else {
55-
const { rowStart, rowEnd, chunks } = data
56-
const onChunk = chunks ? (chunk: ColumnData) => { postChunkMessage({ chunk, queryId }) } : undefined
63+
const { rowStart, rowEnd } = data
5764
try {
58-
const result = await parquetQuery({ metadata, file, rowStart, rowEnd, compressors, onChunk })
65+
const result = await parquetReadObjects({ metadata, file, rowStart, rowEnd, compressors })
5966
postResultMessage({ result, queryId })
6067
} catch (error) {
6168
postErrorMessage({ error: error as Error, queryId })

0 commit comments

Comments
 (0)