Documentation
¶
Index ¶
- func NewArrowIPCStreamIterator(ctx context.Context, rpi rowscanner.ResultPageIterator, ...) rows.ArrowIPCStreamIterator
- func NewArrowRecordIterator(ctx context.Context, rpi rowscanner.ResultPageIterator, bi BatchIterator, ...) rows.ArrowBatchIterator
- func NewArrowRowScanner(resultSetMetadata *cli_service.TGetResultSetMetadataResp, ...) (rowscanner.RowScanner, dbsqlerr.DBError)
- type BatchIterator
- func NewBatchIterator(ipcIterator IPCStreamIterator, startRowOffset int64) BatchIterator
- func NewCloudBatchIterator(ctx context.Context, files []*cli_service.TSparkArrowResultLink, ...) (BatchIterator, dbsqlerr.DBError)
- func NewLocalBatchIterator(ctx context.Context, batches []*cli_service.TSparkArrowBatch, ...) (BatchIterator, dbsqlerr.DBError)
- type IPCStreamIterator
- type Queue
- type RowValues
- type SparkArrowBatch
- type SparkArrowRecord
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func NewArrowIPCStreamIterator ¶ added in v1.8.0
func NewArrowIPCStreamIterator(ctx context.Context, rpi rowscanner.ResultPageIterator, ipcIterator IPCStreamIterator, arrowSchemaBytes []byte, cfg config.Config) rows.ArrowIPCStreamIterator
NewArrowIPCStreamIterator creates a new iterator for Arrow IPC streams
func NewArrowRecordIterator ¶ added in v1.5.0
func NewArrowRecordIterator(ctx context.Context, rpi rowscanner.ResultPageIterator, bi BatchIterator, arrowSchemaBytes []byte, cfg config.Config) rows.ArrowBatchIterator
func NewArrowRowScanner ¶
func NewArrowRowScanner(resultSetMetadata *cli_service.TGetResultSetMetadataResp, rowSet *cli_service.TRowSet, cfg *config.Config, logger *dbsqllog.DBSQLLogger, ctx context.Context) (rowscanner.RowScanner, dbsqlerr.DBError)
NewArrowRowScanner returns an instance of RowScanner which handles arrow format results
Types ¶
type BatchIterator ¶ added in v1.5.0
type BatchIterator interface { Next() (SparkArrowBatch, error) HasNext() bool Close() }
BatchIterator is the interface for iterating through Arrow batches
func NewBatchIterator ¶ added in v1.5.0
func NewBatchIterator(ipcIterator IPCStreamIterator, startRowOffset int64) BatchIterator
NewBatchIterator creates a BatchIterator from an IPCStreamIterator
func NewCloudBatchIterator ¶ added in v1.6.1
func NewCloudBatchIterator( ctx context.Context, files []*cli_service.TSparkArrowResultLink, startRowOffset int64, cfg *config.Config, ) (BatchIterator, dbsqlerr.DBError)
NewCloudBatchIterator creates a cloud-based BatchIterator for backward compatibility
func NewLocalBatchIterator ¶ added in v1.6.1
func NewLocalBatchIterator( ctx context.Context, batches []*cli_service.TSparkArrowBatch, startRowOffset int64, arrowSchemaBytes []byte, cfg *config.Config, ) (BatchIterator, dbsqlerr.DBError)
NewLocalBatchIterator creates a local BatchIterator for backward compatibility
type IPCStreamIterator ¶ added in v1.8.0
func NewCloudIPCStreamIterator ¶ added in v1.8.0
func NewCloudIPCStreamIterator( ctx context.Context, files []*cli_service.TSparkArrowResultLink, startRowOffset int64, cfg *config.Config, ) (IPCStreamIterator, dbsqlerr.DBError)
func NewLocalIPCStreamIterator ¶ added in v1.8.0
func NewLocalIPCStreamIterator( ctx context.Context, batches []*cli_service.TSparkArrowBatch, startRowOffset int64, arrowSchemaBytes []byte, cfg *config.Config, ) (IPCStreamIterator, dbsqlerr.DBError)
type Queue ¶ added in v1.6.1
type RowValues ¶ added in v1.5.0
type RowValues interface { rowscanner.Delimiter Close() NColumns() int SetColumnValues(columnIndex int, values arrow.ArrayData) error IsNull(columnIndex int, rowNumber int64) bool Value(columnIndex int, rowNumber int64) (any, error) SetDelimiter(d rowscanner.Delimiter) }
Abstraction for holding the values for a set of rows
func NewRowValues ¶ added in v1.5.0
func NewRowValues(d rowscanner.Delimiter, holders []columnValues) RowValues
type SparkArrowBatch ¶ added in v1.5.0
type SparkArrowBatch interface { rowscanner.Delimiter Next() (SparkArrowRecord, error) HasNext() bool Close() }
Abstraction for a set of arrow records
type SparkArrowRecord ¶ added in v1.5.0
type SparkArrowRecord interface { rowscanner.Delimiter arrow.Record }
Abstraction for an arrow record
Click to show internal directories.
Click to hide internal directories.