pgutil

package module
v0.2.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Oct 23, 2025 License: MIT Imports: 13 Imported by: 0

README

pgutil

pgutil provides functionality for bulk updating/inserting rows in PostgreSQL.

License

MIT

Documentation

Overview

Package pgutil provides functionality for bulk updating/inserting rows in PostgreSQL.

Index

Constants

View Source
const MaxParams = queryutil.MaxParams

Max query parameters as defined in PostgreSQL Limits.

Variables

View Source
var (
	ErrCreatePool         = errors.New("cannot create postgres pool")
	ErrDatabaseNotExist   = errors.New("postgres database does not exist")
	ErrInvalidCredentials = errors.New("invalid postgres credentials")
)

Functions

func BatchExecInTxAll

func BatchExecInTxAll(
	ctx context.Context,
	db any,
	txOptions TxOptions,
	queryFormat string,
	staticArgs []any,
	valuesClause string,
	valuesArgs []any,
	perBatchTimeout time.Duration,
) (rowsAffected int64, err error)

BatchExecInTxAll executes a batch of SQL queries within one transaction. It dynamically builds SQL queries and values collections, trying to fit the maximum number of values (up to MaxParams) into a single query. When there are a large number of rows, you need to keep in mind the xmin horizon and the consequences of its long retention. db argument must be of type PgxDb or StdDb.

Example:

const updateQueryFormat = `
	UPDATE items SET
		hash = batch.hash
	FROM (
		VALUES %s
	) AS batch (id, hash)
	WHERE items.id = batch.id
`
const valuesClause = "($1::bigint, $2::bytea)"
valuesArgs := []any{
	1, []byte("hash1"),
	2, []byte("hash2"),
	...
	100_000, []byte("hash100000"),
}
affected, err := BatchExecInTxAll(
	ctx, db, TxOptions{}, updateQueryFormat, nil, valuesClause, valuesArgs, 3*time.Second,
)

func BatchExecInTxEvery

func BatchExecInTxEvery(
	ctx context.Context,
	db any,
	txOptions TxOptions,
	queryFormat string,
	staticArgs []any,
	valuesClause string,
	valuesArgs []any,
	perBatchTimeout time.Duration,
) (rowsAffected int64, valuesArgsProcessed int, err error)

BatchExecInTxEvery executes a batch of SQL queries within multiple transactions (one per query). It dynamically builds SQL queries and values collections, trying to fit the maximum number of values (up to MaxParams) into a single query. db argument must be of type PgxDb or StdDb.

Example:

const insertQueryFormat = `
	INSERT INTO items (id, hash) VALUES %s
	ON CONFLICT (id) DO UPDATE SET
		hash = excluded.hash
`
const valuesClause = "($1, $2)"
valuesArgs := []any{
	1, []byte("hash1"),
	2, []byte("hash2"),
	...
	100_000, []byte("hash100000"),
}
affected, processed, err := BatchExecInTxEvery(
	ctx, db, TxOptions{}, insertQueryFormat, nil, valuesClause, valuesArgs, 3*time.Second,
)

func BatchExecWithTx

func BatchExecWithTx(
	ctx context.Context,
	tx any,
	queryFormat string,
	staticArgs []any,
	valuesClause string,
	valuesArgs []any,
	perBatchTimeout time.Duration,
) (rowsAffected int64, valuesArgsProcessed int, err error)

BatchExecWithTx executes a batch of SQL queries within given transaction. It dynamically builds SQL queries and values collections, trying to fit the maximum number of values (up to MaxParams) into a single query. When there are a large number of rows, you need to keep in mind the xmin horizon and the consequences of its long retention. tx argument must be of type PgxTx or StdTx.

Example:

const updateQueryFormat = `
	UPDATE items SET
		hash = batch.hash,
		str = $1
	FROM (
		VALUES %s
	) AS batch (id, hash)
	WHERE items.id = batch.id
`
const valuesClause = "($2::bigint, $3::bytea)"
staticArgs := []any{"str"}
valuesArgs := []any{
	1, []byte("hash1"),
	2, []byte("hash2"),
	...
	100_000, []byte("hash100000"),
}
affected, processed, err := BatchExecWithTx(
	ctx, tx, updateQueryFormat, staticArgs, valuesClause, valuesArgs, 3*time.Second,
)

func Connect

func Connect(ctx context.Context, dataSourceName string, timeout time.Duration) (*pgxpool.Pool, error)

Connect creates a new Pool and tries to ping it during given timeout.

func ConnectWithConfig

func ConnectWithConfig(ctx context.Context, config *pgxpool.Config, timeout time.Duration) (*pgxpool.Pool, error)

Connect creates a new Pool and tries to ping it during given timeout. config must have been created by ParseConfig.

func OpenDBFromPool

func OpenDBFromPool(pool *pgxpool.Pool, opts ...stdlib.OptionOpenDB) *sql.DB

OpenDBFromPool creates a new *sql.DB from the given *pgxpool.Pool, see stdlib.OpenDBFromPool.

func ParseConfig

func ParseConfig(dataSourceName string) (*pgxpool.Config, error)

ParseConfig builds a Config from dataSourceName, see pgxpool.ParseConfig.

Types

type PgxDb

type PgxDb interface {
	BeginTx(ctx context.Context, txOptions pgx.TxOptions) (pgx.Tx, error)
}

type PgxTx

type PgxTx interface {
	Exec(ctx context.Context, sql string, arguments ...any) (pgconn.CommandTag, error)
	Commit(ctx context.Context) error
	Rollback(ctx context.Context) error
}

type StdDb

type StdDb interface {
	BeginTx(ctx context.Context, opts *sql.TxOptions) (*sql.Tx, error)
}

type StdTx

type StdTx interface {
	ExecContext(ctx context.Context, query string, args ...any) (sql.Result, error)
	Commit() error
	Rollback() error
}

type TxAccessMode

type TxAccessMode = pgx.TxAccessMode
const (
	ReadWrite TxAccessMode = pgx.ReadWrite
	ReadOnly  TxAccessMode = pgx.ReadOnly
)

type TxDeferrableMode

type TxDeferrableMode = pgx.TxDeferrableMode
const (
	Deferrable    TxDeferrableMode = pgx.Deferrable
	NotDeferrable TxDeferrableMode = pgx.NotDeferrable
)

type TxIsoLevel

type TxIsoLevel = pgx.TxIsoLevel
const (
	Serializable    TxIsoLevel = pgx.Serializable
	RepeatableRead  TxIsoLevel = pgx.RepeatableRead
	ReadCommitted   TxIsoLevel = pgx.ReadCommitted
	ReadUncommitted TxIsoLevel = pgx.ReadUncommitted
)

type TxOptions

type TxOptions struct {
	IsoLevel   TxIsoLevel
	AccessMode TxAccessMode

	DeferrableMode TxDeferrableMode // pgx only
	BeginQuery     string           // pgx only
	CommitQuery    string           // pgx only
}

Directories

Path Synopsis
Package queryutil provides utility functions to generate queries and values batches for bulk updating/inserting rows in PostgreSQL.
Package queryutil provides utility functions to generate queries and values batches for bulk updating/inserting rows in PostgreSQL.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL