Documentation
¶
Index ¶
- Variables
- func NewPseudoanonymizer(storage common.TokenStorage) (common.Pseudoanonymizer, error)
- type DataTokenizer
- type MySQLTokenizeQuery
- func (encryptor *MySQLTokenizeQuery) ID() string
- func (encryptor *MySQLTokenizeQuery) OnBind(ctx context.Context, statement sqlparser.Statement, values []base.BoundValue) ([]base.BoundValue, bool, error)
- func (encryptor *MySQLTokenizeQuery) OnQuery(ctx context.Context, query mysql.OnQueryObject) (mysql.OnQueryObject, bool, error)
- type PostgreSQLTokenizeQuery
- func (encryptor *PostgreSQLTokenizeQuery) ID() string
- func (encryptor *PostgreSQLTokenizeQuery) OnBind(ctx context.Context, parseResult *pg_query.ParseResult, ...) ([]base.BoundValue, bool, error)
- func (encryptor *PostgreSQLTokenizeQuery) OnQuery(ctx context.Context, query postgresql.OnQueryObject) (postgresql.OnQueryObject, bool, error)
- type TokenEncryptor
- type TokenProcessor
Constants ¶
This section is empty.
Variables ¶
var ErrDataTypeMismatch = errors.New("requested TokenType not match stored TokenType")
ErrDataTypeMismatch used to show that required data type not equal to serializaed data type of stored value
var ErrGenerationRandomValue = errors.New("can't generate new random value, try count exceed")
ErrGenerationRandomValue return when can't new random value which wasn't generated before and exceed count of tries to generate another value
Functions ¶
func NewPseudoanonymizer ¶
func NewPseudoanonymizer(storage common.TokenStorage) (common.Pseudoanonymizer, error)
NewPseudoanonymizer create, initialize and return new instance of Pseudoanonymizer
Types ¶
type DataTokenizer ¶
type DataTokenizer struct {
// contains filtered or unexported fields
}
DataTokenizer tokenizes and detokenizes data buffers.
func NewDataTokenizer ¶
func NewDataTokenizer(tokenizer common.Pseudoanonymizer) (*DataTokenizer, error)
NewDataTokenizer makes a new data buffer tokenizer based on provided pseudoanonymizer.
func (*DataTokenizer) Detokenize ¶
func (t *DataTokenizer) Detokenize(data []byte, context common.TokenContext, setting config.ColumnEncryptionSetting) ([]byte, error)
Detokenize the data in given context with provided settings.
func (*DataTokenizer) Tokenize ¶
func (t *DataTokenizer) Tokenize(data []byte, context common.TokenContext, setting config.ColumnEncryptionSetting) ([]byte, error)
Tokenize the data in given context with provided settings.
type MySQLTokenizeQuery ¶
type MySQLTokenizeQuery struct {
// contains filtered or unexported fields
}
MySQLTokenizeQuery replace tokenized data inside AcraStruct/AcraBlocks and change WHERE conditions to support searchable tokenization
func NewMySQLTokenizeQuery ¶
func NewMySQLTokenizeQuery(schemaStore config.TableSchemaStore, tokenEncryptor *TokenEncryptor) *MySQLTokenizeQuery
NewMySQLTokenizeQuery return PostgreSQLTokenizeQuery with coder for mysql
func (*MySQLTokenizeQuery) ID ¶
func (encryptor *MySQLTokenizeQuery) ID() string
ID returns name of this QueryObserver.
func (*MySQLTokenizeQuery) OnBind ¶
func (encryptor *MySQLTokenizeQuery) OnBind(ctx context.Context, statement sqlparser.Statement, values []base.BoundValue) ([]base.BoundValue, bool, error)
OnBind processes bound values for prepared statements.
Searchable tokenization rewrites WHERE clauses with equality comparisons like this:
WHERE column = 'value' ===> WHERE column = tokenize('value')
If the query is a parameterized prepared query then OnQuery() rewriting yields this:
WHERE column = $1 ===> WHERE column = tokenize($1)
and actual "value" is passed via parameters, visible here in OnBind().
func (*MySQLTokenizeQuery) OnQuery ¶
func (encryptor *MySQLTokenizeQuery) OnQuery(ctx context.Context, query mysql.OnQueryObject) (mysql.OnQueryObject, bool, error)
OnQuery processes query text before database sees it.
Tokenized searchable encryption rewrites WHERE clauses with equality comparisons like this:
WHERE column = 'value' ===> WHERE column = tokenize('value')
If the query is a parameterized prepared query then OnQuery() rewriting yields this:
WHERE column = $1 ===> WHERE column = tokenize($1)
and actual "value" is passed via parameters later. See OnBind() for details.
type PostgreSQLTokenizeQuery ¶
type PostgreSQLTokenizeQuery struct {
// contains filtered or unexported fields
}
PostgreSQLTokenizeQuery replace tokenized data inside AcraStruct/AcraBlocks and change WHERE conditions to support searchable tokenization
func NewPostgresqlTokenizeQuery ¶
func NewPostgresqlTokenizeQuery(schemaStore config.TableSchemaStore, tokenEncryptor *TokenEncryptor) *PostgreSQLTokenizeQuery
NewPostgresqlTokenizeQuery return PostgreSQLTokenizeQuery with coder for postgresql
func (*PostgreSQLTokenizeQuery) ID ¶
func (encryptor *PostgreSQLTokenizeQuery) ID() string
ID returns name of this QueryObserver.
func (*PostgreSQLTokenizeQuery) OnBind ¶
func (encryptor *PostgreSQLTokenizeQuery) OnBind(ctx context.Context, parseResult *pg_query.ParseResult, values []base.BoundValue) ([]base.BoundValue, bool, error)
OnBind processes bound values for prepared statements.
Searchable tokenization rewrites WHERE clauses with equality comparisons like this:
WHERE column = 'value' ===> WHERE column = tokenize('value')
If the query is a parameterized prepared query then OnQuery() rewriting yields this:
WHERE column = $1 ===> WHERE column = tokenize($1)
and actual "value" is passed via parameters, visible here in OnBind().
func (*PostgreSQLTokenizeQuery) OnQuery ¶
func (encryptor *PostgreSQLTokenizeQuery) OnQuery(ctx context.Context, query postgresql.OnQueryObject) (postgresql.OnQueryObject, bool, error)
OnQuery processes query text before database sees it.
Tokenized searchable encryption rewrites WHERE clauses with equality comparisons like this:
WHERE column = 'value' ===> WHERE column = tokenize('value')
If the query is a parameterized prepared query then OnQuery() rewriting yields this:
WHERE column = $1 ===> WHERE column = tokenize($1)
and actual "value" is passed via parameters later. See OnBind() for details.
type TokenEncryptor ¶
type TokenEncryptor struct {
// contains filtered or unexported fields
}
TokenEncryptor adds hash prefix to AcraStruct generated with encryptor.AcrawriterDataEncryptor
func NewTokenEncryptor ¶
func NewTokenEncryptor(tokenizer *DataTokenizer) (*TokenEncryptor, error)
NewTokenEncryptor return new TokenEncryptor
func (*TokenEncryptor) EncryptWithClientID ¶
func (e *TokenEncryptor) EncryptWithClientID(clientID, data []byte, setting configCE.ColumnEncryptionSetting) ([]byte, error)
EncryptWithClientID tokenize data according to setting
type TokenProcessor ¶
type TokenProcessor struct {
// contains filtered or unexported fields
}
TokenProcessor implements processor which tokenize/detokenize data for acra-server used in decryptor module
func NewTokenProcessor ¶
func NewTokenProcessor(tokenizer *DataTokenizer) (*TokenProcessor, error)
NewTokenProcessor return new processor