Documentation ¶
Index ¶
- Constants
- Variables
- func AggregateTokenContextToBytes(context TokenContext) []byte
- func EmbedMetadata(data []byte, metadata TokenMetadata) []byte
- func EncodeTokenValue(value *TokenValue) ([]byte, error)
- func ValidateTokenType(value TokenType) error
- type Anonymizer
- type Email
- type Encryptor
- type MetadataContainer
- func (*MetadataContainer) Descriptor() ([]byte, []int)deprecated
- func (x *MetadataContainer) GetAccessed() int64
- func (x *MetadataContainer) GetCreated() int64
- func (x *MetadataContainer) GetData() []byte
- func (x *MetadataContainer) GetDisabled() bool
- func (*MetadataContainer) ProtoMessage()
- func (x *MetadataContainer) ProtoReflect() protoreflect.Message
- func (x *MetadataContainer) Reset()
- func (x *MetadataContainer) String() string
- type Pseudoanonymizer
- type TokenAction
- type TokenContext
- type TokenMetadata
- type TokenSetting
- type TokenStorage
- type TokenType
- func (TokenType) Descriptor() protoreflect.EnumDescriptor
- func (x TokenType) Enum() *TokenType
- func (TokenType) EnumDescriptor() ([]byte, []int)deprecated
- func (x TokenType) Number() protoreflect.EnumNumber
- func (x TokenType) String() string
- func (x TokenType) ToConfigString() (val string, err error)
- func (TokenType) Type() protoreflect.EnumType
- type TokenValue
Constants ¶
const DefaultAccessTimeGranularity = 24 * time.Hour
DefaultAccessTimeGranularity is the default difference in time required for the access time to be updated.
Variables ¶
var ( ErrUnknownTokenType = errors.New("unknown token type") ErrUnsupportedTokenType = errors.New("token type not supported") )
Validation errors
var ( TokenType_name = map[int32]string{ 0: "Unknown", 1: "Int32", 2: "Int64", 3: "String", 4: "Bytes", 5: "Email", 6: "Int32Str", 7: "Int64Str", } TokenType_value = map[string]int32{ "Unknown": 0, "Int32": 1, "Int64": 2, "String": 3, "Bytes": 4, "Email": 5, "Int32Str": 6, "Int64Str": 7, } )
Enum value maps for TokenType.
var ErrTokenDisabled = errors.New("disabled token accessed")
ErrTokenDisabled is returned when a token was found, but is explicitly disabled
var ErrTokenExists = errors.New("token already exists")
ErrTokenExists is returned by TokenStorage.Save when a token with given ID and context already exists in the storage
var ErrTokenNotFound = errors.New("token not found in storage")
ErrTokenNotFound error used when token wasn't found in storage
var File_metadata_proto protoreflect.FileDescriptor
var File_tokenTypes_proto protoreflect.FileDescriptor
Functions ¶
func AggregateTokenContextToBytes ¶
func AggregateTokenContextToBytes(context TokenContext) []byte
AggregateTokenContextToBytes used as function to return one byte array as value which is digest for context
func EmbedMetadata ¶
func EmbedMetadata(data []byte, metadata TokenMetadata) []byte
EmbedMetadata composes data with additional metadata into a single byte slice.
func EncodeTokenValue ¶
func EncodeTokenValue(value *TokenValue) ([]byte, error)
EncodeTokenValue serializes token value into bytes.
func ValidateTokenType ¶
ValidateTokenType return true if value is supported TokenType
Types ¶
type Anonymizer ¶
type Anonymizer interface { // generic Anonymize(data interface{}, context TokenContext, dataType TokenType) (interface{}, error) // type specific AnonymizeInt32(value int32, context TokenContext) (int32, error) AnonymizeInt64(value int64, context TokenContext) (int64, error) AnonymizeBytes(value []byte, context TokenContext) ([]byte, error) AnonymizeStr(value string, context TokenContext) (string, error) AnonymizeEmail(email Email, context TokenContext) (Email, error) }
Anonymizer interface provide all supported methods to anonymize data
type Encryptor ¶
type Encryptor interface { Encrypt(data, context TokenContext) ([]byte, error) Decrypt(data, context TokenContext) ([]byte, error) }
Encryptor interface used as abstraction for token encryption
type MetadataContainer ¶
type MetadataContainer struct { Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` Created int64 `protobuf:"varint,2,opt,name=created,proto3" json:"created,omitempty"` Accessed int64 `protobuf:"varint,3,opt,name=accessed,proto3" json:"accessed,omitempty"` Disabled bool `protobuf:"varint,4,opt,name=disabled,proto3" json:"disabled,omitempty"` // contains filtered or unexported fields }
MetadataContainer is Protobuf container for TokenMetadata.
func (*MetadataContainer) Descriptor
deprecated
func (*MetadataContainer) Descriptor() ([]byte, []int)
Deprecated: Use MetadataContainer.ProtoReflect.Descriptor instead.
func (*MetadataContainer) GetAccessed ¶
func (x *MetadataContainer) GetAccessed() int64
func (*MetadataContainer) GetCreated ¶
func (x *MetadataContainer) GetCreated() int64
func (*MetadataContainer) GetData ¶
func (x *MetadataContainer) GetData() []byte
func (*MetadataContainer) GetDisabled ¶
func (x *MetadataContainer) GetDisabled() bool
func (*MetadataContainer) ProtoMessage ¶
func (*MetadataContainer) ProtoMessage()
func (*MetadataContainer) ProtoReflect ¶
func (x *MetadataContainer) ProtoReflect() protoreflect.Message
func (*MetadataContainer) Reset ¶
func (x *MetadataContainer) Reset()
func (*MetadataContainer) String ¶
func (x *MetadataContainer) String() string
type Pseudoanonymizer ¶
type Pseudoanonymizer interface { Anonymizer AnonymizeConsistently(data interface{}, context TokenContext, dataType TokenType) (interface{}, error) Deanonymize(data interface{}, context TokenContext, dataType TokenType) (interface{}, error) }
Pseudoanonymizer extends Anonymizer interface with methods to anonymize consistently and deanonymize value
type TokenAction ¶
type TokenAction int
TokenAction is an action to perform during VisitMetadata.
const ( TokenContinue TokenAction = iota TokenEnable TokenDisable TokenRemove )
Available TokenAction values.
type TokenContext ¶
TokenContext used as metadata for each token
type TokenMetadata ¶
TokenMetadata is additional bookeeping information kept by TokenStorage along with the token value.
func ExtractMetadata ¶
func ExtractMetadata(data []byte) ([]byte, TokenMetadata, error)
ExtractMetadata extracts data and metadata back from a composite byte slice.
func NewTokenMetadata ¶
func NewTokenMetadata() TokenMetadata
NewTokenMetadata creates metadata for a newly created token entry,
func (*TokenMetadata) AccessedBefore ¶
AccessedBefore checks that the token has been accessed before the specified time instance with given granularity.
func (TokenMetadata) Equal ¶
func (t TokenMetadata) Equal(other TokenMetadata) bool
Equal returns true if this metadata is equal to the other one.
type TokenSetting ¶
type TokenSetting interface { IsTokenized() bool IsConsistentTokenization() bool GetTokenType() TokenType }
TokenSetting describes how a column should be tokenized.
type TokenStorage ¶
type TokenStorage interface { Save(id []byte, context TokenContext, data []byte) error Get(id []byte, context TokenContext) ([]byte, error) Stat(id []byte, context TokenContext) (TokenMetadata, error) // Iterate over token metadata in the storage. // In addition to metadata, length of data for an entry is provided for reference. (Can't access data without context information). // The iteration order is unspecified. If the storage in concurrently modified during iteration, // modifications may or may not be visible during the iteration, and entries may be visited multiple times. // Return the desired action to do with the token, usually TokenContinue to simply continue iteration. // Return a non-nil error to stop iteration and return this error. VisitMetadata(cb func(dataLength int, metadata TokenMetadata) (TokenAction, error)) error SetAccessTimeGranularity(granularity time.Duration) error }
TokenStorage interface abstracts storage implementation
type TokenType ¶
type TokenType int32
TokenType defines tokenization type.
func NormalizeTokenType ¶
NormalizeTokenType checks the token type and replaces it with the default value if the type is not supported or invalid.
func (TokenType) Descriptor ¶
func (TokenType) Descriptor() protoreflect.EnumDescriptor
func (TokenType) EnumDescriptor
deprecated
func (TokenType) Number ¶
func (x TokenType) Number() protoreflect.EnumNumber
func (TokenType) ToConfigString ¶
ToConfigString converts value to string used in encryptor_config
func (TokenType) Type ¶
func (TokenType) Type() protoreflect.EnumType
type TokenValue ¶
type TokenValue struct { Value []byte `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` Type TokenType `protobuf:"varint,2,opt,name=type,proto3,enum=common.TokenType" json:"type,omitempty"` // contains filtered or unexported fields }
TokenValue keeps serialized token value.
func TokenValueFromData ¶
func TokenValueFromData(data []byte) (*TokenValue, error)
TokenValueFromData deserializes token value from bytes.
func (*TokenValue) Descriptor
deprecated
func (*TokenValue) Descriptor() ([]byte, []int)
Deprecated: Use TokenValue.ProtoReflect.Descriptor instead.
func (*TokenValue) GetType ¶
func (x *TokenValue) GetType() TokenType
func (*TokenValue) GetValue ¶
func (x *TokenValue) GetValue() []byte
func (*TokenValue) ProtoMessage ¶
func (*TokenValue) ProtoMessage()
func (*TokenValue) ProtoReflect ¶
func (x *TokenValue) ProtoReflect() protoreflect.Message
func (*TokenValue) Reset ¶
func (x *TokenValue) Reset()
func (*TokenValue) String ¶
func (x *TokenValue) String() string