nuix

package module
v0.0.0-...-b8842e2 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Nov 23, 2020 License: MIT Imports: 13 Imported by: 0

README

go-nuix-api

Client-API written in golang for communication with the Nuix RESTful Service.

Nuix creates innovative software that empowers organizations to simply and quickly find the truth from any data in a digital world, read more about Nuix...

THIS IS A WIP, DON'T USE IT IN PRODUCTION YET.

Example

...

Contributing

...

License

...

Documentation

Index

Constants

View Source
const (
	// LicEDiscoveryWorkstation holds the short-name for
	// the Nuix eDiscovery Workstation-licence
	LicEDiscoveryWorkstation = "enterprise-workstation"

	// LicInvestigationAndResponse holds the short-name for
	// the Nuix Investigation and Response-licence
	LicInvestigationAndResponse = "law-enforcement-desktop"

	// LicWebReviewer holds the short-name for
	// the Nuix Web Reviewer-licence
	LicWebReviewer = "web-reviewer"

	// LicWorkflow holds the short-name for
	// the Nuix Workflow-licence
	LicWorkflow = "workflow"
)

Variables

This section is empty.

Functions

This section is empty.

Types

type API

type API interface {
	Ping() error
	About(ctx context.Context) (*About, error)
	Licences(ctx context.Context) ([]Licence, error)
	NewClient(ctx context.Context, username, password, licence string, workers int) (*Client, error)
	NewRandomClient(ctx context.Context, username, password string) (*Client, error)
}

API is an interface to call Nuix REST API

func NewService

func NewService(url string) API

NewService creates a new service with the http-client

type About

type About struct {
	EngineVersion      string    `json:"engineVersion"`
	LicenceSource      string    `json:"licenceSource"`
	NuixRestfulVersion string    `json:"nuixRestfulVersion"`
	Server             string    `json:"server"`
	ServerID           string    `json:"serverId"`
	StartupTime        time.Time `json:"startupTime"`
}

type Call

type Call struct {
	Method      string
	URL         string
	Token       string
	Body        interface{}
	Context     context.Context
	ContentType string
	// contains filtered or unexported fields
}

type Case

type Case struct {
	// contains filtered or unexported fields
}

Case holds the client and the top-level information for the case

func (*Case) AddChildCase

func (c *Case) AddChildCase(ctx context.Context, childID string) error

AddChildCase adds a child-case to a compoundcase

func (*Case) AddChildCases

func (c *Case) AddChildCases(ctx context.Context, caseIDs []string) error

AddChildCases adds multiple child-cases to a compoundcase at once

func (*Case) AddKeystore

func (c *Case) AddKeystore(ctx context.Context, file FileRequest) error

AddKeystore returns a list of keystore files available for a case. FIXME : choose file from path?

func (*Case) AddKeystoreFromFile

func (c *Case) AddKeystoreFromFile(ctx context.Context, filepath string) error

func (*Case) ChildCases

func (c *Case) ChildCases(ctx context.Context) ([]*CaseInfo, error)

ChildCases returns all child-cases associated with the compound-case

func (*Case) Close

func (c *Case) Close(ctx context.Context) error

Close a case in a user session Cases should be closed when not in use or they will remain locked.

func (*Case) CreateProcessor

func (c *Case) CreateProcessor() *Processor

CreateProcessor creates a new processor suitable for using to load new data into the case.

func (*Case) Delete

func (c *Case) Delete(ctx context.Context) error

Delete deletes the case from the inventory

func (*Case) DeleteAuditFiles

func (c *Case) DeleteAuditFiles(ctx context.Context) error

DeleteAuditFiles deletes all audit reports and audit verification files for the specified ca

You may want to do this if an incorrect audit verification file has been uploaded for a case. This will allow the Nuix Engine to generate a new audit file.

func (*Case) DeleteKeystore

func (c *Case) DeleteKeystore(ctx context.Context, fileToDeleteRegex string) error

DeleteKeystore deletes the requested keystore from the case's keystore directory.

func (*Case) DeleteWithDescendants

func (c *Case) DeleteWithDescendants(ctx context.Context) error

DeleteWithDescendants deletes the case from the inventory

func (*Case) Digest

func (c *Case) Digest(ctx context.Context) (*CaseDigest, error)

func (*Case) Functions

func (c *Case) Functions(ctx context.Context) ([]string, error)

Functions returns the functions available for the case

func (*Case) FunctionsStatus

func (c *Case) FunctionsStatus(ctx context.Context) ([]FunctionInfo, error)

FunctionsStatus returns the status of the async-functions for the case

func (*Case) GetKeystores

func (c *Case) GetKeystores(ctx context.Context) ([]string, error)

GetKeystores returns a list of keystore files available for a case.

func (*Case) ID

func (c *Case) ID() string

ID returns the case ID

func (*Case) Info

func (c *Case) Info() *CaseInfo

Info returns the top-level information for the case

func (*Case) Metadata

func (c *Case) Metadata(ctx context.Context) ([]CaseMetadata, error)

Metadata returns metadata-fields available for the case

func (*Case) RunFunction

func (c *Case) RunFunction(ctx context.Context, method string) (*Function, error)

RunFunction performs a function against the case and returns the functionKey

func (*Case) Search

func (c *Case) Search(ctx context.Context, request SearchRequest) (*SearchResponse, error)

func (*Case) Subset

func (c *Case) Subset(ctx context.Context, query, location string, request CaseSubsetRequest) (*Function, error)

Subset creates a case subset. When you create a case, the location must be a relative path, or an absolute path that is within a configured inventory location.

type CaseDigest

type CaseDigest struct {
	BinaryStoreLocation string   `json:"binaryStoreLocation,omitempty"`
	CaseID              string   `json:"caseId,omitempty"`
	ChildCases          []string `json:"childCases,omitempty"`
	Compound            bool     `json:"compound,omitempty"`
	CreationDate        int      `json:"creationDate,omitempty"`
	Description         string   `json:"description,omitempty"`
	Elastic             bool     `json:"elastic,omitempty"`
	Investigator        string   `json:"investigator,omitempty"`
	IsOpen              bool     `json:"isOpen,omitempty"`
	Name                string   `json:"name,omitempty"`
	Path                string   `json:"path,omitempty"`
	PhysicalPath        string   `json:"physicalPath,omitempty"`
	ProductName         string   `json:"productName,omitempty"`
	ServerID            string   `json:"serverId,omitempty"`
	URL                 string   `json:"url,omitempty"`
	Version             string   `json:"version,omitempty"`
}

type CaseInfo

type CaseInfo struct {
	// The path to the file system binary store for this case.
	// It can be shared by cases and should be accessible from all workers.
	// By default it will be created within the case Stores folder.
	BinaryStoreLocation string `json:"binaryStoreLocation,omitempty"`

	// When the case was created
	CaseCreationDate int `json:"caseCreationDate,omitempty"`

	// A longer description of the case.
	CaseDescription string `json:"caseDescription,omitempty"`

	// ID of the case
	CaseID string `json:"caseId,omitempty"`

	// Timezone for the case
	CaseInvestigationTimeZone string `json:"caseInvestigationTimeZone,omitempty"`

	// The name of the investigator creating the case.
	CaseInvestigator string `json:"caseInvestigator,omitempty"`

	// A short, human-readable name for the case.
	CaseName       string `json:"caseName,omitempty"`
	CasePath       string `json:"casePath,omitempty"`
	CasePathParent string `json:"casePathParent,omitempty"`
	CaseSize       int    `json:"caseSize,omitempty"`

	// true to create a compound case, false to create a simple case.
	Compound     bool   `json:"compound,omitempty"`
	CreationDate int    `json:"creationDate,omitempty"`
	Description  string `json:"description,omitempty"`
	Elastic      bool   `json:"elastic,omitempty"`
	Investigator string `json:"investigator,omitempty"`

	// If the case is open or not
	IsOpen   bool   `json:"isOpen,omitempty"`
	Location string `json:"location,omitempty"`
	Name     string `json:"name,omitempty"`
	Path     string `json:"path,omitempty"`
}

type CaseMetadata

type CaseMetadata struct {
	Name string `json:"name,omitempty"`
	Type string `json:"type,omitempty"`
}

type CaseOptions

type CaseOptions struct {
	Name            string           `json:"name"`
	Location        string           `json:"location"`
	Investigator    string           `json:"investigator"`
	Compound        bool             `json:"compound"`
	ElasticSettings *ElasticSettings `json:"elasticSearchSettings"`
}

type CaseQueue

type CaseQueue struct {
	CaseID string          `json:"caseId,omitempty"`
	Queue  []*FunctionInfo `json:"queue,omitempty"`
}

type CaseSubsetRequest

type CaseSubsetRequest struct {
	CaseMetadata struct {
		Description  string `json:"description,omitempty"`
		Investigator string `json:"investigator,omitempty"`
		Name         string `json:"name,omitempty"`
	} `json:"caseMetadata,omitempty"`
	CopyClassifiers    bool   `json:"copyClassifiers,omitempty"`
	CopyClusters       bool   `json:"copyClusters,omitempty"`
	CopyComments       bool   `json:"copyComments,omitempty"`
	CopyCustodians     bool   `json:"copyCustodians,omitempty"`
	CopyCustomMetadata bool   `json:"copyCustomMetadata,omitempty"`
	CopyItemSets       bool   `json:"copyItemSets,omitempty"`
	CopyMarkupSets     bool   `json:"copyMarkupSets,omitempty"`
	CopyProductionSets bool   `json:"copyProductionSets,omitempty"`
	CopyTags           bool   `json:"copyTags,omitempty"`
	EvidenceStoreCount int    `json:"evidenceStoreCount,omitempty"`
	IncludeFamilies    bool   `json:"includeFamilies,omitempty"`
	Location           string `json:"location,omitempty"`
	ProcessingSettings struct {
		AnalysisLanguage   string `json:"analysisLanguage,omitempty"`
		EnableExactQueries bool   `json:"enableExactQueries,omitempty"`
		Stemming           bool   `json:"stemming,omitempty"`
		StopWords          bool   `json:"stopWords,omitempty"`
	} `json:"processingSettings,omitempty"`
	Query string `json:"query,omitempty"`
}

type CenteraCluster

type CenteraCluster struct {
	ClipsFile string `json:"clipsFile"`
	IpsFile   string `json:"ipsFile"`
}

type Client

type Client struct {
	// contains filtered or unexported fields
}

Client is being used to perform authenticated operations

func (*Client) CaseDigest

func (c *Client) CaseDigest(ctx context.Context, caseID string) (*CaseDigest, error)

func (*Client) CaseMetadata

func (c *Client) CaseMetadata(ctx context.Context, caseID string) ([]CaseMetadata, error)

CaseMetadata returns the metadata fields associated with a case.

func (*Client) Close

func (c *Client) Close(ctx context.Context) error

Close the client

Close needs to be called before program exit, otherwise the license will be left hanging

func (*Client) CloseCase

func (c *Client) CloseCase(ctx context.Context, caseID string) error

CloseCase closes the case specified by casecaseID

func (*Client) Cluster

func (c *Client) Cluster(ctx context.Context) (*Cluster, error)

Cluster finds the cluster that this client is part of and other members of the same cluster for distributed processing.

func (*Client) DeleteCase

func (c *Client) DeleteCase(ctx context.Context, caseID string, deleteDesc bool) error

DeleteCase deletes the case specified by casecaseID

func (*Client) FunctionsQueue

func (c *Client) FunctionsQueue(ctx context.Context) (*FunctionsQueue, error)

FunctionsQueue returns the status of all asynchronous functions that have been run and that are currently running

func (*Client) GetFunction

func (c *Client) GetFunction(ctx context.Context, functionKey string) (*Function, error)

GetFunction returns a function from the specified functionKey

func (*Client) GetScripts

func (c *Client) GetScripts(ctx context.Context) ([]string, error)

GetScripts returns an array of existing user scripts.

func (*Client) InventoryDigest

func (c *Client) InventoryDigest(ctx context.Context) ([]CaseDigest, error)

InventoryDigest returns the set of case digests for all inventory locations

func (*Client) InventoryLocations

func (c *Client) InventoryLocations(ctx context.Context) ([]string, error)

InventoryLocations returns a slice of the available locations for the inventory

func (*Client) InventoryScan

func (c *Client) InventoryScan(ctx context.Context) ([]string, error)

InventoryScan returns the set of case digests for all inventory locations

func (*Client) Licence

func (c *Client) Licence() string

Licence returns the current licence for the client

func (*Client) NewCase

func (c *Client) NewCase(ctx context.Context, name string, opt CaseOptions) (*Case, error)

NewCase Creates a simple case with the specified options. Metadata can later be set via methods on the Case itself.

func (*Client) OCR

func (c *Client) OCR(ctx context.Context) (bool, error)

OCR checks if optical character recognition (OCR) functionality is available for the client

func (*Client) OpenCase

func (c *Client) OpenCase(ctx context.Context, caseID string) (*Case, error)

OpenCase opens an existing Case in the inventory by casecaseID

func (*Client) Token

func (c *Client) Token() string

Token returns the auth-token for the client

type Cluster

type Cluster struct {
	LocalMember ClusterInformation    `json:"localMember"`
	Members     []*ClusterInformation `json:"members"`
	Name        string                `json:"name"`
}

type ClusterInformation

type ClusterInformation struct {
	Address      string       `json:"address"`
	Port         int          `json:"port"`
	ServerID     string       `json:"serverId"`
	WorkerAgent  WorkerAgent  `json:"workerAgent"`
	WorkerBroker WorkerBroker `json:"workerBroker"`
}

type Dropbox

type Dropbox struct {
	AccessToken string `json:"accessToken"`
	AuthCode    string `json:"authCode"`
	Team        bool   `json:"team"`
}

type ElasticSettings

type ElasticSettings struct {
	AdditionalSettings interface{} `json:"additionalSettings,omitempty"`
	ClusterName        string      `json:"cluster.name,omitempty"`
	NumberOfReplicas   int         `json:"index.number_of_replicas,omitempty"`
	NumberOfShards     int         `json:"index.number_of_shards,omitempty"`
	RefreshInterval    string      `json:"index.refresh_interval,omitempty"`
	NuixPassword       string      `json:"nuix.auth.password,omitempty"`
	NuixUsername       string      `json:"nuix.auth.username,omitempty"`
	AutoClose          string      `json:"nuix.index.auto_close,omitempty"`
	Hosts              []string    `json:"nuix.transport.hosts,omitempty"`
	KeystoreFilepath   string      `json:"searchguard.ssl.transport.keystore_filepath,omitempty"`
	KeystorePassword   string      `json:"searchguard.ssl.transport.keystore_password,omitempty"`
	TruststoreFilepath string      `json:"searchguard.ssl.transport.truststore_filepath,omitempty"`
	TruststorePassword string      `json:"searchguard.ssl.transport.truststore_password,omitempty"`
	XPackPassword      string      `json:"xpack.security.password,omitempty"`
	XPackSSLEnabled    bool        `json:"xpack.security.transport.ssl.enabled,omitempty"`
	XPackUser          string      `json:"xpack.security.user,omitempty"`
	XPackCert          string      `json:"xpack.ssl.certificate,omitempty"`
	XPackSSLCertAuth   string      `json:"xpack.ssl.certificate_authorities,omitempty"`
	XPackSSLKey        string      `json:"xpack.ssl.key,omitempty"`
}

type EnterpriseVault

type EnterpriseVault struct {
	Archive   string    `json:"archive"`
	Computer  string    `json:"computer"`
	Custodian string    `json:"custodian"`
	Flag      string    `json:"flag"`
	From      time.Time `json:"from"`
	Keywords  string    `json:"keywords"`
	To        time.Time `json:"to"`
	Vault     string    `json:"vault"`
}

type Evidence

type Evidence interface {
	// AddCenteraCluster adds a centera-cluster to the evidence
	AddCenteraCluster(centeraCluster CenteraCluster)
	// AddFile adds a file to the evidence
	AddFile(path string)
	// AddLoadFile adds a load-file to the evidence
	AddLoadFile(loadFile LoadFile)
	// AddMailStore adds a mail-store to the evidence
	AddMailStore(mailStore MailStore)
	// AddDropbox adds a dropbox to the evidence
	AddDropbox(dropbox Dropbox)
	// AddEnterpriseVault adds an enterprise-vault to the evidence
	AddEnterpriseVault(enterpriseVault EnterpriseVault)
	// AddExchangeMailbox adds an exchange-mailbox to the evidence
	AddExchangeMailbox(exchangeMailbox ExchangeMailbox)
	// AddS3Bucket adds a S3-bucket to the evidence
	AddS3Bucket(s3Bucket S3Bucket)
	// AddSplitFile adds a split-file to the evidence
	AddSplitFile(splitFile SplitFile)
	// AddSQLServer adds a SQL-server to the evidence
	AddSQLServer(sqlServer SQLServer)
	// AddSSHServer adds a SSH-server to the evidence
	AddSSHServer(sshServer SSHServer)
	// SetDescription sets the description to the evidence-container
	SetDescription(description string)
	// SetEncoding sets the encoding to the evidence-container
	SetEncoding(encoding string)
	// SetLocale sets the locale to the evidence-container
	SetLocale(locale string)
	// SetTimezone sets the timezone to the evidence-container
	SetTimezone(timezone string)
	// SetInitialCustodian sets the initial custodian to the evidence-container
	SetInitialCustodian(custodian string)
	// GetName returns the name of the evidence
	GetName() string
	// GetDescription gets the description from the evidence-container
	GetDescription() string
	// GetEncoding gets the encoding from the evidence-container
	GetEncoding() string
	// GetLocale gets the locale from the evidence-container
	GetLocale() string
	// GetTimezone gets the timezone from the evidence-container
	GetTimezone() string
	// GetInitialCustodian gets the initial custodian from the evidence-container
	GetInitialCustodian() string
	// CenteraClusters gets the centera-clusters from the evidence
	GetCenteraClusters() []CenteraCluster
	// Files gets the files from the evidence
	GetFiles() []File
	// LoadFiles gets the load-files from the evidence
	GetLoadFiles() []LoadFile
	// MailStores gets the mail-stores from the evidence
	GetMailStores() []MailStore
	// Dropboxs gets the dropboxes from the evidence
	GetDropboxes() []Dropbox
	// EnterpriseVaults gets then enterprise-vaults from the evidence
	GetEnterpriseVaults() []EnterpriseVault
	// ExchangeMailboxes gets then exchange-mailboxes from the evidence
	GetExchangeMailboxes() []ExchangeMailbox
	// S3Buckets gets the S3-buckets from the evidence
	GetS3Buckets() []S3Bucket
	// SplitFiles gets the split-files from the evidence
	GetSplitFiles() []SplitFile
	// SQLServers gets the SQL-servers from the evidence
	GetSQLServers() []SQLServer
	// SSHServers gets the SSH-servers from the evidence
	GetSSHServers() []SSHServer
}

Evidence is an interface to manage an evidence-container

type ExchangeMailbox

type ExchangeMailbox struct {
	Domain           string    `json:"domain"`
	From             time.Time `json:"from"`
	Impersonating    bool      `json:"impersonating"`
	Mailbox          string    `json:"mailbox"`
	MailboxRetrieval []string  `json:"mailboxRetrieval"`
	Password         string    `json:"password"`
	To               time.Time `json:"to"`
	URI              string    `json:"uri"`
	Username         string    `json:"username"`
}

type File

type File struct {
	Path string `json:"path"`
}

type FileRequest

type FileRequest struct {
	Data []byte
	Name string
	// contains filtered or unexported fields
}

type Function

type Function struct {
	// contains filtered or unexported fields
}

func (*Function) AddRemoteWorkers

func (f *Function) AddRemoteWorkers(ctx context.Context, address string, workers int) error

AddRemoteWorkers adds the specified amount of remote-workers to the function

func (*Function) Cancel

func (f *Function) Cancel(ctx context.Context) error

Cancel the running function and update the information

func (*Function) Info

func (f *Function) Info() *FunctionInfo

Info returns the latest retrieved information from the function use Status(ctx) to update the information

func (*Function) Status

func (f *Function) Status(ctx context.Context) (*FunctionInfo, error)

Status returns the status of an asynchronous function

type FunctionInfo

type FunctionInfo struct {
	Action                           string      `json:"action"`
	Cancelled                        bool        `json:"cancelled"`
	CaseID                           string      `json:"caseId"`
	CaseLocation                     string      `json:"caseLocation"`
	CaseName                         string      `json:"caseName"`
	Done                             bool        `json:"done"`
	FinishTime                       int         `json:"finishTime"`
	FriendlyName                     string      `json:"friendlyName"`
	FunctionKey                      string      `json:"functionKey"`
	HasSuccessfullyCompleted         bool        `json:"hasSuccessfullyCompleted"`
	Method                           string      `json:"method"`
	Options                          interface{} `json:"options"`
	ParticipatingInCaseFunctionQueue bool        `json:"participatingInCaseFunctionQueue"`
	PercentComplete                  float64     `json:"percentComplete"`
	Progress                         int         `json:"progress"`
	RequestTime                      int         `json:"requestTime"`
	Requestor                        string      `json:"requestor"`
	Result                           interface{} `json:"result"`
	StartTime                        int         `json:"startTime"`
	Status                           string      `json:"status"`
	Token                            string      `json:"token"`
	Total                            int         `json:"total"`
	UpdatedOn                        int         `json:"updatedOn"`
}

type FunctionsQueue

type FunctionsQueue struct {
	CaseQueues            []*CaseQueue    `json:"caseQueues"`
	ExecuteImmediate      []*FunctionInfo `json:"executeImmediate"`
	Paused                bool            `json:"paused"`
	PausedBy              string          `json:"pausedBy"`
	PausedUntil           int             `json:"pausedUntil"`
	SingleItemQueue       []*FunctionInfo `json:"singleItemQueue"`
	TotalRunningFunctions int             `json:"totalRunningFunctions"`
}

type Item

type Item struct {
	CustomMetadata interface{}    `json:"customMetadata,omitempty"`
	Entities       interface{}    `json:"entities,omitempty"`
	GUID           uuid.UUID      `json:"guid,omitempty"`
	Properties     ItemProperties `json:"properties,omitempty"`
}

type ItemProperties

type ItemProperties struct {
	Date int64  `json:"date,omitempty"`
	Name string `json:"name,omitempty"`
}

type KeystoreKeyParameter

type KeystoreKeyParameter struct {
	// contains filtered or unexported fields
}

type Licence

type Licence struct {
	AuditThreshold          int      `json:"auditThreshold"`
	Audited                 bool     `json:"audited"`
	CanChooseWorkers        bool     `json:"canChooseWorkers"`
	ConcurrentUserLimit     int      `json:"concurrentUserLimit"`
	Count                   int      `json:"count"`
	Description             string   `json:"description"`
	Features                []string `json:"features"`
	LegalHoldCustodianLimit int      `json:"legalHoldCustodianLimit"`
	LegalHoldHoldCountLimit int      `json:"legalHoldHoldCountLimit"`
	Location                string   `json:"location"`
	Shortname               string   `json:"shortname"`
	Source                  string   `json:"source"`
	Type                    string   `json:"type"`
	Workers                 int      `json:"workers"`
}

type LoadFile

type LoadFile struct {
	CSVFile string `json:"csvFile"`
	IDXFile string `json:"idxFile"`
}

type MailStore

type MailStore struct {
	Host     string `json:"host"`
	Password string `json:"password"`
	Port     int    `json:"port"`
	Protocol string `json:"protocol"`
	Username string `json:"username"`
}

type MimeTypeProcessorSettings

type MimeTypeProcessorSettings struct {
	// the MIME type to specific the settings for.
	MimeType string `json:"mimeType"`

	// the new processing settings
	Settings struct {

		// If false items matching this MIME type,
		// and their embedded descendants, will not be processed.
		Enabled bool `json:"enabled"`

		// If false descendants of items matching
		// this MIME type will not be processed.
		ProcessEmbedded bool `json:"processEmbedded"`

		// If false items matching this MIME type
		// will not have their image data processed.
		//
		// Note: 'createThumbnails' must also be true in SetProcessingSettings().
		ProcessImages bool `json:"processImages"`

		// If false items matching this MIME type
		// will not have named entities extracted.
		//
		// Note: 'extractNamedEntitiesFromText' or 'extractNamedEntitiesFromProperties'
		// must also be true in SetProcessingSettings().
		ProcessNamedEntities bool `json:"processNamedEntities"`

		// If false items matching this MIME type
		// will not have their text processed.
		//
		// Note: This setting cannot be used when 'textStrip'
		// is set for the same MIME type.
		ProcessText bool `json:"processText"`

		// 	If false items matching this MIME type
		// will not have their binary data stored.
		//
		// Note: This setting only takes effect if
		// 'storeBinary' is enabled in the global processing settings.
		StoreBinary bool `json:"storeBinary"`

		// If true items matching this MIME type
		// will have their binary data text stripped.
		//
		// Note: This setting cannot be used when 'processText' is set for the same MIME type.
		TextStrip bool `json:"textStrip"`
	}
}

MimeTypeProcessorSettings - these settings correspond to the same settings in the desktop application, however the users preferences are not used to derive the defaults.

type ParallelProcessingSettings

type ParallelProcessingSettings struct {
	// Specifies the amount of memory to allocate to the
	// JMS Broker when it is run in a separate process.
	//
	// an integer >= 768
	BrokerMemory int `json:"brokerMemory"`

	// If true, the JMS broker will be run within
	// the current process,
	// otherwise it will be run in a new process.
	//
	// true or false
	EmbedBroker bool `json:"embedBroker"`

	// The worker broker component is optional but if found it allows
	// ProcessingJob.addWorkers(int, Map) to function and provides
	// a central place for recording processing job status from all
	// machines on a local network.
	//
	// An InetSocketAddress or String in the form of host:port.
	WorkerBrokerAddress string `json:"workerBrokerAddress"`

	// Specifies the number of worker processes to use when processing.
	// This defaults to the number of CPU cores on the machine
	// however for a machine with a large number of cores
	// - a smaller value can often perform better.
	// Performance tuning on each machine is recommended.
	// This value is also limited by the settings in the current licence.
	//
	// the number of CPU cores
	WorkerCount int `json:"workerCount"`

	// Sets the maximum amount of memory to allocate -
	// to each worker process in megabytes.
	// Setting this value too low can cause
	// out-of-memory errors during processing.
	// Setting the value too large can cause
	// worker processes to page memory to disk, affecting performance.
	//
	// calculated based on the number of workers
	// and available machine memory.
	WorkerMemory int `json:"workerMemory"`

	// Specifies the base directory workers will use
	// to stores temporary files generated during processing.
	//
	// A string file path defaults to a directory
	// under the case directory.
	WorkerTemp string `json:"workerTemp"`
}

ParallelProcessingSettings - the parallel processing settings.

type ProcessingJob

type ProcessingJob struct {
	// contains filtered or unexported fields
}

ProcessingJob - operations relating to a processing job.

func (*ProcessingJob) GetCasePath

func (p *ProcessingJob) GetCasePath(ctx context.Context) (string, error)

GetCasePath gets the case path on the master.

Note: Rather use ProcessingJob.Status(ctx) - for.....

func (*ProcessingJob) GetStartDateTime

func (p *ProcessingJob) GetStartDateTime(ctx context.Context) (*time.Time, error)

GetStartDateTime gets the start time of the processing job.

Note: Rather use ProcessingJob.Status(ctx) - for.....

func (*ProcessingJob) HasFinished

func (p *ProcessingJob) HasFinished(ctx context.Context) (bool, error)

HasFinished indicates if the processing job has finished.

Note: Rather use ProcessingJob.Status(ctx) - for.....

func (*ProcessingJob) Pause

func (p *ProcessingJob) Pause(ctx context.Context) error

Pause called when the processing job should be paused.

Note: Rather use ProcessingJob.Status(ctx) - for.....

func (*ProcessingJob) Resume

func (p *ProcessingJob) Resume(ctx context.Context) error

Resume called when the processing job should be resumed from a Pause().

Note: Rather use ProcessingJob.Status(ctx) - for.....

func (*ProcessingJob) Status

func (p *ProcessingJob) Status(ctx context.Context) (*FunctionInfo, error)

Status - returns the status of a processing-job

func (*ProcessingJob) Stop

func (p *ProcessingJob) Stop(ctx context.Context) error

Stop - called when the processing job should gracefully stop.

type Processor

type Processor struct {
	// contains filtered or unexported fields
}

Processor processes data for a case.

func (*Processor) AddKeyStore

func (p *Processor) AddKeyStore(keyStore KeystoreKeyParameter)

AddKeyStore adds a key and stores it in the processor's keystore.

func (*Processor) GetMimeTypeProcessingSettings

func (p *Processor) GetMimeTypeProcessingSettings() []*MimeTypeProcessorSettings

GetMimeTypeProcessingSettings gets the processing settings for all MIME types.

func (*Processor) GetParallelProcessingSettings

func (p *Processor) GetParallelProcessingSettings() []*ParallelProcessingSettings

GetParallelProcessingSettings gets the parallel processing settings currently in use.

func (*Processor) GetProcessingProfileName

func (p *Processor) GetProcessingProfileName() string

GetProcessingProfileName gets the processing profile name.

func (*Processor) GetProcessingSettings

func (p *Processor) GetProcessingSettings() *ProcessorSettings

GetProcessingSettings gets the processing settings in use.

func (*Processor) NewEvidence

func (p *Processor) NewEvidence(name string) Evidence

NewEvidence creates a new evidence container within the case, for the purpose of adding evidence to be loaded.

func (*Processor) Process

func (p *Processor) Process(ctx context.Context) (*ProcessingJob, error)

Process is a non-blocking operation which performs any outstanding processing work, returning as soon as processing has started.

func (*Processor) RescanEvidenceRepositories

func (p *Processor) RescanEvidenceRepositories(rescan bool)

RescanEvidenceRepositories rescans the case's Evidence Repositories to discover: - New Evidence Containers within the Evidence Repositories. - New files within existing Evidence Containers, which are themselves within Evidence Repositories.

func (*Processor) SetMimeTypeProcessingSettings

func (p *Processor) SetMimeTypeProcessingSettings(settings MimeTypeProcessorSettings)

SetMimeTypeProcessingSettings sets specific MIME type processing settings to use.

func (*Processor) SetParallelProcessingSettings

func (p *Processor) SetParallelProcessingSettings(settings ParallelProcessingSettings)

SetParallelProcessingSettings sets the parallel processing settings to use for processing.

func (*Processor) SetProcessingProfile

func (p *Processor) SetProcessingProfile(processingProfileName string)

SetProcessingProfile sets the processing profile name.

func (*Processor) SetProcessingSettings

func (p *Processor) SetProcessingSettings(settings ProcessorSettings)

SetProcessingSettings sets the processing settings to use.

type ProcessorInfo

type ProcessorInfo struct {
	ConfigurationProfile       string                        `json:"configurationProfile,omitempty"`
	Containers                 []*evidence                   `json:"containers,omitempty"`
	KeystoreKeyParameters      []*KeystoreKeyParameter       `json:"keystoreKeyParameters,omitempty"`
	MimeTypeProcessorSettings  []*MimeTypeProcessorSettings  `json:"mimeTypeProcessorSettings,omitempty"`
	ParallelProcessingSettings []*ParallelProcessingSettings `json:"parallelProcessingSettings,omitempty"`
	ProcessingProfile          string                        `json:"processingProfile,omitempty"`
	ProcessorSettings          *ProcessorSettings            `json:"processorSettings,omitempty"`
	Repositories               []*Repository                 `json:"repositories,omitempty"`
	RescanEvidenceRepositories bool                          `json:"rescanEvidenceRepositories,omitempty"`
}

ProcessorInfo holds information for a processor

type ProcessorSettings

type ProcessorSettings struct {
	// If true, adds the Bcc field when computing email digests.
	//
	// Note: Using the Bcc field in email digests may prevent the sender and recipients digests from matching.
	// This is because only the sender will have the Bcc field if it is present.
	AddBccToEmailDigests bool `json:"addBccToEmailDigests,omitempty"`

	// If true, adds the communication date when computing email digests.
	//
	// Note: Using the communication date in the email digests may prevent
	// the sender and recipients digests from matching.
	// This is because the sender and recipients communication date / times can be slightly different for the same email.
	AddCommunicationDateToEmailDigests bool `json:"addCommunicationDateToEmailDigests,omitempty"`

	// Specifies the language to use for text analysis when indexing:
	//
	// "en" = English, "ja" = Japanese
	AnalysisLanguage string `json:"analysisLanguage,omitempty"`

	// If true, calculate audited size.
	CalculateAuditedSize bool `json:"calculateAuditedSize,omitempty"`

	// If true, calculate SSDeep fuzzy hash values for items.
	CalculateSSDeepFuzzyHash bool `json:"calculateSSDeepFuzzyHash,omitempty"`

	// If true, carve data out of file system unallocated space for disk images.
	CarveFileSystemUnallocatedSpace bool `json:"carveFileSystemUnallocatedSpace,omitempty"`

	// If true, carve data out of unidentified data items.
	CarveUnidentifiedData bool `json:"carveUnidentifiedData,omitempty"`

	// If nil the block size of the file system is used.
	// Otherwise the given block size is used.
	// File identification is attempted at the start of each block,
	// so the smaller the value the longer processing will take.
	// Avoid values smaller than 512 bytes except in specific cases.
	CarvingBlockSize int `json:"carvingBlockSize,omitempty"`

	// If true, create and store thumbnails of image data items.
	CreateThumbnails bool `json:"createThumbnails,omitempty"`

	// If true, detect faces in photographic items.
	DetectFaces bool `json:"detectFaces,omitempty"`

	// A slice of digests to calculate.
	Digests []string `json:"digests,omitempty"`

	// If true, enables search using "exact" queries.
	EnableExactQueries bool `json:"enableExactQueries,omitempty"`

	// If true, extract the slack space from the end of file records in disk images.
	ExtractEndOfFileSlackSpace bool `json:"extractEndOfFileSlackSpace,omitempty"`

	// If true, extract deleted data from mailbox file formats and
	// slack space from the end of file records in file system disk images.
	ExtractFromSlackSpace bool `json:"extractFromSlackSpace,omitempty"`

	// If true, extract named entities from the text of a document. Deprecated in Nuix 6.0.
	ExtractNamedEntities bool `json:"extractNamedEntities,omitempty"`

	// If true, extract named entities from the properties of a document.
	ExtractNamedEntitiesFromProperties bool `json:"extractNamedEntitiesFromProperties,omitempty"`

	// If true, extract named entities from the text of a document.
	//
	// Note: 'processText' must also be true.
	ExtractNamedEntitiesFromText bool `json:"extractNamedEntitiesFromText,omitempty"`

	// If true, extract named entities from the text of text-stripped items,
	// if and only if extractNamedEntitiesFromText is set to true.
	// extractNamedEntitiesFromProperties is independent of this property.
	ExtractNamedEntitiesFromTextStripped bool `json:"extractNamedEntitiesFromTextStripped,omitempty"`

	// If true, extract shingles from item text. Enabling this setting enables near deduplication.
	ExtractShingles bool `json:"extractShingles,omitempty"`

	// If true, hides embedded immaterial data items such as embedded images in documents.
	HideEmbeddedImmaterialData bool `json:"hideEmbeddedImmaterialData,omitempty"`

	// If false, only file system metadata is extracted for physical files on disk.
	IdentifyPhysicalFiles bool `json:"identifyPhysicalFiles,omitempty"`

	// Specifies the maximum size of binary which will be digested, in bytes.
	//
	// Default: 250000000 (250 MB)
	MaxDigestSize int `json:"maxDigestSize,omitempty"`

	// Specifies the maximum size of binary which will be stored into the binary store, in bytes.
	//
	// Default: 250000000 (250 MB)
	MaxStoredBinarySize int `json:"maxStoredBinarySize,omitempty"`

	// If true, top-level items will contain search fields containing text from their family.
	ProcessFamilyFields bool `json:"processFamilyFields,omitempty"`

	// If true, the contents of forensic images will be exposed.
	// If false metadata about forensic images will be extracted but
	// their contents will not be processed.
	// This settings can be used in combination with processLooseFileContents
	// to explode forensic images but not process their contents.
	//
	// Deprecated in Nuix 7.0, replaced with traversalScope.
	ProcessForensicImages bool `json:"processForensicImages,omitempty"`

	// If true, the contents of loose files will be extracted and processed.
	// If false metadata about loose files will be extracted but their contents will not be processed.
	//
	// Deprecated in Nuix 7.0, replaced with traversalScope.
	ProcessLooseFileContents bool `json:"processLooseFileContents,omitempty"`

	// If true, store and index the text of data items.
	ProcessText bool `json:"processText,omitempty"`

	// If true, process item text and summarise.
	ProcessTextSummaries bool `json:"processTextSummaries,omitempty"`

	// If true, recover deleted file records from disk images.
	RecoverDeletedFiles bool `json:"recoverDeletedFiles,omitempty"`

	// If true, existing evidence stores are used to add any additional data into.
	ReuseEvidenceStores bool `json:"reuseEvidenceStores,omitempty"`

	// If true, perform analysis on images to detect skintones.
	SkinToneAnalysis bool `json:"skinToneAnalysis,omitempty"`

	// If true, only process sections of the Registry that have decoders or have been explicitly selected.
	SmartProcessRegistry bool `json:"smartProcessRegistry,omitempty"`

	// If true, stems words using English rules before indexing (e.g. "fishing" → "fish".)
	// If false, no stemming is performed.
	//
	// Legacy value "en" was deprecated in Nuix 5.2.
	// New code should use true instead.
	Stemming bool `json:"stemming,omitempty"`

	// If true, removes English stop words ("a", "and", "the", etc.) from the text index.
	// If false, no stop words are removed.
	//
	// Legacy value "en" was deprecated in Nuix 5.2.
	// New code should use true instead
	StopWords bool `json:"stopWords,omitempty"`

	// If true, store the binary of data items.
	StoreBinary bool `json:"storeBinary,omitempty"`
}

ProcessorSettings correspond to the same settings in the desktop application, however the user's preferences are not used to derive the defaults.

type Repository

type Repository struct {
	CustodianLevel int    `json:"custodianLevel"`
	Path           string `json:"path"`
}

type S3Bucket

type S3Bucket struct {
	Access   string `json:"access"`
	Bucket   string `json:"bucket"`
	Endpoint string `json:"endpoint"`
	Secret   string `json:"secret"`
}

type SQLServer

type SQLServer struct {
	Computer string `json:"computer"`
	Domain   string `json:"domain"`
	Instance string `json:"instance"`
	Password string `json:"password"`
	Query    string `json:"query"`
	Username string `json:"username"`
}

type SSHServer

type SSHServer struct {
	AccessingRemoteDisks bool   `json:"accessingRemoteDisks"`
	Computer             string `json:"computer"`
	Domain               string `json:"domain"`
	HostFingerprint      string `json:"hostFingerprint"`
	KeyFolder            string `json:"keyFolder"`
	Password             string `json:"password"`
	PortNumber           int    `json:"portNumber"`
	RemoteFolder         string `json:"remoteFolder"`
	SudoPassword         string `json:"sudoPassword"`
	Username             string `json:"username"`
}

type SearchRequest

type SearchRequest struct {
	Deduplicate              string   `json:"deduplicate,omitempty"`
	Entities                 []string `json:"entities,omitempty"`
	FieldList                []string `json:"FieldList,omitempty"`
	ForceCacheDelete         bool     `json:"forceCacheDelete,omitempty"`
	MetadataProfile          string   `json:"metadataProfile,omitempty"`
	NumberOfRecordsRequested int      `json:"numberOfRecordsRequested,omitempty"`
	PropertyList             []string `json:"propertyList,omitempty"`
	Query                    string   `json:"query,omitempty"`
	RelationType             string   `json:"relationType,omitempty"`
	SortField                string   `json:"sortField,omitempty"`
	SortOrder                string   `json:"sortOrder,omitempty"`
	StartIndex               int      `json:"startIndex,omitempty"`
	UseCache                 bool     `json:"useCache,omitempty"`
}

type SearchResponse

type SearchResponse struct {
	CompletedOn          int64         `json:"completedOn,omitempty"`
	Count                int           `json:"count,omitempty"`
	DeduplicatedCount    int           `json:"deduplicatedCount,omitempty"`
	ElapsedTimeForSearch int           `json:"elapsedTimeForSearch,omitempty"`
	Request              SearchRequest `json:"request,omitempty"`
	ResultList           []Item        `json:"resultList,omitempty"`
	StartedOn            int64         `json:"startedOn,omitempty"`
}

type SharepointSite

type SharepointSite struct {
	Domain   string `json:"domain"`
	Password string `json:"password"`
	URI      string `json:"uri"`
	Username string `json:"username"`
}

type SplitFile

type SplitFile struct {
	Files []string
}

type WorkerAgent

type WorkerAgent struct {
	AvailableWorkers    int    `json:"availableWorkers"`
	GUID                string `json:"guid"`
	Host                string `json:"host"`
	LicenceShortName    string `json:"licenceShortName"`
	WorkerBrokerAddress string `json:"workerBrokerAddress"`
}

type WorkerBroker

type WorkerBroker struct {
	Address  string `json:"address"`
	ServerID string `json:"serverId"`
}

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL