kineticaexporter

package module
v0.99.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Apr 22, 2024 License: Apache-2.0 Imports: 22 Imported by: 0

README

Status
Stability development: metrics, traces, logs
Distributions []
Issues Open issues Closed issues
Code Owners @am-kinetica, @TylerHelmuth

Kinetica Logo

Website | Docs | Community Slack

Kinetica OpenTelemetry Collector Exporter Plug-In

Overview

Installation

Creating the tables

The schema name otel is an example. The user may change this to another name but the same name has to be used in the exporter configuration. The SQL scripts follow.


----- Logs

CREATE TABLE otel.log
(
        log_id                   VARCHAR (uuid),            -- generated
        trace_id                 VARCHAR(32),
        span_id                  VARCHAR(16),
        time_unix_nano           TIMESTAMP,
        observed_time_unix_nano  TIMESTAMP,
        severity_id              TINYINT,
        severity_text            VARCHAR(8),
        body                     VARCHAR,
        flags                    INT,
        PRIMARY KEY (log_id)
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE otel.log_attribute
(
        log_id       VARCHAR (uuid),
        key          VARCHAR(256, dict),
        string_value VARCHAR(256),
        bool_value   BOOLEAN,
        int_value    INT,
        double_value DOUBLE,
        bytes_value  BYTES,
        PRIMARY KEY (log_id, key),
        SHARD KEY (log_id),
        FOREIGN KEY (log_id) REFERENCES otel.log(log_id) AS fk_log
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE otel.log_resource_attribute
(
        log_id  VARCHAR (uuid),              -- generated
        key          VARCHAR(256, dict),
        string_value VARCHAR,
        bool_value   BOOLEAN,
        int_value    INT,
        double_value DOUBLE,
        bytes_value  BYTES,
        PRIMARY KEY (log_id, key),
        SHARD KEY (log_id),
        FOREIGN KEY (log_id) REFERENCES otel.log(log_id) AS fk_log_resource
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE otel.log_scope_attribute
(
        log_id     VARCHAR (uuid),              -- generated
        scope_name   VARCHAR(64, dict),
        scope_ver    VARCHAR(16, dict),
        key          VARCHAR(256, dict),
        string_value VARCHAR,
        bool_value   BOOLEAN,
        int_value    INT,
        double_value DOUBLE,
        bytes_value  BYTES,
        PRIMARY KEY (log_id, key),
        SHARD KEY (log_id),
        FOREIGN KEY (log_id) REFERENCES otel.log(log_id) AS fk_log_scope
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

----- Traces

CREATE TABLE "otel"."trace_span"
(
    "id" UUID (primary_key) NOT NULL,
    "trace_id" VARCHAR (32) NOT NULL,
    "span_id" VARCHAR (16) NOT NULL,
    "parent_span_id" VARCHAR (16),
    "trace_state" VARCHAR (256),
    "name" VARCHAR (256, dict) NOT NULL,
    "span_kind" TINYINT (dict),
    "start_time_unix_nano" TIMESTAMP NOT NULL,
    "end_time_unix_nano" TIMESTAMP NOT NULL,
    "dropped_attributes_count" INTEGER,
    "dropped_events_count" INTEGER,
    "dropped_links_count" INTEGER,
    "message" VARCHAR(256),
    "status_code" TINYINT (dict)
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE "otel"."trace_span_attribute"
(
    "span_id" UUID (primary_key, shard_key) NOT NULL,
    "key" VARCHAR (primary_key, 256, dict) NOT NULL,
    "string_value" VARCHAR (256),
    "bool_value" BOOLEAN,
    "int_value" INTEGER,
    "double_value" DOUBLE,
    "bytes_value" BLOB (store_only),
    FOREIGN KEY (span_id) references otel.trace_span(id) as fk_span
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE "otel"."trace_resource_attribute"
(
    span_id VARCHAR (UUID) NOT NULL,
    "key" VARCHAR (256, dict) NOT NULL,
    "string_value" VARCHAR (256),
    "bool_value" BOOLEAN,
    "int_value" INTEGER,
    "double_value" DOUBLE,
    "bytes_value" BLOB (store_only),
    PRIMARY KEY (span_id, key),
    SHARD KEY (span_id),
    FOREIGN KEY (span_id) references otel.trace_span(id) as fk_span_resource
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE "otel"."trace_scope_attribute"
(
    "span_id" UUID (primary_key) NOT NULL,
    "name" VARCHAR (256, dict),
    "version" VARCHAR (256, dict),
    "key" VARCHAR (primary_key, 128, dict) NOT NULL,
    "string_value" VARCHAR (256),
    "bool_value" BOOLEAN,
    "int_value" INTEGER,
    "double_value" DOUBLE,
    "bytes_value" BLOB (store_only),
    SHARD KEY (span_id),
    FOREIGN KEY (span_id) references otel.trace_span(id) as fk_span_scope

) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE "otel"."trace_event_attribute"
(
    "span_id" UUID (primary_key) NOT NULL,
    "event_name" VARCHAR (128, dict) NOT NULL,
    "time_unix_nano" TIMESTAMP,
    "key" VARCHAR (primary_key, 128) NOT NULL,
    "string_value" VARCHAR (256),
    "bool_value" BOOLEAN,
    "int_value" INTEGER,
    "double_value" DOUBLE,
    "bytes_value" BLOB (store_only),
    SHARD KEY (span_id),
    FOREIGN KEY (span_id) references otel.trace_span(id) as fk_span_event

) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE "otel"."trace_link_attribute"
(
    "link_span_id" UUID (primary_key) NOT NULL,
    "trace_id" VARCHAR (32),
    "span_id" VARCHAR (16),
    "key" VARCHAR (primary_key, 128, dict) NOT NULL,
    "string_value" VARCHAR (256),
    "bool_value" TINYINT,
    "int_value" INTEGER,
    "double_value" DOUBLE,
    "bytes_value" BLOB (store_only),
    SHARD KEY (link_span_id),
    FOREIGN KEY (link_span_id) references otel.trace_span(id) as fk_span_link

) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

------ METRICS

------ GAUGE

CREATE TABLE otel.metric_gauge
(
    gauge_id UUID (primary_key, shard_key) not null,
    metric_name varchar(256) not null,
    metric_description varchar (256),
    metric_unit varchar (256)

) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE otel.metric_gauge_datapoint
(
    gauge_id UUID (primary_key, shard_key) not null,
    id UUID (primary_key) not null,
    start_time_unix TIMESTAMP NOT NULL,
    time_unix TIMESTAMP NOT NULL,
    gauge_value DOUBLE,
    flags INT,
    FOREIGN KEY (gauge_id) references otel.metric_gauge(gauge_id) as fk_gauge_datapoint

) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE "otel"."metric_gauge_datapoint_attribute"
(
    "gauge_id" UUID (primary_key, shard_key) NOT NULL,
    datapoint_id uuid (primary_key) not null,
    "key" VARCHAR (primary_key, 128, dict) NOT NULL,
    "string_value" VARCHAR (256),
    "bool_value" BOOLEAN,
    "int_value" INTEGER,
    "double_value" DOUBLE,
    "bytes_value" BLOB (store_only),
    FOREIGN KEY (gauge_id) references otel.metric_gauge(gauge_id) as fk_gauge_datapoint_attribute
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE otel.metric_gauge_datapoint_exemplar
(
    "gauge_id" UUID (primary_key, shard_key) NOT NULL,
    datapoint_id uuid (primary_key) not null,
    exemplar_id UUID (primary_key) not null,
    time_unix TIMESTAMP NOT NULL,
    gauge_value DOUBLE,
    "trace_id" VARCHAR (32),
    "span_id" VARCHAR (16),
    FOREIGN KEY (gauge_id) references otel.metric_gauge(gauge_id) as fk_gauge_datapoint_exemplar
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE otel.metric_gauge_datapoint_exemplar_attribute
(
    "gauge_id" UUID (primary_key, shard_key) NOT NULL,
    datapoint_id uuid (primary_key) not null,
    exemplar_id UUID (primary_key) not null,
    "key" VARCHAR (primary_key, 128, dict) NOT NULL,
    "string_value" VARCHAR (256),
    "bool_value" BOOLEAN,
    "int_value" INTEGER,
    "double_value" DOUBLE,
    "bytes_value" BLOB (store_only),
    FOREIGN KEY (gauge_id) references otel.metric_gauge(gauge_id) as fk_gauge_datapoint_exemplar_attribute
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE "otel"."metric_gauge_resource_attribute"
(
    "gauge_id" UUID (primary_key) NOT NULL,
    "key" VARCHAR (primary_key, 128, dict) NOT NULL,
    "string_value" VARCHAR (256),
    "bool_value" BOOLEAN,
    "int_value" INTEGER,
    "double_value" DOUBLE,
    "bytes_value" BLOB (store_only),
    SHARD KEY (gauge_id),
    FOREIGN KEY (gauge_id) references otel.metric_gauge(gauge_id) as fk_gauge_resource_attribute
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE "otel"."metric_gauge_scope_attribute"
(
    "gauge_id" UUID (primary_key) NOT NULL,
    "name" VARCHAR (256),
    "version" VARCHAR (256),
    "key" VARCHAR (primary_key, 128, dict) NOT NULL,
    "string_value" VARCHAR (256),
    "bool_value" BOOLEAN,
    "int_value" INTEGER,
    "double_value" DOUBLE,
    "bytes_value" BLOB (store_only),
    SHARD KEY (gauge_id),
    FOREIGN KEY (gauge_id) references otel.metric_gauge(gauge_id) as fk_gauge_scope_attribute

) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

------- SUM

CREATE TABLE otel.metric_sum
(
    sum_id UUID (primary_key, shard_key) not null,
    metric_name varchar (256) not null,
    metric_description varchar (256),
    metric_unit varchar (256),
    aggregation_temporality INTEGER,
    is_monotonic BOOLEAN

) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE otel.metric_sum_datapoint
(
    sum_id UUID (primary_key, shard_key) not null,
    id UUID (primary_key) not null,
    start_time_unix TIMESTAMP NOT NULL,
    time_unix TIMESTAMP NOT NULL,
    sum_value DOUBLE,
    flags INT,
    FOREIGN KEY (sum_id) references otel.metric_sum(sum_id) as fk_sum_datapoint

) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE "otel"."metric_sum_datapoint_attribute"
(
    "sum_id" UUID (primary_key, shard_key) NOT NULL,
    datapoint_id uuid (primary_key) not null,
    "key" VARCHAR (primary_key, 128, dict) NOT NULL,
    "string_value" VARCHAR (256),
    "bool_value" BOOLEAN,
    "int_value" INTEGER,
    "double_value" DOUBLE,
    "bytes_value" BLOB (store_only),
    FOREIGN KEY (sum_id) references otel.metric_sum(sum_id) as fk_sum_datapoint_attribute
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE otel.metric_sum_datapoint_exemplar
(
    "sum_id" UUID (primary_key, shard_key) NOT NULL,
    datapoint_id uuid (primary_key) not null,
    exemplar_id UUID (primary_key) not null,
    time_unix TIMESTAMP NOT NULL,
    sum_value DOUBLE,
    "trace_id" VARCHAR (32),
    "span_id" VARCHAR (16),
    FOREIGN KEY (sum_id) references otel.metric_sum(sum_id) as fk_sum_datapoint_exemplar
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE otel.metric_sum_datapoint_exemplar_attribute
(
    "sum_id" UUID (primary_key, shard_key) NOT NULL,
    datapoint_id uuid (primary_key) not null,
    exemplar_id UUID (primary_key) not null,
    "key" VARCHAR (primary_key, 128, dict) NOT NULL,
    "string_value" VARCHAR (256),
    "bool_value" BOOLEAN,
    "int_value" INTEGER,
    "double_value" DOUBLE,
    "bytes_value" BLOB (store_only),
    FOREIGN KEY (sum_id) references otel.metric_sum(sum_id) as fk_sum_datapoint_exemplar_attribute
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE "otel"."metric_sum_resource_attribute"
(
    "sum_id" UUID (primary_key) NOT NULL,
    "key" VARCHAR (primary_key, 128, dict) NOT NULL,
    "string_value" VARCHAR (256),
    "bool_value" BOOLEAN,
    "int_value" INTEGER,
    "double_value" DOUBLE,
    "bytes_value" BLOB (store_only),
    SHARD KEY (sum_id),
    FOREIGN KEY (sum_id) references otel.metric_sum(sum_id) as fk_sum_resource_attribute
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE "otel"."metric_sum_scope_attribute"
(
    "sum_id" UUID (primary_key) NOT NULL,
    "name" VARCHAR (256),
    "version" VARCHAR (256),
    "key" VARCHAR (primary_key, 128, dict) NOT NULL,
    "string_value" VARCHAR (256),
    "bool_value" BOOLEAN,
    "int_value" INTEGER,
    "double_value" DOUBLE,
    "bytes_value" BLOB (store_only),
    SHARD KEY (sum_id),
    FOREIGN KEY (sum_id) references otel.metric_sum(sum_id) as fk_sum_scope_attribute
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

------ HISTOGRAM

CREATE TABLE otel.metric_histogram
(
    histogram_id UUID (primary_key, shard_key) not null,
    metric_name varchar (256) not null,
    metric_description varchar (256),
    metric_unit varchar (256),
    aggregation_temporality int8

) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE otel.metric_histogram_datapoint
(
    histogram_id UUID (primary_key, shard_key) not null,
    id UUID (primary_key) not null,
    start_time_unix TIMESTAMP,
    time_unix TIMESTAMP NOT NULL,
    count LONG,
    data_sum DOUBLE,
    data_min DOUBLE,
    data_max DOUBLE,
    flags INT,
    FOREIGN KEY (histogram_id) references otel.metric_histogram(histogram_id) as fk_histogram_datapoint

) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE otel.metric_histogram_datapoint_bucket_count
(
    histogram_id UUID (primary_key, shard_key) not null,
    datapoint_id UUID (primary_key) not null,
    count_id UUID (primary_key) not null,
    count LONG,
    FOREIGN KEY (histogram_id) references otel.metric_histogram(histogram_id) as fk_histogram_datapoint_bucket_count

) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE otel.metric_histogram_datapoint_explicit_bound
(
    histogram_id UUID (primary_key, shard_key) not null,
    datapoint_id UUID (primary_key) not null,
    bound_id UUID (primary_key) not null,
    explicit_bound DOUBLE,
    FOREIGN KEY (histogram_id) references otel.metric_histogram(histogram_id) as fk_histogram_datapoint_explicit_bound

) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE "otel"."metric_histogram_datapoint_attribute"
(
    "histogram_id" UUID (primary_key, shard_key) NOT NULL,
    datapoint_id uuid (primary_key) not null,
    "key" VARCHAR (primary_key, 128, dict) NOT NULL,
    "string_value" VARCHAR (256),
    "bool_value" BOOLEAN,
    "int_value" INTEGER,
    "double_value" DOUBLE,
    "bytes_value" BLOB (store_only),
    FOREIGN KEY (histogram_id) references otel.metric_histogram(histogram_id) as fk_histogram_datapoint_attribute
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE otel.metric_histogram_datapoint_exemplar
(
    "histogram_id" UUID (primary_key, shard_key) NOT NULL,
    datapoint_id uuid (primary_key) not null,
    exemplar_id UUID (primary_key) not null,
    time_unix TIMESTAMP NOT NULL,
    histogram_value DOUBLE,
    "trace_id" VARCHAR (32),
    "span_id" VARCHAR (16),
    FOREIGN KEY (histogram_id) references otel.metric_histogram(histogram_id) as fk_histogram_datapoint_exemplar
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE otel.metric_histogram_datapoint_exemplar_attribute
(
    "histogram_id" UUID (primary_key, shard_key) NOT NULL,
    datapoint_id uuid (primary_key) not null,
    exemplar_id UUID (primary_key) not null,
    "key" VARCHAR (primary_key, 128, dict) NOT NULL,
    "string_value" VARCHAR (256),
    "bool_value" BOOLEAN,
    "int_value" INTEGER,
    "double_value" DOUBLE,
    "bytes_value" BLOB (store_only),
    FOREIGN KEY (histogram_id) references otel.metric_histogram(histogram_id) as fk_histogram_datapoint_exemplar_attribute
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE "otel"."metric_histogram_resource_attribute"
(
    "histogram_id" UUID (primary_key) NOT NULL,
    "key" VARCHAR (primary_key, 128, dict) NOT NULL,
    "string_value" VARCHAR (256),
    "bool_value" BOOLEAN,
    "int_value" INTEGER,
    "double_value" DOUBLE,
    "bytes_value" BLOB (store_only),
    SHARD KEY (histogram_id),
    FOREIGN KEY (histogram_id) references otel.metric_histogram(histogram_id) as fk_histogram_resource_attribute
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE "otel"."metric_histogram_scope_attribute"
(
    "histogram_id" UUID (primary_key) NOT NULL,
    "name" VARCHAR (256),
    "version" VARCHAR (256),
    "key" VARCHAR (primary_key, 128, dict) NOT NULL,
    "string_value" VARCHAR (256),
    "bool_value" BOOLEAN,
    "int_value" INTEGER,
    "double_value" DOUBLE,
    "bytes_value" BLOB (store_only),
    SHARD KEY (histogram_id),
    FOREIGN KEY (histogram_id) references otel.metric_histogram(histogram_id) as fk_histogram_scope_attribute
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);


------- EXPONENTIAL HISTOGRAM

CREATE TABLE otel.metric_exp_histogram
(
    histogram_id UUID (primary_key, shard_key) not null,
    metric_name varchar (256) not null,
    metric_description varchar (256),
    metric_unit varchar (256),
    aggregation_temporality int8

) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE otel.metric_exp_histogram_datapoint
(
    histogram_id UUID (primary_key, shard_key) not null,
    id UUID (primary_key) not null,
    start_time_unix TIMESTAMP,
    time_unix TIMESTAMP NOT NULL,
    count LONG,
    data_sum DOUBLE,
    scale INTEGER,
    zero_count LONG,
    buckets_positive_offset INTEGER,
    buckets_negative_offset INTEGER,
    data_min DOUBLE,
    data_max DOUBLE,
    flags INT,
    FOREIGN KEY (histogram_id) references otel.metric_exp_histogram(histogram_id) as fk_exp_histogram_datapoint

) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE otel.metric_exp_histogram_datapoint_bucket_positive_count
(
    histogram_id UUID (primary_key, shard_key) not null,
    datapoint_id UUID (primary_key) not null,
    count_id UUID (primary_key) not null,
    count LONG,
    FOREIGN KEY (histogram_id) references otel.metric_exp_histogram(histogram_id) as fk_exp_histogram_datapoint_bucket_count

) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE otel.metric_exp_histogram_datapoint_bucket_negative_count
(
    histogram_id UUID (primary_key, shard_key) not null,
    datapoint_id UUID (primary_key) not null,
    count_id UUID (primary_key) not null,
    count LONG,
    FOREIGN KEY (histogram_id) references otel.metric_exp_histogram(histogram_id) as fk_exp_histogram_datapoint_bucket_count

) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE "otel"."metric_exp_histogram_datapoint_attribute"
(
    "histogram_id" UUID (primary_key, shard_key) NOT NULL,
    datapoint_id uuid (primary_key) not null,
    "key" VARCHAR (primary_key, 128, dict) NOT NULL,
    "string_value" VARCHAR (256),
    "bool_value" BOOLEAN,
    "int_value" INTEGER,
    "double_value" DOUBLE,
    "bytes_value" BLOB (store_only),
    FOREIGN KEY (histogram_id) references otel.metric_exp_histogram(histogram_id) as fk_exp_histogram_datapoint_attribute
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE otel.metric_exp_histogram_datapoint_exemplar
(
    "histogram_id" UUID (primary_key, shard_key) NOT NULL,
    datapoint_id uuid (primary_key) not null,
    exemplar_id UUID (primary_key) not null,
    time_unix TIMESTAMP NOT NULL,
    sum_value DOUBLE,
    "trace_id" VARCHAR (32),
    "span_id" VARCHAR (16),
    FOREIGN KEY (histogram_id) references otel.metric_exp_histogram(histogram_id) as fk_exp_histogram_datapoint_exemplar
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE otel.metric_exp_histogram_datapoint_exemplar_attribute
(
    "histogram_id" UUID (primary_key, shard_key) NOT NULL,
    datapoint_id uuid (primary_key) not null,
    exemplar_id UUID (primary_key) not null,
    "key" VARCHAR (primary_key, 128, dict) NOT NULL,
    "string_value" VARCHAR (256),
    "bool_value" BOOLEAN,
    "int_value" INTEGER,
    "double_value" DOUBLE,
    "bytes_value" BLOB (store_only),
    FOREIGN KEY (histogram_id) references otel.metric_exp_histogram(histogram_id) as fk_exp_histogram_datapoint_exemplar_attribute
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE "otel"."metric_exp_histogram_resource_attribute"
(
    "histogram_id" UUID (primary_key) NOT NULL,
    "key" VARCHAR (primary_key, 128, dict) NOT NULL,
    "string_value" VARCHAR (256),
    "bool_value" BOOLEAN,
    "int_value" INTEGER,
    "double_value" DOUBLE,
    "bytes_value" BLOB (store_only),
    SHARD KEY (histogram_id),
    FOREIGN KEY (histogram_id) references otel.metric_histogram(histogram_id) as fk_histogram_resource_attribute
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE "otel"."metric_exp_histogram_scope_attribute"
(
    "histogram_id" UUID (primary_key) NOT NULL,
    "name" VARCHAR (256),
    "version" VARCHAR (256),
    "key" VARCHAR (primary_key, 128, dict) NOT NULL,
    "string_value" VARCHAR (256),
    "bool_value" BOOLEAN,
    "int_value" INTEGER,
    "double_value" DOUBLE,
    "bytes_value" BLOB (store_only),
    SHARD KEY (histogram_id),
    FOREIGN KEY (histogram_id) references otel.metric_histogram(histogram_id) as fk_histogram_scope_attribute
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

------- SUMMARY

CREATE TABLE otel.metric_summary
(
    summary_id UUID (primary_key, shard_key) not null,
    metric_name varchar (256) not null,
    metric_description varchar (256),
    metric_unit varchar (256)
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE otel.metric_summary_datapoint
(
    summary_id UUID (primary_key, shard_key) not null,
    id UUID (primary_key) not null,
    start_time_unix TIMESTAMP,
    time_unix TIMESTAMP NOT NULL,
    count LONG,
    data_sum DOUBLE,
    flags INT,
    FOREIGN KEY (summary_id) references otel.metric_summary(summary_id) as fk_summary_datapoint

) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE "otel"."metric_summary_datapoint_attribute"
(
    "summary_id" UUID (primary_key, shard_key) NOT NULL,
    datapoint_id uuid (primary_key) not null,
    "key" VARCHAR (primary_key, 128, dict) NOT NULL,
    "string_value" VARCHAR (256),
    "bool_value" BOOLEAN,
    "int_value" INTEGER,
    "double_value" DOUBLE,
    "bytes_value" BLOB (store_only),
    FOREIGN KEY (summary_id) references otel.metric_summary(summary_id) as fk_summary_datapoint_attribute
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE otel.metric_summary_datapoint_quantile_values
(
    summary_id UUID (primary_key, shard_key) not null,
    datapoint_id UUID (primary_key) not null,
    quantile_id UUID (primary_key) not null,
    quantile DOUBLE,
    value DOUBLE,
    FOREIGN KEY (summary_id) references otel.metric_summary(summary_id) as fk_summary_datapoint_quantile
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE "otel"."metric_summary_resource_attribute"
(
    "summary_id" UUID (primary_key) NOT NULL,
    "key" VARCHAR (primary_key, 128, dict) NOT NULL,
    "string_value" VARCHAR (256),
    "bool_value" BOOLEAN,
    "int_value" INTEGER,
    "double_value" DOUBLE,
    "bytes_value" BLOB (store_only),
    SHARD KEY (summary_id),
    FOREIGN KEY (summary_id) references otel.metric_summary(summary_id) as fk_summary_resource_attribute
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

CREATE TABLE "otel"."metric_summary_scope_attribute"
(
    "summary_id" UUID (primary_key) NOT NULL,
    "name" VARCHAR (256),
    "version" VARCHAR (256),
    "key" VARCHAR (primary_key, 128, dict) NOT NULL,
    "string_value" VARCHAR (256),
    "bool_value" BOOLEAN,
    "int_value" INTEGER,
    "double_value" DOUBLE,
    "bytes_value" BLOB (store_only),
    SHARD KEY (summary_id),
    FOREIGN KEY (summary_id) references otel.metric_summary(summary_id) as fk_summary_scope_attribute
) USING TABLE PROPERTIES (NO_ERROR_IF_EXISTS = TRUE);

Kinetica Opentelemetry Exporter

This exporter could be used to as part of an Opentelemetry collector to persist data related to logs, traces and metrics to the Kinetica database.

This component is under development status.

Steps to build the collector binary

  • Please refer to https://github.com/open-telemetry/opentelemetry-collector/tree/main/cmd/builder for instructions on how to get the latest binary and install locally. This link mentions - "Download the binary for your respective platform under the "Releases" page. If install an official release build, the binary is named ocb, but if you installed by using go install, it will be called builder".
  • As this link describes install the Collector Builder as a package; it will be available as ocb after installation if installed from an official release.
  • An example config file for building the collector binary is as follows:
dist:
  name: otelcol-kinetica
  description: Otel collector with Kinetica exporter
  output_path: /home/kinetica/otelexporter_utils/collector-binary
  otelcol_version: 0.78.2

exporters:
  - gomod:
      github.com/open-telemetry/opentelemetry-collector-contrib/exporter/fileexporter v0.78.0

processors:
  - gomod:
      go.opentelemetry.io/collector/processor/batchprocessor v0.78.2

receivers:
  - gomod:
      go.opentelemetry.io/collector/receiver/otlpreceiver v0.78.2
  - gomod:
      github.com/open-telemetry/opentelemetry-collector-contrib/receiver/prometheusreceiver v0.78.0
  • Save the config file as e.g., config_file.yaml.
  • Once the builder config file has been created the binary can be build using the command ocb --config config_file.yaml.

Configuring the collector

The OTEL collector has a number of components at play.

  1. The Receiver - this will receive the telemetry data from one of the sources which has been instrumented to produce logs, traces and metrics.
  2. The Processor - the metric data received from the Receivers will be passed on to this component.
  3. The Exporter - this component is fed by the processor and is used to persist the metrics data to a data store (kike Kinetica) or some other component for further processing.

A sample configuration for the collector binary is as follows:

receivers:
  otlp:
    protocols:
      grpc:
        endpoint: localhost:4317

  prometheus:
      config:
        scrape_configs:
          - job_name: 'ki_stats'
            honor_labels: true
            static_configs:
              - targets: ['172.31.32.21:9010', '172.31.32.15:9010', '172.31.32.16:9010', '172.31.32.18:9010', '172.31.33.29:9010', '172.31.32.19:9010', '172.31.32.26:9010', '172.31.32.20:9010', '172.31.32.17:9010']

processors:
  batch:

exporters:
  kinetica:
    host: http://localhost:9191/
    schema: otel
    username: admin
    password: password
    bypasssslcertcheck: true
    logconfigfile: log_config.yaml


service:
  pipelines:
    traces:
      receivers:
      - otlp
      processors:
      - batch
      exporters:
      - kinetica
    metrics:
      receivers:
      - otlp
      - prometheus
      processors:
      - batch
      exporters:
      - kinetica
    logs:
      receivers:
      - otlp
      processors:
      - batch
      exporters:
      - kinetica

The first section of the config file describes the components (receivers, exporters etc.) with their configurations. The second section describes the service details which defines the pipeline stages for logs, traces, and metrics.

Save the config file as collector-config.yaml.

The command to run the collector binary produced by the steps described above is:

collector-binary --config collector-config.yaml

Configuring the Kinetica Exporter

The following parameters will be necessary to define the connection for the Kinetica OpenTelemetry Collector Exporter.

Configuration parameters for the Kinetica Exporter
Name Description
host Kinetica host URL
schema Kinetica OpenTelemetry schema, if omitted the default schema will be used
username Kinetica User
password Kinetica Password
bypasssslcertcheck Whether to skip SSL certificate check or not (true or false)
logconfigfile Name of the log config file
Configuring Logging

The logging is done using Uber zap package and lumberjack for rotating files based on size. Time based rotation is not supported yet. The configuration for lumberjack can be found here - https://pkg.go.dev/gopkg.in/natefinch/lumberjack.v2

Default Log Config file

This is included and will be used in case a user defined config file is not found. The name of the file is config_log_zap.yaml.

level: 'info'
development: true
disableCaller: false
disableStacktrace: false
encoding: 'console'
encoderConfig:
  messageKey: 'msg'
  levelKey: 'level'
  timeKey: 'ts'
  nameKey: 'logger'
  callerKey: 'caller'
  functionKey: 'function'
  stacktraceKey: 'stacktrace'
  skipLineEnding: false
  lineEnding: "\n"
  levelEncoder: 'capital'
  timeEncoder: 'iso8601'
  durationEncoder: 'string'
  callerEncoder: 'full'
  nameEncoder: 'full'
  consoleSeparator: ' | '
outputPaths:
  # Implements loggin to the console
  - 'stdout'
  # Implements rolling logs using lumberjack logger; config parameters are supplied as
  # query params. Here maxSize is 10MB after which the logger rolls over; maximum
  # number of backups (maxBackups) kept is 5 and maxAge is 10 days.
  # The name of the log file in this case is "logs/kinetica-exporter.log" where the
  # "logs" directory is under the current directory on the local machine.
  - 'lumberjack://localhost/logs/kinetica-exporter.log?maxSize=10&maxBackups=5&maxAge=10'
errorOutputPaths:
  - 'stderr'
  - './logs/error_logs'
initialFields:
  app: 'kinetica-exporter'

Documentation

Support

For support, you can post on stackoverflow under the kinetica tag or Slack.

Contact Us

Documentation

Overview

Package kineticaexporter exports log/metrics data to Kinetica.

Index

Constants

View Source
const (
	MeasurementSpans     = "spans"
	MeasurementSpanLinks = "span-links"
	MeasurementLogs      = "logs"

	// These attribute key names are influenced by the proto message keys.
	AttributeTime                   = "time"
	AttributeStartTimeUnixNano      = "start_time_unix_nano"
	AttributeTraceID                = "trace_id"
	AttributeSpanID                 = "span_id"
	AttributeTraceState             = "trace_state"
	AttributeParentSpanID           = "parent_span_id"
	AttributeParentServiceName      = "parent_service_name"
	AttributeChildServiceName       = "child_service_name"
	AttributeCallCount              = "call_count"
	AttributeSpansQueueDepth        = "spans_queue_depth"
	AttributeSpansDropped           = "spans_dropped"
	AttributeName                   = "name"
	AttributeSpanKind               = "kind"
	AttributeEndTimeUnixNano        = "end_time_unix_nano"
	AttributeDurationNano           = "duration_nano"
	AttributeDroppedAttributesCount = "dropped_attributes_count"
	AttributeDroppedEventsCount     = "dropped_events_count"
	AttributeDroppedLinksCount      = "dropped_links_count"
	AttributeAttributes             = "attributes"
	AttributeLinkedTraceID          = "linked_trace_id"
	AttributeLinkedSpanID           = "linked_span_id"
	AttributeSeverityNumber         = "severity_number"
	AttributeSeverityText           = "severity_text"
	AttributeBody                   = "body"

	LogTable                  = "log"
	LogAttributeTable         = "log_attribute"
	LogResourceAttributeTable = "log_resource_attribute"
	LogScopeAttributeTable    = "log_scope_attribute"

	TraceSpanTable              = "trace_span"
	TraceSpanAttributeTable     = "trace_span_attribute"
	TraceResourceAttributeTable = "trace_resource_attribute"
	TraceScopeAttributeTable    = "trace_scope_attribute"
	TraceEventAttributeTable    = "trace_event_attribute"
	TraceLinkAttributeTable     = "trace_link_attribute"

	GaugeTable                           = "metric_gauge"
	GaugeDatapointTable                  = "metric_gauge_datapoint"
	GaugeDatapointAttributeTable         = "metric_gauge_datapoint_attribute"
	GaugeDatapointExemplarTable          = "metric_gauge_datapoint_exemplar"
	GaugeDatapointExemplarAttributeTable = "metric_gauge_datapoint_exemplar_attribute"
	GaugeResourceAttributeTable          = "metric_gauge_resource_attribute"
	GaugeScopeAttributeTable             = "metric_gauge_scope_attribute"

	SumTable                           = "metric_sum"
	SumResourceAttributeTable          = "metric_sum_resource_attribute"
	SumScopeAttributeTable             = "metric_sum_scope_attribute"
	SumDatapointTable                  = "metric_sum_datapoint"
	SumDatapointAttributeTable         = "metric_sum_datapoint_attribute"
	SumDatapointExemplarTable          = "metric_sum_datapoint_exemplar"
	SumDataPointExemplarAttributeTable = "metric_sum_datapoint_exemplar_attribute"

	HistogramTable                           = "metric_histogram"
	HistogramResourceAttributeTable          = "metric_histogram_resource_attribute"
	HistogramScopeAttributeTable             = "metric_histogram_scope_attribute"
	HistogramDatapointTable                  = "metric_histogram_datapoint"
	HistogramDatapointAttributeTable         = "metric_histogram_datapoint_attribute"
	HistogramBucketCountsTable               = "metric_histogram_datapoint_bucket_count"
	HistogramExplicitBoundsTable             = "metric_histogram_datapoint_explicit_bound"
	HistogramDatapointExemplarTable          = "metric_histogram_datapoint_exemplar"
	HistogramDataPointExemplarAttributeTable = "metric_histogram_datapoint_exemplar_attribute"

	ExpHistogramTable                           = "metric_exp_histogram"
	ExpHistogramResourceAttributeTable          = "metric_exp_histogram_resource_attribute"
	ExpHistogramScopeAttributeTable             = "metric_exp_histogram_scope_attribute"
	ExpHistogramDatapointTable                  = "metric_exp_histogram_datapoint"
	ExpHistogramDatapointAttributeTable         = "metric_exp_histogram_datapoint_attribute"
	ExpHistogramPositiveBucketCountsTable       = "metric_exp_histogram_datapoint_bucket_positive_count"
	ExpHistogramNegativeBucketCountsTable       = "metric_exp_histogram_datapoint_bucket_negative_count"
	ExpHistogramDatapointExemplarTable          = "metric_exp_histogram_datapoint_exemplar"
	ExpHistogramDataPointExemplarAttributeTable = "metric_exp_histogram_datapoint_exemplar_attribute"

	SummaryTable                       = "metric_summary"
	SummaryResourceAttributeTable      = "metric_summary_resource_attribute"
	SummaryScopeAttributeTable         = "metric_summary_scope_attribute"
	SummaryDatapointTable              = "metric_summary_datapoint"
	SummaryDatapointAttributeTable     = "metric_summary_datapoint_attribute"
	SummaryDatapointQuantileValueTable = "metric_summary_datapoint_quantile_values"

	ChunkSize = 10000
)
View Source
const (
	CreateSchema string = "create schema if not exists %s;"

	HasTable string = "execute endpoint '/has/table' JSON '{\"table_name\":\"%s\"}'"

	// Metrics - DDLs
	// Gauge
	CreateGauge string = `` /* 239-byte string literal not displayed */

	CreateGaugeDatapoint string = `` /* 370-byte string literal not displayed */

	CreateGaugeDatapointAttribute string = `` /* 487-byte string literal not displayed */

	CreateGaugeDatapointExemplar string = `` /* 445-byte string literal not displayed */

	CreateGaugeDatapointExemplarAttribute string = `` /* 548-byte string literal not displayed */

	CreateGaugeResourceAttribute string = `` /* 454-byte string literal not displayed */

	CreateGaugeScopeAttribute string = `` /* 499-byte string literal not displayed */

	CreateSum string = `` /* 295-byte string literal not displayed */

	CreateSumDatapoint string = `` /* 356-byte string literal not displayed */

	CreateSumDatapointAttribute string = `` /* 475-byte string literal not displayed */

	CreateSumDatapointExemplar string = `` /* 431-byte string literal not displayed */

	CreateSumDatapointExemplarAttribute string = `` /* 536-byte string literal not displayed */

	CreateSumResourceAttribute string = `` /* 440-byte string literal not displayed */

	CreateSumScopeAttribute string = `` /* 485-byte string literal not displayed */

	CreateHistogram string = `` /* 280-byte string literal not displayed */

	CreateHistogramDatapoint string = `` /* 434-byte string literal not displayed */

	CreateHistogramDatapointBucketCount string = `` /* 379-byte string literal not displayed */

	CreateHistogramDatapointExplicitBound string = `` /* 394-byte string literal not displayed */

	CreateHistogramDatapointAttribute string = `` /* 511-byte string literal not displayed */

	CreateHistogramDatapointExemplar string = `` /* 473-byte string literal not displayed */

	CreateHistogramDatapointExemplarAttribute string = `` /* 572-byte string literal not displayed */

	CreateHistogramResourceAttribute string = `` /* 482-byte string literal not displayed */

	CreateHistogramScopeAttribute string = `` /* 527-byte string literal not displayed */

	// exponential Histogram
	CreateExpHistogram string = `` /* 284-byte string literal not displayed */

	CreateExpHistogramDatapoint string = `` /* 552-byte string literal not displayed */

	CreateExpHistogramDatapointBucketPositiveCount string = `` /* 400-byte string literal not displayed */

	CreateExpHistogramDatapointBucketNegativeCount string = `` /* 400-byte string literal not displayed */

	CreateExpHistogramDatapointAttribute string = `` /* 523-byte string literal not displayed */

	CreateExpHistogramDatapointExemplar string = `` /* 479-byte string literal not displayed */

	CreateExpHistogramDatapointExemplarAttribute string = `` /* 584-byte string literal not displayed */

	CreateExpHistogramResourceAttribute string = `` /* 486-byte string literal not displayed */

	CreateExpHistogramScopeAttribute string = `` /* 531-byte string literal not displayed */

	// Summary
	CreateSummary string = `` /* 244-byte string literal not displayed */

	CreateSummaryDatapoint string = `` /* 384-byte string literal not displayed */

	CreateSummaryDatapointAttribute string = `` /* 499-byte string literal not displayed */

	CreateSummaryDatapointQuantileValues string = `` /* 390-byte string literal not displayed */

	CreateSummaryResourceAttribute string = `` /* 468-byte string literal not displayed */

	CreateSummaryScopeAttribute string = `` /* 513-byte string literal not displayed */

)

Variables

This section is empty.

Functions

func NewFactory

func NewFactory() exporter.Factory

NewFactory creates a factory for Kinetica exporter.

@return exporter.Factory

Types

type AttributeValue added in v0.92.0

type AttributeValue struct {
	IntValue    int     `avro:"int_value"`
	StringValue string  `avro:"string_value"`
	BoolValue   int8    `avro:"bool_value"`
	DoubleValue float64 `avro:"double_value"`
	BytesValue  []byte  `avro:"bytes_value"`
}

AttributeValue - struct to contain attribute values of different types Used by other metric structs

type Config

type Config struct {
	Host               string              `mapstructure:"host"`
	Schema             string              `mapstructure:"schema"`
	Username           string              `mapstructure:"username"`
	Password           configopaque.String `mapstructure:"password"`
	BypassSslCertCheck bool                `mapstructure:"bypasssslcertcheck"`
}

Config defines configuration for the Kinetica exporter.

func (*Config) Validate

func (cfg *Config) Validate() error

Validate the config

@receiver cfg
@return error

type ExponentialHistogram added in v0.92.0

type ExponentialHistogram struct {
	HistogramID                    string `avro:"histogram_id"`
	MetricName                     string `avro:"metric_name"`
	Description                    string `avro:"metric_description"`
	Unit                           string `avro:"metric_unit"`
	pmetric.AggregationTemporality `avro:"aggregation_temporality"`
}

ExponentialHistogram - struct modeling an Exponential Histogram

type ExponentialHistogramBucketNegativeCount added in v0.92.0

type ExponentialHistogramBucketNegativeCount struct {
	HistogramID string `avro:"histogram_id"`
	DatapointID string `avro:"datapoint_id"`
	CountID     string `avro:"count_id"`
	Count       uint64 `avro:"count"`
}

ExponentialHistogramBucketNegativeCount - struct modeling an Exponential Histogram Bucket Negative Count

type ExponentialHistogramBucketPositiveCount added in v0.92.0

type ExponentialHistogramBucketPositiveCount struct {
	HistogramID string `avro:"histogram_id"`
	DatapointID string `avro:"datapoint_id"`
	CountID     string `avro:"count_id"`
	Count       int64  `avro:"count"`
}

ExponentialHistogramBucketPositiveCount - struct modeling an Exponential Histogram Bucket Positive Count

type ExponentialHistogramDataPointAttribute added in v0.92.0

type ExponentialHistogramDataPointAttribute struct {
	HistogramID    string `avro:"histogram_id"`
	DatapointID    string `avro:"datapoint_id"`
	Key            string `avro:"key"`
	AttributeValue `mapstructure:",squash"`
}

ExponentialHistogramDataPointAttribute - struct modeling an Exponential Histogram Datapoint attribute

type ExponentialHistogramDataPointExemplarAttribute added in v0.92.0

type ExponentialHistogramDataPointExemplarAttribute struct {
	HistogramID    string `avro:"histogram_id"`
	DatapointID    string `avro:"datapoint_id"`
	ExemplarID     string `avro:"exemplar_id"`
	Key            string `avro:"key"`
	AttributeValue `mapstructure:",squash"`
}

ExponentialHistogramDataPointExemplarAttribute - struct modeling an Exponential Histogram Datapoint Exemplar attribute

type ExponentialHistogramDatapoint added in v0.92.0

type ExponentialHistogramDatapoint struct {
	HistogramID           string  `avro:"histogram_id"`
	ID                    string  `avro:"id"`
	StartTimeUnix         int64   `avro:"start_time_unix"`
	TimeUnix              int64   `avro:"time_unix"`
	Count                 int64   `avro:"count"`
	Sum                   float64 `avro:"data_sum"`
	Min                   float64 `avro:"data_min"`
	Max                   float64 `avro:"data_max"`
	Flags                 int     `avro:"flags"`
	Scale                 int     `avro:"scale"`
	ZeroCount             int64   `avro:"zero_count"`
	BucketsPositiveOffset int     `avro:"buckets_positive_offset"`
	BucketsNegativeOffset int     `avro:"buckets_negative_offset"`
}

ExponentialHistogramDatapoint - struct modeling an Exponential Histogram Datapoint

type ExponentialHistogramDatapointExemplar added in v0.92.0

type ExponentialHistogramDatapointExemplar struct {
	HistogramID    string  `avro:"histogram_id"`
	DatapointID    string  `avro:"datapoint_id"`
	ExemplarID     string  `avro:"exemplar_id"`
	TimeUnix       int64   `avro:"time_unix"`
	HistogramValue float64 `avro:"histogram_value"`
	TraceID        string  `mapstructure:"trace_id" avro:"trace_id"`
	SpanID         string  `mapstructure:"span_id" avro:"span_id"`
}

ExponentialHistogramDatapointExemplar - struct modeling an Exponential Histogram Datapoint Exemplar

type ExponentialHistogramResourceAttribute added in v0.92.0

type ExponentialHistogramResourceAttribute struct {
	HistogramID    string `avro:"histogram_id"`
	Key            string `avro:"key"`
	AttributeValue `mapstructure:",squash"`
}

ExponentialHistogramResourceAttribute - struct modeling an Exponential Histogram Resource attribute

type ExponentialHistogramScopeAttribute added in v0.92.0

type ExponentialHistogramScopeAttribute struct {
	HistogramID    string `avro:"histogram_id"`
	ScopeName      string `avro:"name"`
	ScopeVersion   string `avro:"version"`
	Key            string `avro:"key"`
	AttributeValue `mapstructure:",squash"`
}

ExponentialHistogramScopeAttribute - struct modeling an Exponential Histogram Scope attribute

type Gauge added in v0.92.0

type Gauge struct {
	GaugeID     string `avro:"gauge_id"`
	MetricName  string `avro:"metric_name"`
	Description string `avro:"metric_description"`
	Unit        string `avro:"metric_unit"`
}

Gauge - struct modeling the Gauge data

type GaugeDataPointExemplarAttribute added in v0.92.0

type GaugeDataPointExemplarAttribute struct {
	GaugeID        string `avro:"gauge_id"`
	DatapointID    string `avro:"datapoint_id"`
	ExemplarID     string `avro:"exemplar_id"`
	Key            string `avro:"key"`
	AttributeValue `mapstructure:",squash"`
}

GaugeDataPointExemplarAttribute - struct modeling a Gauge Datapoint Exemplar attribute

type GaugeDatapoint added in v0.92.0

type GaugeDatapoint struct {
	GaugeID       string  `avro:"gauge_id"`
	ID            string  `avro:"id"`
	StartTimeUnix int64   `mapstructure:"start_time_unix" avro:"start_time_unix"`
	TimeUnix      int64   `mapstructure:"time_unix" avro:"time_unix"`
	GaugeValue    float64 `mapstructure:"gauge_value" avro:"gauge_value"`
	Flags         int     `mapstructure:"flags" avro:"flags"`
}

GaugeDatapoint - struct modeling the Gauge Datapoint

type GaugeDatapointAttribute added in v0.92.0

type GaugeDatapointAttribute struct {
	GaugeID        string `avro:"gauge_id"`
	DatapointID    string `avro:"datapoint_id"`
	Key            string `avro:"key"`
	AttributeValue `mapstructure:",squash"`
}

GaugeDatapointAttribute - struct modeling the Gauge Datapoint attributes

type GaugeDatapointExemplar added in v0.92.0

type GaugeDatapointExemplar struct {
	GaugeID     string  `avro:"gauge_id"`
	DatapointID string  `avro:"datapoint_id"`
	ExemplarID  string  `avro:"exemplar_id"`
	TimeUnix    int64   `mapstructure:"time_unix" avro:"time_unix"`
	GaugeValue  float64 `mapstructure:"gauge_value" avro:"gauge_value"`
	TraceID     string  `mapstructure:"trace_id" avro:"trace_id"`
	SpanID      string  `mapstructure:"span_id" avro:"span_id"`
}

GaugeDatapointExemplar - struct modeling a Gauge Datapoint Exemplar

type GaugeResourceAttribute added in v0.92.0

type GaugeResourceAttribute struct {
	GaugeID        string `avro:"gauge_id"`
	Key            string `avro:"key"`
	AttributeValue `mapstructure:",squash"`
}

GaugeResourceAttribute - struct modeling a Gauge resource attribute

type GaugeScopeAttribute added in v0.92.0

type GaugeScopeAttribute struct {
	GaugeID        string `avro:"gauge_id"`
	ScopeName      string `avro:"name"`
	ScopeVersion   string `avro:"version"`
	Key            string `avro:"key"`
	AttributeValue `mapstructure:",squash"`
}

GaugeScopeAttribute - struct modeling a Gauge Scope attribute

type Histogram added in v0.92.0

type Histogram struct {
	HistogramID                    string `avro:"histogram_id"`
	MetricName                     string `avro:"metric_name"`
	Description                    string `avro:"metric_description"`
	Unit                           string `avro:"metric_unit"`
	pmetric.AggregationTemporality `avro:"aggregation_temporality"`
}

Histogram - struct modeling a Histogram metric type

type HistogramDataPointAttribute added in v0.92.0

type HistogramDataPointAttribute struct {
	HistogramID    string `avro:"histogram_id"`
	DatapointID    string `avro:"datapoint_id"`
	Key            string `avro:"key"`
	AttributeValue `mapstructure:",squash"`
}

HistogramDataPointAttribute - struct modeling a Histogram Datapoint attribute

type HistogramDataPointExemplarAttribute added in v0.92.0

type HistogramDataPointExemplarAttribute struct {
	HistogramID    string `avro:"histogram_id"`
	DatapointID    string `avro:"datapoint_id"`
	ExemplarID     string `avro:"exemplar_id"`
	Key            string `avro:"key"`
	AttributeValue `mapstructure:",squash"`
}

HistogramDataPointExemplarAttribute - struct modeling a Histogram Datapoint Exemplar attribute

type HistogramDatapoint added in v0.92.0

type HistogramDatapoint struct {
	HistogramID   string  `avro:"histogram_id"`
	ID            string  `avro:"id"`
	StartTimeUnix int64   `avro:"start_time_unix"`
	TimeUnix      int64   `avro:"time_unix"`
	Count         int64   `avro:"count"`
	Sum           float64 `avro:"data_sum"`
	Min           float64 `avro:"data_min"`
	Max           float64 `avro:"data_max"`
	Flags         int     `avro:"flags"`
}

HistogramDatapoint - struct modeling a Histogram Datapoint

type HistogramDatapointBucketCount added in v0.92.0

type HistogramDatapointBucketCount struct {
	HistogramID string `avro:"histogram_id"`
	DatapointID string `avro:"datapoint_id"`
	CountID     string `avro:"count_id"`
	Count       int64  `avro:"count"`
}

HistogramDatapointBucketCount - struct modeling a Histogram Datapoint Bucket Count

type HistogramDatapointExemplar added in v0.92.0

type HistogramDatapointExemplar struct {
	HistogramID    string  `avro:"histogram_id"`
	DatapointID    string  `avro:"datapoint_id"`
	ExemplarID     string  `avro:"exemplar_id"`
	TimeUnix       int64   `avro:"time_unix"`
	HistogramValue float64 `avro:"histogram_value"`
	TraceID        string  `mapstructure:"trace_id" avro:"trace_id"`
	SpanID         string  `mapstructure:"span_id" avro:"span_id"`
}

HistogramDatapointExemplar - struct modeling a Histogram Datapoint Exemplar

type HistogramDatapointExplicitBound added in v0.92.0

type HistogramDatapointExplicitBound struct {
	HistogramID   string  `avro:"histogram_id"`
	DatapointID   string  `avro:"datapoint_id"`
	BoundID       string  `avro:"bound_id"`
	ExplicitBound float64 `avro:"explicit_bound"`
}

HistogramDatapointExplicitBound - struct modeling a Histogram Datapoint Explicit Bound

type HistogramResourceAttribute added in v0.92.0

type HistogramResourceAttribute struct {
	HistogramID    string `avro:"histogram_id"`
	Key            string `avro:"key"`
	AttributeValue `mapstructure:",squash"`
}

HistogramResourceAttribute - struct modeling a Histogram Resource Attribute

type HistogramScopeAttribute added in v0.92.0

type HistogramScopeAttribute struct {
	HistogramID    string `avro:"histogram_id"`
	ScopeName      string `avro:"name"`
	ScopeVersion   string `avro:"version"`
	Key            string `avro:"key"`
	AttributeValue `mapstructure:",squash"`
}

HistogramScopeAttribute - struct modeling a Histogram Scope Attribute

type KiWriter added in v0.92.0

type KiWriter struct {
	Db      kinetica.Kinetica
	Options kinetica.KineticaOptions
	// contains filtered or unexported fields
}

KiWriter - struct modeling the Kinetica connection, contains the Kinetica connection kinetica.Kinetica, the Kinetica Options kinetica.KineticaOptions, the config Config and the logger zap.Logger

var Writer *KiWriter

Writer - global pointer to kiwriter struct initialized in the init func

func (*KiWriter) GetCfg added in v0.92.0

func (kiwriter *KiWriter) GetCfg() Config

GetCfg - Getter for the Config value

@receiver kiwriter
@return Config

func (*KiWriter) GetDb added in v0.92.0

func (kiwriter *KiWriter) GetDb() kinetica.Kinetica

GetDb - Getter for the Kinetica instance

@receiver kiwriter
@return gpudb.Gpudb

func (*KiWriter) GetOptions added in v0.92.0

func (kiwriter *KiWriter) GetOptions() kinetica.KineticaOptions

GetOptions - Getter for the Kinetica options.

@receiver kiwriter
@return gpudb.GpudbOptions

type Sum added in v0.92.0

type Sum struct {
	SumID                          string `avro:"sum_id"`
	MetricName                     string `avro:"metric_name"`
	Description                    string `avro:"metric_description"`
	Unit                           string `avro:"metric_unit"`
	pmetric.AggregationTemporality `avro:"aggregation_temporality"`
	IsMonotonic                    int8 `avro:"is_monotonic"`
}

Sum - struct modeling a Sum metric

type SumDataPointAttribute added in v0.92.0

type SumDataPointAttribute struct {
	SumID          string `avro:"sum_id"`
	DatapointID    string `avro:"datapoint_id"`
	Key            string `avro:"key"`
	AttributeValue `mapstructure:",squash"`
}

SumDataPointAttribute - struct modeling a Sum Datapoint attribute

type SumDataPointExemplarAttribute added in v0.92.0

type SumDataPointExemplarAttribute struct {
	SumID          string `avro:"sum_id"`
	DatapointID    string `avro:"datapoint_id"`
	ExemplarID     string `avro:"exemplar_id"`
	Key            string `avro:"key"`
	AttributeValue `mapstructure:",squash"`
}

SumDataPointExemplarAttribute - struct modeling a Sum Datapoint Exemplar attribute

type SumDatapoint added in v0.92.0

type SumDatapoint struct {
	SumID         string  `avro:"sum_id"`
	ID            string  `avro:"id"`
	StartTimeUnix int64   `mapstructure:"start_time_unix" avro:"start_time_unix"`
	TimeUnix      int64   `mapstructure:"time_unix" avro:"time_unix"`
	SumValue      float64 `mapstructure:"sum_value" avro:"sum_value"`
	Flags         int     `mapstructure:"flags" avro:"flags"`
}

SumDatapoint - struct modeling a Sum Datapoint

type SumDatapointExemplar added in v0.92.0

type SumDatapointExemplar struct {
	SumID       string  `avro:"sum_id"`
	DatapointID string  `avro:"datapoint_id"`
	ExemplarID  string  `avro:"exemplar_id"`
	TimeUnix    int64   `mapstructure:"time_unix" avro:"time_unix"`
	SumValue    float64 `mapstructure:"sum_value" avro:"sum_value"`
	TraceID     string  `mapstructure:"trace_id" avro:"trace_id"`
	SpanID      string  `mapstructure:"span_id" avro:"span_id"`
}

SumDatapointExemplar - struct modeling a Sum Datapoint Exemplar

type SumResourceAttribute added in v0.92.0

type SumResourceAttribute struct {
	SumID          string `avro:"sum_id"`
	Key            string `avro:"key"`
	AttributeValue `mapstructure:",squash"`
}

SumResourceAttribute - struct modeling a Sum Resource attribute

type SumScopeAttribute added in v0.92.0

type SumScopeAttribute struct {
	SumID          string `avro:"sum_id"`
	ScopeName      string `avro:"name"`
	ScopeVersion   string `avro:"version"`
	Key            string `avro:"key"`
	AttributeValue `mapstructure:",squash"`
}

SumScopeAttribute - struct modeling a Sum Scope attribute

type Summary added in v0.92.0

type Summary struct {
	SummaryID   string `avro:"summary_id"`
	MetricName  string `avro:"metric_name"`
	Description string `avro:"metric_description"`
	Unit        string `avro:"metric_unit"`
}

Summary - struct modeling a Summary type metric

type SummaryDataPointAttribute added in v0.92.0

type SummaryDataPointAttribute struct {
	SummaryID      string `avro:"summary_id"`
	DatapointID    string `avro:"datapoint_id"`
	Key            string `avro:"key"`
	AttributeValue `mapstructure:",squash"`
}

SummaryDataPointAttribute - struct modeling a Summary Datapoint attribute

type SummaryDatapoint added in v0.92.0

type SummaryDatapoint struct {
	SummaryID     string  `avro:"summary_id"`
	ID            string  `avro:"id"`
	StartTimeUnix int64   `avro:"start_time_unix"`
	TimeUnix      int64   `avro:"time_unix"`
	Count         int64   `avro:"count"`
	Sum           float64 `avro:"data_sum"`
	Flags         int     `avro:"flags"`
}

SummaryDatapoint - struct modeling a Summary Datapoint

type SummaryDatapointQuantileValues added in v0.92.0

type SummaryDatapointQuantileValues struct {
	SummaryID   string  `avro:"summary_id"`
	DatapointID string  `avro:"datapoint_id"`
	QuantileID  string  `avro:"quantile_id"`
	Quantile    float64 `avro:"quantile"`
	Value       float64 `avro:"value"`
}

SummaryDatapointQuantileValues - struct modeling a Summary Datapoint Quantile value

type SummaryResourceAttribute added in v0.92.0

type SummaryResourceAttribute struct {
	SummaryID      string `avro:"summary_id"`
	Key            string `avro:"key"`
	AttributeValue `mapstructure:",squash"`
}

SummaryResourceAttribute - struct modeling a Summary Resource attribute

type SummaryScopeAttribute added in v0.92.0

type SummaryScopeAttribute struct {
	SummaryID      string `avro:"summary_id"`
	ScopeName      string `avro:"name"`
	ScopeVersion   string `avro:"version"`
	Key            string `avro:"key"`
	AttributeValue `mapstructure:",squash"`
}

SummaryScopeAttribute - struct modeling a Summary Scope attribute

type ValueTypePair added in v0.92.0

type ValueTypePair struct {
	// contains filtered or unexported fields
}

ValueTypePair - struct to wrap a value as [any] and its type pcommon.ValueType

Directories

Path Synopsis
internal

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL