diff --git a/e2e_tests/api_server.go b/e2e_tests/api_server.go new file mode 100644 index 0000000000..7f08f58393 --- /dev/null +++ b/e2e_tests/api_server.go @@ -0,0 +1,34 @@ +package e2e_tests + +import ( + "context" + "errors" + "fmt" + "net/http" + "testing" + + "github.com/stretchr/testify/require" +) + +func createApiServer(t *testing.T, port int) { + mux := http.NewServeMux() + mux.HandleFunc("/", func(writer http.ResponseWriter, request *http.Request) { + writer.WriteHeader(200) + }) + + _, cancelCtx := context.WithCancel(context.Background()) + s := &http.Server{ + Addr: fmt.Sprintf("0.0.0.0:%d", port), + Handler: mux, + } + + t.Cleanup(func() { + cancelCtx() + }) + + go func() { + if err := s.ListenAndServe(); !errors.Is(err, http.ErrServerClosed) { + require.NoError(t, err) + } + }() +} diff --git a/e2e_tests/e2e_test.go b/e2e_tests/e2e_test.go index dd82040bb3..5901bc5607 100644 --- a/e2e_tests/e2e_test.go +++ b/e2e_tests/e2e_test.go @@ -50,18 +50,25 @@ import ( // make cert-manager // kubectl get csr -o=jsonpath='{range.items[?(@.spec.signerName=="kubernetes.io/kubelet-serving")]}{.metadata.name}{" "}{end}' | xargs kubectl certificate approve +// When running tests you can use the following env vars to help with local development: +// SKIP_SETUP: skip setting up the chart and apps. Useful if they are already deployed. +// SKIP_TEARDOWN: skip deleting the chart and apps as part of cleanup. Useful to keep around for local development. + const ( testKubeConfig = "/tmp/kube-config-splunk-otel-collector-chart-e2e-testing" hecReceiverPort = 8090 + hecMetricsReceiverPort = 8091 signalFxReceiverPort = 9443 signalFxReceiverK8sClusterReceiverPort = 19443 otlpReceiverPort = 4317 + apiPort = 8881 ) var setupRun = sync.Once{} type sinks struct { logsConsumer *consumertest.LogsSink + hecMetricsConsumer *consumertest.MetricsSink agentMetricsConsumer *consumertest.MetricsSink k8sclusterReceiverMetricsConsumer *consumertest.MetricsSink tracesConsumer *consumertest.TracesSink @@ -71,11 +78,15 @@ var globalSinks *sinks func setupOnce(t *testing.T) *sinks { setupRun.Do(func() { + // create an API server + createApiServer(t, apiPort) // set ingest pipelines + logs, metrics := setupHEC(t) globalSinks = &sinks{ - logsConsumer: setupLogs(t), - agentMetricsConsumer: setupMetrics(t, signalFxReceiverPort), - k8sclusterReceiverMetricsConsumer: setupMetrics(t, signalFxReceiverK8sClusterReceiverPort), + logsConsumer: logs, + hecMetricsConsumer: metrics, + agentMetricsConsumer: setupSignalfxReceiver(t, signalFxReceiverPort), + k8sclusterReceiverMetricsConsumer: setupSignalfxReceiver(t, signalFxReceiverK8sClusterReceiverPort), tracesConsumer: setupTraces(t), } // deploy the chart and applications. @@ -83,12 +94,12 @@ func setupOnce(t *testing.T) *sinks { t.Log("Skipping setup as SKIP_SETUP is set to true") return } - setup(t) + deployChartsAndApps(t) }) return globalSinks } -func setup(t *testing.T) { +func deployChartsAndApps(t *testing.T) { kubeConfig, err := clientcmd.BuildConfigFromFlags("", testKubeConfig) require.NoError(t, err) clientset, err := kubernetes.NewForConfig(kubeConfig) @@ -102,7 +113,9 @@ func setup(t *testing.T) { valuesStr := strings.ReplaceAll(string(valuesBytes), "$K8S_CLUSTER_ENDPOINT", fmt.Sprintf("http://%s:%d", hostEndpoint(t), signalFxReceiverK8sClusterReceiverPort)) valuesStr = strings.ReplaceAll(valuesStr, "$AGENT_ENDPOINT", fmt.Sprintf("http://%s:%d", hostEndpoint(t), signalFxReceiverPort)) valuesStr = strings.ReplaceAll(valuesStr, "$LOG_HEC_ENDPOINT", fmt.Sprintf("http://%s:%d", hostEndpoint(t), hecReceiverPort)) + valuesStr = strings.ReplaceAll(valuesStr, "$METRIC_HEC_ENDPOINT", fmt.Sprintf("http://%s:%d/services/collector", hostEndpoint(t), hecMetricsReceiverPort)) valuesStr = strings.ReplaceAll(valuesStr, "$OTLP_ENDPOINT", fmt.Sprintf("%s:%d", hostEndpoint(t), otlpReceiverPort)) + valuesStr = strings.ReplaceAll(valuesStr, "$API_URL_ENDPOINT", fmt.Sprintf("http://%s:%d", hostEndpoint(t), apiPort)) var values map[string]interface{} err = yaml.Unmarshal([]byte(valuesStr), &values) require.NoError(t, err) @@ -145,6 +158,21 @@ func setup(t *testing.T) { } waitForAllDeploymentsToStart(t, clientset) + + t.Cleanup(func() { + if os.Getenv("SKIP_TEARDOWN") == "true" { + t.Log("Skipping teardown as SKIP_TEARDOWN is set to true") + return + } + waitTime := int64(0) + _ = deployments.Delete(context.Background(), "nodejs-test", metav1.DeleteOptions{ + GracePeriodSeconds: &waitTime, + }) + uninstall := action.NewUninstall(actionConfig) + uninstall.IgnoreNotFound = true + uninstall.Wait = true + _, _ = uninstall.Run("sock") + }) } func Test_NodeJSTraces(t *testing.T) { @@ -178,14 +206,33 @@ func Test_NodeJSTraces(t *testing.T) { func Test_KubernetesClusterReceiverMetrics(t *testing.T) { metricsConsumer := setupOnce(t).k8sclusterReceiverMetricsConsumer - waitForMetrics(t, 3, metricsConsumer) + waitForMetrics(t, 10, metricsConsumer) expectedMetricsFile := filepath.Join("testdata", "expected_cluster_receiver.yaml") expectedMetrics, err := readMetrics(expectedMetricsFile) require.NoError(t, err) + var metricToCompare pmetric.Metrics +OUTER: + for _, m := range metricsConsumer.AllMetrics() { + for i := 0; i < m.ResourceMetrics().Len(); i++ { + rm := m.ResourceMetrics().At(i) + for j := 0; j < rm.ScopeMetrics().Len(); j++ { + sm := rm.ScopeMetrics().At(j) + for k := 0; k < sm.Metrics().Len(); k++ { + metric := sm.Metrics().At(k) + if metric.Name() == "k8s.deployment.desired" { + metricToCompare = m + break OUTER + } + } + } + } + } + require.True(t, metricToCompare.MetricCount() > 0) + require.NoError(t, - pmetrictest.CompareMetrics(expectedMetrics, metricsConsumer.AllMetrics()[len(metricsConsumer.AllMetrics())-1], + pmetrictest.CompareMetrics(expectedMetrics, metricToCompare, pmetrictest.IgnoreTimestamp(), pmetrictest.IgnoreStartTimestamp(), pmetrictest.IgnoreMetricValues("k8s.deployment.desired", "k8s.deployment.available", "k8s.container.restarts", "k8s.container.cpu_request", "k8s.container.memory_request", "k8s.container.memory_limit"), @@ -244,7 +291,12 @@ func Test_AgentMetrics(t *testing.T) { agentMetricsConsumer := setupOnce(t).agentMetricsConsumer metricNames := []string{ - "otelcol_exporter_send_failed_log_records", + "container.filesystem.available", + "container.filesystem.capacity", + "container.filesystem.usage", + "container.memory.usage", + "k8s.pod.network.errors", + "k8s.pod.network.io", "otelcol_exporter_sent_log_records", "otelcol_otelsvc_k8s_ip_lookup_miss", "otelcol_processor_refused_log_records", @@ -253,7 +305,43 @@ func Test_AgentMetrics(t *testing.T) { "otelcol_processor_batch_batch_send_size_sum", "otelcol_processor_batch_batch_send_size_count", "otelcol_processor_batch_batch_send_size_bucket", - "otelcol_otelsvc_k8s_namespace_updated", + "otelcol_exporter_queue_size", + "otelcol_exporter_sent_metric_points", + "otelcol_otelsvc_k8s_namespace_added", + "otelcol_otelsvc_k8s_pod_added", + "otelcol_otelsvc_k8s_pod_table_size", + "otelcol_otelsvc_k8s_pod_updated", + "otelcol_process_cpu_seconds", + "otelcol_process_memory_rss", + "otelcol_process_runtime_heap_alloc_bytes", + "otelcol_process_runtime_total_alloc_bytes", + "otelcol_process_runtime_total_sys_memory_bytes", + "otelcol_process_uptime", + "otelcol_processor_accepted_metric_points", + "otelcol_processor_batch_timeout_trigger_send", + "otelcol_processor_dropped_metric_points", + "otelcol_processor_refused_metric_points", + "otelcol_receiver_accepted_metric_points", + "otelcol_receiver_refused_metric_points", + "otelcol_scraper_errored_metric_points", + "otelcol_scraper_scraped_metric_points", + "system.cpu.load_average.15m", + "system.cpu.load_average.1m", + "system.cpu.load_average.5m", + "system.disk.operations", + "system.filesystem.usage", + "system.memory.usage", + "system.network.errors", + "system.network.io", + "system.paging.operations", + } + checkMetricsAreEmitted(t, agentMetricsConsumer, metricNames) +} + +func Test_HECMetrics(t *testing.T) { + hecMetricsConsumer := setupOnce(t).hecMetricsConsumer + + metricNames := []string{ "container.cpu.time", "container.cpu.utilization", "container.filesystem.available", @@ -281,8 +369,15 @@ func Test_AgentMetrics(t *testing.T) { "k8s.pod.network.errors", "k8s.pod.network.io", "otelcol_exporter_queue_size", - "otelcol_exporter_send_failed_metric_points", "otelcol_exporter_sent_metric_points", + "otelcol_exporter_sent_log_records", + "otelcol_otelsvc_k8s_ip_lookup_miss", + "otelcol_processor_refused_log_records", + "otelcol_processor_dropped_log_records", + "otelcol_processor_accepted_log_records", + "otelcol_processor_batch_batch_send_size_sum", + "otelcol_processor_batch_batch_send_size_count", + "otelcol_processor_batch_batch_send_size_bucket", "otelcol_otelsvc_k8s_namespace_added", "otelcol_otelsvc_k8s_pod_added", "otelcol_otelsvc_k8s_pod_table_size", @@ -326,7 +421,7 @@ func Test_AgentMetrics(t *testing.T) { "system.processes.count", "system.processes.created", } - checkMetricsAreEmitted(t, agentMetricsConsumer, metricNames) + checkMetricsAreEmitted(t, hecMetricsConsumer, metricNames) } func waitForAllDeploymentsToStart(t *testing.T, clientset *kubernetes.Clientset) { @@ -360,7 +455,7 @@ func setupTraces(t *testing.T) *consumertest.TracesSink { return tc } -func setupMetrics(t *testing.T, port int) *consumertest.MetricsSink { +func setupSignalfxReceiver(t *testing.T, port int) *consumertest.MetricsSink { mc := new(consumertest.MetricsSink) f := signalfxreceiver.NewFactory() cfg := f.CreateDefaultConfig().(*signalfxreceiver.Config) @@ -378,23 +473,34 @@ func setupMetrics(t *testing.T, port int) *consumertest.MetricsSink { return mc } -func setupLogs(t *testing.T) *consumertest.LogsSink { +func setupHEC(t *testing.T) (*consumertest.LogsSink, *consumertest.MetricsSink) { + // the splunkhecreceiver does poorly at receiving logs and metrics. Use separate ports for now. f := splunkhecreceiver.NewFactory() cfg := f.CreateDefaultConfig().(*splunkhecreceiver.Config) cfg.Endpoint = fmt.Sprintf("0.0.0.0:%d", hecReceiverPort) + mCfg := f.CreateDefaultConfig().(*splunkhecreceiver.Config) + mCfg.Endpoint = fmt.Sprintf("0.0.0.0:%d", hecMetricsReceiverPort) + lc := new(consumertest.LogsSink) + mc := new(consumertest.MetricsSink) rcvr, err := f.CreateLogsReceiver(context.Background(), receivertest.NewNopCreateSettings(), cfg, lc) + mrcvr, err := f.CreateMetricsReceiver(context.Background(), receivertest.NewNopCreateSettings(), mCfg, mc) require.NoError(t, err) require.NoError(t, rcvr.Start(context.Background(), componenttest.NewNopHost())) require.NoError(t, err, "failed creating logs receiver") t.Cleanup(func() { assert.NoError(t, rcvr.Shutdown(context.Background())) + }) + require.NoError(t, mrcvr.Start(context.Background(), componenttest.NewNopHost())) + require.NoError(t, err, "failed creating metrics receiver") + t.Cleanup(func() { + assert.NoError(t, mrcvr.Shutdown(context.Background())) }) - return lc + return lc, mc } func checkMetricsAreEmitted(t *testing.T, mc *consumertest.MetricsSink, metricNames []string) { @@ -402,25 +508,23 @@ func checkMetricsAreEmitted(t *testing.T, mc *consumertest.MetricsSink, metricNa for _, name := range metricNames { metricsToFind[name] = false } - var stillMissing []string timeoutMinutes := 3 require.Eventuallyf(t, func() bool { - if len(mc.AllMetrics()) == 0 { - return false - } - m := mc.AllMetrics()[len(mc.AllMetrics())-1] - for i := 0; i < m.ResourceMetrics().Len(); i++ { - rm := m.ResourceMetrics().At(i) - for j := 0; j < rm.ScopeMetrics().Len(); j++ { - sm := rm.ScopeMetrics().At(j) - for k := 0; k < sm.Metrics().Len(); k++ { - metric := sm.Metrics().At(k) - metricsToFind[metric.Name()] = true + for _, m := range mc.AllMetrics() { + for i := 0; i < m.ResourceMetrics().Len(); i++ { + rm := m.ResourceMetrics().At(i) + for j := 0; j < rm.ScopeMetrics().Len(); j++ { + sm := rm.ScopeMetrics().At(j) + for k := 0; k < sm.Metrics().Len(); k++ { + metric := sm.Metrics().At(k) + metricsToFind[metric.Name()] = true + } } } } - stillMissing = []string{} + var stillMissing []string + var found []string missingCount := 0 foundCount := 0 for _, name := range metricNames { @@ -428,13 +532,15 @@ func checkMetricsAreEmitted(t *testing.T, mc *consumertest.MetricsSink, metricNa stillMissing = append(stillMissing, name) missingCount++ } else { + found = append(found, name) foundCount++ } } + t.Logf("Found: %s", strings.Join(found, ",")) t.Logf("Metrics found: %d, metrics still missing: %d\n%s\n", foundCount, missingCount, strings.Join(stillMissing, ",")) return missingCount == 0 - }, time.Duration(timeoutMinutes)*time.Minute, 1*time.Second, - "failed to receive all metrics %d minutes, missing metrics: %s", timeoutMinutes, strings.Join(stillMissing, ",")) + }, time.Duration(timeoutMinutes)*time.Minute, 10*time.Second, + "failed to receive all metrics %d minutes", timeoutMinutes) } func hostEndpoint(t *testing.T) string { diff --git a/e2e_tests/testdata/expected_cluster_receiver.yaml b/e2e_tests/testdata/expected_cluster_receiver.yaml index 031e940313..21840efbde 100644 --- a/e2e_tests/testdata/expected_cluster_receiver.yaml +++ b/e2e_tests/testdata/expected_cluster_receiver.yaml @@ -1,2872 +1,3872 @@ resourceMetrics: - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: cert-manager - - key: k8s.namespace.uid - value: - stringValue: 24d28cd3-f8b4-4bc2-8a0f-6a44fbe517fc - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 + - resource: {} scopeMetrics: - metrics: - - description: The current phase of namespaces (1 for active and 0 for terminating) - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.namespace.phase - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: default - - key: k8s.namespace.uid - value: - stringValue: 38a398cc-a21c-421f-b54a-a96ffed5e935 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: The current phase of namespaces (1 for active and 0 for terminating) - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.namespace.phase - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: kube-node-lease - - key: k8s.namespace.uid - value: - stringValue: c520e69d-c96a-4495-be17-921da50c42f7 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: The current phase of namespaces (1 for active and 0 for terminating) - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.namespace.phase - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: kube-public - - key: k8s.namespace.uid - value: - stringValue: 7194cd08-debe-45c7-bfed-570751c8c588 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: The current phase of namespaces (1 for active and 0 for terminating) - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.namespace.phase - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: kube-system - - key: k8s.namespace.uid - value: - stringValue: fee93690-401f-483a-a4dd-e67a0c5f8fd0 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: The current phase of namespaces (1 for active and 0 for terminating) - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.namespace.phase - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: local-path-storage - - key: k8s.namespace.uid - value: - stringValue: 3c177e55-2201-4119-9733-7604bbbf8fda - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: The current phase of namespaces (1 for active and 0 for terminating) - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.namespace.phase - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.node.uid - value: - stringValue: 018bff23-5f17-4094-86e1-d994b6362a16 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Ready condition status of the node (true=1, false=0, unknown=-1) - gauge: - dataPoints: - - asInt: "1" - timeUnixNano: "1000000" - name: k8s.node.condition_ready - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.daemonset.name - value: - stringValue: kindnet - - key: k8s.daemonset.uid - value: - stringValue: 37dc933d-ee20-447e-8973-1bbe532bdf75 - - key: k8s.namespace.name - value: - stringValue: kube-system - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Number of nodes that are running at least 1 daemon pod and are supposed to run the daemon pod - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.daemonset.current_scheduled_nodes - unit: "1" - - description: Number of nodes that should be running the daemon pod (including nodes currently running the daemon pod) - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.daemonset.desired_scheduled_nodes - unit: "1" - - description: Number of nodes that are running the daemon pod, but are not supposed to run the daemon pod - gauge: - dataPoints: - - asInt: "0" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.daemonset.misscheduled_nodes - unit: "1" - - description: Number of nodes that should be running the daemon pod and have one or more of the daemon pod running and ready - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.daemonset.ready_nodes - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.daemonset.name - value: - stringValue: kube-proxy - - key: k8s.daemonset.uid - value: - stringValue: ee864647-f429-429c-9578-7fc8c09c795e - - key: k8s.namespace.name - value: - stringValue: kube-system - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Number of nodes that are running at least 1 daemon pod and are supposed to run the daemon pod - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.daemonset.current_scheduled_nodes - unit: "1" - - description: Number of nodes that should be running the daemon pod (including nodes currently running the daemon pod) - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.daemonset.desired_scheduled_nodes - unit: "1" - - description: Number of nodes that are running the daemon pod, but are not supposed to run the daemon pod - gauge: - dataPoints: - - asInt: "0" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.daemonset.misscheduled_nodes - unit: "1" - - description: Number of nodes that should be running the daemon pod and have one or more of the daemon pod running and ready - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.daemonset.ready_nodes - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.daemonset.name - value: - stringValue: sock-splunk-otel-collector-agent - - key: k8s.daemonset.uid - value: - stringValue: ea89d344-50de-48f5-98b0-a555fe36e34e - - key: k8s.namespace.name - value: - stringValue: default - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Number of nodes that are running at least 1 daemon pod and are supposed to run the daemon pod - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.daemonset.current_scheduled_nodes - unit: "1" - - description: Number of nodes that should be running the daemon pod (including nodes currently running the daemon pod) - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.daemonset.desired_scheduled_nodes - unit: "1" - - description: Number of nodes that are running the daemon pod, but are not supposed to run the daemon pod - gauge: - dataPoints: - - asInt: "0" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.daemonset.misscheduled_nodes - unit: "1" - - description: Number of nodes that should be running the daemon pod and have one or more of the daemon pod running and ready - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.daemonset.ready_nodes - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.deployment.name - value: - stringValue: cert-manager - - key: k8s.deployment.uid - value: - stringValue: a57ef6ee-1b2b-4558-bcf5-a042653c52b4 - - key: k8s.namespace.name - value: - stringValue: cert-manager - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Total number of available pods (ready for at least minReadySeconds) targeted by this deployment - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.deployment.available - unit: "1" - - description: Number of desired pods in this deployment - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.deployment.desired - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.deployment.name - value: - stringValue: cert-manager-cainjector - - key: k8s.deployment.uid - value: - stringValue: 7923ba15-5fe7-49fe-acce-ab6ea1c400af - - key: k8s.namespace.name - value: - stringValue: cert-manager - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Total number of available pods (ready for at least minReadySeconds) targeted by this deployment - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.deployment.available - unit: "1" - - description: Number of desired pods in this deployment - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.deployment.desired - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.deployment.name - value: - stringValue: cert-manager-webhook - - key: k8s.deployment.uid - value: - stringValue: a007a65e-a345-4f59-91f5-a1a372e9fd53 - - key: k8s.namespace.name - value: - stringValue: cert-manager - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Total number of available pods (ready for at least minReadySeconds) targeted by this deployment - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.deployment.available - unit: "1" - - description: Number of desired pods in this deployment - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.deployment.desired - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.deployment.name - value: - stringValue: coredns - - key: k8s.deployment.uid - value: - stringValue: 5390f5dd-9d46-4b59-ae1f-90b1572db0a5 - - key: k8s.namespace.name - value: - stringValue: kube-system - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Total number of available pods (ready for at least minReadySeconds) targeted by this deployment - gauge: + - gauge: dataPoints: - asInt: "2" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.deployment.available - unit: "1" - - description: Number of desired pods in this deployment - gauge: - dataPoints: - - asInt: "2" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.deployment.desired - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.deployment.name - value: - stringValue: local-path-provisioner - - key: k8s.deployment.uid - value: - stringValue: 24060f59-fab6-4403-bd1b-d6aa50d9f516 - - key: k8s.namespace.name - value: - stringValue: local-path-storage - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Total number of available pods (ready for at least minReadySeconds) targeted by this deployment - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.deployment.available - unit: "1" - - description: Number of desired pods in this deployment - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.deployment.desired - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.deployment.name - value: - stringValue: nodejs-test - - key: k8s.deployment.uid - value: - stringValue: c55a99d3-e7cf-42e8-aa10-c0aa5c749b76 - - key: k8s.namespace.name - value: - stringValue: default - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Total number of available pods (ready for at least minReadySeconds) targeted by this deployment - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.deployment.available - unit: "1" - - description: Number of desired pods in this deployment - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.deployment.desired - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.deployment.name - value: - stringValue: sock-operator - - key: k8s.deployment.uid - value: - stringValue: fa6e0d2e-8fd2-4978-9385-24578c661447 - - key: k8s.namespace.name - value: - stringValue: default - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Total number of available pods (ready for at least minReadySeconds) targeted by this deployment - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.deployment.available - unit: "1" - - description: Number of desired pods in this deployment - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.deployment.desired - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.deployment.name - value: - stringValue: sock-splunk-otel-collector-k8s-cluster-receiver - - key: k8s.deployment.uid - value: - stringValue: b37d8bd6-ac5b-426e-8103-07876b1949d1 - - key: k8s.namespace.name - value: - stringValue: default - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Total number of available pods (ready for at least minReadySeconds) targeted by this deployment - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.deployment.available - unit: "1" - - description: Number of desired pods in this deployment - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.deployment.desired - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: cert-manager - - key: k8s.replicaset.name - value: - stringValue: cert-manager-7f6665fd8c - - key: k8s.replicaset.uid - value: - stringValue: 6f3a88d5-08fa-47d7-b5d5-22349eec9852 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Total number of available pods (ready for at least minReadySeconds) targeted by this replicaset - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.replicaset.available - unit: "1" - - description: Number of desired pods in this replicaset - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.replicaset.desired - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: cert-manager - - key: k8s.replicaset.name - value: - stringValue: cert-manager-cainjector-666564dc88 - - key: k8s.replicaset.uid - value: - stringValue: 47b7d220-b5ea-4a97-9188-413f82d61c83 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Total number of available pods (ready for at least minReadySeconds) targeted by this replicaset - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.replicaset.available - unit: "1" - - description: Number of desired pods in this replicaset - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.replicaset.desired - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: cert-manager - - key: k8s.replicaset.name - value: - stringValue: cert-manager-webhook-fd94896cd - - key: k8s.replicaset.uid - value: - stringValue: 45d66cb0-32bc-4b10-8de2-354f5a88b6d1 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Total number of available pods (ready for at least minReadySeconds) targeted by this replicaset - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.replicaset.available - unit: "1" - - description: Number of desired pods in this replicaset - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.replicaset.desired - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: default - - key: k8s.replicaset.name - value: - stringValue: nodejs-test-56cdcf7c - - key: k8s.replicaset.uid - value: - stringValue: 0b01c9f3-6f57-4bf9-8461-86cfdbcec358 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Total number of available pods (ready for at least minReadySeconds) targeted by this replicaset - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.replicaset.available - unit: "1" - - description: Number of desired pods in this replicaset - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.replicaset.desired - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: default - - key: k8s.replicaset.name - value: - stringValue: sock-operator-56566595b - - key: k8s.replicaset.uid - value: - stringValue: 603f1329-325f-4a81-803b-654147bee88c - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Total number of available pods (ready for at least minReadySeconds) targeted by this replicaset - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.replicaset.available - unit: "1" - - description: Number of desired pods in this replicaset - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.replicaset.desired - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: default - - key: k8s.replicaset.name - value: - stringValue: sock-splunk-otel-collector-k8s-cluster-receiver-79b6f8bbcf - - key: k8s.replicaset.uid - value: - stringValue: aeba1ae6-6ae0-4784-bfc4-04b81e98998c - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Total number of available pods (ready for at least minReadySeconds) targeted by this replicaset - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.replicaset.available - unit: "1" - - description: Number of desired pods in this replicaset - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.replicaset.desired - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: kube-system - - key: k8s.replicaset.name - value: - stringValue: coredns-5d78c9869d - - key: k8s.replicaset.uid - value: - stringValue: 55a09986-f646-49d7-9aef-ec70d3272c49 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Total number of available pods (ready for at least minReadySeconds) targeted by this replicaset - gauge: - dataPoints: - - asInt: "2" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.replicaset.available - unit: "1" - - description: Number of desired pods in this replicaset - gauge: - dataPoints: - - asInt: "2" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.replicaset.desired - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: local-path-storage - - key: k8s.replicaset.name - value: - stringValue: local-path-provisioner-6bc4bddd6b - - key: k8s.replicaset.uid - value: - stringValue: 5736cb78-db19-4e63-b231-f6ffe183e0d7 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Total number of available pods (ready for at least minReadySeconds) targeted by this replicaset - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.replicaset.available - unit: "1" - - description: Number of desired pods in this replicaset - gauge: - dataPoints: - - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.replicaset.desired - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: cert-manager - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: cert-manager-7f6665fd8c-wkcm6 - - key: k8s.pod.uid - value: - stringValue: 5286b581-2c17-4110-b074-18e477e407ec - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Current phase of the pod (1 - Pending, 2 - Running, 3 - Succeeded, 4 - Failed, 5 - Unknown) - gauge: - dataPoints: - - asInt: "2" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.pod.phase - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: cert-manager - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: cert-manager-cainjector-666564dc88-d78nh - - key: k8s.pod.uid - value: - stringValue: 59324e58-5da3-4499-a70c-b94c3f0036fa - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Current phase of the pod (1 - Pending, 2 - Running, 3 - Succeeded, 4 - Failed, 5 - Unknown) - gauge: - dataPoints: + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: cert-manager + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: cert-manager-5698c4d465-vm5nb + - key: k8s.pod.uid + value: + stringValue: 74540585-433a-4aff-9b0b-2ca0c111633a + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "2" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.pod.phase - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: cert-manager - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: cert-manager-webhook-fd94896cd-hcv6s - - key: k8s.pod.uid - value: - stringValue: b5cfd5d0-59d3-4ac5-8b4c-b8d5e07abec4 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Current phase of the pod (1 - Pending, 2 - Running, 3 - Succeeded, 4 - Failed, 5 - Unknown) - gauge: - dataPoints: + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: cert-manager + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: cert-manager-cainjector-d4748596-4ggln + - key: k8s.pod.uid + value: + stringValue: e7fe7c54-a4b8-4559-a197-d1edf77dfc7b + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "2" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.pod.phase - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: default - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: nodejs-test-56cdcf7c-bqrx6 - - key: k8s.pod.uid - value: - stringValue: 7c6cf270-98d0-4a6a-b215-2461d252a610 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Current phase of the pod (1 - Pending, 2 - Running, 3 - Succeeded, 4 - Failed, 5 - Unknown) - gauge: - dataPoints: + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: cert-manager + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: cert-manager-webhook-65d78d5c4b-wd2xl + - key: k8s.pod.uid + value: + stringValue: 360c6364-87bb-43a4-819c-0bec4d6daf79 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "2" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.pod.phase - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: default - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: sock-operator-56566595b-9s2hn - - key: k8s.pod.uid - value: - stringValue: 1aa50cee-ff7b-4c49-b0b4-6c3e7a166410 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Current phase of the pod (1 - Pending, 2 - Running, 3 - Succeeded, 4 - Failed, 5 - Unknown) - gauge: - dataPoints: + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: nodejs-test-57564b7dc9-5m55j + - key: k8s.pod.uid + value: + stringValue: 2f2443be-3e3a-424b-bab5-30be32b30aed + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "2" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.pod.phase - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: default - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: sock-splunk-otel-collector-agent-wnhhz - - key: k8s.pod.uid - value: - stringValue: f2607c0e-1ccc-440f-b9ba-db9b220ba76e - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Current phase of the pod (1 - Pending, 2 - Running, 3 - Succeeded, 4 - Failed, 5 - Unknown) - gauge: - dataPoints: + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-operator-949dd8564-hgqp6 + - key: k8s.pod.uid + value: + stringValue: 9ff5c317-8225-429d-bb61-9787789ef05f + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "2" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.pod.phase - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: default - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: sock-splunk-otel-collector-k8s-cluster-receiver-79b6f8bbcfkzqs8 - - key: k8s.pod.uid - value: - stringValue: 28548a19-31ac-4113-aeef-6a84a9006e09 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Current phase of the pod (1 - Pending, 2 - Running, 3 - Succeeded, 4 - Failed, 5 - Unknown) - gauge: - dataPoints: + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-splunk-otel-collector-agent-dts69 + - key: k8s.pod.uid + value: + stringValue: 7171d2df-fba3-48a1-ad50-62cd3ddba6e0 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "2" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.pod.phase - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: kube-system - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: coredns-5d78c9869d-d6jcd - - key: k8s.pod.uid - value: - stringValue: 4aaa15dc-642e-4f23-9bba-36d575bd493c - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Current phase of the pod (1 - Pending, 2 - Running, 3 - Succeeded, 4 - Failed, 5 - Unknown) - gauge: - dataPoints: + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-splunk-otel-collector-k8s-cluster-receiver-7444c4bc8-ltjlc + - key: k8s.pod.uid + value: + stringValue: 62916eaa-170a-459f-9471-56c212a9c0f8 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "2" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.pod.phase - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: kube-system - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: coredns-5d78c9869d-d74kc - - key: k8s.pod.uid - value: - stringValue: d7997008-4987-4cd2-9b42-97189c1f19b2 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Current phase of the pod (1 - Pending, 2 - Running, 3 - Succeeded, 4 - Failed, 5 - Unknown) - gauge: - dataPoints: + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: coredns-5dd5756b68-gvjxr + - key: k8s.pod.uid + value: + stringValue: 81598039-aec3-498c-8669-7a105bf3f1d1 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "2" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.pod.phase - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: kube-system - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: etcd-kind-control-plane - - key: k8s.pod.uid - value: - stringValue: 2e3eea91-b1bc-4339-bd65-55bc996d1acb - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Current phase of the pod (1 - Pending, 2 - Running, 3 - Succeeded, 4 - Failed, 5 - Unknown) - gauge: - dataPoints: + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: coredns-5dd5756b68-xndsg + - key: k8s.pod.uid + value: + stringValue: 53d939b8-8ee2-4bc5-b1f0-2f4fd26e359a + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "2" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.pod.phase - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: kube-system - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: kindnet-nc2q6 - - key: k8s.pod.uid - value: - stringValue: 2e4716c8-b5fc-414e-8e2c-f209d6e98b3a - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Current phase of the pod (1 - Pending, 2 - Running, 3 - Succeeded, 4 - Failed, 5 - Unknown) - gauge: - dataPoints: + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: etcd-kind-control-plane + - key: k8s.pod.uid + value: + stringValue: 57dce01c-4f67-43f6-970a-5089093ca378 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "2" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.pod.phase - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: kube-system - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: kube-apiserver-kind-control-plane - - key: k8s.pod.uid - value: - stringValue: d3bef5c4-8194-46fa-a02f-dc5db95a5255 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Current phase of the pod (1 - Pending, 2 - Running, 3 - Succeeded, 4 - Failed, 5 - Unknown) - gauge: - dataPoints: + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: kindnet-tkg4v + - key: k8s.pod.uid + value: + stringValue: bff7fb95-6b7b-4589-8c89-cf423a184512 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "2" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.pod.phase - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: kube-system - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: kube-controller-manager-kind-control-plane - - key: k8s.pod.uid - value: - stringValue: 58218320-9d68-4a05-a014-c2a3fb27ac17 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Current phase of the pod (1 - Pending, 2 - Running, 3 - Succeeded, 4 - Failed, 5 - Unknown) - gauge: - dataPoints: + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: kube-apiserver-kind-control-plane + - key: k8s.pod.uid + value: + stringValue: 6c7bc53b-49de-4362-978e-bb33c7c49bcb + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "2" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.pod.phase - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: kube-system - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: kube-proxy-wf4ls - - key: k8s.pod.uid - value: - stringValue: 8202e24b-78a2-4a6a-a39e-28ba6964ac5a - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Current phase of the pod (1 - Pending, 2 - Running, 3 - Succeeded, 4 - Failed, 5 - Unknown) - gauge: - dataPoints: + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: kube-controller-manager-kind-control-plane + - key: k8s.pod.uid + value: + stringValue: 1156765f-4aa3-48ce-b72b-40dbe0791f79 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "2" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.pod.phase - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: kube-system - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: kube-scheduler-kind-control-plane - - key: k8s.pod.uid - value: - stringValue: c497b243-2c28-435e-805e-7389f314b93f - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Current phase of the pod (1 - Pending, 2 - Running, 3 - Succeeded, 4 - Failed, 5 - Unknown) - gauge: - dataPoints: + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: kube-proxy-zsnhf + - key: k8s.pod.uid + value: + stringValue: 704cefde-dff1-4cb4-9108-0c765983721c + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "2" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.pod.phase - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.namespace.name - value: - stringValue: local-path-storage - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: local-path-provisioner-6bc4bddd6b-rpwhk - - key: k8s.pod.uid - value: - stringValue: aba05221-ad2b-4cbe-8d53-72852f2e3899 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Current phase of the pod (1 - Pending, 2 - Running, 3 - Succeeded, 4 - Failed, 5 - Unknown) - gauge: - dataPoints: + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: kube-scheduler-kind-control-plane + - key: k8s.pod.uid + value: + stringValue: 146a1b22-6228-4180-8da0-ac586ff53364 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "2" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: local-path-storage + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: local-path-provisioner-6f8956fb48-q6kkt + - key: k8s.pod.uid + value: + stringValue: a100ff5f-a521-463a-981e-f2704ca35e6f + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" name: k8s.pod.phase - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: container.id - value: - stringValue: 0aab92f5f0a0d9849b04b2bfa6f0f08b3047cd2bc0dbc85da37d0204bd5a2e8e - - key: container.image.name - value: - stringValue: docker.io/kindest/local-path-provisioner - - key: container.image.tag - value: - stringValue: v20230511-dc714da8 - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.container.name - value: - stringValue: local-path-provisioner - - key: k8s.namespace.name - value: - stringValue: local-path-storage - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: local-path-provisioner-6bc4bddd6b-rpwhk - - key: k8s.pod.uid - value: - stringValue: aba05221-ad2b-4cbe-8d53-72852f2e3899 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Whether a container has passed its readiness probe (0 for no, 1 for yes) - gauge: + - gauge: dataPoints: - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.ready - unit: "1" - - description: How many times the container has restarted in the recent past. This value is pulled directly from the K8s API and the value can go indefinitely high and be reset to 0 at any time depending on how your kubelet is configured to prune dead containers. It is best to not depend too much on the exact value but rather look at it as either == 0, in which case you can conclude there were no restarts in the recent past, or > 0, in which case you can conclude there were restarts in the recent past, and not try and analyze the value beyond that. - gauge: - dataPoints: - - asInt: "0" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.restarts - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: container.id - value: - stringValue: 0da68dc7786ddfbfb31887400f38137fea782ca35a66c932adc65c289c25d112 - - key: container.image.name - value: - stringValue: registry.k8s.io/kube-scheduler - - key: container.image.tag - value: - stringValue: v1.27.3 - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.container.name - value: - stringValue: kube-scheduler - - key: k8s.namespace.name - value: - stringValue: kube-system - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: kube-scheduler-kind-control-plane - - key: k8s.pod.uid - value: - stringValue: c497b243-2c28-435e-805e-7389f314b93f - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Resource requested for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asDouble: 0.1 - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.cpu_request - unit: '{cpu}' - - description: Whether a container has passed its readiness probe (0 for no, 1 for yes) - gauge: - dataPoints: + attributes: + - key: container.id + value: + stringValue: 1d8d872582305d6f52ef1a9c2af9502991aac13bf66db581578fa057ae034d65 + - key: container.image.name + value: + stringValue: docker.io/kindest/kindnetd + - key: container.image.tag + value: + stringValue: v20230511-dc714da8 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: kindnet-cni + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: kindnet-tkg4v + - key: k8s.pod.uid + value: + stringValue: bff7fb95-6b7b-4589-8c89-cf423a184512 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.ready - unit: "1" - - description: How many times the container has restarted in the recent past. This value is pulled directly from the K8s API and the value can go indefinitely high and be reset to 0 at any time depending on how your kubelet is configured to prune dead containers. It is best to not depend too much on the exact value but rather look at it as either == 0, in which case you can conclude there were no restarts in the recent past, or > 0, in which case you can conclude there were restarts in the recent past, and not try and analyze the value beyond that. - gauge: - dataPoints: - - asInt: "0" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.restarts - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: container.id - value: - stringValue: 1067eaecf09b0efc87c94bbfb2f25ee7043fd9befb764ce81837fccf9e0c6cf4 - - key: container.image.name - value: - stringValue: quay.io/brancz/kube-rbac-proxy - - key: container.image.tag - value: - stringValue: v0.14.2 - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.container.name - value: - stringValue: kube-rbac-proxy - - key: k8s.namespace.name - value: - stringValue: default - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: sock-operator-56566595b-9s2hn - - key: k8s.pod.uid - value: - stringValue: 1aa50cee-ff7b-4c49-b0b4-6c3e7a166410 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Maximum resource limit set for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asDouble: 0.5 - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.cpu_limit - unit: '{cpu}' - - description: Resource requested for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asDouble: 0.005 - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.cpu_request - unit: '{cpu}' - - description: Maximum resource limit set for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asInt: "134217728" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.memory_limit - unit: By - - description: Resource requested for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asInt: "67108864" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.memory_request - unit: By - - description: Whether a container has passed its readiness probe (0 for no, 1 for yes) - gauge: - dataPoints: + attributes: + - key: container.id + value: + stringValue: 1dea2480208524e3cf9eff58d595a10d5f7a81ed70fdb2a1b51b1866ed0a1cee + - key: container.image.name + value: + stringValue: registry.k8s.io/kube-apiserver + - key: container.image.tag + value: + stringValue: v1.28.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: kube-apiserver + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: kube-apiserver-kind-control-plane + - key: k8s.pod.uid + value: + stringValue: 6c7bc53b-49de-4362-978e-bb33c7c49bcb + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.ready - unit: "1" - - description: How many times the container has restarted in the recent past. This value is pulled directly from the K8s API and the value can go indefinitely high and be reset to 0 at any time depending on how your kubelet is configured to prune dead containers. It is best to not depend too much on the exact value but rather look at it as either == 0, in which case you can conclude there were no restarts in the recent past, or > 0, in which case you can conclude there were restarts in the recent past, and not try and analyze the value beyond that. - gauge: - dataPoints: - - asInt: "0" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.restarts - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: container.id - value: - stringValue: 1fcb382f630e41d40e87147e5e54d3ecd545a4a4517e844ce89ac6810429ec6d - - key: container.image.name - value: - stringValue: docker.io/kindest/kindnetd - - key: container.image.tag - value: - stringValue: v20230511-dc714da8 - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.container.name - value: - stringValue: kindnet-cni - - key: k8s.namespace.name - value: - stringValue: kube-system - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: kindnet-nc2q6 - - key: k8s.pod.uid - value: - stringValue: 2e4716c8-b5fc-414e-8e2c-f209d6e98b3a - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Maximum resource limit set for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asDouble: 0.1 - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.cpu_limit - unit: '{cpu}' - - description: Resource requested for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asDouble: 0.1 - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.cpu_request - unit: '{cpu}' - - description: Maximum resource limit set for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asInt: "52428800" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.memory_limit - unit: By - - description: Resource requested for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asInt: "52428800" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.memory_request - unit: By - - description: Whether a container has passed its readiness probe (0 for no, 1 for yes) - gauge: - dataPoints: + attributes: + - key: container.id + value: + stringValue: 2f4f8e838b83404ecc37276bd38a3f76b9c71c08a7050450e188018e3fee774d + - key: container.image.name + value: + stringValue: docker.io/kindest/local-path-provisioner + - key: container.image.tag + value: + stringValue: v20230511-dc714da8 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: local-path-provisioner + - key: k8s.namespace.name + value: + stringValue: local-path-storage + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: local-path-provisioner-6f8956fb48-q6kkt + - key: k8s.pod.uid + value: + stringValue: a100ff5f-a521-463a-981e-f2704ca35e6f + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.ready - unit: "1" - - description: How many times the container has restarted in the recent past. This value is pulled directly from the K8s API and the value can go indefinitely high and be reset to 0 at any time depending on how your kubelet is configured to prune dead containers. It is best to not depend too much on the exact value but rather look at it as either == 0, in which case you can conclude there were no restarts in the recent past, or > 0, in which case you can conclude there were restarts in the recent past, and not try and analyze the value beyond that. - gauge: - dataPoints: - - asInt: "0" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.restarts - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: container.id - value: - stringValue: 229a8e204f9e45dc08502a0606038b70d399446f08f26c8292127f24b32381ef - - key: container.image.name - value: - stringValue: quay.io/jetstack/cert-manager-webhook - - key: container.image.tag - value: - stringValue: v1.10.0 - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.container.name - value: - stringValue: cert-manager-webhook - - key: k8s.namespace.name - value: - stringValue: cert-manager - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: cert-manager-webhook-fd94896cd-hcv6s - - key: k8s.pod.uid - value: - stringValue: b5cfd5d0-59d3-4ac5-8b4c-b8d5e07abec4 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Whether a container has passed its readiness probe (0 for no, 1 for yes) - gauge: - dataPoints: + attributes: + - key: container.id + value: + stringValue: 3517b5c45f99850e302744745f8910d72a58b923cde84f6d1edc1e0c31538d88 + - key: container.image.name + value: + stringValue: quay.io/jetstack/cert-manager-webhook + - key: container.image.tag + value: + stringValue: v1.13.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: cert-manager-webhook + - key: k8s.namespace.name + value: + stringValue: cert-manager + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: cert-manager-webhook-65d78d5c4b-wd2xl + - key: k8s.pod.uid + value: + stringValue: 360c6364-87bb-43a4-819c-0bec4d6daf79 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.ready - unit: "1" - - description: How many times the container has restarted in the recent past. This value is pulled directly from the K8s API and the value can go indefinitely high and be reset to 0 at any time depending on how your kubelet is configured to prune dead containers. It is best to not depend too much on the exact value but rather look at it as either == 0, in which case you can conclude there were no restarts in the recent past, or > 0, in which case you can conclude there were restarts in the recent past, and not try and analyze the value beyond that. - gauge: - dataPoints: - - asInt: "0" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.restarts - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: container.id - value: - stringValue: 24143363221e9d734ded5daa2c378c50274ded835cc43c3f192f1f48d36e7549 - - key: container.image.name - value: - stringValue: registry.k8s.io/kube-controller-manager - - key: container.image.tag - value: - stringValue: v1.27.3 - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.container.name - value: - stringValue: kube-controller-manager - - key: k8s.namespace.name - value: - stringValue: kube-system - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: kube-controller-manager-kind-control-plane - - key: k8s.pod.uid - value: - stringValue: 58218320-9d68-4a05-a014-c2a3fb27ac17 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Resource requested for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asDouble: 0.2 - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.cpu_request - unit: '{cpu}' - - description: Whether a container has passed its readiness probe (0 for no, 1 for yes) - gauge: - dataPoints: + attributes: + - key: container.id + value: + stringValue: 49a13379ed58c0343071e6ac8143712d5d6087e22abf0385372671ebcf911b4d + - key: container.image.name + value: + stringValue: quay.io/signalfx/splunk-otel-collector + - key: container.image.tag + value: + stringValue: 0.85.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: otel-collector + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-splunk-otel-collector-agent-dts69 + - key: k8s.pod.uid + value: + stringValue: 7171d2df-fba3-48a1-ad50-62cd3ddba6e0 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.ready - unit: "1" - - description: How many times the container has restarted in the recent past. This value is pulled directly from the K8s API and the value can go indefinitely high and be reset to 0 at any time depending on how your kubelet is configured to prune dead containers. It is best to not depend too much on the exact value but rather look at it as either == 0, in which case you can conclude there were no restarts in the recent past, or > 0, in which case you can conclude there were restarts in the recent past, and not try and analyze the value beyond that. - gauge: - dataPoints: - - asInt: "0" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.restarts - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: container.id - value: - stringValue: 301113c8a12ed13ca8d184cb885388f7e6f5539c215ec2eb7d8a3b2fde2e6533 - - key: container.image.name - value: - stringValue: registry.k8s.io/coredns/coredns - - key: container.image.tag - value: - stringValue: v1.10.1 - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.container.name - value: - stringValue: coredns - - key: k8s.namespace.name - value: - stringValue: kube-system - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: coredns-5d78c9869d-d6jcd - - key: k8s.pod.uid - value: - stringValue: 4aaa15dc-642e-4f23-9bba-36d575bd493c - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Resource requested for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asDouble: 0.1 - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.cpu_request - unit: '{cpu}' - - description: Maximum resource limit set for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asInt: "178257920" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.memory_limit - unit: By - - description: Resource requested for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asInt: "73400320" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.memory_request - unit: By - - description: Whether a container has passed its readiness probe (0 for no, 1 for yes) - gauge: - dataPoints: + attributes: + - key: container.id + value: + stringValue: 69903fd12bfaf7351195b5161a9b8eda2ea8feb8a23874595938932151258bff + - key: container.image.name + value: + stringValue: registry.k8s.io/kube-controller-manager + - key: container.image.tag + value: + stringValue: v1.28.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: kube-controller-manager + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: kube-controller-manager-kind-control-plane + - key: k8s.pod.uid + value: + stringValue: 1156765f-4aa3-48ce-b72b-40dbe0791f79 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.ready - unit: "1" - - description: How many times the container has restarted in the recent past. This value is pulled directly from the K8s API and the value can go indefinitely high and be reset to 0 at any time depending on how your kubelet is configured to prune dead containers. It is best to not depend too much on the exact value but rather look at it as either == 0, in which case you can conclude there were no restarts in the recent past, or > 0, in which case you can conclude there were restarts in the recent past, and not try and analyze the value beyond that. - gauge: - dataPoints: - - asInt: "0" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.restarts - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: container.id - value: - stringValue: 34469c340168ed86014f2e81d5a88d60e90751995f0d584adda50a09efa60bbb - - key: container.image.name - value: - stringValue: ghcr.io/open-telemetry/opentelemetry-operator/opentelemetry-operator - - key: container.image.tag - value: - stringValue: v0.83.0 - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.container.name - value: - stringValue: manager - - key: k8s.namespace.name - value: - stringValue: default - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: sock-operator-56566595b-9s2hn - - key: k8s.pod.uid - value: - stringValue: 1aa50cee-ff7b-4c49-b0b4-6c3e7a166410 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Maximum resource limit set for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asDouble: 0.1 - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.cpu_limit - unit: '{cpu}' - - description: Resource requested for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asDouble: 0.1 - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.cpu_request - unit: '{cpu}' - - description: Maximum resource limit set for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asInt: "134217728" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.memory_limit - unit: By - - description: Resource requested for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asInt: "67108864" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.memory_request - unit: By - - description: Whether a container has passed its readiness probe (0 for no, 1 for yes) - gauge: - dataPoints: + attributes: + - key: container.id + value: + stringValue: 7f4f87451f89e22cc93868ed17d7d38d9e32072e3d67f0ad005605bf46f8f7fe + - key: container.image.name + value: + stringValue: registry.k8s.io/coredns/coredns + - key: container.image.tag + value: + stringValue: v1.10.1 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: coredns + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: coredns-5dd5756b68-gvjxr + - key: k8s.pod.uid + value: + stringValue: 81598039-aec3-498c-8669-7a105bf3f1d1 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.ready - unit: "1" - - description: How many times the container has restarted in the recent past. This value is pulled directly from the K8s API and the value can go indefinitely high and be reset to 0 at any time depending on how your kubelet is configured to prune dead containers. It is best to not depend too much on the exact value but rather look at it as either == 0, in which case you can conclude there were no restarts in the recent past, or > 0, in which case you can conclude there were restarts in the recent past, and not try and analyze the value beyond that. - gauge: - dataPoints: - - asInt: "0" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.restarts - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: container.id - value: - stringValue: 3cb1ef9a1d7f3525fb5c8012de8a2c693262a47645c58cc01f1bbfca1d22332f - - key: container.image.name - value: - stringValue: quay.io/signalfx/splunk-otel-collector - - key: container.image.tag - value: - stringValue: 0.85.0 - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.container.name - value: - stringValue: otel-collector - - key: k8s.namespace.name - value: - stringValue: default - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: sock-splunk-otel-collector-k8s-cluster-receiver-79b6f8bbcfkzqs8 - - key: k8s.pod.uid - value: - stringValue: 28548a19-31ac-4113-aeef-6a84a9006e09 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Maximum resource limit set for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asDouble: 0.2 - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.cpu_limit - unit: '{cpu}' - - description: Resource requested for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asDouble: 0.2 - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.cpu_request - unit: '{cpu}' - - description: Maximum resource limit set for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asInt: "524288000" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.memory_limit - unit: By - - description: Resource requested for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asInt: "524288000" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.memory_request - unit: By - - description: Whether a container has passed its readiness probe (0 for no, 1 for yes) - gauge: - dataPoints: + attributes: + - key: container.id + value: + stringValue: 86d46502c81a11ce4ebd8ce2b8c974b279e67cc8167ba5ef17a4728f54cce898 + - key: container.image.name + value: + stringValue: docker.io/library/nodejs_test + - key: container.image.tag + value: + stringValue: latest + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: nodejs-test + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: nodejs-test-57564b7dc9-5m55j + - key: k8s.pod.uid + value: + stringValue: 2f2443be-3e3a-424b-bab5-30be32b30aed + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.ready - unit: "1" - - description: How many times the container has restarted in the recent past. This value is pulled directly from the K8s API and the value can go indefinitely high and be reset to 0 at any time depending on how your kubelet is configured to prune dead containers. It is best to not depend too much on the exact value but rather look at it as either == 0, in which case you can conclude there were no restarts in the recent past, or > 0, in which case you can conclude there were restarts in the recent past, and not try and analyze the value beyond that. - gauge: - dataPoints: - - asInt: "0" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.restarts - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: container.id - value: - stringValue: 5546876bb7f228ac90c4b2633a9b943339e7f12183bc508b639683342be8a1ea - - key: container.image.name - value: - stringValue: quay.io/jetstack/cert-manager-controller - - key: container.image.tag - value: - stringValue: v1.10.0 - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.container.name - value: - stringValue: cert-manager-controller - - key: k8s.namespace.name - value: - stringValue: cert-manager - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: cert-manager-7f6665fd8c-wkcm6 - - key: k8s.pod.uid - value: - stringValue: 5286b581-2c17-4110-b074-18e477e407ec - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Whether a container has passed its readiness probe (0 for no, 1 for yes) - gauge: - dataPoints: + attributes: + - key: container.id + value: + stringValue: 8cfb7effb556aff47f4e32f7a68181d891c93342f14cf147c73482a5653558da + - key: container.image.name + value: + stringValue: ghcr.io/open-telemetry/opentelemetry-operator/opentelemetry-operator + - key: container.image.tag + value: + stringValue: v0.83.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: manager + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-operator-949dd8564-hgqp6 + - key: k8s.pod.uid + value: + stringValue: 9ff5c317-8225-429d-bb61-9787789ef05f + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: container.id + value: + stringValue: 932009af1d5d3391d21d62771b550c90094cc19bad776afc6554547b2429c5f0 + - key: container.image.name + value: + stringValue: quay.io/brancz/kube-rbac-proxy + - key: container.image.tag + value: + stringValue: v0.14.2 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: kube-rbac-proxy + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-operator-949dd8564-hgqp6 + - key: k8s.pod.uid + value: + stringValue: 9ff5c317-8225-429d-bb61-9787789ef05f + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: container.id + value: + stringValue: 96b9557968027a101dab32565494c69369db2a30290a02c44cb95c7f874781b2 + - key: container.image.name + value: + stringValue: registry.k8s.io/kube-scheduler + - key: container.image.tag + value: + stringValue: v1.28.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: kube-scheduler + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: kube-scheduler-kind-control-plane + - key: k8s.pod.uid + value: + stringValue: 146a1b22-6228-4180-8da0-ac586ff53364 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: container.id + value: + stringValue: a0d6fb7796dcb8652af0b56f78b203f26891a3035883ff12ee65bee7b7d3ce17 + - key: container.image.name + value: + stringValue: quay.io/signalfx/splunk-otel-collector + - key: container.image.tag + value: + stringValue: 0.85.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: otel-collector + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-splunk-otel-collector-k8s-cluster-receiver-7444c4bc8-ltjlc + - key: k8s.pod.uid + value: + stringValue: 62916eaa-170a-459f-9471-56c212a9c0f8 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: container.id + value: + stringValue: a99f328c2b7593f993dd85b3d7f857a5410c669a8aa6fbe01b979db7c975551d + - key: container.image.name + value: + stringValue: registry.k8s.io/kube-proxy + - key: container.image.tag + value: + stringValue: v1.28.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: kube-proxy + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: kube-proxy-zsnhf + - key: k8s.pod.uid + value: + stringValue: 704cefde-dff1-4cb4-9108-0c765983721c + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: container.id + value: + stringValue: c746cba4f4507792f79f48bce09ee45108966760ace5299cbc3fe2d7cad22e80 + - key: container.image.name + value: + stringValue: registry.k8s.io/etcd + - key: container.image.tag + value: + stringValue: 3.5.9-0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: etcd + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: etcd-kind-control-plane + - key: k8s.pod.uid + value: + stringValue: 57dce01c-4f67-43f6-970a-5089093ca378 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.ready - unit: "1" - - description: How many times the container has restarted in the recent past. This value is pulled directly from the K8s API and the value can go indefinitely high and be reset to 0 at any time depending on how your kubelet is configured to prune dead containers. It is best to not depend too much on the exact value but rather look at it as either == 0, in which case you can conclude there were no restarts in the recent past, or > 0, in which case you can conclude there were restarts in the recent past, and not try and analyze the value beyond that. - gauge: - dataPoints: - - asInt: "0" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.restarts - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: container.id - value: - stringValue: 6dd49fc25156797d364203d727ca3542331db8267216690c190273927d05746d - - key: container.image.name - value: - stringValue: registry.k8s.io/coredns/coredns - - key: container.image.tag - value: - stringValue: v1.10.1 - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.container.name - value: - stringValue: coredns - - key: k8s.namespace.name - value: - stringValue: kube-system - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: coredns-5d78c9869d-d74kc - - key: k8s.pod.uid - value: - stringValue: d7997008-4987-4cd2-9b42-97189c1f19b2 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Resource requested for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asDouble: 0.1 - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.cpu_request - unit: '{cpu}' - - description: Maximum resource limit set for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asInt: "178257920" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.memory_limit - unit: By - - description: Resource requested for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asInt: "73400320" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.memory_request - unit: By - - description: Whether a container has passed its readiness probe (0 for no, 1 for yes) - gauge: - dataPoints: + attributes: + - key: container.id + value: + stringValue: c808c1842f01c0f1303f743d562ad13c67507ba30a2b23f8bc17085264c17cf3 + - key: container.image.name + value: + stringValue: quay.io/jetstack/cert-manager-controller + - key: container.image.tag + value: + stringValue: v1.13.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: cert-manager-controller + - key: k8s.namespace.name + value: + stringValue: cert-manager + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: cert-manager-5698c4d465-vm5nb + - key: k8s.pod.uid + value: + stringValue: 74540585-433a-4aff-9b0b-2ca0c111633a + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: container.id + value: + stringValue: d691b9091990122be2e84f7559d8193db2cd1da7b6c220c82400d3e2be44b742 + - key: container.image.name + value: + stringValue: registry.k8s.io/coredns/coredns + - key: container.image.tag + value: + stringValue: v1.10.1 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: coredns + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: coredns-5dd5756b68-xndsg + - key: k8s.pod.uid + value: + stringValue: 53d939b8-8ee2-4bc5-b1f0-2f4fd26e359a + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" + attributes: + - key: container.id + value: + stringValue: f6fb8175938844680df2a8f798647344b271c8d00745ac3661aadd977a119caf + - key: container.image.name + value: + stringValue: quay.io/jetstack/cert-manager-cainjector + - key: container.image.tag + value: + stringValue: v1.13.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: cert-manager-cainjector + - key: k8s.namespace.name + value: + stringValue: cert-manager + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: cert-manager-cainjector-d4748596-4ggln + - key: k8s.pod.uid + value: + stringValue: e7fe7c54-a4b8-4559-a197-d1edf77dfc7b + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" name: k8s.container.ready - unit: "1" - - description: How many times the container has restarted in the recent past. This value is pulled directly from the K8s API and the value can go indefinitely high and be reset to 0 at any time depending on how your kubelet is configured to prune dead containers. It is best to not depend too much on the exact value but rather look at it as either == 0, in which case you can conclude there were no restarts in the recent past, or > 0, in which case you can conclude there were restarts in the recent past, and not try and analyze the value beyond that. - gauge: + - gauge: dataPoints: - asInt: "0" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" + attributes: + - key: container.id + value: + stringValue: 1d8d872582305d6f52ef1a9c2af9502991aac13bf66db581578fa057ae034d65 + - key: container.image.name + value: + stringValue: docker.io/kindest/kindnetd + - key: container.image.tag + value: + stringValue: v20230511-dc714da8 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: kindnet-cni + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: kindnet-tkg4v + - key: k8s.pod.uid + value: + stringValue: bff7fb95-6b7b-4589-8c89-cf423a184512 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "0" + attributes: + - key: container.id + value: + stringValue: 1dea2480208524e3cf9eff58d595a10d5f7a81ed70fdb2a1b51b1866ed0a1cee + - key: container.image.name + value: + stringValue: registry.k8s.io/kube-apiserver + - key: container.image.tag + value: + stringValue: v1.28.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: kube-apiserver + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: kube-apiserver-kind-control-plane + - key: k8s.pod.uid + value: + stringValue: 6c7bc53b-49de-4362-978e-bb33c7c49bcb + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "0" + attributes: + - key: container.id + value: + stringValue: 2f4f8e838b83404ecc37276bd38a3f76b9c71c08a7050450e188018e3fee774d + - key: container.image.name + value: + stringValue: docker.io/kindest/local-path-provisioner + - key: container.image.tag + value: + stringValue: v20230511-dc714da8 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: local-path-provisioner + - key: k8s.namespace.name + value: + stringValue: local-path-storage + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: local-path-provisioner-6f8956fb48-q6kkt + - key: k8s.pod.uid + value: + stringValue: a100ff5f-a521-463a-981e-f2704ca35e6f + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "0" + attributes: + - key: container.id + value: + stringValue: 3517b5c45f99850e302744745f8910d72a58b923cde84f6d1edc1e0c31538d88 + - key: container.image.name + value: + stringValue: quay.io/jetstack/cert-manager-webhook + - key: container.image.tag + value: + stringValue: v1.13.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: cert-manager-webhook + - key: k8s.namespace.name + value: + stringValue: cert-manager + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: cert-manager-webhook-65d78d5c4b-wd2xl + - key: k8s.pod.uid + value: + stringValue: 360c6364-87bb-43a4-819c-0bec4d6daf79 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "0" + attributes: + - key: container.id + value: + stringValue: 49a13379ed58c0343071e6ac8143712d5d6087e22abf0385372671ebcf911b4d + - key: container.image.name + value: + stringValue: quay.io/signalfx/splunk-otel-collector + - key: container.image.tag + value: + stringValue: 0.85.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: otel-collector + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-splunk-otel-collector-agent-dts69 + - key: k8s.pod.uid + value: + stringValue: 7171d2df-fba3-48a1-ad50-62cd3ddba6e0 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "0" + attributes: + - key: container.id + value: + stringValue: 69903fd12bfaf7351195b5161a9b8eda2ea8feb8a23874595938932151258bff + - key: container.image.name + value: + stringValue: registry.k8s.io/kube-controller-manager + - key: container.image.tag + value: + stringValue: v1.28.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: kube-controller-manager + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: kube-controller-manager-kind-control-plane + - key: k8s.pod.uid + value: + stringValue: 1156765f-4aa3-48ce-b72b-40dbe0791f79 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "0" + attributes: + - key: container.id + value: + stringValue: 7f4f87451f89e22cc93868ed17d7d38d9e32072e3d67f0ad005605bf46f8f7fe + - key: container.image.name + value: + stringValue: registry.k8s.io/coredns/coredns + - key: container.image.tag + value: + stringValue: v1.10.1 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: coredns + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: coredns-5dd5756b68-gvjxr + - key: k8s.pod.uid + value: + stringValue: 81598039-aec3-498c-8669-7a105bf3f1d1 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "0" + attributes: + - key: container.id + value: + stringValue: 86d46502c81a11ce4ebd8ce2b8c974b279e67cc8167ba5ef17a4728f54cce898 + - key: container.image.name + value: + stringValue: docker.io/library/nodejs_test + - key: container.image.tag + value: + stringValue: latest + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: nodejs-test + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: nodejs-test-57564b7dc9-5m55j + - key: k8s.pod.uid + value: + stringValue: 2f2443be-3e3a-424b-bab5-30be32b30aed + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "0" + attributes: + - key: container.id + value: + stringValue: 8cfb7effb556aff47f4e32f7a68181d891c93342f14cf147c73482a5653558da + - key: container.image.name + value: + stringValue: ghcr.io/open-telemetry/opentelemetry-operator/opentelemetry-operator + - key: container.image.tag + value: + stringValue: v0.83.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: manager + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-operator-949dd8564-hgqp6 + - key: k8s.pod.uid + value: + stringValue: 9ff5c317-8225-429d-bb61-9787789ef05f + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "0" + attributes: + - key: container.id + value: + stringValue: 932009af1d5d3391d21d62771b550c90094cc19bad776afc6554547b2429c5f0 + - key: container.image.name + value: + stringValue: quay.io/brancz/kube-rbac-proxy + - key: container.image.tag + value: + stringValue: v0.14.2 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: kube-rbac-proxy + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-operator-949dd8564-hgqp6 + - key: k8s.pod.uid + value: + stringValue: 9ff5c317-8225-429d-bb61-9787789ef05f + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "0" + attributes: + - key: container.id + value: + stringValue: 96b9557968027a101dab32565494c69369db2a30290a02c44cb95c7f874781b2 + - key: container.image.name + value: + stringValue: registry.k8s.io/kube-scheduler + - key: container.image.tag + value: + stringValue: v1.28.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: kube-scheduler + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: kube-scheduler-kind-control-plane + - key: k8s.pod.uid + value: + stringValue: 146a1b22-6228-4180-8da0-ac586ff53364 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "0" + attributes: + - key: container.id + value: + stringValue: a0d6fb7796dcb8652af0b56f78b203f26891a3035883ff12ee65bee7b7d3ce17 + - key: container.image.name + value: + stringValue: quay.io/signalfx/splunk-otel-collector + - key: container.image.tag + value: + stringValue: 0.85.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: otel-collector + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-splunk-otel-collector-k8s-cluster-receiver-7444c4bc8-ltjlc + - key: k8s.pod.uid + value: + stringValue: 62916eaa-170a-459f-9471-56c212a9c0f8 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "0" + attributes: + - key: container.id + value: + stringValue: a99f328c2b7593f993dd85b3d7f857a5410c669a8aa6fbe01b979db7c975551d + - key: container.image.name + value: + stringValue: registry.k8s.io/kube-proxy + - key: container.image.tag + value: + stringValue: v1.28.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: kube-proxy + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: kube-proxy-zsnhf + - key: k8s.pod.uid + value: + stringValue: 704cefde-dff1-4cb4-9108-0c765983721c + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "0" + attributes: + - key: container.id + value: + stringValue: c746cba4f4507792f79f48bce09ee45108966760ace5299cbc3fe2d7cad22e80 + - key: container.image.name + value: + stringValue: registry.k8s.io/etcd + - key: container.image.tag + value: + stringValue: 3.5.9-0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: etcd + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: etcd-kind-control-plane + - key: k8s.pod.uid + value: + stringValue: 57dce01c-4f67-43f6-970a-5089093ca378 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "0" + attributes: + - key: container.id + value: + stringValue: c808c1842f01c0f1303f743d562ad13c67507ba30a2b23f8bc17085264c17cf3 + - key: container.image.name + value: + stringValue: quay.io/jetstack/cert-manager-controller + - key: container.image.tag + value: + stringValue: v1.13.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: cert-manager-controller + - key: k8s.namespace.name + value: + stringValue: cert-manager + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: cert-manager-5698c4d465-vm5nb + - key: k8s.pod.uid + value: + stringValue: 74540585-433a-4aff-9b0b-2ca0c111633a + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "0" + attributes: + - key: container.id + value: + stringValue: d691b9091990122be2e84f7559d8193db2cd1da7b6c220c82400d3e2be44b742 + - key: container.image.name + value: + stringValue: registry.k8s.io/coredns/coredns + - key: container.image.tag + value: + stringValue: v1.10.1 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: coredns + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: coredns-5dd5756b68-xndsg + - key: k8s.pod.uid + value: + stringValue: 53d939b8-8ee2-4bc5-b1f0-2f4fd26e359a + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "0" + attributes: + - key: container.id + value: + stringValue: f6fb8175938844680df2a8f798647344b271c8d00745ac3661aadd977a119caf + - key: container.image.name + value: + stringValue: quay.io/jetstack/cert-manager-cainjector + - key: container.image.tag + value: + stringValue: v1.13.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: cert-manager-cainjector + - key: k8s.namespace.name + value: + stringValue: cert-manager + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: cert-manager-cainjector-d4748596-4ggln + - key: k8s.pod.uid + value: + stringValue: e7fe7c54-a4b8-4559-a197-d1edf77dfc7b + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" name: k8s.container.restarts - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: container.id - value: - stringValue: 7b2b6c1d8a95ca46baa108a2e3e495cd29584a68f8656b72c7c8728698da3d33 - - key: container.image.name - value: - stringValue: quay.io/signalfx/splunk-otel-collector - - key: container.image.tag - value: - stringValue: 0.85.0 - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.container.name - value: - stringValue: otel-collector - - key: k8s.namespace.name - value: - stringValue: default - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: sock-splunk-otel-collector-agent-wnhhz - - key: k8s.pod.uid - value: - stringValue: f2607c0e-1ccc-440f-b9ba-db9b220ba76e - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Maximum resource limit set for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: + - gauge: dataPoints: + - asDouble: 0.1 + attributes: + - key: container.id + value: + stringValue: 1d8d872582305d6f52ef1a9c2af9502991aac13bf66db581578fa057ae034d65 + - key: container.image.name + value: + stringValue: docker.io/kindest/kindnetd + - key: container.image.tag + value: + stringValue: v20230511-dc714da8 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: kindnet-cni + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: kindnet-tkg4v + - key: k8s.pod.uid + value: + stringValue: bff7fb95-6b7b-4589-8c89-cf423a184512 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asDouble: 0.2 + attributes: + - key: container.id + value: + stringValue: 49a13379ed58c0343071e6ac8143712d5d6087e22abf0385372671ebcf911b4d + - key: container.image.name + value: + stringValue: quay.io/signalfx/splunk-otel-collector + - key: container.image.tag + value: + stringValue: 0.85.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: otel-collector + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-splunk-otel-collector-agent-dts69 + - key: k8s.pod.uid + value: + stringValue: 7171d2df-fba3-48a1-ad50-62cd3ddba6e0 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asDouble: 0.1 + attributes: + - key: container.id + value: + stringValue: 8cfb7effb556aff47f4e32f7a68181d891c93342f14cf147c73482a5653558da + - key: container.image.name + value: + stringValue: ghcr.io/open-telemetry/opentelemetry-operator/opentelemetry-operator + - key: container.image.tag + value: + stringValue: v0.83.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: manager + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-operator-949dd8564-hgqp6 + - key: k8s.pod.uid + value: + stringValue: 9ff5c317-8225-429d-bb61-9787789ef05f + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asDouble: 0.5 + attributes: + - key: container.id + value: + stringValue: 932009af1d5d3391d21d62771b550c90094cc19bad776afc6554547b2429c5f0 + - key: container.image.name + value: + stringValue: quay.io/brancz/kube-rbac-proxy + - key: container.image.tag + value: + stringValue: v0.14.2 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: kube-rbac-proxy + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-operator-949dd8564-hgqp6 + - key: k8s.pod.uid + value: + stringValue: 9ff5c317-8225-429d-bb61-9787789ef05f + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asDouble: 0.2 - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" + attributes: + - key: container.id + value: + stringValue: a0d6fb7796dcb8652af0b56f78b203f26891a3035883ff12ee65bee7b7d3ce17 + - key: container.image.name + value: + stringValue: quay.io/signalfx/splunk-otel-collector + - key: container.image.tag + value: + stringValue: 0.85.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: otel-collector + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-splunk-otel-collector-k8s-cluster-receiver-7444c4bc8-ltjlc + - key: k8s.pod.uid + value: + stringValue: 62916eaa-170a-459f-9471-56c212a9c0f8 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" name: k8s.container.cpu_limit - unit: '{cpu}' - - description: Resource requested for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: + - gauge: dataPoints: + - asDouble: 0.1 + attributes: + - key: container.id + value: + stringValue: 1d8d872582305d6f52ef1a9c2af9502991aac13bf66db581578fa057ae034d65 + - key: container.image.name + value: + stringValue: docker.io/kindest/kindnetd + - key: container.image.tag + value: + stringValue: v20230511-dc714da8 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: kindnet-cni + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: kindnet-tkg4v + - key: k8s.pod.uid + value: + stringValue: bff7fb95-6b7b-4589-8c89-cf423a184512 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asDouble: 0.25 + attributes: + - key: container.id + value: + stringValue: 1dea2480208524e3cf9eff58d595a10d5f7a81ed70fdb2a1b51b1866ed0a1cee + - key: container.image.name + value: + stringValue: registry.k8s.io/kube-apiserver + - key: container.image.tag + value: + stringValue: v1.28.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: kube-apiserver + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: kube-apiserver-kind-control-plane + - key: k8s.pod.uid + value: + stringValue: 6c7bc53b-49de-4362-978e-bb33c7c49bcb + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asDouble: 0.2 + attributes: + - key: container.id + value: + stringValue: 49a13379ed58c0343071e6ac8143712d5d6087e22abf0385372671ebcf911b4d + - key: container.image.name + value: + stringValue: quay.io/signalfx/splunk-otel-collector + - key: container.image.tag + value: + stringValue: 0.85.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: otel-collector + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-splunk-otel-collector-agent-dts69 + - key: k8s.pod.uid + value: + stringValue: 7171d2df-fba3-48a1-ad50-62cd3ddba6e0 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asDouble: 0.2 + attributes: + - key: container.id + value: + stringValue: 69903fd12bfaf7351195b5161a9b8eda2ea8feb8a23874595938932151258bff + - key: container.image.name + value: + stringValue: registry.k8s.io/kube-controller-manager + - key: container.image.tag + value: + stringValue: v1.28.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: kube-controller-manager + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: kube-controller-manager-kind-control-plane + - key: k8s.pod.uid + value: + stringValue: 1156765f-4aa3-48ce-b72b-40dbe0791f79 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asDouble: 0.1 + attributes: + - key: container.id + value: + stringValue: 7f4f87451f89e22cc93868ed17d7d38d9e32072e3d67f0ad005605bf46f8f7fe + - key: container.image.name + value: + stringValue: registry.k8s.io/coredns/coredns + - key: container.image.tag + value: + stringValue: v1.10.1 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: coredns + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: coredns-5dd5756b68-gvjxr + - key: k8s.pod.uid + value: + stringValue: 81598039-aec3-498c-8669-7a105bf3f1d1 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asDouble: 0.1 + attributes: + - key: container.id + value: + stringValue: 8cfb7effb556aff47f4e32f7a68181d891c93342f14cf147c73482a5653558da + - key: container.image.name + value: + stringValue: ghcr.io/open-telemetry/opentelemetry-operator/opentelemetry-operator + - key: container.image.tag + value: + stringValue: v0.83.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: manager + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-operator-949dd8564-hgqp6 + - key: k8s.pod.uid + value: + stringValue: 9ff5c317-8225-429d-bb61-9787789ef05f + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asDouble: 0.005 + attributes: + - key: container.id + value: + stringValue: 932009af1d5d3391d21d62771b550c90094cc19bad776afc6554547b2429c5f0 + - key: container.image.name + value: + stringValue: quay.io/brancz/kube-rbac-proxy + - key: container.image.tag + value: + stringValue: v0.14.2 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: kube-rbac-proxy + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-operator-949dd8564-hgqp6 + - key: k8s.pod.uid + value: + stringValue: 9ff5c317-8225-429d-bb61-9787789ef05f + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asDouble: 0.1 + attributes: + - key: container.id + value: + stringValue: 96b9557968027a101dab32565494c69369db2a30290a02c44cb95c7f874781b2 + - key: container.image.name + value: + stringValue: registry.k8s.io/kube-scheduler + - key: container.image.tag + value: + stringValue: v1.28.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: kube-scheduler + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: kube-scheduler-kind-control-plane + - key: k8s.pod.uid + value: + stringValue: 146a1b22-6228-4180-8da0-ac586ff53364 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asDouble: 0.2 - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" + attributes: + - key: container.id + value: + stringValue: a0d6fb7796dcb8652af0b56f78b203f26891a3035883ff12ee65bee7b7d3ce17 + - key: container.image.name + value: + stringValue: quay.io/signalfx/splunk-otel-collector + - key: container.image.tag + value: + stringValue: 0.85.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: otel-collector + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-splunk-otel-collector-k8s-cluster-receiver-7444c4bc8-ltjlc + - key: k8s.pod.uid + value: + stringValue: 62916eaa-170a-459f-9471-56c212a9c0f8 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asDouble: 0.1 + attributes: + - key: container.id + value: + stringValue: c746cba4f4507792f79f48bce09ee45108966760ace5299cbc3fe2d7cad22e80 + - key: container.image.name + value: + stringValue: registry.k8s.io/etcd + - key: container.image.tag + value: + stringValue: 3.5.9-0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: etcd + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: etcd-kind-control-plane + - key: k8s.pod.uid + value: + stringValue: 57dce01c-4f67-43f6-970a-5089093ca378 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asDouble: 0.1 + attributes: + - key: container.id + value: + stringValue: d691b9091990122be2e84f7559d8193db2cd1da7b6c220c82400d3e2be44b742 + - key: container.image.name + value: + stringValue: registry.k8s.io/coredns/coredns + - key: container.image.tag + value: + stringValue: v1.10.1 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: coredns + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: coredns-5dd5756b68-xndsg + - key: k8s.pod.uid + value: + stringValue: 53d939b8-8ee2-4bc5-b1f0-2f4fd26e359a + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" name: k8s.container.cpu_request - unit: '{cpu}' - - description: Maximum resource limit set for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: + - gauge: dataPoints: + - asInt: "52428800" + attributes: + - key: container.id + value: + stringValue: 1d8d872582305d6f52ef1a9c2af9502991aac13bf66db581578fa057ae034d65 + - key: container.image.name + value: + stringValue: docker.io/kindest/kindnetd + - key: container.image.tag + value: + stringValue: v20230511-dc714da8 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: kindnet-cni + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: kindnet-tkg4v + - key: k8s.pod.uid + value: + stringValue: bff7fb95-6b7b-4589-8c89-cf423a184512 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "524288000" + attributes: + - key: container.id + value: + stringValue: 49a13379ed58c0343071e6ac8143712d5d6087e22abf0385372671ebcf911b4d + - key: container.image.name + value: + stringValue: quay.io/signalfx/splunk-otel-collector + - key: container.image.tag + value: + stringValue: 0.85.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: otel-collector + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-splunk-otel-collector-agent-dts69 + - key: k8s.pod.uid + value: + stringValue: 7171d2df-fba3-48a1-ad50-62cd3ddba6e0 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "178257920" + attributes: + - key: container.id + value: + stringValue: 7f4f87451f89e22cc93868ed17d7d38d9e32072e3d67f0ad005605bf46f8f7fe + - key: container.image.name + value: + stringValue: registry.k8s.io/coredns/coredns + - key: container.image.tag + value: + stringValue: v1.10.1 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: coredns + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: coredns-5dd5756b68-gvjxr + - key: k8s.pod.uid + value: + stringValue: 81598039-aec3-498c-8669-7a105bf3f1d1 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "134217728" + attributes: + - key: container.id + value: + stringValue: 8cfb7effb556aff47f4e32f7a68181d891c93342f14cf147c73482a5653558da + - key: container.image.name + value: + stringValue: ghcr.io/open-telemetry/opentelemetry-operator/opentelemetry-operator + - key: container.image.tag + value: + stringValue: v0.83.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: manager + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-operator-949dd8564-hgqp6 + - key: k8s.pod.uid + value: + stringValue: 9ff5c317-8225-429d-bb61-9787789ef05f + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "134217728" + attributes: + - key: container.id + value: + stringValue: 932009af1d5d3391d21d62771b550c90094cc19bad776afc6554547b2429c5f0 + - key: container.image.name + value: + stringValue: quay.io/brancz/kube-rbac-proxy + - key: container.image.tag + value: + stringValue: v0.14.2 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: kube-rbac-proxy + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-operator-949dd8564-hgqp6 + - key: k8s.pod.uid + value: + stringValue: 9ff5c317-8225-429d-bb61-9787789ef05f + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "524288000" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" + attributes: + - key: container.id + value: + stringValue: a0d6fb7796dcb8652af0b56f78b203f26891a3035883ff12ee65bee7b7d3ce17 + - key: container.image.name + value: + stringValue: quay.io/signalfx/splunk-otel-collector + - key: container.image.tag + value: + stringValue: 0.85.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: otel-collector + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-splunk-otel-collector-k8s-cluster-receiver-7444c4bc8-ltjlc + - key: k8s.pod.uid + value: + stringValue: 62916eaa-170a-459f-9471-56c212a9c0f8 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "178257920" + attributes: + - key: container.id + value: + stringValue: d691b9091990122be2e84f7559d8193db2cd1da7b6c220c82400d3e2be44b742 + - key: container.image.name + value: + stringValue: registry.k8s.io/coredns/coredns + - key: container.image.tag + value: + stringValue: v1.10.1 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: coredns + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: coredns-5dd5756b68-xndsg + - key: k8s.pod.uid + value: + stringValue: 53d939b8-8ee2-4bc5-b1f0-2f4fd26e359a + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" name: k8s.container.memory_limit - unit: By - - description: Resource requested for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: + - gauge: dataPoints: + - asInt: "52428800" + attributes: + - key: container.id + value: + stringValue: 1d8d872582305d6f52ef1a9c2af9502991aac13bf66db581578fa057ae034d65 + - key: container.image.name + value: + stringValue: docker.io/kindest/kindnetd + - key: container.image.tag + value: + stringValue: v20230511-dc714da8 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: kindnet-cni + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: kindnet-tkg4v + - key: k8s.pod.uid + value: + stringValue: bff7fb95-6b7b-4589-8c89-cf423a184512 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "524288000" + attributes: + - key: container.id + value: + stringValue: 49a13379ed58c0343071e6ac8143712d5d6087e22abf0385372671ebcf911b4d + - key: container.image.name + value: + stringValue: quay.io/signalfx/splunk-otel-collector + - key: container.image.tag + value: + stringValue: 0.85.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: otel-collector + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-splunk-otel-collector-agent-dts69 + - key: k8s.pod.uid + value: + stringValue: 7171d2df-fba3-48a1-ad50-62cd3ddba6e0 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "73400320" + attributes: + - key: container.id + value: + stringValue: 7f4f87451f89e22cc93868ed17d7d38d9e32072e3d67f0ad005605bf46f8f7fe + - key: container.image.name + value: + stringValue: registry.k8s.io/coredns/coredns + - key: container.image.tag + value: + stringValue: v1.10.1 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: coredns + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: coredns-5dd5756b68-gvjxr + - key: k8s.pod.uid + value: + stringValue: 81598039-aec3-498c-8669-7a105bf3f1d1 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "67108864" + attributes: + - key: container.id + value: + stringValue: 8cfb7effb556aff47f4e32f7a68181d891c93342f14cf147c73482a5653558da + - key: container.image.name + value: + stringValue: ghcr.io/open-telemetry/opentelemetry-operator/opentelemetry-operator + - key: container.image.tag + value: + stringValue: v0.83.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: manager + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-operator-949dd8564-hgqp6 + - key: k8s.pod.uid + value: + stringValue: 9ff5c317-8225-429d-bb61-9787789ef05f + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "67108864" + attributes: + - key: container.id + value: + stringValue: 932009af1d5d3391d21d62771b550c90094cc19bad776afc6554547b2429c5f0 + - key: container.image.name + value: + stringValue: quay.io/brancz/kube-rbac-proxy + - key: container.image.tag + value: + stringValue: v0.14.2 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: kube-rbac-proxy + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-operator-949dd8564-hgqp6 + - key: k8s.pod.uid + value: + stringValue: 9ff5c317-8225-429d-bb61-9787789ef05f + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "524288000" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" + attributes: + - key: container.id + value: + stringValue: a0d6fb7796dcb8652af0b56f78b203f26891a3035883ff12ee65bee7b7d3ce17 + - key: container.image.name + value: + stringValue: quay.io/signalfx/splunk-otel-collector + - key: container.image.tag + value: + stringValue: 0.85.0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: otel-collector + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: sock-splunk-otel-collector-k8s-cluster-receiver-7444c4bc8-ltjlc + - key: k8s.pod.uid + value: + stringValue: 62916eaa-170a-459f-9471-56c212a9c0f8 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "104857600" + attributes: + - key: container.id + value: + stringValue: c746cba4f4507792f79f48bce09ee45108966760ace5299cbc3fe2d7cad22e80 + - key: container.image.name + value: + stringValue: registry.k8s.io/etcd + - key: container.image.tag + value: + stringValue: 3.5.9-0 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: etcd + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: etcd-kind-control-plane + - key: k8s.pod.uid + value: + stringValue: 57dce01c-4f67-43f6-970a-5089093ca378 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "73400320" + attributes: + - key: container.id + value: + stringValue: d691b9091990122be2e84f7559d8193db2cd1da7b6c220c82400d3e2be44b742 + - key: container.image.name + value: + stringValue: registry.k8s.io/coredns/coredns + - key: container.image.tag + value: + stringValue: v1.10.1 + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.container.name + value: + stringValue: coredns + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.pod.name + value: + stringValue: coredns-5dd5756b68-xndsg + - key: k8s.pod.uid + value: + stringValue: 53d939b8-8ee2-4bc5-b1f0-2f4fd26e359a + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" name: k8s.container.memory_request - unit: By - - description: Whether a container has passed its readiness probe (0 for no, 1 for yes) - gauge: + - gauge: dataPoints: - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.ready - unit: "1" - - description: How many times the container has restarted in the recent past. This value is pulled directly from the K8s API and the value can go indefinitely high and be reset to 0 at any time depending on how your kubelet is configured to prune dead containers. It is best to not depend too much on the exact value but rather look at it as either == 0, in which case you can conclude there were no restarts in the recent past, or > 0, in which case you can conclude there were restarts in the recent past, and not try and analyze the value beyond that. - gauge: - dataPoints: - - asInt: "0" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.restarts - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: container.id - value: - stringValue: 83b3dc6a49f6841dfa4c961e02ce3e8d348012d7826161064f207bd2e439bedf - - key: container.image.name - value: - stringValue: quay.io/jetstack/cert-manager-cainjector - - key: container.image.tag - value: - stringValue: v1.10.0 - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.container.name - value: - stringValue: cert-manager-cainjector - - key: k8s.namespace.name - value: - stringValue: cert-manager - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: cert-manager-cainjector-666564dc88-d78nh - - key: k8s.pod.uid - value: - stringValue: 59324e58-5da3-4499-a70c-b94c3f0036fa - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Whether a container has passed its readiness probe (0 for no, 1 for yes) - gauge: - dataPoints: + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: cert-manager + - key: k8s.namespace.uid + value: + stringValue: e818156f-37b1-4b8b-9a6b-51dec2e39460 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.ready - unit: "1" - - description: How many times the container has restarted in the recent past. This value is pulled directly from the K8s API and the value can go indefinitely high and be reset to 0 at any time depending on how your kubelet is configured to prune dead containers. It is best to not depend too much on the exact value but rather look at it as either == 0, in which case you can conclude there were no restarts in the recent past, or > 0, in which case you can conclude there were restarts in the recent past, and not try and analyze the value beyond that. - gauge: + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.namespace.uid + value: + stringValue: 80eedd34-118a-4009-ba57-0487c785cd21 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: kube-node-lease + - key: k8s.namespace.uid + value: + stringValue: 44d9e9ba-53b1-4de2-98e8-0ac86190129f + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: kube-public + - key: k8s.namespace.uid + value: + stringValue: f9763004-f7b2-47eb-8f59-2191b1b7ea20 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.namespace.uid + value: + stringValue: 0dd4b998-54a9-4efc-8069-a1d74f9ef1eb + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: local-path-storage + - key: k8s.namespace.uid + value: + stringValue: d44bc2e0-89d5-40cc-95a8-9e9762103d28 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + name: k8s.namespace.phase + - gauge: dataPoints: - - asInt: "0" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.restarts - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: container.id - value: - stringValue: caabbd1d52a4eef537a6eb280682fe086c40f1acfb94870adde35ed5aba1e906 - - key: container.image.name - value: - stringValue: docker.io/library/nodejs_test - - key: container.image.tag - value: - stringValue: latest - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.container.name - value: - stringValue: nodejs-test - - key: k8s.namespace.name - value: - stringValue: default - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: nodejs-test-56cdcf7c-bqrx6 - - key: k8s.pod.uid - value: - stringValue: 7c6cf270-98d0-4a6a-b215-2461d252a610 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Whether a container has passed its readiness probe (0 for no, 1 for yes) - gauge: + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.deployment.name + value: + stringValue: cert-manager + - key: k8s.deployment.uid + value: + stringValue: de6f1d36-509e-41d9-9c89-a39b8a604eef + - key: k8s.namespace.name + value: + stringValue: cert-manager + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.deployment.name + value: + stringValue: cert-manager-cainjector + - key: k8s.deployment.uid + value: + stringValue: 404c3fba-3ed8-4402-8275-e2fa572f9209 + - key: k8s.namespace.name + value: + stringValue: cert-manager + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.deployment.name + value: + stringValue: cert-manager-webhook + - key: k8s.deployment.uid + value: + stringValue: 9fa78641-9542-4b52-a1bc-d48dbda6be04 + - key: k8s.namespace.name + value: + stringValue: cert-manager + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "2" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.deployment.name + value: + stringValue: coredns + - key: k8s.deployment.uid + value: + stringValue: 951ecd5a-9cfe-42c4-be0f-f7eb0b137c4a + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.deployment.name + value: + stringValue: local-path-provisioner + - key: k8s.deployment.uid + value: + stringValue: 8dcea2a3-58f6-423d-b61b-f732bfe9393b + - key: k8s.namespace.name + value: + stringValue: local-path-storage + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.deployment.name + value: + stringValue: nodejs-test + - key: k8s.deployment.uid + value: + stringValue: b2e27afc-a829-4bd0-87b3-c49cdc077bc7 + - key: k8s.namespace.name + value: + stringValue: default + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.deployment.name + value: + stringValue: sock-operator + - key: k8s.deployment.uid + value: + stringValue: e313f535-50f8-46ca-9300-65b44e22f9dc + - key: k8s.namespace.name + value: + stringValue: default + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.deployment.name + value: + stringValue: sock-splunk-otel-collector-k8s-cluster-receiver + - key: k8s.deployment.uid + value: + stringValue: f9bfdd05-8948-4410-b50f-b60f7f490693 + - key: k8s.namespace.name + value: + stringValue: default + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + name: k8s.deployment.available + - gauge: dataPoints: - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.ready - unit: "1" - - description: How many times the container has restarted in the recent past. This value is pulled directly from the K8s API and the value can go indefinitely high and be reset to 0 at any time depending on how your kubelet is configured to prune dead containers. It is best to not depend too much on the exact value but rather look at it as either == 0, in which case you can conclude there were no restarts in the recent past, or > 0, in which case you can conclude there were restarts in the recent past, and not try and analyze the value beyond that. - gauge: + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.deployment.name + value: + stringValue: cert-manager + - key: k8s.deployment.uid + value: + stringValue: de6f1d36-509e-41d9-9c89-a39b8a604eef + - key: k8s.namespace.name + value: + stringValue: cert-manager + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.deployment.name + value: + stringValue: cert-manager-cainjector + - key: k8s.deployment.uid + value: + stringValue: 404c3fba-3ed8-4402-8275-e2fa572f9209 + - key: k8s.namespace.name + value: + stringValue: cert-manager + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.deployment.name + value: + stringValue: cert-manager-webhook + - key: k8s.deployment.uid + value: + stringValue: 9fa78641-9542-4b52-a1bc-d48dbda6be04 + - key: k8s.namespace.name + value: + stringValue: cert-manager + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "2" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.deployment.name + value: + stringValue: coredns + - key: k8s.deployment.uid + value: + stringValue: 951ecd5a-9cfe-42c4-be0f-f7eb0b137c4a + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.deployment.name + value: + stringValue: local-path-provisioner + - key: k8s.deployment.uid + value: + stringValue: 8dcea2a3-58f6-423d-b61b-f732bfe9393b + - key: k8s.namespace.name + value: + stringValue: local-path-storage + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.deployment.name + value: + stringValue: nodejs-test + - key: k8s.deployment.uid + value: + stringValue: b2e27afc-a829-4bd0-87b3-c49cdc077bc7 + - key: k8s.namespace.name + value: + stringValue: default + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.deployment.name + value: + stringValue: sock-operator + - key: k8s.deployment.uid + value: + stringValue: e313f535-50f8-46ca-9300-65b44e22f9dc + - key: k8s.namespace.name + value: + stringValue: default + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.deployment.name + value: + stringValue: sock-splunk-otel-collector-k8s-cluster-receiver + - key: k8s.deployment.uid + value: + stringValue: f9bfdd05-8948-4410-b50f-b60f7f490693 + - key: k8s.namespace.name + value: + stringValue: default + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + name: k8s.deployment.desired + - gauge: dataPoints: + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: cert-manager + - key: k8s.replicaset.name + value: + stringValue: cert-manager-5698c4d465 + - key: k8s.replicaset.uid + value: + stringValue: f5830850-00f6-4dc5-a80c-d6aaa7c124b7 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: cert-manager + - key: k8s.replicaset.name + value: + stringValue: cert-manager-cainjector-d4748596 + - key: k8s.replicaset.uid + value: + stringValue: a98cbe7e-bbd6-4dc9-90d9-7db9354740c1 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: cert-manager + - key: k8s.replicaset.name + value: + stringValue: cert-manager-webhook-65d78d5c4b + - key: k8s.replicaset.uid + value: + stringValue: 92248868-4ca5-4644-9e98-1da4b0c314e6 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.replicaset.name + value: + stringValue: nodejs-test-57564b7dc9 + - key: k8s.replicaset.uid + value: + stringValue: fed76747-f866-4841-9f8d-d1fef5085219 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.replicaset.name + value: + stringValue: sock-operator-949dd8564 + - key: k8s.replicaset.uid + value: + stringValue: d49ea36a-9b66-448c-bdf2-9de54bff31ad + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.replicaset.name + value: + stringValue: sock-splunk-otel-collector-k8s-cluster-receiver-7444c4bc8 + - key: k8s.replicaset.uid + value: + stringValue: df1e63c8-09cb-4578-b362-1fd344c56321 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "0" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.restarts - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: container.id - value: - stringValue: cb010798e4cc40ee62e5516a49db8284004429dfffdd0a4475872c798e69efa8 - - key: container.image.name - value: - stringValue: registry.k8s.io/kube-proxy - - key: container.image.tag - value: - stringValue: v1.27.3 - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.container.name - value: - stringValue: kube-proxy - - key: k8s.namespace.name - value: - stringValue: kube-system - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: kube-proxy-wf4ls - - key: k8s.pod.uid - value: - stringValue: 8202e24b-78a2-4a6a-a39e-28ba6964ac5a - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Whether a container has passed its readiness probe (0 for no, 1 for yes) - gauge: - dataPoints: + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.replicaset.name + value: + stringValue: sock-splunk-otel-collector-k8s-cluster-receiver-7d64687c9c + - key: k8s.replicaset.uid + value: + stringValue: 98487648-1c7e-4578-bdda-2dd5f4f9600f + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "2" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.replicaset.name + value: + stringValue: coredns-5dd5756b68 + - key: k8s.replicaset.uid + value: + stringValue: 1bbc2f83-6464-46a4-b754-e4deb444d098 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.ready - unit: "1" - - description: How many times the container has restarted in the recent past. This value is pulled directly from the K8s API and the value can go indefinitely high and be reset to 0 at any time depending on how your kubelet is configured to prune dead containers. It is best to not depend too much on the exact value but rather look at it as either == 0, in which case you can conclude there were no restarts in the recent past, or > 0, in which case you can conclude there were restarts in the recent past, and not try and analyze the value beyond that. - gauge: + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: local-path-storage + - key: k8s.replicaset.name + value: + stringValue: local-path-provisioner-6f8956fb48 + - key: k8s.replicaset.uid + value: + stringValue: fbaa33fe-44d2-4707-a1e1-98c0a2b00e13 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + name: k8s.replicaset.available + - gauge: dataPoints: + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: cert-manager + - key: k8s.replicaset.name + value: + stringValue: cert-manager-5698c4d465 + - key: k8s.replicaset.uid + value: + stringValue: f5830850-00f6-4dc5-a80c-d6aaa7c124b7 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: cert-manager + - key: k8s.replicaset.name + value: + stringValue: cert-manager-cainjector-d4748596 + - key: k8s.replicaset.uid + value: + stringValue: a98cbe7e-bbd6-4dc9-90d9-7db9354740c1 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: cert-manager + - key: k8s.replicaset.name + value: + stringValue: cert-manager-webhook-65d78d5c4b + - key: k8s.replicaset.uid + value: + stringValue: 92248868-4ca5-4644-9e98-1da4b0c314e6 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.replicaset.name + value: + stringValue: nodejs-test-57564b7dc9 + - key: k8s.replicaset.uid + value: + stringValue: fed76747-f866-4841-9f8d-d1fef5085219 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.replicaset.name + value: + stringValue: sock-operator-949dd8564 + - key: k8s.replicaset.uid + value: + stringValue: d49ea36a-9b66-448c-bdf2-9de54bff31ad + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.replicaset.name + value: + stringValue: sock-splunk-otel-collector-k8s-cluster-receiver-7444c4bc8 + - key: k8s.replicaset.uid + value: + stringValue: df1e63c8-09cb-4578-b362-1fd344c56321 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" - asInt: "0" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.restarts - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: container.id - value: - stringValue: e30f701398b7e05fde5e47c86efcdbbcd6ce68feb87ffa8f7654edc3e63a01bd - - key: container.image.name - value: - stringValue: registry.k8s.io/kube-apiserver - - key: container.image.tag - value: - stringValue: v1.27.3 - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.container.name - value: - stringValue: kube-apiserver - - key: k8s.namespace.name - value: - stringValue: kube-system - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: kube-apiserver-kind-control-plane - - key: k8s.pod.uid - value: - stringValue: d3bef5c4-8194-46fa-a02f-dc5db95a5255 - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Resource requested for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: default + - key: k8s.replicaset.name + value: + stringValue: sock-splunk-otel-collector-k8s-cluster-receiver-7d64687c9c + - key: k8s.replicaset.uid + value: + stringValue: 98487648-1c7e-4578-bdda-2dd5f4f9600f + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "2" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: k8s.replicaset.name + value: + stringValue: coredns-5dd5756b68 + - key: k8s.replicaset.uid + value: + stringValue: 1bbc2f83-6464-46a4-b754-e4deb444d098 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.namespace.name + value: + stringValue: local-path-storage + - key: k8s.replicaset.name + value: + stringValue: local-path-provisioner-6f8956fb48 + - key: k8s.replicaset.uid + value: + stringValue: fbaa33fe-44d2-4707-a1e1-98c0a2b00e13 + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + name: k8s.replicaset.desired + - gauge: dataPoints: - - asDouble: 0.25 - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.cpu_request - unit: '{cpu}' - - description: Whether a container has passed its readiness probe (0 for no, 1 for yes) - gauge: + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.daemonset.name + value: + stringValue: kindnet + - key: k8s.daemonset.uid + value: + stringValue: 6640ecde-fa37-4a26-ba23-6b6a71bc3921 + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.daemonset.name + value: + stringValue: kube-proxy + - key: k8s.daemonset.uid + value: + stringValue: ba313745-1d40-4f03-b8ec-8574e97c205b + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.daemonset.name + value: + stringValue: sock-splunk-otel-collector-agent + - key: k8s.daemonset.uid + value: + stringValue: 3ada8789-be03-4c0a-876c-a30e81d6d374 + - key: k8s.namespace.name + value: + stringValue: default + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + name: k8s.daemonset.current_scheduled_nodes + - gauge: dataPoints: - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.ready - unit: "1" - - description: How many times the container has restarted in the recent past. This value is pulled directly from the K8s API and the value can go indefinitely high and be reset to 0 at any time depending on how your kubelet is configured to prune dead containers. It is best to not depend too much on the exact value but rather look at it as either == 0, in which case you can conclude there were no restarts in the recent past, or > 0, in which case you can conclude there were restarts in the recent past, and not try and analyze the value beyond that. - gauge: + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.daemonset.name + value: + stringValue: kindnet + - key: k8s.daemonset.uid + value: + stringValue: 6640ecde-fa37-4a26-ba23-6b6a71bc3921 + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.daemonset.name + value: + stringValue: kube-proxy + - key: k8s.daemonset.uid + value: + stringValue: ba313745-1d40-4f03-b8ec-8574e97c205b + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.daemonset.name + value: + stringValue: sock-splunk-otel-collector-agent + - key: k8s.daemonset.uid + value: + stringValue: 3ada8789-be03-4c0a-876c-a30e81d6d374 + - key: k8s.namespace.name + value: + stringValue: default + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + name: k8s.daemonset.desired_scheduled_nodes + - gauge: dataPoints: - asInt: "0" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.restarts - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 - - resource: - attributes: - - key: container.id - value: - stringValue: f7981669609b22994bdf59b3bbfac55cdd66ae633473ea865e21a94431d3e940 - - key: container.image.name - value: - stringValue: registry.k8s.io/etcd - - key: container.image.tag - value: - stringValue: 3.5.7-0 - - key: k8s.cluster.name - value: - stringValue: dev-operator - - key: k8s.container.name - value: - stringValue: etcd - - key: k8s.namespace.name - value: - stringValue: kube-system - - key: k8s.node.name - value: - stringValue: kind-control-plane - - key: k8s.pod.name - value: - stringValue: etcd-kind-control-plane - - key: k8s.pod.uid - value: - stringValue: 2e3eea91-b1bc-4339-bd65-55bc996d1acb - - key: metric_source - value: - stringValue: kubernetes - - key: receiver - value: - stringValue: k8scluster - schemaUrl: https://opentelemetry.io/schemas/1.18.0 - scopeMetrics: - - metrics: - - description: Resource requested for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asDouble: 0.1 - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.cpu_request - unit: '{cpu}' - - description: Resource requested for the container. See https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core for details - gauge: - dataPoints: - - asInt: "104857600" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.memory_request - unit: By - - description: Whether a container has passed its readiness probe (0 for no, 1 for yes) - gauge: + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.daemonset.name + value: + stringValue: kindnet + - key: k8s.daemonset.uid + value: + stringValue: 6640ecde-fa37-4a26-ba23-6b6a71bc3921 + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "0" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.daemonset.name + value: + stringValue: kube-proxy + - key: k8s.daemonset.uid + value: + stringValue: ba313745-1d40-4f03-b8ec-8574e97c205b + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "0" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.daemonset.name + value: + stringValue: sock-splunk-otel-collector-agent + - key: k8s.daemonset.uid + value: + stringValue: 3ada8789-be03-4c0a-876c-a30e81d6d374 + - key: k8s.namespace.name + value: + stringValue: default + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + name: k8s.daemonset.misscheduled_nodes + - gauge: dataPoints: - asInt: "1" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.ready - unit: "1" - - description: How many times the container has restarted in the recent past. This value is pulled directly from the K8s API and the value can go indefinitely high and be reset to 0 at any time depending on how your kubelet is configured to prune dead containers. It is best to not depend too much on the exact value but rather look at it as either == 0, in which case you can conclude there were no restarts in the recent past, or > 0, in which case you can conclude there were restarts in the recent past, and not try and analyze the value beyond that. - gauge: + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.daemonset.name + value: + stringValue: kindnet + - key: k8s.daemonset.uid + value: + stringValue: 6640ecde-fa37-4a26-ba23-6b6a71bc3921 + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.daemonset.name + value: + stringValue: kube-proxy + - key: k8s.daemonset.uid + value: + stringValue: ba313745-1d40-4f03-b8ec-8574e97c205b + - key: k8s.namespace.name + value: + stringValue: kube-system + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.daemonset.name + value: + stringValue: sock-splunk-otel-collector-agent + - key: k8s.daemonset.uid + value: + stringValue: 3ada8789-be03-4c0a-876c-a30e81d6d374 + - key: k8s.namespace.name + value: + stringValue: default + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + name: k8s.daemonset.ready_nodes + - gauge: dataPoints: - - asInt: "0" - startTimeUnixNano: "1000000" - timeUnixNano: "2000000" - name: k8s.container.restarts - unit: "1" - scope: - name: otelcol/k8sclusterreceiver - version: v0.85.0 + - asInt: "1" + attributes: + - key: k8s.cluster.name + value: + stringValue: dev-operator + - key: k8s.node.name + value: + stringValue: kind-control-plane + - key: k8s.node.uid + value: + stringValue: 8983a7cc-b9af-4678-b0d5-ed5bb5f8b29f + - key: metric_source + value: + stringValue: kubernetes + - key: receiver + value: + stringValue: k8scluster + timeUnixNano: "1000000" + name: k8s.node.condition_ready + scope: {} diff --git a/e2e_tests/testdata/operator_values.yaml b/e2e_tests/testdata/operator_values.yaml index e2122484fb..4b8d4a6df9 100644 --- a/e2e_tests/testdata/operator_values.yaml +++ b/e2e_tests/testdata/operator_values.yaml @@ -6,11 +6,13 @@ splunkObservability: realm: CHANGEME accessToken: CHANGEME ingestUrl: $AGENT_ENDPOINT - apiUrl: $AGENT_ENDPOINT + apiUrl: $API_URL_ENDPOINT metricsEnabled: true splunkPlatform: token: foobar endpoint: $LOG_HEC_ENDPOINT + metricsEnabled: true + metricsIndex: myMetricsIndex agent: config: exporters: @@ -18,6 +20,8 @@ agent: endpoint: $OTLP_ENDPOINT tls: insecure: true + splunk_hec/platform_metrics: + endpoint: $METRIC_HEC_ENDPOINT service: pipelines: traces: @@ -28,7 +32,7 @@ clusterReceiver: config: exporters: signalfx: - endpoint: $K8S_CLUSTER_ENDPOINT + ingest_url: $K8S_CLUSTER_ENDPOINT tls: insecure: true