diff --git a/.github/component_owners.yml b/.github/component_owners.yml index 1c50cb156..9c7e26c0e 100644 --- a/.github/component_owners.yml +++ b/.github/component_owners.yml @@ -22,6 +22,9 @@ components: consistent-sampling: - oertl - PeterF778 + disk-buffering: + - LikeTheSalad + - zeitlinger samplers: - iNikem - trask diff --git a/disk-buffering/CONTRIBUTING.md b/disk-buffering/CONTRIBUTING.md new file mode 100644 index 000000000..a53f488b6 --- /dev/null +++ b/disk-buffering/CONTRIBUTING.md @@ -0,0 +1,56 @@ +# Contributor Guide + +Each one of the three exporters provided by this +tool ([LogRecordDiskExporter](src/main/java/io/opentelemetry/contrib/disk/buffering/LogRecordDiskExporter.java), [MetricDiskExporter](src/main/java/io/opentelemetry/contrib/disk/buffering/MetricDiskExporter.java) +and [SpanDiskExporter](src/main/java/io/opentelemetry/contrib/disk/buffering/SpanDiskExporter.java)) +is responsible of performing 2 actions, `write` and `read/delegate`, the `write` one happens +automatically as a set of signals are provided from the processor, while the `read/delegate` one has +to be triggered manually by the consumer of this library as explained in the [README](README.md). + +## Writing overview + +![Writing flow](assets/writing-flow.png) + +* The writing process happens automatically within its `export(Collection signals)` + method, which is called by the configured signal processor. +* When a set of signals is received, these are delegated over to + the [DiskExporter](src/main/java/io/opentelemetry/contrib/disk/buffering/internal/exporters/DiskExporter.java) + class which then serializes them using an implementation + of [SignalSerializer](src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/SignalSerializer.java) + and then the serialized data is appended into a File using an instance of + the [Storage](src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/Storage.java) + class. +* The data is written into a file directly, without the use of a buffer, to make sure no data gets + lost in case the application ends unexpectedly. +* Each disk exporter stores its signals in its own folder, which is expected to contain files + that belong to that type of signal only. +* Each file may contain more than a batch of signals if the configuration parameters allow enough + limit size for it. +* If the configured folder size for the signals has been reached and a new file is needed to be + created to keep storing new data, the oldest available file will be removed to make space for the + new one. +* The [Storage](src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/Storage.java), + [FolderManager](src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/FolderManager.java) + and [WritableFile](src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/WritableFile.java) + files contain more information on the details of the writing process into a file. + +## Reading overview + +![Reading flow](assets/reading-flow.png) + +* The reading process has to be triggered manually by the library consumer as explained in + the [README](README.md). +* A single file is read at a time and updated to remove the data gathered from it after it is + successfully exported, until it's emptied. Each file previously created during the + writing process has a timestamp in milliseconds, which is used to determine what file to start + reading from, which will be the oldest one available. +* If the oldest file available is stale, which is determined based on the configuration provided at + the time of creating the disk exporter, then it will be ignored, and the next oldest (and + unexpired) one will be used instead. +* All the stale and empty files will be removed as a new file is created. +* The [Storage](src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/Storage.java), + [FolderManager](src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/FolderManager.java) + and [ReadableFile](src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/ReadableFile.java) + files contain more information on the details of the file reading process. +* Note that the reader delegates the data to the exporter exactly in the way it has received the + data - it does not try to batch data (but this could be an optimization in the future). diff --git a/disk-buffering/README.md b/disk-buffering/README.md new file mode 100644 index 000000000..9684faa1a --- /dev/null +++ b/disk-buffering/README.md @@ -0,0 +1,113 @@ +# Disk buffering + +This module provides signal exporter wrappers that intercept and store signals in files which can be +sent later on demand. A high level description of how it works is that there are two separate +processes in place, one for writing data in disk, and one for reading/exporting the previously +stored data. + +* Each exporter stores the received data automatically in disk right after it's received from its + processor. +* The reading of the data back from disk and exporting process has to be done manually. At + the moment there's no automatic mechanism to do so. There's more information on it can be + achieved, under [Reading data](#reading-data). + +> For a more detailed information on how the whole process works, take a look at +> the [CONTRIBUTING](CONTRIBUTING.md) file. + +## Configuration + +The configurable parameters are provided **per exporter**, the available ones are: + +* Max file size, defaults to 1MB. +* Max folder size, defaults to 10MB. All files are stored in a single folder per-signal, therefore + if all 3 types of signals are stored, the total amount of space from disk to be taken by default + would be of 30MB. +* Max age for file writing, defaults to 30 seconds. +* Min age for file reading, defaults to 33 seconds. It must be greater that the max age for file + writing. +* Max age for file reading, defaults to 18 hours. After that time passes, the file will be + considered stale and will be removed when new files are created. No more data will be read from a + file past this time. +* An instance + of [TemporaryFileProvider](src/main/java/io/opentelemetry/contrib/disk/buffering/internal/files/TemporaryFileProvider.java), + defaults to calling `File.createTempFile`. This provider will be used when reading from the disk + in order create a temporary file from which each line (batch of signals) will be read and + sequentially get removed from the original cache file right after the data has been successfully + exported. + +## Usage + +### Storing data + +In order to use it, you need to wrap your own exporter with a new instance of +the ones provided in here: + +* For a LogRecordExporter, it must be wrapped within + a [LogRecordDiskExporter](src/main/java/io/opentelemetry/contrib/disk/buffering/LogRecordDiskExporter.java). +* For a MetricExporter, it must be wrapped within + a [MetricDiskExporter](src/main/java/io/opentelemetry/contrib/disk/buffering/MetricDiskExporter.java). +* For a SpanExporter, it must be wrapped within + a [SpanDiskExporter](src/main/java/io/opentelemetry/contrib/disk/buffering/SpanDiskExporter.java). + +Each wrapper will need the following when instantiating them: + +* The exporter to be wrapped. +* A File instance of the root directory where all the data is going to be written. The same root dir + can be used for all the wrappers, since each will create their own folder inside it. +* An instance + of [StorageConfiguration](src/main/java/io/opentelemetry/contrib/disk/buffering/internal/StorageConfiguration.java) + with the desired parameters. You can create one with default values by + calling `StorageConfiguration.getDefault()`. + +After wrapping your exporters, you must register the wrapper as the exporter you'll use. It will +take care of always storing the data it receives. + +#### Set up example for spans + +```java +// Creating the SpanExporter of our choice. +SpanExporter mySpanExporter = OtlpGrpcSpanExporter.getDefault(); + +// Wrapping our exporter with its disk exporter. +SpanDiskExporter diskExporter = SpanDiskExporter.create(mySpanExporter, new File("/my/signals/cache/dir"), StorageConfiguration.getDefault()); + + // Registering the disk exporter within our OpenTelemetry instance. +SdkTracerProvider myTraceProvider = SdkTracerProvider.builder() + .addSpanProcessor(SimpleSpanProcessor.create(diskExporter)) + .build(); +OpenTelemetrySdk.builder() + .setTracerProvider(myTraceProvider) + .buildAndRegisterGlobal(); + +``` + +### Reading data + +Each of the exporter wrappers can read from the disk and send the retrieved data over to their +wrapped exporter by calling this method from them: + +```java +try { + if(diskExporter.exportStoredBatch(1, TimeUnit.SECONDS)) { + // A batch was successfully exported and removed from disk. You can call this method for as long as it keeps returning true. + } else { + // Either there was no data in the disk or the wrapped exporter returned CompletableResultCode.ofFailure(). + } +} catch (IOException e) { + // Something unexpected happened. +} +``` + +Both the writing and reading processes can run in parallel and they don't overlap +because each is supposed to happen in different files. We ensure that reader and writer don't +accidentally meet in the same file by using the configurable parameters. These parameters set non-overlapping time frames for each action to be done on a single file at a time. On top of that, there's a mechanism in +place to avoid overlapping on edge cases where the time frames ended but the resources haven't been +released. For that mechanism to work properly, this tool assumes that both the reading and the +writing actions are executed within the same application process. + +## Component owners + +- [Cesar Munoz](https://github.com/LikeTheSalad), Elastic +- [Gregor Zeitlinger](https://github.com/zeitlinger), Grafana + +Learn more about component owners in [component_owners.yml](../.github/component_owners.yml). diff --git a/disk-buffering/assets/reading-flow.png b/disk-buffering/assets/reading-flow.png new file mode 100644 index 000000000..76b8de438 Binary files /dev/null and b/disk-buffering/assets/reading-flow.png differ diff --git a/disk-buffering/assets/writing-flow.png b/disk-buffering/assets/writing-flow.png new file mode 100644 index 000000000..c6144b301 Binary files /dev/null and b/disk-buffering/assets/writing-flow.png differ diff --git a/disk-buffering/build.gradle.kts b/disk-buffering/build.gradle.kts new file mode 100644 index 000000000..abfb000eb --- /dev/null +++ b/disk-buffering/build.gradle.kts @@ -0,0 +1,50 @@ +import ru.vyarus.gradle.plugin.animalsniffer.AnimalSniffer + +plugins { + id("otel.java-conventions") + id("otel.publish-conventions") + id("me.champeau.jmh") version "0.7.1" + id("ru.vyarus.animalsniffer") version "1.7.1" +} + +description = "Exporter implementations that store signals on disk" +otelJava.moduleName.set("io.opentelemetry.contrib.exporters.disk") + +java { + sourceCompatibility = JavaVersion.VERSION_1_8 + targetCompatibility = JavaVersion.VERSION_1_8 +} + +val autovalueVersion = "1.10.1" +dependencies { + api("io.opentelemetry:opentelemetry-sdk") + implementation("io.opentelemetry:opentelemetry-exporter-otlp-common") + implementation("io.opentelemetry.proto:opentelemetry-proto:0.20.0-alpha") + compileOnly("com.google.auto.value:auto-value-annotations:$autovalueVersion") + annotationProcessor("com.google.auto.value:auto-value:$autovalueVersion") + signature("com.toasttab.android:gummy-bears-api-24:0.5.1@signature") + testImplementation("org.mockito:mockito-inline:4.11.0") + testImplementation("io.opentelemetry:opentelemetry-sdk-testing") +} + +animalsniffer { + sourceSets = listOf(java.sourceSets.main.get()) +} + +// Always having declared output makes this task properly participate in tasks up-to-date checks +tasks.withType { + reports.text.required.set(true) +} + +// Attaching animalsniffer check to the compilation process. +tasks.named("classes").configure { + finalizedBy("animalsnifferMain") +} + +jmh { + warmupIterations.set(0) + fork.set(2) + iterations.set(5) + timeOnIteration.set("5s") + timeUnit.set("ms") +} diff --git a/disk-buffering/src/jmh/java/io/opentelemetry/contrib/disk/buffering/internal/files/utils/FileTransferUtilBenchmark.java b/disk-buffering/src/jmh/java/io/opentelemetry/contrib/disk/buffering/internal/files/utils/FileTransferUtilBenchmark.java new file mode 100644 index 000000000..a9fb7ad5e --- /dev/null +++ b/disk-buffering/src/jmh/java/io/opentelemetry/contrib/disk/buffering/internal/files/utils/FileTransferUtilBenchmark.java @@ -0,0 +1,57 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.files.utils; + +import io.opentelemetry.contrib.disk.buffering.internal.storage.files.utils.FileTransferUtil; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.StandardOpenOption; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.TearDown; + +public class FileTransferUtilBenchmark { + + @Benchmark + @BenchmarkMode(Mode.AverageTime) + public void fileTransfer(FileTransferState state) throws IOException { + state.fileTransferUtil.transferBytes(state.offset, state.amountOfBytesToTransfer); + } + + @State(Scope.Benchmark) + public static class FileTransferState { + public FileTransferUtil fileTransferUtil; + public int offset; + public int amountOfBytesToTransfer; + private File inputFile; + private File outputFile; + + @Setup + public void setUp() throws IOException { + outputFile = File.createTempFile("output", ".txt"); + inputFile = File.createTempFile("input", ".txt"); + int totalDataSize = 1024 * 1024; // 1MB + byte[] data = new byte[totalDataSize]; + Files.write(inputFile.toPath(), data, StandardOpenOption.CREATE); + fileTransferUtil = new FileTransferUtil(new FileInputStream(inputFile), outputFile); + offset = 512; + amountOfBytesToTransfer = totalDataSize - offset; + } + + @TearDown + public void tearDown() throws IOException { + fileTransferUtil.close(); + inputFile.delete(); + outputFile.delete(); + } + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/LogRecordDiskExporter.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/LogRecordDiskExporter.java new file mode 100644 index 000000000..b5fcedd0d --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/LogRecordDiskExporter.java @@ -0,0 +1,95 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering; + +import io.opentelemetry.contrib.disk.buffering.internal.StorageConfiguration; +import io.opentelemetry.contrib.disk.buffering.internal.exporters.DiskExporter; +import io.opentelemetry.contrib.disk.buffering.internal.serialization.serializers.SignalSerializer; +import io.opentelemetry.contrib.disk.buffering.internal.storage.utils.StorageClock; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.logs.LogRecordProcessor; +import io.opentelemetry.sdk.logs.data.LogRecordData; +import io.opentelemetry.sdk.logs.export.LogRecordExporter; +import java.io.File; +import java.io.IOException; +import java.util.Collection; +import java.util.concurrent.TimeUnit; + +/** + * This is a {@link LogRecordExporter} wrapper that takes care of intercepting all the signals sent + * out to be exported, tries to store them in the disk in order to export them later. + * + *

In order to use it, you need to wrap your own {@link LogRecordExporter} with a new instance of + * this one, which will be the one you need to register in your {@link LogRecordProcessor}. + */ +public final class LogRecordDiskExporter implements LogRecordExporter, StoredBatchExporter { + private final LogRecordExporter wrapped; + private final DiskExporter diskExporter; + + /** + * Creates a new instance of {@link LogRecordDiskExporter}. + * + * @param wrapped - The exporter where the data retrieved from the disk will be delegated to. + * @param rootDir - The directory to create this signal's cache dir where all the data will be + * written into. + * @param configuration - How you want to manage the storage process. + * @throws IOException If no dir can be created in rootDir. + */ + public static LogRecordDiskExporter create( + LogRecordExporter wrapped, File rootDir, StorageConfiguration configuration) + throws IOException { + return create(wrapped, rootDir, configuration, StorageClock.getInstance()); + } + + // This is used for testing purposes. + static LogRecordDiskExporter create( + LogRecordExporter wrapped, + File rootDir, + StorageConfiguration configuration, + StorageClock clock) + throws IOException { + return new LogRecordDiskExporter(wrapped, rootDir, configuration, clock); + } + + private LogRecordDiskExporter( + LogRecordExporter wrapped, + File rootDir, + StorageConfiguration configuration, + StorageClock clock) + throws IOException { + this.wrapped = wrapped; + diskExporter = + new DiskExporter<>( + rootDir, configuration, "logs", SignalSerializer.ofLogs(), wrapped::export, clock); + } + + @Override + public CompletableResultCode export(Collection logs) { + return diskExporter.onExport(logs); + } + + @Override + public CompletableResultCode flush() { + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode shutdown() { + try { + diskExporter.onShutDown(); + } catch (IOException e) { + return CompletableResultCode.ofFailure(); + } finally { + wrapped.shutdown(); + } + return CompletableResultCode.ofSuccess(); + } + + @Override + public boolean exportStoredBatch(long timeout, TimeUnit unit) throws IOException { + return diskExporter.exportStoredBatch(timeout, unit); + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/MetricDiskExporter.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/MetricDiskExporter.java new file mode 100644 index 000000000..face0f679 --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/MetricDiskExporter.java @@ -0,0 +1,100 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering; + +import io.opentelemetry.contrib.disk.buffering.internal.StorageConfiguration; +import io.opentelemetry.contrib.disk.buffering.internal.exporters.DiskExporter; +import io.opentelemetry.contrib.disk.buffering.internal.serialization.serializers.SignalSerializer; +import io.opentelemetry.contrib.disk.buffering.internal.storage.utils.StorageClock; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.metrics.InstrumentType; +import io.opentelemetry.sdk.metrics.data.AggregationTemporality; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.export.MetricExporter; +import io.opentelemetry.sdk.metrics.export.MetricReader; +import java.io.File; +import java.io.IOException; +import java.util.Collection; +import java.util.concurrent.TimeUnit; + +/** + * This is a {@link MetricExporter} wrapper that takes care of intercepting all the signals sent out + * to be exported, tries to store them in the disk in order to export them later. + * + *

In order to use it, you need to wrap your own {@link MetricExporter} with a new instance of + * this one, which will be the one you need to register in your {@link MetricReader}. + */ +public final class MetricDiskExporter implements MetricExporter, StoredBatchExporter { + private final MetricExporter wrapped; + private final DiskExporter diskExporter; + + /** + * Creates a new instance of {@link MetricDiskExporter}. + * + * @param wrapped - The exporter where the data retrieved from the disk will be delegated to. + * @param rootDir - The directory to create this signal's cache dir where all the data will be + * written into. + * @param configuration - How you want to manage the storage process. + * @throws IOException If no dir can be created in rootDir. + */ + public static MetricDiskExporter create( + MetricExporter wrapped, File rootDir, StorageConfiguration configuration) throws IOException { + return create(wrapped, rootDir, configuration, StorageClock.getInstance()); + } + + // This is used for testing purposes. + public static MetricDiskExporter create( + MetricExporter wrapped, File rootDir, StorageConfiguration configuration, StorageClock clock) + throws IOException { + return new MetricDiskExporter(wrapped, rootDir, configuration, clock); + } + + private MetricDiskExporter( + MetricExporter wrapped, File rootDir, StorageConfiguration configuration, StorageClock clock) + throws IOException { + this.wrapped = wrapped; + diskExporter = + new DiskExporter<>( + rootDir, + configuration, + "metrics", + SignalSerializer.ofMetrics(), + wrapped::export, + clock); + } + + @Override + public CompletableResultCode export(Collection metrics) { + return diskExporter.onExport(metrics); + } + + @Override + public CompletableResultCode flush() { + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode shutdown() { + try { + diskExporter.onShutDown(); + } catch (IOException e) { + return CompletableResultCode.ofFailure(); + } finally { + wrapped.shutdown(); + } + return CompletableResultCode.ofSuccess(); + } + + @Override + public AggregationTemporality getAggregationTemporality(InstrumentType instrumentType) { + return wrapped.getAggregationTemporality(instrumentType); + } + + @Override + public boolean exportStoredBatch(long timeout, TimeUnit unit) throws IOException { + return diskExporter.exportStoredBatch(timeout, unit); + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/SpanDiskExporter.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/SpanDiskExporter.java new file mode 100644 index 000000000..abcf98d17 --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/SpanDiskExporter.java @@ -0,0 +1,88 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering; + +import io.opentelemetry.contrib.disk.buffering.internal.StorageConfiguration; +import io.opentelemetry.contrib.disk.buffering.internal.exporters.DiskExporter; +import io.opentelemetry.contrib.disk.buffering.internal.serialization.serializers.SignalSerializer; +import io.opentelemetry.contrib.disk.buffering.internal.storage.utils.StorageClock; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.trace.SpanProcessor; +import io.opentelemetry.sdk.trace.data.SpanData; +import io.opentelemetry.sdk.trace.export.SpanExporter; +import java.io.File; +import java.io.IOException; +import java.util.Collection; +import java.util.concurrent.TimeUnit; + +/** + * This is a {@link SpanExporter} wrapper that takes care of intercepting all the signals sent out + * to be exported, tries to store them in the disk in order to export them later. + * + *

In order to use it, you need to wrap your own {@link SpanExporter} with a new instance of this + * one, which will be the one you need to register in your {@link SpanProcessor}. + */ +public final class SpanDiskExporter implements SpanExporter, StoredBatchExporter { + private final SpanExporter wrapped; + private final DiskExporter diskExporter; + + /** + * Creates a new instance of {@link SpanDiskExporter}. + * + * @param wrapped - The exporter where the data retrieved from the disk will be delegated to. + * @param rootDir - The directory to create this signal's cache dir where all the data will be + * written into. + * @param configuration - How you want to manage the storage process. + * @throws IOException If no dir can be created in rootDir. + */ + public static SpanDiskExporter create( + SpanExporter wrapped, File rootDir, StorageConfiguration configuration) throws IOException { + return create(wrapped, rootDir, configuration, StorageClock.getInstance()); + } + + // This is used for testing purposes. + public static SpanDiskExporter create( + SpanExporter wrapped, File rootDir, StorageConfiguration configuration, StorageClock clock) + throws IOException { + return new SpanDiskExporter(wrapped, rootDir, configuration, clock); + } + + private SpanDiskExporter( + SpanExporter wrapped, File rootDir, StorageConfiguration configuration, StorageClock clock) + throws IOException { + this.wrapped = wrapped; + diskExporter = + new DiskExporter<>( + rootDir, configuration, "spans", SignalSerializer.ofSpans(), wrapped::export, clock); + } + + @Override + public CompletableResultCode export(Collection spans) { + return diskExporter.onExport(spans); + } + + @Override + public CompletableResultCode shutdown() { + try { + diskExporter.onShutDown(); + } catch (IOException e) { + return CompletableResultCode.ofFailure(); + } finally { + wrapped.shutdown(); + } + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode flush() { + return CompletableResultCode.ofSuccess(); + } + + @Override + public boolean exportStoredBatch(long timeout, TimeUnit unit) throws IOException { + return diskExporter.exportStoredBatch(timeout, unit); + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/StoredBatchExporter.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/StoredBatchExporter.java new file mode 100644 index 000000000..ccaf2ad5c --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/StoredBatchExporter.java @@ -0,0 +1,23 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering; + +import java.io.IOException; +import java.util.concurrent.TimeUnit; + +public interface StoredBatchExporter { + + /** + * Reads data from the disk and attempts to export it. + * + * @param timeout The amount of time to wait for the wrapped exporter to finish. + * @param unit The unit of the time provided. + * @return TRUE if there was data available and it was successfully exported within the timeout + * provided. FALSE if either of those conditions didn't meet. + * @throws IOException If an unexpected error happens. + */ + boolean exportStoredBatch(long timeout, TimeUnit unit) throws IOException; +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/StorageConfiguration.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/StorageConfiguration.java new file mode 100644 index 000000000..3cd4a48b8 --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/StorageConfiguration.java @@ -0,0 +1,78 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal; + +import com.google.auto.value.AutoValue; +import io.opentelemetry.contrib.disk.buffering.internal.files.DefaultTemporaryFileProvider; +import io.opentelemetry.contrib.disk.buffering.internal.files.TemporaryFileProvider; +import java.util.concurrent.TimeUnit; + +/** Defines how the storage should be managed. */ +@AutoValue +public abstract class StorageConfiguration { + /** The max amount of time a file can receive new data. */ + public abstract long getMaxFileAgeForWriteMillis(); + + /** + * The min amount of time needed to pass before reading from a file. This value MUST be greater + * than getMaxFileAgeForWriteMillis() to make sure the selected file to read is not being written + * to. + */ + public abstract long getMinFileAgeForReadMillis(); + + /** + * The max amount of time a file can be read from, which is also the amount of time a file is not + * considered to be deleted as stale. + */ + public abstract long getMaxFileAgeForReadMillis(); + + /** + * The max file size, If the getMaxFileAgeForWriteMillis() time value hasn't passed but the file + * has reached this size, it stops receiving data. + */ + public abstract int getMaxFileSize(); + + /** + * All the files are stored in a signal-specific folder. This number represents each folder's + * size, therefore the max amount of cache size for the overall telemetry data would be the sum of + * the folder sizes of all the signals being stored in disk. + */ + public abstract int getMaxFolderSize(); + + /** A creator of temporary files needed to do the disk reading process. */ + public abstract TemporaryFileProvider getTemporaryFileProvider(); + + public static StorageConfiguration getDefault() { + return builder().build(); + } + + public static Builder builder() { + return new AutoValue_StorageConfiguration.Builder() + .setMaxFileSize(1024 * 1024) // 1MB + .setMaxFolderSize(10 * 1024 * 1024) // 10MB + .setMaxFileAgeForWriteMillis(TimeUnit.SECONDS.toMillis(30)) + .setMinFileAgeForReadMillis(TimeUnit.SECONDS.toMillis(33)) + .setMaxFileAgeForReadMillis(TimeUnit.HOURS.toMillis(18)) + .setTemporaryFileProvider(DefaultTemporaryFileProvider.getInstance()); + } + + @AutoValue.Builder + public abstract static class Builder { + public abstract Builder setMaxFileAgeForWriteMillis(long value); + + public abstract Builder setMinFileAgeForReadMillis(long value); + + public abstract Builder setMaxFileAgeForReadMillis(long value); + + public abstract Builder setMaxFileSize(int value); + + public abstract Builder setMaxFolderSize(int value); + + public abstract Builder setTemporaryFileProvider(TemporaryFileProvider value); + + public abstract StorageConfiguration build(); + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/exporters/DiskExporter.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/exporters/DiskExporter.java new file mode 100644 index 000000000..da7fc4a83 --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/exporters/DiskExporter.java @@ -0,0 +1,98 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.exporters; + +import io.opentelemetry.contrib.disk.buffering.StoredBatchExporter; +import io.opentelemetry.contrib.disk.buffering.internal.StorageConfiguration; +import io.opentelemetry.contrib.disk.buffering.internal.serialization.serializers.SignalSerializer; +import io.opentelemetry.contrib.disk.buffering.internal.storage.FolderManager; +import io.opentelemetry.contrib.disk.buffering.internal.storage.Storage; +import io.opentelemetry.contrib.disk.buffering.internal.storage.responses.ReadableResult; +import io.opentelemetry.contrib.disk.buffering.internal.storage.utils.StorageClock; +import io.opentelemetry.sdk.common.CompletableResultCode; +import java.io.File; +import java.io.IOException; +import java.util.Collection; +import java.util.concurrent.TimeUnit; +import java.util.function.Function; +import java.util.logging.Level; +import java.util.logging.Logger; + +public final class DiskExporter implements StoredBatchExporter { + private final Storage storage; + private final SignalSerializer serializer; + private final Function, CompletableResultCode> exportFunction; + private static final Logger logger = Logger.getLogger(DiskExporter.class.getName()); + + public DiskExporter( + File rootDir, + StorageConfiguration configuration, + String folderName, + SignalSerializer serializer, + Function, CompletableResultCode> exportFunction, + StorageClock clock) + throws IOException { + validateConfiguration(configuration); + this.storage = + new Storage(new FolderManager(getSignalFolder(rootDir, folderName), configuration, clock)); + this.serializer = serializer; + this.exportFunction = exportFunction; + } + + @Override + public boolean exportStoredBatch(long timeout, TimeUnit unit) throws IOException { + logger.log(Level.INFO, "Attempting to export batch from disk."); + ReadableResult result = + storage.readAndProcess( + bytes -> { + logger.log(Level.INFO, "About to export stored batch."); + CompletableResultCode join = + exportFunction.apply(serializer.deserialize(bytes)).join(timeout, unit); + return join.isSuccess(); + }); + return result == ReadableResult.SUCCEEDED; + } + + public void onShutDown() throws IOException { + storage.close(); + } + + public CompletableResultCode onExport(Collection data) { + logger.log(Level.FINER, "Intercepting exporter batch."); + try { + if (storage.write(serializer.serialize(data))) { + return CompletableResultCode.ofSuccess(); + } else { + logger.log(Level.INFO, "Could not store batch in disk. Exporting it right away."); + return exportFunction.apply(data); + } + } catch (IOException e) { + logger.log( + Level.WARNING, + "An unexpected error happened while attempting to write the data in disk. Exporting it right away.", + e); + return exportFunction.apply(data); + } + } + + private static File getSignalFolder(File rootDir, String folderName) throws IOException { + File folder = new File(rootDir, folderName); + if (!folder.exists()) { + if (!folder.mkdirs()) { + throw new IOException( + "Could not create the signal folder: '" + folderName + "' inside: " + rootDir); + } + } + return folder; + } + + private static void validateConfiguration(StorageConfiguration configuration) { + if (configuration.getMinFileAgeForReadMillis() <= configuration.getMaxFileAgeForWriteMillis()) { + throw new IllegalArgumentException( + "The configured max file age for writing must be lower than the configured min file age for reading"); + } + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/files/DefaultTemporaryFileProvider.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/files/DefaultTemporaryFileProvider.java new file mode 100644 index 000000000..2e6e21d98 --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/files/DefaultTemporaryFileProvider.java @@ -0,0 +1,24 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.files; + +import java.io.File; +import java.io.IOException; + +public final class DefaultTemporaryFileProvider implements TemporaryFileProvider { + private static final TemporaryFileProvider INSTANCE = new DefaultTemporaryFileProvider(); + + public static TemporaryFileProvider getInstance() { + return INSTANCE; + } + + private DefaultTemporaryFileProvider() {} + + @Override + public File createTemporaryFile(String prefix) throws IOException { + return File.createTempFile(prefix + "_", ".tmp"); + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/files/TemporaryFileProvider.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/files/TemporaryFileProvider.java new file mode 100644 index 000000000..37f13abdd --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/files/TemporaryFileProvider.java @@ -0,0 +1,20 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.files; + +import java.io.File; +import java.io.IOException; + +/** Provides a temporary file needed to do the disk reading process. */ +public interface TemporaryFileProvider { + + /** + * Creates a temporary file. + * + * @param prefix The prefix for the provided file name. + */ + File createTemporaryFile(String prefix) throws IOException; +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/AttributesMapper.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/AttributesMapper.java new file mode 100644 index 000000000..8b4ceb05a --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/AttributesMapper.java @@ -0,0 +1,233 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.common; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.AttributeType; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.proto.common.v1.AnyValue; +import io.opentelemetry.proto.common.v1.ArrayValue; +import io.opentelemetry.proto.common.v1.KeyValue; +import java.util.ArrayList; +import java.util.List; + +public final class AttributesMapper { + + private static final AttributesMapper INSTANCE = new AttributesMapper(); + + public static AttributesMapper getInstance() { + return INSTANCE; + } + + public List attributesToProto(Attributes attributes) { + List keyValues = new ArrayList<>(); + attributes.forEach((attributeKey, o) -> keyValues.add(attributeEntryToProto(attributeKey, o))); + return keyValues; + } + + public Attributes protoToAttributes(List values) { + AttributesBuilder builder = Attributes.builder(); + for (KeyValue keyValue : values) { + addValue(builder, keyValue.getKey(), keyValue.getValue()); + } + return builder.build(); + } + + private static KeyValue attributeEntryToProto(AttributeKey key, Object value) { + KeyValue.Builder builder = KeyValue.newBuilder(); + builder.setKey(key.getKey()); + builder.setValue(attributeValueToProto(key.getType(), value)); + return builder.build(); + } + + @SuppressWarnings("unchecked") + private static AnyValue attributeValueToProto(AttributeType type, Object value) { + switch (type) { + case STRING: + return stringToAnyValue((String) value); + case BOOLEAN: + return booleanToAnyValue((Boolean) value); + case LONG: + return longToAnyValue((Long) value); + case DOUBLE: + return doubleToAnyValue((Double) value); + case STRING_ARRAY: + return arrayToAnyValue(stringListToAnyValue((List) value)); + case BOOLEAN_ARRAY: + return arrayToAnyValue(booleanListToAnyValue((List) value)); + case LONG_ARRAY: + return arrayToAnyValue(longListToAnyValue((List) value)); + case DOUBLE_ARRAY: + return arrayToAnyValue(doubleListToAnyValue((List) value)); + } + throw new UnsupportedOperationException(); + } + + private static AnyValue arrayToAnyValue(List value) { + return AnyValue.newBuilder() + .setArrayValue(ArrayValue.newBuilder().addAllValues(value).build()) + .build(); + } + + private static void addValue(AttributesBuilder builder, String key, AnyValue value) { + if (value.hasStringValue()) { + builder.put(AttributeKey.stringKey(key), value.getStringValue()); + } else if (value.hasBoolValue()) { + builder.put(AttributeKey.booleanKey(key), value.getBoolValue()); + } else if (value.hasIntValue()) { + builder.put(AttributeKey.longKey(key), value.getIntValue()); + } else if (value.hasDoubleValue()) { + builder.put(AttributeKey.doubleKey(key), value.getDoubleValue()); + } else if (value.hasArrayValue()) { + addArray(builder, key, value.getArrayValue()); + } else { + throw new UnsupportedOperationException(); + } + } + + private static void addArray(AttributesBuilder builder, String key, ArrayValue arrayValue) { + List values = arrayValue.getValuesList(); + AnyValue anyValue = values.get(0); + if (anyValue.hasStringValue()) { + builder.put(AttributeKey.stringArrayKey(key), anyValuesToStrings(values)); + } else if (anyValue.hasBoolValue()) { + builder.put(AttributeKey.booleanArrayKey(key), anyValuesToBooleans(values)); + } else if (anyValue.hasIntValue()) { + builder.put(AttributeKey.longArrayKey(key), anyValuesToLongs(values)); + } else if (anyValue.hasDoubleValue()) { + builder.put(AttributeKey.doubleArrayKey(key), anyValuesToDoubles(values)); + } else { + throw new UnsupportedOperationException(); + } + } + + private static AnyValue stringToAnyValue(String value) { + AnyValue.Builder anyValue = AnyValue.newBuilder(); + + anyValue.setStringValue(value); + + return anyValue.build(); + } + + private static AnyValue booleanToAnyValue(Boolean value) { + AnyValue.Builder anyValue = AnyValue.newBuilder(); + + if (value != null) { + anyValue.setBoolValue(value); + } + + return anyValue.build(); + } + + private static AnyValue longToAnyValue(Long value) { + AnyValue.Builder anyValue = AnyValue.newBuilder(); + + if (value != null) { + anyValue.setIntValue(value); + } + + return anyValue.build(); + } + + private static AnyValue doubleToAnyValue(Double value) { + AnyValue.Builder anyValue = AnyValue.newBuilder(); + + if (value != null) { + anyValue.setDoubleValue(value); + } + + return anyValue.build(); + } + + private static List stringListToAnyValue(List value) { + List list = new ArrayList<>(value.size()); + for (String string : value) { + list.add(stringToAnyValue(string)); + } + + return list; + } + + private static List booleanListToAnyValue(List value) { + List list = new ArrayList<>(value.size()); + for (Boolean boolean1 : value) { + list.add(booleanToAnyValue(boolean1)); + } + + return list; + } + + private static List longListToAnyValue(List value) { + List list = new ArrayList<>(value.size()); + for (Long long1 : value) { + list.add(longToAnyValue(long1)); + } + + return list; + } + + private static List doubleListToAnyValue(List value) { + List list = new ArrayList<>(value.size()); + for (Double double1 : value) { + list.add(doubleToAnyValue(double1)); + } + + return list; + } + + private static List anyValuesToStrings(List values) { + List list = new ArrayList<>(values.size()); + for (AnyValue anyValue : values) { + list.add(anyValueToString(anyValue)); + } + + return list; + } + + private static List anyValuesToBooleans(List values) { + List list = new ArrayList<>(values.size()); + for (AnyValue anyValue : values) { + list.add(anyValueToBoolean(anyValue)); + } + + return list; + } + + private static List anyValuesToLongs(List values) { + List list = new ArrayList<>(values.size()); + for (AnyValue anyValue : values) { + list.add(anyValueToLong(anyValue)); + } + + return list; + } + + private static List anyValuesToDoubles(List values) { + List list = new ArrayList<>(values.size()); + for (AnyValue anyValue : values) { + list.add(anyValueToDouble(anyValue)); + } + + return list; + } + + private static String anyValueToString(AnyValue value) { + return value.getStringValue(); + } + + private static Boolean anyValueToBoolean(AnyValue value) { + return value.getBoolValue(); + } + + private static Long anyValueToLong(AnyValue value) { + return value.getIntValue(); + } + + private static Double anyValueToDouble(AnyValue value) { + return value.getDoubleValue(); + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/BaseProtoSignalsDataMapper.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/BaseProtoSignalsDataMapper.java new file mode 100644 index 000000000..ef263c53d --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/BaseProtoSignalsDataMapper.java @@ -0,0 +1,130 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.common; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.proto.common.v1.InstrumentationScope; +import io.opentelemetry.proto.common.v1.KeyValue; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.common.InstrumentationScopeInfoBuilder; +import io.opentelemetry.sdk.resources.Resource; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.annotation.Nullable; + +public abstract class BaseProtoSignalsDataMapper< + SIGNAL_ITEM, PROTO_SIGNAL_ITEM, PROTO_DATA, PROTO_RESOURCE_ITEM, PROTO_SCOPE_ITEM> { + + public PROTO_DATA toProto(Collection sourceItems) { + Map>> itemsByResourceAndScope = + new HashMap<>(); + sourceItems.forEach( + sourceData -> { + Resource resource = getResourceFromSignal(sourceData); + InstrumentationScopeInfo instrumentationScopeInfo = + getInstrumentationScopeInfo(sourceData); + + Map> itemsByResource = + itemsByResourceAndScope.get(resource); + if (itemsByResource == null) { + itemsByResource = new HashMap<>(); + itemsByResourceAndScope.put(resource, itemsByResource); + } + + List scopeSignals = itemsByResource.get(instrumentationScopeInfo); + if (scopeSignals == null) { + scopeSignals = new ArrayList<>(); + itemsByResource.put(instrumentationScopeInfo, scopeSignals); + } + + scopeSignals.add(signalItemToProto(sourceData)); + }); + + return createProtoData(itemsByResourceAndScope); + } + + public List fromProto(PROTO_DATA protoData) { + List result = new ArrayList<>(); + for (PROTO_RESOURCE_ITEM resourceSignal : getProtoResources(protoData)) { + Resource resource = getResourceFromProto(resourceSignal); + for (PROTO_SCOPE_ITEM scopeSignals : getScopes(resourceSignal)) { + InstrumentationScopeInfo scopeInfo = getInstrumentationScopeFromProto(scopeSignals); + for (PROTO_SIGNAL_ITEM item : getSignalsFromProto(scopeSignals)) { + result.add(protoToSignalItem(item, resource, scopeInfo)); + } + } + } + + return result; + } + + protected io.opentelemetry.proto.resource.v1.Resource resourceToProto(Resource resource) { + return ResourceMapper.getInstance().mapToProto(resource); + } + + protected Resource protoToResource( + io.opentelemetry.proto.resource.v1.Resource protoResource, String schemaUrl) { + return ResourceMapper.getInstance() + .mapToSdk(protoResource, schemaUrl.isEmpty() ? null : schemaUrl); + } + + protected InstrumentationScopeInfo protoToInstrumentationScopeInfo( + InstrumentationScope scope, @Nullable String schemaUrl) { + InstrumentationScopeInfoBuilder builder = InstrumentationScopeInfo.builder(scope.getName()); + builder.setAttributes(protoToAttributes(scope.getAttributesList())); + if (!scope.getVersion().isEmpty()) { + builder.setVersion(scope.getVersion()); + } + if (schemaUrl != null) { + builder.setSchemaUrl(schemaUrl); + } + return builder.build(); + } + + protected InstrumentationScope instrumentationScopeToProto(InstrumentationScopeInfo source) { + InstrumentationScope.Builder builder = + InstrumentationScope.newBuilder().setName(source.getName()); + if (source.getVersion() != null) { + builder.setVersion(source.getVersion()); + } + builder.addAllAttributes(attributesToProto(source.getAttributes())); + return builder.build(); + } + + protected abstract PROTO_SIGNAL_ITEM signalItemToProto(SIGNAL_ITEM sourceData); + + protected abstract SIGNAL_ITEM protoToSignalItem( + PROTO_SIGNAL_ITEM protoSignalItem, Resource resource, InstrumentationScopeInfo scopeInfo); + + protected abstract List getProtoResources(PROTO_DATA protoData); + + protected abstract PROTO_DATA createProtoData( + Map>> itemsByResource); + + protected abstract List getSignalsFromProto(PROTO_SCOPE_ITEM scopeSignals); + + protected abstract InstrumentationScopeInfo getInstrumentationScopeFromProto( + PROTO_SCOPE_ITEM scopeSignals); + + protected abstract List getScopes(PROTO_RESOURCE_ITEM resourceSignal); + + protected abstract Resource getResourceFromProto(PROTO_RESOURCE_ITEM resourceSignal); + + protected abstract Resource getResourceFromSignal(SIGNAL_ITEM source); + + protected abstract InstrumentationScopeInfo getInstrumentationScopeInfo(SIGNAL_ITEM source); + + private static List attributesToProto(Attributes source) { + return AttributesMapper.getInstance().attributesToProto(source); + } + + private static Attributes protoToAttributes(List source) { + return AttributesMapper.getInstance().protoToAttributes(source); + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/ByteStringMapper.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/ByteStringMapper.java new file mode 100644 index 000000000..a04fbe0b7 --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/ByteStringMapper.java @@ -0,0 +1,25 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.common; + +import com.google.protobuf.ByteString; + +public final class ByteStringMapper { + + private static final ByteStringMapper INSTANCE = new ByteStringMapper(); + + public static ByteStringMapper getInstance() { + return INSTANCE; + } + + public ByteString stringToProto(String source) { + return ByteString.copyFromUtf8(source); + } + + public String protoToString(ByteString source) { + return source.toStringUtf8(); + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/ResourceMapper.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/ResourceMapper.java new file mode 100644 index 000000000..c93894cb6 --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/ResourceMapper.java @@ -0,0 +1,38 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.common; + +import io.opentelemetry.proto.resource.v1.Resource; +import io.opentelemetry.sdk.resources.ResourceBuilder; +import javax.annotation.Nullable; + +public final class ResourceMapper { + + private static final ResourceMapper INSTANCE = new ResourceMapper(); + + public static ResourceMapper getInstance() { + return INSTANCE; + } + + public Resource mapToProto(io.opentelemetry.sdk.resources.Resource sdkResource) { + return Resource.newBuilder() + .addAllAttributes( + AttributesMapper.getInstance().attributesToProto(sdkResource.getAttributes())) + .build(); + } + + public io.opentelemetry.sdk.resources.Resource mapToSdk( + Resource protoResource, @Nullable String schemaUrl) { + ResourceBuilder resource = io.opentelemetry.sdk.resources.Resource.builder(); + + if (schemaUrl != null) { + resource.setSchemaUrl(schemaUrl); + } + resource.putAll( + AttributesMapper.getInstance().protoToAttributes(protoResource.getAttributesList())); + return resource.build(); + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/LogRecordDataMapper.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/LogRecordDataMapper.java new file mode 100644 index 000000000..8cedb2d4e --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/LogRecordDataMapper.java @@ -0,0 +1,124 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.logs; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.logs.Severity; +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.api.trace.TraceFlags; +import io.opentelemetry.api.trace.TraceState; +import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.common.AttributesMapper; +import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.common.ByteStringMapper; +import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.logs.models.LogRecordDataImpl; +import io.opentelemetry.proto.common.v1.AnyValue; +import io.opentelemetry.proto.logs.v1.LogRecord; +import io.opentelemetry.proto.logs.v1.SeverityNumber; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.logs.data.Body; +import io.opentelemetry.sdk.logs.data.LogRecordData; +import io.opentelemetry.sdk.resources.Resource; + +public final class LogRecordDataMapper { + + private static final LogRecordDataMapper INSTANCE = new LogRecordDataMapper(); + + public static LogRecordDataMapper getInstance() { + return INSTANCE; + } + + public LogRecord mapToProto(LogRecordData source) { + LogRecord.Builder logRecord = LogRecord.newBuilder(); + + logRecord.setTimeUnixNano(source.getTimestampEpochNanos()); + logRecord.setObservedTimeUnixNano(source.getObservedTimestampEpochNanos()); + if (source.getSeverity() != null) { + logRecord.setSeverityNumber(severityToProto(source.getSeverity())); + } + if (source.getSeverityText() != null) { + logRecord.setSeverityText(source.getSeverityText()); + } + if (source.getBody() != null) { + logRecord.setBody(bodyToAnyValue(source.getBody())); + } + + logRecord.setFlags(source.getSpanContext().getTraceFlags().asByte()); + + addExtrasToProtoBuilder(source, logRecord); + + return logRecord.build(); + } + + private static void addExtrasToProtoBuilder(LogRecordData source, LogRecord.Builder target) { + target.addAllAttributes( + AttributesMapper.getInstance().attributesToProto(source.getAttributes())); + SpanContext spanContext = source.getSpanContext(); + target.setSpanId(ByteStringMapper.getInstance().stringToProto(spanContext.getSpanId())); + target.setTraceId(ByteStringMapper.getInstance().stringToProto(spanContext.getTraceId())); + target.setDroppedAttributesCount( + source.getTotalAttributeCount() - source.getAttributes().size()); + } + + public LogRecordData mapToSdk( + LogRecord source, Resource resource, InstrumentationScopeInfo scopeInfo) { + LogRecordDataImpl.Builder logRecordData = LogRecordDataImpl.builder(); + + logRecordData.setTimestampEpochNanos(source.getTimeUnixNano()); + logRecordData.setObservedTimestampEpochNanos(source.getObservedTimeUnixNano()); + logRecordData.setSeverity(severityNumberToSdk(source.getSeverityNumber())); + logRecordData.setSeverityText(source.getSeverityText()); + if (source.hasBody()) { + logRecordData.setBody(anyValueToBody(source.getBody())); + } + + addExtrasToSdkItemBuilder(source, logRecordData, resource, scopeInfo); + + return logRecordData.build(); + } + + private static void addExtrasToSdkItemBuilder( + LogRecord source, + LogRecordDataImpl.Builder target, + Resource resource, + InstrumentationScopeInfo scopeInfo) { + Attributes attributes = + AttributesMapper.getInstance().protoToAttributes(source.getAttributesList()); + target.setAttributes(attributes); + target.setSpanContext( + SpanContext.create( + ByteStringMapper.getInstance().protoToString(source.getTraceId()), + ByteStringMapper.getInstance().protoToString(source.getSpanId()), + TraceFlags.getSampled(), + TraceState.getDefault())); + target.setTotalAttributeCount(source.getDroppedAttributesCount() + attributes.size()); + target.setResource(resource); + target.setInstrumentationScopeInfo(scopeInfo); + } + + private static AnyValue bodyToAnyValue(Body body) { + return AnyValue.newBuilder().setStringValue(body.asString()).build(); + } + + private static SeverityNumber severityToProto(Severity severity) { + return SeverityNumber.forNumber(severity.getSeverityNumber()); + } + + private static Body anyValueToBody(AnyValue source) { + if (source.hasStringValue()) { + return Body.string(source.getStringValue()); + } else { + return Body.empty(); + } + } + + private static Severity severityNumberToSdk(SeverityNumber source) { + for (Severity value : Severity.values()) { + if (value.getSeverityNumber() == source.getNumber()) { + return value; + } + } + throw new IllegalArgumentException(); + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/ProtoLogsDataMapper.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/ProtoLogsDataMapper.java new file mode 100644 index 000000000..92ee11ab2 --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/ProtoLogsDataMapper.java @@ -0,0 +1,110 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.logs; + +import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.common.BaseProtoSignalsDataMapper; +import io.opentelemetry.proto.logs.v1.LogRecord; +import io.opentelemetry.proto.logs.v1.LogsData; +import io.opentelemetry.proto.logs.v1.ResourceLogs; +import io.opentelemetry.proto.logs.v1.ScopeLogs; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.logs.data.LogRecordData; +import io.opentelemetry.sdk.resources.Resource; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +public class ProtoLogsDataMapper + extends BaseProtoSignalsDataMapper< + LogRecordData, LogRecord, LogsData, ResourceLogs, ScopeLogs> { + + private static final ProtoLogsDataMapper INSTANCE = new ProtoLogsDataMapper(); + + public static ProtoLogsDataMapper getInstance() { + return INSTANCE; + } + + @Override + protected LogRecord signalItemToProto(LogRecordData sourceData) { + return LogRecordDataMapper.getInstance().mapToProto(sourceData); + } + + @Override + protected LogRecordData protoToSignalItem( + LogRecord logRecord, Resource resource, InstrumentationScopeInfo scopeInfo) { + return LogRecordDataMapper.getInstance().mapToSdk(logRecord, resource, scopeInfo); + } + + @Override + protected List getProtoResources(LogsData logsData) { + return logsData.getResourceLogsList(); + } + + @Override + protected LogsData createProtoData( + Map>> itemsByResource) { + List items = new ArrayList<>(); + itemsByResource.forEach( + (resource, instrumentationScopeInfoScopedLogsMap) -> { + ResourceLogs.Builder resourceLogsBuilder = createProtoResourceBuilder(resource); + for (Map.Entry> logsByScope : + instrumentationScopeInfoScopedLogsMap.entrySet()) { + ScopeLogs.Builder scopeBuilder = createProtoScopeBuilder(logsByScope.getKey()); + scopeBuilder.addAllLogRecords(logsByScope.getValue()); + resourceLogsBuilder.addScopeLogs(scopeBuilder.build()); + } + items.add(resourceLogsBuilder.build()); + }); + return LogsData.newBuilder().addAllResourceLogs(items).build(); + } + + private ScopeLogs.Builder createProtoScopeBuilder(InstrumentationScopeInfo scopeInfo) { + ScopeLogs.Builder builder = + ScopeLogs.newBuilder().setScope(instrumentationScopeToProto(scopeInfo)); + if (scopeInfo.getSchemaUrl() != null) { + builder.setSchemaUrl(scopeInfo.getSchemaUrl()); + } + return builder; + } + + private ResourceLogs.Builder createProtoResourceBuilder(Resource resource) { + ResourceLogs.Builder builder = ResourceLogs.newBuilder().setResource(resourceToProto(resource)); + if (resource.getSchemaUrl() != null) { + builder.setSchemaUrl(resource.getSchemaUrl()); + } + return builder; + } + + @Override + protected List getSignalsFromProto(ScopeLogs scopeSignals) { + return scopeSignals.getLogRecordsList(); + } + + @Override + protected InstrumentationScopeInfo getInstrumentationScopeFromProto(ScopeLogs scopeSignals) { + return protoToInstrumentationScopeInfo(scopeSignals.getScope(), scopeSignals.getSchemaUrl()); + } + + @Override + protected List getScopes(ResourceLogs resourceSignal) { + return resourceSignal.getScopeLogsList(); + } + + @Override + protected Resource getResourceFromProto(ResourceLogs resourceSignal) { + return protoToResource(resourceSignal.getResource(), resourceSignal.getSchemaUrl()); + } + + @Override + protected Resource getResourceFromSignal(LogRecordData source) { + return source.getResource(); + } + + @Override + protected InstrumentationScopeInfo getInstrumentationScopeInfo(LogRecordData source) { + return source.getInstrumentationScopeInfo(); + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/models/LogRecordDataImpl.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/models/LogRecordDataImpl.java new file mode 100644 index 000000000..de130e3d1 --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/models/LogRecordDataImpl.java @@ -0,0 +1,48 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.logs.models; + +import com.google.auto.value.AutoValue; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.logs.Severity; +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.logs.data.Body; +import io.opentelemetry.sdk.logs.data.LogRecordData; +import io.opentelemetry.sdk.resources.Resource; + +@AutoValue +public abstract class LogRecordDataImpl implements LogRecordData { + + public static Builder builder() { + return new AutoValue_LogRecordDataImpl.Builder(); + } + + @AutoValue.Builder + public abstract static class Builder { + public abstract Builder setResource(Resource value); + + public abstract Builder setInstrumentationScopeInfo(InstrumentationScopeInfo value); + + public abstract Builder setTimestampEpochNanos(Long value); + + public abstract Builder setObservedTimestampEpochNanos(Long value); + + public abstract Builder setSpanContext(SpanContext value); + + public abstract Builder setSeverity(Severity value); + + public abstract Builder setSeverityText(String value); + + public abstract Builder setBody(Body value); + + public abstract Builder setAttributes(Attributes value); + + public abstract Builder setTotalAttributeCount(Integer value); + + public abstract LogRecordDataImpl build(); + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/metrics/MetricDataMapper.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/metrics/MetricDataMapper.java new file mode 100644 index 000000000..512cc71cb --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/metrics/MetricDataMapper.java @@ -0,0 +1,779 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.metrics; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.api.trace.TraceFlags; +import io.opentelemetry.api.trace.TraceState; +import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.common.AttributesMapper; +import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.common.ByteStringMapper; +import io.opentelemetry.proto.common.v1.KeyValue; +import io.opentelemetry.proto.metrics.v1.AggregationTemporality; +import io.opentelemetry.proto.metrics.v1.Exemplar; +import io.opentelemetry.proto.metrics.v1.ExponentialHistogram; +import io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint; +import io.opentelemetry.proto.metrics.v1.Gauge; +import io.opentelemetry.proto.metrics.v1.Histogram; +import io.opentelemetry.proto.metrics.v1.HistogramDataPoint; +import io.opentelemetry.proto.metrics.v1.Metric; +import io.opentelemetry.proto.metrics.v1.NumberDataPoint; +import io.opentelemetry.proto.metrics.v1.Sum; +import io.opentelemetry.proto.metrics.v1.Summary; +import io.opentelemetry.proto.metrics.v1.SummaryDataPoint; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.metrics.data.Data; +import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; +import io.opentelemetry.sdk.metrics.data.DoublePointData; +import io.opentelemetry.sdk.metrics.data.ExemplarData; +import io.opentelemetry.sdk.metrics.data.ExponentialHistogramBuckets; +import io.opentelemetry.sdk.metrics.data.ExponentialHistogramData; +import io.opentelemetry.sdk.metrics.data.ExponentialHistogramPointData; +import io.opentelemetry.sdk.metrics.data.GaugeData; +import io.opentelemetry.sdk.metrics.data.HistogramData; +import io.opentelemetry.sdk.metrics.data.HistogramPointData; +import io.opentelemetry.sdk.metrics.data.LongExemplarData; +import io.opentelemetry.sdk.metrics.data.LongPointData; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.data.MetricDataType; +import io.opentelemetry.sdk.metrics.data.PointData; +import io.opentelemetry.sdk.metrics.data.SumData; +import io.opentelemetry.sdk.metrics.data.SummaryData; +import io.opentelemetry.sdk.metrics.data.SummaryPointData; +import io.opentelemetry.sdk.metrics.data.ValueAtQuantile; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramBuckets; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramPointData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableGaugeData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramPointData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongExemplarData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongPointData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableSumData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableSummaryData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableSummaryPointData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableValueAtQuantile; +import io.opentelemetry.sdk.resources.Resource; +import java.util.ArrayList; +import java.util.List; + +public final class MetricDataMapper { + + private static final MetricDataMapper INSTANCE = new MetricDataMapper(); + + public static MetricDataMapper getInstance() { + return INSTANCE; + } + + public Metric mapToProto(MetricData source) { + Metric.Builder metric = Metric.newBuilder(); + + metric.setName(source.getName()); + metric.setDescription(source.getDescription()); + metric.setUnit(source.getUnit()); + + addDataToProto(source, metric); + + return metric.build(); + } + + @SuppressWarnings("unchecked") + public MetricData mapToSdk(Metric source, Resource resource, InstrumentationScopeInfo scope) { + switch (source.getDataCase()) { + case GAUGE: + DataWithType gaugeDataWithType = mapGaugeToSdk(source.getGauge()); + if (gaugeDataWithType.type == MetricDataType.DOUBLE_GAUGE) { + return ImmutableMetricData.createDoubleGauge( + resource, + scope, + source.getName(), + source.getDescription(), + source.getUnit(), + (GaugeData) gaugeDataWithType.data); + } else { + return ImmutableMetricData.createLongGauge( + resource, + scope, + source.getName(), + source.getDescription(), + source.getUnit(), + (GaugeData) gaugeDataWithType.data); + } + case SUM: + DataWithType sumDataWithType = mapSumToSdk(source.getSum()); + if (sumDataWithType.type == MetricDataType.DOUBLE_SUM) { + return ImmutableMetricData.createDoubleSum( + resource, + scope, + source.getName(), + source.getDescription(), + source.getUnit(), + (SumData) sumDataWithType.data); + } else { + return ImmutableMetricData.createLongSum( + resource, + scope, + source.getName(), + source.getDescription(), + source.getUnit(), + (SumData) sumDataWithType.data); + } + case SUMMARY: + return ImmutableMetricData.createDoubleSummary( + resource, + scope, + source.getName(), + source.getDescription(), + source.getUnit(), + mapSummaryToSdk(source.getSummary())); + case HISTOGRAM: + return ImmutableMetricData.createDoubleHistogram( + resource, + scope, + source.getName(), + source.getDescription(), + source.getUnit(), + mapHistogramToSdk(source.getHistogram())); + case EXPONENTIAL_HISTOGRAM: + return ImmutableMetricData.createExponentialHistogram( + resource, + scope, + source.getName(), + source.getDescription(), + source.getUnit(), + mapExponentialHistogramToSdk(source.getExponentialHistogram())); + default: + throw new UnsupportedOperationException(); + } + } + + @SuppressWarnings("unchecked") + private static void addDataToProto(MetricData source, Metric.Builder target) { + switch (source.getType()) { + case LONG_GAUGE: + target.setGauge(mapLongGaugeToProto((GaugeData) source.getData())); + break; + case DOUBLE_GAUGE: + target.setGauge(mapDoubleGaugeToProto((GaugeData) source.getData())); + break; + case LONG_SUM: + target.setSum(mapLongSumToProto((SumData) source.getData())); + break; + case DOUBLE_SUM: + target.setSum(mapDoubleSumToProto((SumData) source.getData())); + break; + case SUMMARY: + target.setSummary(mapSummaryToProto((SummaryData) source.getData())); + break; + case HISTOGRAM: + target.setHistogram(mapHistogramToProto((HistogramData) source.getData())); + break; + case EXPONENTIAL_HISTOGRAM: + target.setExponentialHistogram( + mapExponentialHistogramToProto((ExponentialHistogramData) source.getData())); + break; + } + } + + private static DataWithType mapGaugeToSdk(Gauge gauge) { + if (gauge.getDataPointsCount() > 0) { + NumberDataPoint dataPoint = gauge.getDataPoints(0); + if (dataPoint.hasAsInt()) { + return new DataWithType(mapLongGaugeToSdk(gauge), MetricDataType.LONG_GAUGE); + } else if (dataPoint.hasAsDouble()) { + return new DataWithType(mapDoubleGaugeToSdk(gauge), MetricDataType.DOUBLE_GAUGE); + } + } + return new DataWithType(mapDoubleGaugeToSdk(gauge), MetricDataType.DOUBLE_GAUGE); + } + + private static DataWithType mapSumToSdk(Sum sum) { + if (sum.getDataPointsCount() > 0) { + NumberDataPoint dataPoint = sum.getDataPoints(0); + if (dataPoint.hasAsInt()) { + return new DataWithType(mapLongSumToSdk(sum), MetricDataType.LONG_SUM); + } else if (dataPoint.hasAsDouble()) { + return new DataWithType(mapDoubleSumToSdk(sum), MetricDataType.DOUBLE_SUM); + } + } + return new DataWithType(mapDoubleSumToSdk(sum), MetricDataType.DOUBLE_SUM); + } + + private static Gauge mapLongGaugeToProto(GaugeData data) { + Gauge.Builder gauge = Gauge.newBuilder(); + + if (data.getPoints() != null) { + for (LongPointData point : data.getPoints()) { + gauge.addDataPoints(longPointDataToNumberDataPoint(point)); + } + } + + return gauge.build(); + } + + private static Gauge mapDoubleGaugeToProto(GaugeData data) { + Gauge.Builder gauge = Gauge.newBuilder(); + + if (data.getPoints() != null) { + for (DoublePointData point : data.getPoints()) { + gauge.addDataPoints(doublePointDataToNumberDataPoint(point)); + } + } + + return gauge.build(); + } + + private static Sum mapLongSumToProto(SumData data) { + Sum.Builder sum = Sum.newBuilder(); + + if (data.getPoints() != null) { + for (LongPointData point : data.getPoints()) { + sum.addDataPoints(longPointDataToNumberDataPoint(point)); + } + } + sum.setIsMonotonic(data.isMonotonic()); + sum.setAggregationTemporality( + mapAggregationTemporalityToProto(data.getAggregationTemporality())); + + return sum.build(); + } + + private static Sum mapDoubleSumToProto(SumData data) { + Sum.Builder sum = Sum.newBuilder(); + + if (data.getPoints() != null) { + for (DoublePointData point : data.getPoints()) { + sum.addDataPoints(doublePointDataToNumberDataPoint(point)); + } + } + sum.setIsMonotonic(data.isMonotonic()); + sum.setAggregationTemporality( + mapAggregationTemporalityToProto(data.getAggregationTemporality())); + + return sum.build(); + } + + private static Summary mapSummaryToProto(SummaryData data) { + Summary.Builder summary = Summary.newBuilder(); + + if (data.getPoints() != null) { + for (SummaryPointData point : data.getPoints()) { + summary.addDataPoints(summaryPointDataToSummaryDataPoint(point)); + } + } + + return summary.build(); + } + + private static Histogram mapHistogramToProto(HistogramData data) { + Histogram.Builder histogram = Histogram.newBuilder(); + + if (data.getPoints() != null) { + for (HistogramPointData point : data.getPoints()) { + histogram.addDataPoints(histogramPointDataToHistogramDataPoint(point)); + } + } + histogram.setAggregationTemporality( + mapAggregationTemporalityToProto(data.getAggregationTemporality())); + + return histogram.build(); + } + + private static ExponentialHistogram mapExponentialHistogramToProto( + ExponentialHistogramData data) { + ExponentialHistogram.Builder exponentialHistogram = ExponentialHistogram.newBuilder(); + + if (data.getPoints() != null) { + for (ExponentialHistogramPointData point : data.getPoints()) { + exponentialHistogram.addDataPoints( + exponentialHistogramPointDataToExponentialHistogramDataPoint(point)); + } + } + exponentialHistogram.setAggregationTemporality( + mapAggregationTemporalityToProto(data.getAggregationTemporality())); + + return exponentialHistogram.build(); + } + + private static NumberDataPoint longPointDataToNumberDataPoint(LongPointData source) { + NumberDataPoint.Builder numberDataPoint = NumberDataPoint.newBuilder(); + + numberDataPoint.setStartTimeUnixNano(source.getStartEpochNanos()); + numberDataPoint.setTimeUnixNano(source.getEpochNanos()); + numberDataPoint.setAsInt(source.getValue()); + if (source.getExemplars() != null) { + for (LongExemplarData exemplar : source.getExemplars()) { + numberDataPoint.addExemplars(longExemplarDataToExemplar(exemplar)); + } + } + + addAttributesToNumberDataPoint(source, numberDataPoint); + + return numberDataPoint.build(); + } + + private static void addAttributesToNumberDataPoint( + PointData source, NumberDataPoint.Builder target) { + target.addAllAttributes(attributesToProto(source.getAttributes())); + } + + private static NumberDataPoint doublePointDataToNumberDataPoint(DoublePointData source) { + NumberDataPoint.Builder numberDataPoint = NumberDataPoint.newBuilder(); + + numberDataPoint.setStartTimeUnixNano(source.getStartEpochNanos()); + numberDataPoint.setTimeUnixNano(source.getEpochNanos()); + numberDataPoint.setAsDouble(source.getValue()); + if (source.getExemplars() != null) { + for (DoubleExemplarData exemplar : source.getExemplars()) { + numberDataPoint.addExemplars(doubleExemplarDataToExemplar(exemplar)); + } + } + + addAttributesToNumberDataPoint(source, numberDataPoint); + + return numberDataPoint.build(); + } + + private static SummaryDataPoint summaryPointDataToSummaryDataPoint( + SummaryPointData summaryPointData) { + SummaryDataPoint.Builder summaryDataPoint = SummaryDataPoint.newBuilder(); + + summaryDataPoint.setStartTimeUnixNano(summaryPointData.getStartEpochNanos()); + summaryDataPoint.setTimeUnixNano(summaryPointData.getEpochNanos()); + if (summaryPointData.getValues() != null) { + for (ValueAtQuantile value : summaryPointData.getValues()) { + summaryDataPoint.addQuantileValues(valueAtQuantileToValueAtQuantile(value)); + } + } + summaryDataPoint.setCount(summaryPointData.getCount()); + summaryDataPoint.setSum(summaryPointData.getSum()); + + addAttributesToSummaryDataPoint(summaryPointData, summaryDataPoint); + + return summaryDataPoint.build(); + } + + private static void addAttributesToSummaryDataPoint( + PointData source, SummaryDataPoint.Builder target) { + target.addAllAttributes(attributesToProto(source.getAttributes())); + } + + private static HistogramDataPoint histogramPointDataToHistogramDataPoint( + HistogramPointData histogramPointData) { + HistogramDataPoint.Builder histogramDataPoint = HistogramDataPoint.newBuilder(); + + histogramDataPoint.setStartTimeUnixNano(histogramPointData.getStartEpochNanos()); + histogramDataPoint.setTimeUnixNano(histogramPointData.getEpochNanos()); + if (histogramPointData.getCounts() != null) { + for (Long count : histogramPointData.getCounts()) { + histogramDataPoint.addBucketCounts(count); + } + } + if (histogramPointData.getBoundaries() != null) { + for (Double boundary : histogramPointData.getBoundaries()) { + histogramDataPoint.addExplicitBounds(boundary); + } + } + if (histogramPointData.getExemplars() != null) { + for (DoubleExemplarData exemplar : histogramPointData.getExemplars()) { + histogramDataPoint.addExemplars(doubleExemplarDataToExemplar(exemplar)); + } + } + histogramDataPoint.setCount(histogramPointData.getCount()); + histogramDataPoint.setSum(histogramPointData.getSum()); + if (histogramPointData.hasMin()) { + histogramDataPoint.setMin(histogramPointData.getMin()); + } + if (histogramPointData.hasMax()) { + histogramDataPoint.setMax(histogramPointData.getMax()); + } + + addAttributesToHistogramDataPoint(histogramPointData, histogramDataPoint); + + return histogramDataPoint.build(); + } + + private static void addAttributesToHistogramDataPoint( + HistogramPointData source, HistogramDataPoint.Builder target) { + target.addAllAttributes(attributesToProto(source.getAttributes())); + } + + private static ExponentialHistogramDataPoint + exponentialHistogramPointDataToExponentialHistogramDataPoint( + ExponentialHistogramPointData exponentialHistogramPointData) { + ExponentialHistogramDataPoint.Builder exponentialHistogramDataPoint = + ExponentialHistogramDataPoint.newBuilder(); + + exponentialHistogramDataPoint.setStartTimeUnixNano( + exponentialHistogramPointData.getStartEpochNanos()); + exponentialHistogramDataPoint.setTimeUnixNano(exponentialHistogramPointData.getEpochNanos()); + exponentialHistogramDataPoint.setPositive( + exponentialHistogramBucketsToBuckets(exponentialHistogramPointData.getPositiveBuckets())); + exponentialHistogramDataPoint.setNegative( + exponentialHistogramBucketsToBuckets(exponentialHistogramPointData.getNegativeBuckets())); + if (exponentialHistogramPointData.getExemplars() != null) { + for (DoubleExemplarData exemplar : exponentialHistogramPointData.getExemplars()) { + exponentialHistogramDataPoint.addExemplars(doubleExemplarDataToExemplar(exemplar)); + } + } + exponentialHistogramDataPoint.setCount(exponentialHistogramPointData.getCount()); + exponentialHistogramDataPoint.setSum(exponentialHistogramPointData.getSum()); + exponentialHistogramDataPoint.setScale(exponentialHistogramPointData.getScale()); + exponentialHistogramDataPoint.setZeroCount(exponentialHistogramPointData.getZeroCount()); + if (exponentialHistogramPointData.hasMin()) { + exponentialHistogramDataPoint.setMin(exponentialHistogramPointData.getMin()); + } + if (exponentialHistogramPointData.hasMax()) { + exponentialHistogramDataPoint.setMax(exponentialHistogramPointData.getMax()); + } + + addAttributesToExponentialHistogramDataPoint( + exponentialHistogramPointData, exponentialHistogramDataPoint); + + return exponentialHistogramDataPoint.build(); + } + + private static void addAttributesToExponentialHistogramDataPoint( + ExponentialHistogramPointData source, ExponentialHistogramDataPoint.Builder target) { + target.addAllAttributes(attributesToProto(source.getAttributes())); + } + + private static ExponentialHistogramDataPoint.Buckets exponentialHistogramBucketsToBuckets( + ExponentialHistogramBuckets source) { + ExponentialHistogramDataPoint.Buckets.Builder buckets = + ExponentialHistogramDataPoint.Buckets.newBuilder(); + + if (source.getBucketCounts() != null) { + for (Long bucketCount : source.getBucketCounts()) { + buckets.addBucketCounts(bucketCount); + } + } + buckets.setOffset(source.getOffset()); + + return buckets.build(); + } + + private static Exemplar doubleExemplarDataToExemplar(DoubleExemplarData doubleExemplarData) { + Exemplar.Builder exemplar = Exemplar.newBuilder(); + + exemplar.setTimeUnixNano(doubleExemplarData.getEpochNanos()); + exemplar.setAsDouble(doubleExemplarData.getValue()); + + addExtrasToExemplar(doubleExemplarData, exemplar); + + return exemplar.build(); + } + + private static Exemplar longExemplarDataToExemplar(LongExemplarData doubleExemplarData) { + Exemplar.Builder exemplar = Exemplar.newBuilder(); + + exemplar.setTimeUnixNano(doubleExemplarData.getEpochNanos()); + exemplar.setAsInt(doubleExemplarData.getValue()); + + addExtrasToExemplar(doubleExemplarData, exemplar); + + return exemplar.build(); + } + + private static void addExtrasToExemplar(ExemplarData source, Exemplar.Builder target) { + target.addAllFilteredAttributes(attributesToProto(source.getFilteredAttributes())); + SpanContext spanContext = source.getSpanContext(); + target.setSpanId(ByteStringMapper.getInstance().stringToProto(spanContext.getSpanId())); + target.setTraceId(ByteStringMapper.getInstance().stringToProto(spanContext.getTraceId())); + } + + private static AggregationTemporality mapAggregationTemporalityToProto( + io.opentelemetry.sdk.metrics.data.AggregationTemporality source) { + AggregationTemporality aggregationTemporality; + + switch (source) { + case DELTA: + aggregationTemporality = AggregationTemporality.AGGREGATION_TEMPORALITY_DELTA; + break; + case CUMULATIVE: + aggregationTemporality = AggregationTemporality.AGGREGATION_TEMPORALITY_CUMULATIVE; + break; + default: + aggregationTemporality = AggregationTemporality.UNRECOGNIZED; + } + + return aggregationTemporality; + } + + private static SummaryData mapSummaryToSdk(Summary summary) { + return ImmutableSummaryData.create( + summaryDataPointListToSummaryPointDataCollection(summary.getDataPointsList())); + } + + private static HistogramData mapHistogramToSdk(Histogram histogram) { + return ImmutableHistogramData.create( + mapAggregationTemporalityToSdk(histogram.getAggregationTemporality()), + histogramDataPointListToHistogramPointDataCollection(histogram.getDataPointsList())); + } + + private static ExponentialHistogramData mapExponentialHistogramToSdk( + ExponentialHistogram source) { + return ImmutableExponentialHistogramData.create( + mapAggregationTemporalityToSdk(source.getAggregationTemporality()), + exponentialHistogramDataPointListToExponentialHistogramPointDataCollection( + source.getDataPointsList())); + } + + private static ExponentialHistogramPointData + exponentialHistogramDataPointToExponentialHistogramPointData( + ExponentialHistogramDataPoint source) { + return ImmutableExponentialHistogramPointData.create( + source.getScale(), + source.getSum(), + source.getZeroCount(), + source.hasMin(), + source.getMin(), + source.hasMax(), + source.getMax(), + mapBucketsFromProto(source.getPositive(), source.getScale()), + mapBucketsFromProto(source.getNegative(), source.getScale()), + source.getStartTimeUnixNano(), + source.getTimeUnixNano(), + protoToAttributes(source.getAttributesList()), + exemplarListToDoubleExemplarDataList(source.getExemplarsList())); + } + + private static HistogramPointData histogramDataPointToHistogramPointData( + HistogramDataPoint source) { + return ImmutableHistogramPointData.create( + source.getStartTimeUnixNano(), + source.getTimeUnixNano(), + protoToAttributes(source.getAttributesList()), + source.getSum(), + source.hasMin(), + source.getMin(), + source.hasMax(), + source.getMax(), + source.getExplicitBoundsList(), + source.getBucketCountsList(), + exemplarListToDoubleExemplarDataList(source.getExemplarsList())); + } + + private static DoubleExemplarData exemplarToDoubleExemplarData(Exemplar source) { + return ImmutableDoubleExemplarData.create( + protoToAttributes(source.getFilteredAttributesList()), + source.getTimeUnixNano(), + createForExemplar(source), + source.getAsDouble()); + } + + private static LongExemplarData exemplarToLongExemplarData(Exemplar source) { + return ImmutableLongExemplarData.create( + protoToAttributes(source.getFilteredAttributesList()), + source.getTimeUnixNano(), + createForExemplar(source), + source.getAsInt()); + } + + private static SpanContext createForExemplar(Exemplar value) { + return SpanContext.create( + ByteStringMapper.getInstance().protoToString(value.getTraceId()), + ByteStringMapper.getInstance().protoToString(value.getSpanId()), + TraceFlags.getSampled(), + TraceState.getDefault()); + } + + private static SummaryPointData summaryDataPointToSummaryPointData(SummaryDataPoint source) { + return ImmutableSummaryPointData.create( + source.getStartTimeUnixNano(), + source.getTimeUnixNano(), + protoToAttributes(source.getAttributesList()), + source.getCount(), + source.getSum(), + valueAtQuantileListToValueAtQuantileList(source.getQuantileValuesList())); + } + + private static ValueAtQuantile mapFromSummaryValueAtQuantileProto( + SummaryDataPoint.ValueAtQuantile source) { + return ImmutableValueAtQuantile.create(source.getQuantile(), source.getValue()); + } + + private static io.opentelemetry.sdk.metrics.data.AggregationTemporality + mapAggregationTemporalityToSdk(AggregationTemporality source) { + io.opentelemetry.sdk.metrics.data.AggregationTemporality aggregationTemporality; + + switch (source) { + case AGGREGATION_TEMPORALITY_DELTA: + aggregationTemporality = io.opentelemetry.sdk.metrics.data.AggregationTemporality.DELTA; + break; + case AGGREGATION_TEMPORALITY_CUMULATIVE: + aggregationTemporality = + io.opentelemetry.sdk.metrics.data.AggregationTemporality.CUMULATIVE; + break; + default: + throw new IllegalArgumentException("Unexpected enum constant: " + source); + } + + return aggregationTemporality; + } + + private static GaugeData mapLongGaugeToSdk(Gauge gauge) { + return ImmutableGaugeData.create( + numberDataPointListToLongPointDataCollection(gauge.getDataPointsList())); + } + + private static GaugeData mapDoubleGaugeToSdk(Gauge gauge) { + return ImmutableGaugeData.create( + numberDataPointListToDoublePointDataCollection(gauge.getDataPointsList())); + } + + private static SumData mapLongSumToSdk(Sum sum) { + return ImmutableSumData.create( + sum.getIsMonotonic(), + mapAggregationTemporalityToSdk(sum.getAggregationTemporality()), + numberDataPointListToLongPointDataCollection(sum.getDataPointsList())); + } + + private static SumData mapDoubleSumToSdk(Sum sum) { + return ImmutableSumData.create( + sum.getIsMonotonic(), + mapAggregationTemporalityToSdk(sum.getAggregationTemporality()), + numberDataPointListToDoublePointDataCollection(sum.getDataPointsList())); + } + + private static DoublePointData mapDoubleNumberDataPointToSdk(NumberDataPoint source) { + return ImmutableDoublePointData.create( + source.getStartTimeUnixNano(), + source.getTimeUnixNano(), + protoToAttributes(source.getAttributesList()), + source.getAsDouble(), + exemplarListToDoubleExemplarDataList(source.getExemplarsList())); + } + + private static LongPointData mapLongNumberDataPointToSdk(NumberDataPoint source) { + return ImmutableLongPointData.create( + source.getStartTimeUnixNano(), + source.getTimeUnixNano(), + protoToAttributes(source.getAttributesList()), + source.getAsInt(), + exemplarListToLongExemplarDataList(source.getExemplarsList())); + } + + private static SummaryDataPoint.ValueAtQuantile valueAtQuantileToValueAtQuantile( + ValueAtQuantile valueAtQuantile) { + SummaryDataPoint.ValueAtQuantile.Builder builder = + SummaryDataPoint.ValueAtQuantile.newBuilder(); + + builder.setQuantile(valueAtQuantile.getQuantile()); + builder.setValue(valueAtQuantile.getValue()); + + return builder.build(); + } + + private static List summaryDataPointListToSummaryPointDataCollection( + List list) { + List collection = new ArrayList<>(list.size()); + for (SummaryDataPoint summaryDataPoint : list) { + collection.add(summaryDataPointToSummaryPointData(summaryDataPoint)); + } + + return collection; + } + + private static List histogramDataPointListToHistogramPointDataCollection( + List list) { + List collection = new ArrayList<>(list.size()); + for (HistogramDataPoint histogramDataPoint : list) { + collection.add(histogramDataPointToHistogramPointData(histogramDataPoint)); + } + + return collection; + } + + private static List + exponentialHistogramDataPointListToExponentialHistogramPointDataCollection( + List list) { + List collection = new ArrayList<>(list.size()); + for (ExponentialHistogramDataPoint exponentialHistogramDataPoint : list) { + collection.add( + exponentialHistogramDataPointToExponentialHistogramPointData( + exponentialHistogramDataPoint)); + } + + return collection; + } + + private static List exemplarListToDoubleExemplarDataList( + List list) { + List result = new ArrayList<>(list.size()); + for (Exemplar exemplar : list) { + result.add(exemplarToDoubleExemplarData(exemplar)); + } + + return result; + } + + private static List valueAtQuantileListToValueAtQuantileList( + List list) { + List result = new ArrayList<>(list.size()); + for (SummaryDataPoint.ValueAtQuantile valueAtQuantile : list) { + result.add(mapFromSummaryValueAtQuantileProto(valueAtQuantile)); + } + + return result; + } + + private static List numberDataPointListToLongPointDataCollection( + List list) { + List collection = new ArrayList<>(list.size()); + for (NumberDataPoint numberDataPoint : list) { + collection.add(mapLongNumberDataPointToSdk(numberDataPoint)); + } + + return collection; + } + + private static List numberDataPointListToDoublePointDataCollection( + List list) { + List collection = new ArrayList<>(list.size()); + for (NumberDataPoint numberDataPoint : list) { + collection.add(mapDoubleNumberDataPointToSdk(numberDataPoint)); + } + + return collection; + } + + private static List exemplarListToLongExemplarDataList(List list) { + List result = new ArrayList<>(list.size()); + for (Exemplar exemplar : list) { + result.add(exemplarToLongExemplarData(exemplar)); + } + + return result; + } + + private static ExponentialHistogramBuckets mapBucketsFromProto( + ExponentialHistogramDataPoint.Buckets source, int scale) { + return ImmutableExponentialHistogramBuckets.create( + scale, source.getOffset(), source.getBucketCountsList()); + } + + private static List attributesToProto(Attributes source) { + return AttributesMapper.getInstance().attributesToProto(source); + } + + private static Attributes protoToAttributes(List source) { + return AttributesMapper.getInstance().protoToAttributes(source); + } + + private static final class DataWithType { + public final Data data; + public final MetricDataType type; + + private DataWithType(Data data, MetricDataType type) { + this.data = data; + this.type = type; + } + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/metrics/ProtoMetricsDataMapper.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/metrics/ProtoMetricsDataMapper.java new file mode 100644 index 000000000..b05631d5c --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/metrics/ProtoMetricsDataMapper.java @@ -0,0 +1,111 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.metrics; + +import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.common.BaseProtoSignalsDataMapper; +import io.opentelemetry.proto.metrics.v1.Metric; +import io.opentelemetry.proto.metrics.v1.MetricsData; +import io.opentelemetry.proto.metrics.v1.ResourceMetrics; +import io.opentelemetry.proto.metrics.v1.ScopeMetrics; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.resources.Resource; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +public class ProtoMetricsDataMapper + extends BaseProtoSignalsDataMapper< + MetricData, Metric, MetricsData, ResourceMetrics, ScopeMetrics> { + + private static final ProtoMetricsDataMapper INSTANCE = new ProtoMetricsDataMapper(); + + public static ProtoMetricsDataMapper getInstance() { + return INSTANCE; + } + + @Override + protected Metric signalItemToProto(MetricData sourceData) { + return MetricDataMapper.getInstance().mapToProto(sourceData); + } + + @Override + protected MetricData protoToSignalItem( + Metric protoSignalItem, Resource resource, InstrumentationScopeInfo scopeInfo) { + return MetricDataMapper.getInstance().mapToSdk(protoSignalItem, resource, scopeInfo); + } + + @Override + protected List getProtoResources(MetricsData protoData) { + return protoData.getResourceMetricsList(); + } + + @Override + protected MetricsData createProtoData( + Map>> itemsByResource) { + List items = new ArrayList<>(); + itemsByResource.forEach( + (resource, instrumentationScopeInfoScopedMetricsMap) -> { + ResourceMetrics.Builder resourceMetricsBuilder = createProtoResourceBuilder(resource); + for (Map.Entry> metricsByScope : + instrumentationScopeInfoScopedMetricsMap.entrySet()) { + ScopeMetrics.Builder scopeBuilder = createProtoScopeBuilder(metricsByScope.getKey()); + scopeBuilder.addAllMetrics(metricsByScope.getValue()); + resourceMetricsBuilder.addScopeMetrics(scopeBuilder.build()); + } + items.add(resourceMetricsBuilder.build()); + }); + return MetricsData.newBuilder().addAllResourceMetrics(items).build(); + } + + private ScopeMetrics.Builder createProtoScopeBuilder(InstrumentationScopeInfo scopeInfo) { + ScopeMetrics.Builder builder = + ScopeMetrics.newBuilder().setScope(instrumentationScopeToProto(scopeInfo)); + if (scopeInfo.getSchemaUrl() != null) { + builder.setSchemaUrl(scopeInfo.getSchemaUrl()); + } + return builder; + } + + private ResourceMetrics.Builder createProtoResourceBuilder(Resource resource) { + ResourceMetrics.Builder builder = + ResourceMetrics.newBuilder().setResource(resourceToProto(resource)); + if (resource.getSchemaUrl() != null) { + builder.setSchemaUrl(resource.getSchemaUrl()); + } + return builder; + } + + @Override + protected List getSignalsFromProto(ScopeMetrics scopeSignals) { + return scopeSignals.getMetricsList(); + } + + @Override + protected InstrumentationScopeInfo getInstrumentationScopeFromProto(ScopeMetrics scopeSignals) { + return protoToInstrumentationScopeInfo(scopeSignals.getScope(), scopeSignals.getSchemaUrl()); + } + + @Override + protected List getScopes(ResourceMetrics resourceSignal) { + return resourceSignal.getScopeMetricsList(); + } + + @Override + protected Resource getResourceFromProto(ResourceMetrics resourceSignal) { + return protoToResource(resourceSignal.getResource(), resourceSignal.getSchemaUrl()); + } + + @Override + protected Resource getResourceFromSignal(MetricData source) { + return source.getResource(); + } + + @Override + protected InstrumentationScopeInfo getInstrumentationScopeInfo(MetricData source) { + return source.getInstrumentationScopeInfo(); + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/ProtoSpansDataMapper.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/ProtoSpansDataMapper.java new file mode 100644 index 000000000..632dd491b --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/ProtoSpansDataMapper.java @@ -0,0 +1,111 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.spans; + +import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.common.BaseProtoSignalsDataMapper; +import io.opentelemetry.proto.trace.v1.ResourceSpans; +import io.opentelemetry.proto.trace.v1.ScopeSpans; +import io.opentelemetry.proto.trace.v1.Span; +import io.opentelemetry.proto.trace.v1.TracesData; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.trace.data.SpanData; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +public class ProtoSpansDataMapper + extends BaseProtoSignalsDataMapper { + + private static final ProtoSpansDataMapper INSTANCE = new ProtoSpansDataMapper(); + + public static ProtoSpansDataMapper getInstance() { + return INSTANCE; + } + + @Override + protected Span signalItemToProto(SpanData sourceData) { + return SpanDataMapper.getInstance().mapToProto(sourceData); + } + + @Override + protected List getProtoResources(TracesData protoData) { + return protoData.getResourceSpansList(); + } + + @Override + protected SpanData protoToSignalItem( + Span protoSignalItem, Resource resource, InstrumentationScopeInfo scopeInfo) { + return SpanDataMapper.getInstance().mapToSdk(protoSignalItem, resource, scopeInfo); + } + + @Override + protected TracesData createProtoData( + Map>> itemsByResource) { + List items = new ArrayList<>(); + itemsByResource.forEach( + (resource, instrumentationScopeInfoScopedSpansMap) -> { + ResourceSpans.Builder resourceSpansBuilder = createProtoResourceBuilder(resource); + for (Map.Entry> spansByScope : + instrumentationScopeInfoScopedSpansMap.entrySet()) { + ScopeSpans.Builder scopeBuilder = createProtoScopeBuilder(spansByScope.getKey()); + scopeBuilder.addAllSpans(spansByScope.getValue()); + resourceSpansBuilder.addScopeSpans(scopeBuilder.build()); + } + items.add(resourceSpansBuilder.build()); + }); + return TracesData.newBuilder().addAllResourceSpans(items).build(); + } + + @Override + protected List getSignalsFromProto(ScopeSpans scopeSignals) { + return scopeSignals.getSpansList(); + } + + @Override + protected InstrumentationScopeInfo getInstrumentationScopeFromProto(ScopeSpans scopeSignals) { + return protoToInstrumentationScopeInfo(scopeSignals.getScope(), scopeSignals.getSchemaUrl()); + } + + @Override + protected List getScopes(ResourceSpans resourceSignal) { + return resourceSignal.getScopeSpansList(); + } + + @Override + protected Resource getResourceFromProto(ResourceSpans resourceSignal) { + return protoToResource(resourceSignal.getResource(), resourceSignal.getSchemaUrl()); + } + + @Override + protected Resource getResourceFromSignal(SpanData source) { + return source.getResource(); + } + + @Override + protected InstrumentationScopeInfo getInstrumentationScopeInfo(SpanData source) { + return source.getInstrumentationScopeInfo(); + } + + private ResourceSpans.Builder createProtoResourceBuilder(Resource resource) { + ResourceSpans.Builder builder = + ResourceSpans.newBuilder().setResource(resourceToProto(resource)); + if (resource.getSchemaUrl() != null) { + builder.setSchemaUrl(resource.getSchemaUrl()); + } + return builder; + } + + private ScopeSpans.Builder createProtoScopeBuilder( + InstrumentationScopeInfo instrumentationScopeInfo) { + ScopeSpans.Builder builder = + ScopeSpans.newBuilder().setScope(instrumentationScopeToProto(instrumentationScopeInfo)); + if (instrumentationScopeInfo.getSchemaUrl() != null) { + builder.setSchemaUrl(instrumentationScopeInfo.getSchemaUrl()); + } + return builder; + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/SpanDataMapper.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/SpanDataMapper.java new file mode 100644 index 000000000..689be4d9b --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/SpanDataMapper.java @@ -0,0 +1,318 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.spans; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.api.trace.SpanKind; +import io.opentelemetry.api.trace.StatusCode; +import io.opentelemetry.api.trace.TraceFlags; +import io.opentelemetry.api.trace.TraceState; +import io.opentelemetry.api.trace.propagation.internal.W3CTraceContextEncoding; +import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.common.AttributesMapper; +import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.common.ByteStringMapper; +import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.spans.models.SpanDataImpl; +import io.opentelemetry.proto.common.v1.KeyValue; +import io.opentelemetry.proto.trace.v1.Span; +import io.opentelemetry.proto.trace.v1.Status; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.trace.data.EventData; +import io.opentelemetry.sdk.trace.data.LinkData; +import io.opentelemetry.sdk.trace.data.SpanData; +import io.opentelemetry.sdk.trace.data.StatusData; +import java.util.ArrayList; +import java.util.List; +import javax.annotation.Nullable; + +public final class SpanDataMapper { + + private static final SpanDataMapper INSTANCE = new SpanDataMapper(); + + public static SpanDataMapper getInstance() { + return INSTANCE; + } + + private final ByteStringMapper byteStringMapper = ByteStringMapper.getInstance(); + + public Span mapToProto(SpanData source) { + Span.Builder span = Span.newBuilder(); + + span.setStartTimeUnixNano(source.getStartEpochNanos()); + span.setEndTimeUnixNano(source.getEndEpochNanos()); + if (source.getEvents() != null) { + for (EventData event : source.getEvents()) { + span.addEvents(eventDataToProto(event)); + } + } + if (source.getLinks() != null) { + for (LinkData link : source.getLinks()) { + span.addLinks(linkDataToProto(link)); + } + } + span.setTraceId(byteStringMapper.stringToProto(source.getTraceId())); + span.setSpanId(byteStringMapper.stringToProto(source.getSpanId())); + span.setParentSpanId(byteStringMapper.stringToProto(source.getParentSpanId())); + span.setName(source.getName()); + span.setKind(mapSpanKindToProto(source.getKind())); + span.setStatus(statusDataToProto(source.getStatus())); + + addSpanProtoExtras(source, span); + + return span.build(); + } + + private static void addSpanProtoExtras(SpanData source, Span.Builder target) { + target.addAllAttributes(attributesToProto(source.getAttributes())); + target.setDroppedAttributesCount( + source.getTotalAttributeCount() - source.getAttributes().size()); + target.setDroppedEventsCount(source.getTotalRecordedEvents() - getListSize(source.getEvents())); + target.setDroppedLinksCount(source.getTotalRecordedLinks() - getListSize(source.getLinks())); + target.setTraceState(encodeTraceState(source.getSpanContext().getTraceState())); + } + + public SpanData mapToSdk( + Span source, Resource resource, InstrumentationScopeInfo instrumentationScopeInfo) { + SpanDataImpl.Builder spanData = SpanDataImpl.builder(); + + spanData.setStartEpochNanos(source.getStartTimeUnixNano()); + spanData.setEndEpochNanos(source.getEndTimeUnixNano()); + spanData.setEvents(eventListToEventDataList(source.getEventsList())); + spanData.setLinks(linkListToLinkDataList(source.getLinksList())); + spanData.setName(source.getName()); + spanData.setKind(mapSpanKindToSdk(source.getKind())); + if (source.hasStatus()) { + spanData.setStatus(mapStatusDataToSdk(source.getStatus())); + } + + addSpanDataExtras(source, spanData, resource, instrumentationScopeInfo); + + return spanData.build(); + } + + private static void addSpanDataExtras( + Span source, + SpanDataImpl.Builder target, + Resource resource, + InstrumentationScopeInfo instrumentationScopeInfo) { + Attributes attributes = protoToAttributes(source.getAttributesList()); + target.setAttributes(attributes); + target.setResource(resource); + target.setInstrumentationScopeInfo(instrumentationScopeInfo); + String traceId = ByteStringMapper.getInstance().protoToString(source.getTraceId()); + target.setSpanContext( + SpanContext.create( + traceId, + ByteStringMapper.getInstance().protoToString(source.getSpanId()), + TraceFlags.getSampled(), + decodeTraceState(source.getTraceState()))); + target.setParentSpanContext( + SpanContext.create( + traceId, + ByteStringMapper.getInstance().protoToString(source.getParentSpanId()), + TraceFlags.getSampled(), + TraceState.getDefault())); + target.setTotalAttributeCount(source.getDroppedAttributesCount() + attributes.size()); + target.setTotalRecordedEvents( + calculateRecordedItems(source.getDroppedEventsCount(), source.getEventsCount())); + target.setTotalRecordedLinks( + calculateRecordedItems(source.getDroppedLinksCount(), source.getLinksCount())); + } + + private static StatusData mapStatusDataToSdk(Status source) { + return StatusData.create(getStatusCode(source.getCodeValue()), source.getMessage()); + } + + private static Span.Event eventDataToProto(EventData source) { + Span.Event.Builder event = Span.Event.newBuilder(); + + event.setTimeUnixNano(source.getEpochNanos()); + event.setName(source.getName()); + event.setDroppedAttributesCount(source.getDroppedAttributesCount()); + + addEventProtoExtras(source, event); + + return event.build(); + } + + private static void addEventProtoExtras(EventData source, Span.Event.Builder target) { + target.addAllAttributes(attributesToProto(source.getAttributes())); + } + + private static Status statusDataToProto(StatusData source) { + Status.Builder status = Status.newBuilder(); + + status.setMessage(source.getDescription()); + status.setCode(mapStatusCodeToProto(source.getStatusCode())); + + return status.build(); + } + + private static Span.SpanKind mapSpanKindToProto(SpanKind source) { + Span.SpanKind spanKind; + + switch (source) { + case INTERNAL: + spanKind = Span.SpanKind.SPAN_KIND_INTERNAL; + break; + case SERVER: + spanKind = Span.SpanKind.SPAN_KIND_SERVER; + break; + case CLIENT: + spanKind = Span.SpanKind.SPAN_KIND_CLIENT; + break; + case PRODUCER: + spanKind = Span.SpanKind.SPAN_KIND_PRODUCER; + break; + case CONSUMER: + spanKind = Span.SpanKind.SPAN_KIND_CONSUMER; + break; + default: + throw new IllegalArgumentException("Unexpected enum constant: " + source); + } + + return spanKind; + } + + private static Status.StatusCode mapStatusCodeToProto(StatusCode source) { + Status.StatusCode statusCode; + + switch (source) { + case UNSET: + statusCode = Status.StatusCode.STATUS_CODE_UNSET; + break; + case OK: + statusCode = Status.StatusCode.STATUS_CODE_OK; + break; + case ERROR: + statusCode = Status.StatusCode.STATUS_CODE_ERROR; + break; + default: + throw new IllegalArgumentException("Unexpected enum constant: " + source); + } + + return statusCode; + } + + private static EventData eventDataToSdk(Span.Event source) { + Attributes attributes = protoToAttributes(source.getAttributesList()); + return EventData.create( + source.getTimeUnixNano(), + source.getName(), + attributes, + attributes.size() + source.getDroppedAttributesCount()); + } + + private static SpanKind mapSpanKindToSdk(Span.SpanKind source) { + SpanKind spanKind; + + switch (source) { + case SPAN_KIND_INTERNAL: + spanKind = SpanKind.INTERNAL; + break; + case SPAN_KIND_SERVER: + spanKind = SpanKind.SERVER; + break; + case SPAN_KIND_CLIENT: + spanKind = SpanKind.CLIENT; + break; + case SPAN_KIND_PRODUCER: + spanKind = SpanKind.PRODUCER; + break; + case SPAN_KIND_CONSUMER: + spanKind = SpanKind.CONSUMER; + break; + default: + throw new IllegalArgumentException("Unexpected enum constant: " + source); + } + + return spanKind; + } + + private static List eventListToEventDataList(List list) { + List result = new ArrayList<>(list.size()); + for (Span.Event event : list) { + result.add(eventDataToSdk(event)); + } + + return result; + } + + private static List linkListToLinkDataList(List list) { + List result = new ArrayList<>(list.size()); + for (Span.Link link : list) { + result.add(linkDataToSdk(link)); + } + + return result; + } + + private static LinkData linkDataToSdk(Span.Link source) { + Attributes attributes = protoToAttributes(source.getAttributesList()); + int totalAttrCount = source.getDroppedAttributesCount() + attributes.size(); + SpanContext spanContext = + SpanContext.create( + ByteStringMapper.getInstance().protoToString(source.getTraceId()), + ByteStringMapper.getInstance().protoToString(source.getSpanId()), + TraceFlags.getSampled(), + decodeTraceState(source.getTraceState())); + return LinkData.create(spanContext, attributes, totalAttrCount); + } + + private static int calculateRecordedItems(int droppedCount, int itemsCount) { + return droppedCount + itemsCount; + } + + private static StatusCode getStatusCode(int ordinal) { + for (StatusCode statusCode : StatusCode.values()) { + if (statusCode.ordinal() == ordinal) { + return statusCode; + } + } + throw new IllegalArgumentException(); + } + + private static List attributesToProto(Attributes source) { + return AttributesMapper.getInstance().attributesToProto(source); + } + + private static Attributes protoToAttributes(List source) { + return AttributesMapper.getInstance().protoToAttributes(source); + } + + private static int getListSize(List list) { + if (list == null) { + return 0; + } + return list.size(); + } + + private static String encodeTraceState(TraceState traceState) { + if (!traceState.isEmpty()) { + return W3CTraceContextEncoding.encodeTraceState(traceState); + } + return ""; + } + + private static TraceState decodeTraceState(@Nullable String source) { + return (source == null || source.isEmpty()) + ? TraceState.getDefault() + : W3CTraceContextEncoding.decodeTraceState(source); + } + + private static Span.Link linkDataToProto(LinkData source) { + Span.Link.Builder builder = Span.Link.newBuilder(); + SpanContext spanContext = source.getSpanContext(); + builder.setTraceId(ByteStringMapper.getInstance().stringToProto(spanContext.getTraceId())); + builder.setSpanId(ByteStringMapper.getInstance().stringToProto(spanContext.getSpanId())); + builder.addAllAttributes(attributesToProto(source.getAttributes())); + builder.setDroppedAttributesCount( + source.getTotalAttributeCount() - source.getAttributes().size()); + builder.setTraceState(encodeTraceState(spanContext.getTraceState())); + + return builder.build(); + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/models/SpanDataImpl.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/models/SpanDataImpl.java new file mode 100644 index 000000000..dd7cfa49e --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/models/SpanDataImpl.java @@ -0,0 +1,76 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.spans.models; + +import com.google.auto.value.AutoValue; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.api.trace.SpanKind; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.trace.data.EventData; +import io.opentelemetry.sdk.trace.data.LinkData; +import io.opentelemetry.sdk.trace.data.SpanData; +import io.opentelemetry.sdk.trace.data.StatusData; +import java.util.List; + +@AutoValue +public abstract class SpanDataImpl implements SpanData { + + public static Builder builder() { + return new AutoValue_SpanDataImpl.Builder(); + } + + @Override + public boolean hasEnded() { + return true; + } + + @SuppressWarnings( + "deprecation") // Overridden to avoid AutoValue to generate builder method for it. + @Override + public io.opentelemetry.sdk.common.InstrumentationLibraryInfo getInstrumentationLibraryInfo() { + throw new UnsupportedOperationException(); + } + + @Override + public abstract InstrumentationScopeInfo getInstrumentationScopeInfo(); + + @AutoValue.Builder + public abstract static class Builder { + public abstract Builder setName(String value); + + public abstract Builder setKind(SpanKind value); + + public abstract Builder setSpanContext(SpanContext value); + + public abstract Builder setParentSpanContext(SpanContext value); + + public abstract Builder setStatus(StatusData value); + + public abstract Builder setStartEpochNanos(Long value); + + public abstract Builder setTotalAttributeCount(Integer value); + + public abstract Builder setTotalRecordedEvents(Integer value); + + public abstract Builder setTotalRecordedLinks(Integer value); + + public abstract Builder setEndEpochNanos(Long value); + + public abstract Builder setAttributes(Attributes value); + + public abstract Builder setEvents(List value); + + public abstract Builder setLinks(List value); + + public abstract Builder setInstrumentationScopeInfo(InstrumentationScopeInfo value); + + public abstract Builder setResource(Resource value); + + public abstract SpanDataImpl build(); + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/LogRecordDataSerializer.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/LogRecordDataSerializer.java new file mode 100644 index 000000000..6d0451ef7 --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/LogRecordDataSerializer.java @@ -0,0 +1,45 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.serializers; + +import com.google.protobuf.InvalidProtocolBufferException; +import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.logs.ProtoLogsDataMapper; +import io.opentelemetry.proto.logs.v1.LogsData; +import io.opentelemetry.sdk.logs.data.LogRecordData; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.Collection; +import java.util.List; + +public final class LogRecordDataSerializer implements SignalSerializer { + private static final LogRecordDataSerializer INSTANCE = new LogRecordDataSerializer(); + + private LogRecordDataSerializer() {} + + static LogRecordDataSerializer getInstance() { + return INSTANCE; + } + + @Override + public byte[] serialize(Collection logRecordData) { + LogsData proto = ProtoLogsDataMapper.getInstance().toProto(logRecordData); + try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { + proto.writeDelimitedTo(out); + return out.toByteArray(); + } catch (IOException e) { + throw new IllegalStateException(e); + } + } + + @Override + public List deserialize(byte[] source) { + try { + return ProtoLogsDataMapper.getInstance().fromProto(LogsData.parseFrom(source)); + } catch (InvalidProtocolBufferException e) { + throw new IllegalArgumentException(e); + } + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/MetricDataSerializer.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/MetricDataSerializer.java new file mode 100644 index 000000000..9e5722fbd --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/MetricDataSerializer.java @@ -0,0 +1,45 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.serializers; + +import com.google.protobuf.InvalidProtocolBufferException; +import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.metrics.ProtoMetricsDataMapper; +import io.opentelemetry.proto.metrics.v1.MetricsData; +import io.opentelemetry.sdk.metrics.data.MetricData; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.Collection; +import java.util.List; + +public final class MetricDataSerializer implements SignalSerializer { + private static final MetricDataSerializer INSTANCE = new MetricDataSerializer(); + + private MetricDataSerializer() {} + + static MetricDataSerializer getInstance() { + return INSTANCE; + } + + @Override + public byte[] serialize(Collection metricData) { + MetricsData proto = ProtoMetricsDataMapper.getInstance().toProto(metricData); + try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { + proto.writeDelimitedTo(out); + return out.toByteArray(); + } catch (IOException e) { + throw new IllegalStateException(e); + } + } + + @Override + public List deserialize(byte[] source) { + try { + return ProtoMetricsDataMapper.getInstance().fromProto(MetricsData.parseFrom(source)); + } catch (InvalidProtocolBufferException e) { + throw new IllegalArgumentException(e); + } + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/SignalSerializer.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/SignalSerializer.java new file mode 100644 index 000000000..40c307bf6 --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/SignalSerializer.java @@ -0,0 +1,28 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.serializers; + +import java.util.Collection; +import java.util.List; + +public interface SignalSerializer { + + static SpanDataSerializer ofSpans() { + return SpanDataSerializer.getInstance(); + } + + static MetricDataSerializer ofMetrics() { + return MetricDataSerializer.getInstance(); + } + + static LogRecordDataSerializer ofLogs() { + return LogRecordDataSerializer.getInstance(); + } + + byte[] serialize(Collection items); + + List deserialize(byte[] source); +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/SpanDataSerializer.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/SpanDataSerializer.java new file mode 100644 index 000000000..1dc02034b --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/SpanDataSerializer.java @@ -0,0 +1,45 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.serializers; + +import com.google.protobuf.InvalidProtocolBufferException; +import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.spans.ProtoSpansDataMapper; +import io.opentelemetry.proto.trace.v1.TracesData; +import io.opentelemetry.sdk.trace.data.SpanData; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.Collection; +import java.util.List; + +public final class SpanDataSerializer implements SignalSerializer { + private static final SpanDataSerializer INSTANCE = new SpanDataSerializer(); + + private SpanDataSerializer() {} + + static SpanDataSerializer getInstance() { + return INSTANCE; + } + + @Override + public byte[] serialize(Collection spanData) { + TracesData proto = ProtoSpansDataMapper.getInstance().toProto(spanData); + try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { + proto.writeDelimitedTo(out); + return out.toByteArray(); + } catch (IOException e) { + throw new IllegalStateException(e); + } + } + + @Override + public List deserialize(byte[] source) { + try { + return ProtoSpansDataMapper.getInstance().fromProto(TracesData.parseFrom(source)); + } catch (InvalidProtocolBufferException e) { + throw new IllegalArgumentException(e); + } + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/FolderManager.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/FolderManager.java new file mode 100644 index 000000000..1411aa73c --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/FolderManager.java @@ -0,0 +1,140 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.storage; + +import io.opentelemetry.contrib.disk.buffering.internal.StorageConfiguration; +import io.opentelemetry.contrib.disk.buffering.internal.storage.files.ReadableFile; +import io.opentelemetry.contrib.disk.buffering.internal.storage.files.WritableFile; +import io.opentelemetry.contrib.disk.buffering.internal.storage.utils.StorageClock; +import java.io.File; +import java.io.IOException; +import java.util.Objects; +import javax.annotation.Nullable; + +public final class FolderManager { + private final File folder; + private final StorageClock clock; + private final StorageConfiguration configuration; + @Nullable private ReadableFile currentReadableFile; + @Nullable private WritableFile currentWritableFile; + + public FolderManager(File folder, StorageConfiguration configuration, StorageClock clock) { + this.folder = folder; + this.configuration = configuration; + this.clock = clock; + } + + @Nullable + public synchronized ReadableFile getReadableFile() throws IOException { + currentReadableFile = null; + File readableFile = findReadableFile(); + if (readableFile != null) { + currentReadableFile = + new ReadableFile( + readableFile, Long.parseLong(readableFile.getName()), clock, configuration); + return currentReadableFile; + } + return null; + } + + public synchronized WritableFile createWritableFile() throws IOException { + long systemCurrentTimeMillis = clock.now(); + File[] existingFiles = folder.listFiles(); + if (existingFiles != null) { + if (purgeExpiredFilesIfAny(existingFiles, systemCurrentTimeMillis) == 0) { + removeOldestFileIfSpaceIsNeeded(existingFiles); + } + } + File file = new File(folder, String.valueOf(systemCurrentTimeMillis)); + currentWritableFile = new WritableFile(file, systemCurrentTimeMillis, configuration, clock); + return currentWritableFile; + } + + @Nullable + private File findReadableFile() throws IOException { + long currentTime = clock.now(); + File[] existingFiles = folder.listFiles(); + File oldestFileAvailable = null; + long oldestFileCreationTimeMillis = 0; + if (existingFiles != null) { + for (File existingFile : existingFiles) { + long existingFileCreationTimeMillis = Long.parseLong(existingFile.getName()); + if (isReadyToBeRead(currentTime, existingFileCreationTimeMillis) + && !hasExpiredForReading(currentTime, existingFileCreationTimeMillis)) { + if (oldestFileAvailable == null + || existingFileCreationTimeMillis < oldestFileCreationTimeMillis) { + oldestFileCreationTimeMillis = existingFileCreationTimeMillis; + oldestFileAvailable = existingFile; + } + } + } + } + // Checking if the oldest available file is currently the writable file. + if (oldestFileAvailable != null + && currentWritableFile != null + && oldestFileAvailable.equals(currentWritableFile.file)) { + currentWritableFile.close(); + } + return oldestFileAvailable; + } + + private int purgeExpiredFilesIfAny(File[] existingFiles, long currentTimeMillis) + throws IOException { + int filesDeleted = 0; + for (File existingFile : existingFiles) { + if (hasExpiredForReading(currentTimeMillis, Long.parseLong(existingFile.getName()))) { + if (currentReadableFile != null && existingFile.equals(currentReadableFile.file)) { + currentReadableFile.close(); + } + if (existingFile.delete()) { + filesDeleted++; + } + } + } + return filesDeleted; + } + + private void removeOldestFileIfSpaceIsNeeded(File[] existingFiles) throws IOException { + if (existingFiles.length > 0) { + if (isNeededToClearSpaceForNewFile(existingFiles)) { + File oldest = getOldest(existingFiles); + if (currentReadableFile != null && oldest.equals(currentReadableFile.file)) { + currentReadableFile.close(); + } + if (!oldest.delete()) { + throw new IOException("Could not delete the file: " + oldest); + } + } + } + } + + private static File getOldest(File[] existingFiles) { + File oldest = null; + for (File existingFile : existingFiles) { + if (oldest == null || existingFile.getName().compareTo(oldest.getName()) < 0) { + oldest = existingFile; + } + } + return Objects.requireNonNull(oldest); + } + + private boolean isNeededToClearSpaceForNewFile(File[] existingFiles) { + int currentFolderSize = 0; + for (File existingFile : existingFiles) { + currentFolderSize += (int) existingFile.length(); + } + return (currentFolderSize + configuration.getMaxFileSize()) > configuration.getMaxFolderSize(); + } + + private boolean isReadyToBeRead(long currentTimeMillis, long createdTimeInMillis) { + return currentTimeMillis >= (createdTimeInMillis + configuration.getMinFileAgeForReadMillis()); + } + + private boolean hasExpiredForReading(long systemCurrentTimeMillis, long createdTimeInMillis) { + return systemCurrentTimeMillis + > (createdTimeInMillis + configuration.getMaxFileAgeForReadMillis()); + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/Storage.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/Storage.java new file mode 100644 index 000000000..f56f1f159 --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/Storage.java @@ -0,0 +1,105 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.storage; + +import io.opentelemetry.contrib.disk.buffering.internal.storage.files.ReadableFile; +import io.opentelemetry.contrib.disk.buffering.internal.storage.files.WritableFile; +import io.opentelemetry.contrib.disk.buffering.internal.storage.responses.ReadableResult; +import io.opentelemetry.contrib.disk.buffering.internal.storage.responses.WritableResult; +import java.io.Closeable; +import java.io.IOException; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Function; +import javax.annotation.Nullable; + +public final class Storage implements Closeable { + private final FolderManager folderManager; + @Nullable private WritableFile writableFile; + @Nullable private ReadableFile readableFile; + private static final int MAX_ATTEMPTS = 3; + private final AtomicBoolean isClosed = new AtomicBoolean(false); + + public Storage(FolderManager folderManager) { + this.folderManager = folderManager; + } + + /** + * Attempts to write an item into a writable file. + * + * @param item - The data that would be appended to the file. + * @throws IOException If an unexpected error happens. + */ + public boolean write(byte[] item) throws IOException { + return write(item, 1); + } + + private boolean write(byte[] item, int attemptNumber) throws IOException { + if (isClosed.get()) { + return false; + } + if (attemptNumber > MAX_ATTEMPTS) { + return false; + } + if (writableFile == null) { + writableFile = folderManager.createWritableFile(); + } + WritableResult result = writableFile.append(item); + if (result != WritableResult.SUCCEEDED) { + // Retry with new file + writableFile = null; + return write(item, ++attemptNumber); + } + return true; + } + + /** + * Attempts to read an item from a ready-to-read file. + * + * @param processing Is passed over to {@link ReadableFile#readAndProcess(Function)}. + * @throws IOException If an unexpected error happens. + */ + public ReadableResult readAndProcess(Function processing) throws IOException { + return readAndProcess(processing, 1); + } + + private ReadableResult readAndProcess(Function processing, int attemptNumber) + throws IOException { + if (isClosed.get()) { + return ReadableResult.FAILED; + } + if (attemptNumber > MAX_ATTEMPTS) { + return ReadableResult.FAILED; + } + if (readableFile == null) { + readableFile = folderManager.getReadableFile(); + if (readableFile == null) { + return ReadableResult.FAILED; + } + } + ReadableResult result = readableFile.readAndProcess(processing); + switch (result) { + case SUCCEEDED: + case PROCESSING_FAILED: + return result; + default: + // Retry with new file + readableFile = null; + return readAndProcess(processing, ++attemptNumber); + } + } + + @Override + public void close() throws IOException { + if (isClosed.compareAndSet(false, true)) { + if (writableFile != null) { + writableFile.close(); + } + if (readableFile != null) { + readableFile.close(); + } + } + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/ReadableFile.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/ReadableFile.java new file mode 100644 index 000000000..f38e1392f --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/ReadableFile.java @@ -0,0 +1,165 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.storage.files; + +import io.opentelemetry.contrib.disk.buffering.internal.StorageConfiguration; +import io.opentelemetry.contrib.disk.buffering.internal.storage.files.reader.DelimitedProtoStreamReader; +import io.opentelemetry.contrib.disk.buffering.internal.storage.files.reader.ReadResult; +import io.opentelemetry.contrib.disk.buffering.internal.storage.files.reader.StreamReader; +import io.opentelemetry.contrib.disk.buffering.internal.storage.files.utils.FileTransferUtil; +import io.opentelemetry.contrib.disk.buffering.internal.storage.responses.ReadableResult; +import io.opentelemetry.contrib.disk.buffering.internal.storage.utils.StorageClock; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Function; +import javax.annotation.Nullable; + +/** + * Reads from a file and updates it in parallel in order to avoid re-reading the same items later. + * The way it does so is by creating a temporary file where all the contents are added during the + * instantiation of this class. Then, the contents are read from the temporary file, after an item + * has been read from the temporary file, the original file gets updated to remove the recently read + * data. + * + *

More information on the overall storage process in the CONTRIBUTING.md file. + */ +public final class ReadableFile extends StorageFile { + private final int originalFileSize; + private final StreamReader reader; + private final FileTransferUtil fileTransferUtil; + private final File temporaryFile; + private final StorageClock clock; + private final long expireTimeMillis; + private final AtomicBoolean isClosed = new AtomicBoolean(false); + private int readBytes = 0; + @Nullable private ReadResult unconsumedResult; + + public ReadableFile( + File file, long createdTimeMillis, StorageClock clock, StorageConfiguration configuration) + throws IOException { + this( + file, + createdTimeMillis, + clock, + configuration, + DelimitedProtoStreamReader.Factory.getInstance()); + } + + public ReadableFile( + File file, + long createdTimeMillis, + StorageClock clock, + StorageConfiguration configuration, + StreamReader.Factory readerFactory) + throws IOException { + super(file); + this.clock = clock; + expireTimeMillis = createdTimeMillis + configuration.getMaxFileAgeForReadMillis(); + originalFileSize = (int) file.length(); + temporaryFile = configuration.getTemporaryFileProvider().createTemporaryFile(file.getName()); + copyFile(file, temporaryFile); + FileInputStream tempInputStream = new FileInputStream(temporaryFile); + fileTransferUtil = new FileTransferUtil(tempInputStream, file); + reader = readerFactory.create(tempInputStream); + } + + /** + * Reads the next line available in the file and provides it to a {@link Function processing} + * which will determine whether to remove the provided line or not. + * + * @param processing - A function that receives the line that has been read and returns a boolean. + * If the processing function returns TRUE, then the provided line will be deleted from the + * source file. If the function returns FALSE, no changes will be applied to the source file. + */ + public synchronized ReadableResult readAndProcess(Function processing) + throws IOException { + if (isClosed.get()) { + return ReadableResult.FAILED; + } + if (hasExpired()) { + close(); + return ReadableResult.FAILED; + } + ReadResult read = readNextItem(); + if (read == null) { + cleanUp(); + return ReadableResult.FAILED; + } + if (processing.apply(read.content)) { + unconsumedResult = null; + readBytes += read.totalReadLength; + int amountOfBytesToTransfer = originalFileSize - readBytes; + if (amountOfBytesToTransfer > 0) { + fileTransferUtil.transferBytes(readBytes, amountOfBytesToTransfer); + } else { + cleanUp(); + } + return ReadableResult.SUCCEEDED; + } else { + unconsumedResult = read; + return ReadableResult.PROCESSING_FAILED; + } + } + + @Nullable + private ReadResult readNextItem() throws IOException { + if (unconsumedResult != null) { + return unconsumedResult; + } + return reader.read(); + } + + private void cleanUp() throws IOException { + file.delete(); + close(); + } + + @Override + public long getSize() { + return originalFileSize; + } + + @Override + public synchronized boolean hasExpired() { + return clock.now() >= expireTimeMillis; + } + + @Override + public synchronized boolean isClosed() { + return isClosed.get(); + } + + @Override + public synchronized void close() throws IOException { + if (isClosed.compareAndSet(false, true)) { + unconsumedResult = null; + fileTransferUtil.close(); + reader.close(); + temporaryFile.delete(); + } + } + + /** + * This is needed instead of using Files.copy in order to keep it compatible with Android api < + * 26. + */ + private static void copyFile(File from, File to) throws IOException { + try (InputStream in = new FileInputStream(from); + OutputStream out = new FileOutputStream(to)) { + + byte[] buffer = new byte[1024]; + int lengthRead; + while ((lengthRead = in.read(buffer)) > 0) { + out.write(buffer, 0, lengthRead); + } + } + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/StorageFile.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/StorageFile.java new file mode 100644 index 000000000..ea87ace9b --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/StorageFile.java @@ -0,0 +1,23 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.storage.files; + +import java.io.Closeable; +import java.io.File; + +public abstract class StorageFile implements Closeable { + public final File file; + + public StorageFile(File file) { + this.file = file; + } + + public abstract long getSize(); + + public abstract boolean hasExpired(); + + public abstract boolean isClosed(); +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/WritableFile.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/WritableFile.java new file mode 100644 index 000000000..5cfc0c705 --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/WritableFile.java @@ -0,0 +1,82 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.storage.files; + +import io.opentelemetry.contrib.disk.buffering.internal.StorageConfiguration; +import io.opentelemetry.contrib.disk.buffering.internal.storage.responses.WritableResult; +import io.opentelemetry.contrib.disk.buffering.internal.storage.utils.StorageClock; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.util.concurrent.atomic.AtomicBoolean; + +public final class WritableFile extends StorageFile { + private final StorageConfiguration configuration; + private final StorageClock clock; + private final long expireTimeMillis; + private final OutputStream out; + private final AtomicBoolean isClosed = new AtomicBoolean(false); + private int size; + + public WritableFile( + File file, long createdTimeMillis, StorageConfiguration configuration, StorageClock clock) + throws IOException { + super(file); + this.configuration = configuration; + this.clock = clock; + expireTimeMillis = createdTimeMillis + configuration.getMaxFileAgeForWriteMillis(); + size = (int) file.length(); + out = new FileOutputStream(file); + } + + /** + * Adds a new line to the file. If it fails due to expired write time or because the file has + * reached the configured max size, the file stream is closed with the contents available in the + * buffer before attempting to append the new data. + * + * @param data - The new data line to add. + */ + public synchronized WritableResult append(byte[] data) throws IOException { + if (isClosed.get()) { + return WritableResult.FAILED; + } + if (hasExpired()) { + close(); + return WritableResult.FAILED; + } + int futureSize = size + data.length; + if (futureSize > configuration.getMaxFileSize()) { + close(); + return WritableResult.FAILED; + } + out.write(data); + size = futureSize; + return WritableResult.SUCCEEDED; + } + + @Override + public synchronized long getSize() { + return size; + } + + @Override + public synchronized boolean hasExpired() { + return clock.now() >= expireTimeMillis; + } + + @Override + public synchronized boolean isClosed() { + return isClosed.get(); + } + + @Override + public synchronized void close() throws IOException { + if (isClosed.compareAndSet(false, true)) { + out.close(); + } + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/reader/DelimitedProtoStreamReader.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/reader/DelimitedProtoStreamReader.java new file mode 100644 index 000000000..ccdb0f1ed --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/reader/DelimitedProtoStreamReader.java @@ -0,0 +1,64 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.storage.files.reader; + +import com.google.protobuf.CodedInputStream; +import io.opentelemetry.contrib.disk.buffering.internal.storage.files.utils.CountingInputStream; +import java.io.IOException; +import java.io.InputStream; +import javax.annotation.Nullable; + +public final class DelimitedProtoStreamReader extends StreamReader { + private final CountingInputStream countingInputStream; + + public DelimitedProtoStreamReader(InputStream inputStream) { + super(new CountingInputStream(inputStream)); + countingInputStream = (CountingInputStream) this.inputStream; + } + + @Override + @Nullable + public ReadResult read() throws IOException { + int startingPosition = countingInputStream.getPosition(); + int itemSize = getNextItemSize(); + if (itemSize < 1) { + return null; + } + byte[] bytes = new byte[itemSize]; + if (inputStream.read(bytes) < 0) { + return null; + } + return new ReadResult(bytes, countingInputStream.getPosition() - startingPosition); + } + + private int getNextItemSize() { + try { + int firstByte = inputStream.read(); + if (firstByte == -1) { + return 0; + } + return CodedInputStream.readRawVarint32(firstByte, inputStream); + } catch (IOException e) { + return 0; + } + } + + public static class Factory implements StreamReader.Factory { + + private static final Factory INSTANCE = new DelimitedProtoStreamReader.Factory(); + + public static Factory getInstance() { + return INSTANCE; + } + + private Factory() {} + + @Override + public StreamReader create(InputStream stream) { + return new DelimitedProtoStreamReader(stream); + } + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/reader/ReadResult.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/reader/ReadResult.java new file mode 100644 index 000000000..079c2396c --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/reader/ReadResult.java @@ -0,0 +1,22 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.storage.files.reader; + +public final class ReadResult { + /** The consumable data. */ + public final byte[] content; + + /** + * The total amount of data read from the stream. This number can be greater than the content + * length as it also takes into account any delimiters size. + */ + public final int totalReadLength; + + public ReadResult(byte[] content, int totalReadLength) { + this.content = content; + this.totalReadLength = totalReadLength; + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/reader/StreamReader.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/reader/StreamReader.java new file mode 100644 index 000000000..d263aad71 --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/reader/StreamReader.java @@ -0,0 +1,31 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.storage.files.reader; + +import java.io.Closeable; +import java.io.IOException; +import java.io.InputStream; +import javax.annotation.Nullable; + +public abstract class StreamReader implements Closeable { + protected final InputStream inputStream; + + protected StreamReader(InputStream inputStream) { + this.inputStream = inputStream; + } + + @Nullable + public abstract ReadResult read() throws IOException; + + @Override + public void close() throws IOException { + inputStream.close(); + } + + public interface Factory { + StreamReader create(InputStream stream); + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/utils/CountingInputStream.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/utils/CountingInputStream.java new file mode 100644 index 000000000..9faa2c018 --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/utils/CountingInputStream.java @@ -0,0 +1,68 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.storage.files.utils; + +import java.io.FilterInputStream; +import java.io.IOException; +import java.io.InputStream; + +public final class CountingInputStream extends FilterInputStream { + + private int position; + private int mark = -1; + + public CountingInputStream(InputStream in) { + super(in); + } + + public int getPosition() { + return position; + } + + @Override + public synchronized void mark(int readlimit) { + in.mark(readlimit); + mark = position; + } + + @Override + public long skip(long n) throws IOException { + long result = in.skip(n); + position = (int) (position + result); + return result; + } + + @Override + public int read() throws IOException { + int result = in.read(); + if (result != -1) { + position++; + } + return result; + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + int result = in.read(b, off, len); + if (result != -1) { + position += result; + } + return result; + } + + @Override + public synchronized void reset() throws IOException { + if (!in.markSupported()) { + throw new IOException("Mark is not supported"); + } + if (mark == -1) { + throw new IOException("Mark is not set"); + } + + in.reset(); + position = mark; + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/utils/FileTransferUtil.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/utils/FileTransferUtil.java new file mode 100644 index 000000000..e4729cb53 --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/utils/FileTransferUtil.java @@ -0,0 +1,35 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.storage.files.utils; + +import java.io.Closeable; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.channels.FileChannel; + +public final class FileTransferUtil implements Closeable { + private final File output; + + private final FileChannel inputChannel; + + public FileTransferUtil(FileInputStream input, File output) { + this.output = output; + inputChannel = input.getChannel(); + } + + public void transferBytes(int offset, int length) throws IOException { + try (FileOutputStream out = new FileOutputStream(output, false)) { + inputChannel.transferTo(offset, length, out.getChannel()); + } + } + + @Override + public void close() throws IOException { + inputChannel.close(); + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/responses/ReadableResult.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/responses/ReadableResult.java new file mode 100644 index 000000000..8448d2a15 --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/responses/ReadableResult.java @@ -0,0 +1,12 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.storage.responses; + +public enum ReadableResult { + SUCCEEDED, + FAILED, + PROCESSING_FAILED +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/responses/WritableResult.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/responses/WritableResult.java new file mode 100644 index 000000000..9cab7f2eb --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/responses/WritableResult.java @@ -0,0 +1,11 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.storage.responses; + +public enum WritableResult { + SUCCEEDED, + FAILED +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/utils/StorageClock.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/utils/StorageClock.java new file mode 100644 index 000000000..0836f257e --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/utils/StorageClock.java @@ -0,0 +1,28 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.storage.utils; + +import io.opentelemetry.sdk.common.Clock; + +/** Internal utility that allows changing the time for testing purposes. */ +public final class StorageClock implements Clock { + private static final StorageClock INSTANCE = new StorageClock(); + + public static StorageClock getInstance() { + return INSTANCE; + } + + /** Returns the current time in milliseconds. */ + @Override + public long now() { + return System.currentTimeMillis(); + } + + @Override + public long nanoTime() { + throw new UnsupportedOperationException(); + } +} diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/IntegrationTest.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/IntegrationTest.java new file mode 100644 index 000000000..f204a45cb --- /dev/null +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/IntegrationTest.java @@ -0,0 +1,148 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; + +import io.opentelemetry.api.logs.Logger; +import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.contrib.disk.buffering.internal.StorageConfiguration; +import io.opentelemetry.contrib.disk.buffering.internal.storage.utils.StorageClock; +import io.opentelemetry.sdk.logs.SdkLoggerProvider; +import io.opentelemetry.sdk.logs.export.LogRecordExporter; +import io.opentelemetry.sdk.logs.export.SimpleLogRecordProcessor; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.export.MetricExporter; +import io.opentelemetry.sdk.metrics.export.PeriodicMetricReader; +import io.opentelemetry.sdk.testing.exporter.InMemoryLogRecordExporter; +import io.opentelemetry.sdk.testing.exporter.InMemoryMetricExporter; +import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; +import io.opentelemetry.sdk.trace.export.SpanExporter; +import java.io.File; +import java.io.IOException; +import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +public class IntegrationTest { + private InMemorySpanExporter memorySpanExporter; + private SpanDiskExporter diskSpanExporter; + private Tracer tracer; + private InMemoryMetricExporter memoryMetricExporter; + private MetricDiskExporter diskMetricExporter; + private SdkMeterProvider meterProvider; + private Meter meter; + private InMemoryLogRecordExporter memoryLogRecordExporter; + private LogRecordDiskExporter diskLogRecordExporter; + private Logger logger; + private StorageClock clock; + @TempDir File rootDir; + private static final long INITIAL_TIME_IN_MILLIS = 1000; + private static final StorageConfiguration STORAGE_CONFIGURATION = + StorageConfiguration.getDefault(); + + @BeforeEach + public void setUp() throws IOException { + clock = mock(); + doReturn(INITIAL_TIME_IN_MILLIS).when(clock).now(); + + // Setting up spans + memorySpanExporter = InMemorySpanExporter.create(); + diskSpanExporter = + SpanDiskExporter.create(memorySpanExporter, rootDir, STORAGE_CONFIGURATION, clock); + tracer = createTracerProvider(diskSpanExporter).get("SpanInstrumentationScope"); + + // Setting up metrics + memoryMetricExporter = InMemoryMetricExporter.create(); + diskMetricExporter = + MetricDiskExporter.create(memoryMetricExporter, rootDir, STORAGE_CONFIGURATION, clock); + meterProvider = createMeterProvider(diskMetricExporter); + meter = meterProvider.get("MetricInstrumentationScope"); + + // Setting up logs + memoryLogRecordExporter = InMemoryLogRecordExporter.create(); + diskLogRecordExporter = + LogRecordDiskExporter.create( + memoryLogRecordExporter, rootDir, STORAGE_CONFIGURATION, clock); + logger = createLoggerProvider(diskLogRecordExporter).get("LogInstrumentationScope"); + } + + @Test + public void verifySpansIntegration() throws IOException { + Span span = tracer.spanBuilder("Span name").startSpan(); + span.end(); + + assertExporter(diskSpanExporter, () -> memorySpanExporter.getFinishedSpanItems().size()); + } + + @Test + public void verifyMetricsIntegration() throws IOException { + meter.counterBuilder("Counter").build().add(2); + meterProvider.forceFlush(); + + assertExporter(diskMetricExporter, () -> memoryMetricExporter.getFinishedMetricItems().size()); + } + + @Test + public void verifyLogRecordsIntegration() throws IOException { + logger.logRecordBuilder().setBody("I'm a log!").emit(); + + assertExporter( + diskLogRecordExporter, () -> memoryLogRecordExporter.getFinishedLogRecordItems().size()); + } + + private void assertExporter(StoredBatchExporter exporter, Supplier finishedItems) + throws IOException { + // Verify no data has been received in the original exporter until this point. + assertEquals(0, finishedItems.get()); + + // Go to the future when we can read the stored items. + fastForwardTimeByMillis(STORAGE_CONFIGURATION.getMinFileAgeForReadMillis()); + + // Read and send stored data. + assertTrue(exporter.exportStoredBatch(1, TimeUnit.SECONDS)); + + // Now the data must have been delegated to the original exporter. + assertEquals(1, finishedItems.get()); + + // Bonus: Try to read again, no more data should be available. + assertFalse(exporter.exportStoredBatch(1, TimeUnit.SECONDS)); + assertEquals(1, finishedItems.get()); + } + + @SuppressWarnings("DirectInvocationOnMock") + private void fastForwardTimeByMillis(long milliseconds) { + doReturn(clock.now() + milliseconds).when(clock).now(); + } + + private static SdkTracerProvider createTracerProvider(SpanExporter exporter) { + return SdkTracerProvider.builder() + .addSpanProcessor(SimpleSpanProcessor.create(exporter)) + .build(); + } + + private static SdkMeterProvider createMeterProvider(MetricExporter exporter) { + return SdkMeterProvider.builder() + .registerMetricReader(PeriodicMetricReader.create(exporter)) + .build(); + } + + private static SdkLoggerProvider createLoggerProvider(LogRecordExporter exporter) { + return SdkLoggerProvider.builder() + .addLogRecordProcessor(SimpleLogRecordProcessor.create(exporter)) + .build(); + } +} diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/LogRecordDiskExporterTest.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/LogRecordDiskExporterTest.java new file mode 100644 index 000000000..a1f521a84 --- /dev/null +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/LogRecordDiskExporterTest.java @@ -0,0 +1,46 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.mock; + +import io.opentelemetry.contrib.disk.buffering.internal.StorageConfiguration; +import io.opentelemetry.contrib.disk.buffering.internal.storage.TestData; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.logs.export.LogRecordExporter; +import java.io.File; +import java.io.IOException; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +class LogRecordDiskExporterTest { + private LogRecordExporter wrapped; + private LogRecordDiskExporter exporter; + private static final StorageConfiguration STORAGE_CONFIGURATION = + TestData.getDefaultConfiguration(); + private static final String STORAGE_FOLDER_NAME = "logs"; + @TempDir File rootDir; + + @BeforeEach + public void setUp() throws IOException { + wrapped = mock(); + exporter = LogRecordDiskExporter.create(wrapped, rootDir, STORAGE_CONFIGURATION); + } + + @Test + public void verifyCacheFolderName() { + File[] files = rootDir.listFiles(); + assertEquals(1, files.length); + assertEquals(STORAGE_FOLDER_NAME, files[0].getName()); + } + + @Test + public void onFlush_returnSuccess() { + assertEquals(CompletableResultCode.ofSuccess(), exporter.flush()); + } +} diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/MetricDiskExporterTest.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/MetricDiskExporterTest.java new file mode 100644 index 000000000..8df89da76 --- /dev/null +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/MetricDiskExporterTest.java @@ -0,0 +1,62 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + +import io.opentelemetry.contrib.disk.buffering.internal.StorageConfiguration; +import io.opentelemetry.contrib.disk.buffering.internal.storage.TestData; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.metrics.InstrumentType; +import io.opentelemetry.sdk.metrics.data.AggregationTemporality; +import io.opentelemetry.sdk.metrics.export.MetricExporter; +import java.io.File; +import java.io.IOException; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +class MetricDiskExporterTest { + + private MetricExporter wrapped; + private MetricDiskExporter exporter; + private static final StorageConfiguration STORAGE_CONFIGURATION = + TestData.getDefaultConfiguration(); + private static final String STORAGE_FOLDER_NAME = "metrics"; + @TempDir File rootDir; + + @BeforeEach + public void setUp() throws IOException { + wrapped = mock(); + exporter = MetricDiskExporter.create(wrapped, rootDir, STORAGE_CONFIGURATION); + } + + @Test + public void verifyCacheFolderName() { + File[] files = rootDir.listFiles(); + assertEquals(1, files.length); + assertEquals(STORAGE_FOLDER_NAME, files[0].getName()); + } + + @Test + public void onFlush_returnSuccess() { + assertEquals(CompletableResultCode.ofSuccess(), exporter.flush()); + } + + @Test + public void provideWrappedAggregationTemporality() { + InstrumentType instrumentType = mock(); + AggregationTemporality aggregationTemporality = AggregationTemporality.DELTA; + doReturn(aggregationTemporality).when(wrapped).getAggregationTemporality(instrumentType); + + assertEquals(aggregationTemporality, exporter.getAggregationTemporality(instrumentType)); + + verify(wrapped).getAggregationTemporality(instrumentType); + } +} diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/SpanDiskExporterTest.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/SpanDiskExporterTest.java new file mode 100644 index 000000000..eaa8c1629 --- /dev/null +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/SpanDiskExporterTest.java @@ -0,0 +1,46 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.mock; + +import io.opentelemetry.contrib.disk.buffering.internal.StorageConfiguration; +import io.opentelemetry.contrib.disk.buffering.internal.storage.TestData; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.trace.export.SpanExporter; +import java.io.File; +import java.io.IOException; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +class SpanDiskExporterTest { + private SpanExporter wrapped; + private SpanDiskExporter exporter; + private static final StorageConfiguration STORAGE_CONFIGURATION = + TestData.getDefaultConfiguration(); + private static final String STORAGE_FOLDER_NAME = "spans"; + @TempDir File rootDir; + + @BeforeEach + public void setUp() throws IOException { + wrapped = mock(); + exporter = SpanDiskExporter.create(wrapped, rootDir, STORAGE_CONFIGURATION); + } + + @Test + public void verifyCacheFolderName() { + File[] files = rootDir.listFiles(); + assertEquals(1, files.length); + assertEquals(STORAGE_FOLDER_NAME, files[0].getName()); + } + + @Test + public void onFlush_returnSuccess() { + assertEquals(CompletableResultCode.ofSuccess(), exporter.flush()); + } +} diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/exporters/DiskExporterTest.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/exporters/DiskExporterTest.java new file mode 100644 index 000000000..168e812e1 --- /dev/null +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/exporters/DiskExporterTest.java @@ -0,0 +1,152 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.exporters; + +import static io.opentelemetry.contrib.disk.buffering.internal.storage.TestData.MIN_FILE_AGE_FOR_READ_MILLIS; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoInteractions; + +import io.opentelemetry.contrib.disk.buffering.internal.StorageConfiguration; +import io.opentelemetry.contrib.disk.buffering.internal.serialization.serializers.SignalSerializer; +import io.opentelemetry.contrib.disk.buffering.internal.storage.TestData; +import io.opentelemetry.contrib.disk.buffering.internal.storage.utils.StorageClock; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.trace.data.SpanData; +import io.opentelemetry.sdk.trace.export.SpanExporter; +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +@SuppressWarnings("unchecked") +class DiskExporterTest { + private SpanExporter wrapped; + private SignalSerializer serializer; + private StorageClock clock; + private DiskExporter exporter; + private final List deserializedData = Collections.emptyList(); + @TempDir File rootDir; + private static final String STORAGE_FOLDER_NAME = "testName"; + + @BeforeEach + public void setUp() throws IOException { + clock = createClockMock(1000L); + setUpSerializer(); + wrapped = mock(); + exporter = + new DiskExporter<>( + rootDir, + TestData.getDefaultConfiguration(), + STORAGE_FOLDER_NAME, + serializer, + wrapped::export, + clock); + } + + @Test + public void whenExportingStoredBatch_withAvailableData_andSuccessfullyProcessed_returnTrue() + throws IOException { + doReturn(CompletableResultCode.ofSuccess()).when(wrapped).export(deserializedData); + + createDummyFile(1000L, "First line"); + doReturn(1000L + MIN_FILE_AGE_FOR_READ_MILLIS).when(clock).now(); + + assertTrue(exporter.exportStoredBatch(1, TimeUnit.SECONDS)); + } + + @Test + public void whenMinFileReadIsNotGraterThanMaxFileWrite_throwException() throws IOException { + try { + new DiskExporter<>( + rootDir, + StorageConfiguration.builder() + .setMaxFileAgeForWriteMillis(2) + .setMinFileAgeForReadMillis(1) + .build(), + STORAGE_FOLDER_NAME, + serializer, + wrapped::export, + clock); + fail(); + } catch (IllegalArgumentException e) { + assertEquals( + "The configured max file age for writing must be lower than the configured min file age for reading", + e.getMessage()); + } + } + + @Test + public void whenExportingStoredBatch_withAvailableData_andUnsuccessfullyProcessed_returnFalse() + throws IOException { + doReturn(CompletableResultCode.ofFailure()).when(wrapped).export(deserializedData); + + createDummyFile(1000L, "First line"); + doReturn(1000L + MIN_FILE_AGE_FOR_READ_MILLIS).when(clock).now(); + + assertFalse(exporter.exportStoredBatch(1, TimeUnit.SECONDS)); + } + + @Test + public void whenExportingStoredBatch_withNoAvailableData_returnFalse() throws IOException { + assertFalse(exporter.exportStoredBatch(1, TimeUnit.SECONDS)); + } + + @Test + public void verifyStorageFolderIsCreated() { + assertTrue(new File(rootDir, STORAGE_FOLDER_NAME).exists()); + } + + @Test + public void whenWritingSucceedsOnExport_returnSuccessfulResultCode() { + doReturn(new byte[2]).when(serializer).serialize(deserializedData); + + CompletableResultCode completableResultCode = exporter.onExport(deserializedData); + + assertTrue(completableResultCode.isSuccess()); + verifyNoInteractions(wrapped); + } + + @Test + public void whenWritingFailsOnExport_doExportRightAway() throws IOException { + doReturn(CompletableResultCode.ofSuccess()).when(wrapped).export(deserializedData); + exporter.onShutDown(); + + CompletableResultCode completableResultCode = exporter.onExport(deserializedData); + + assertTrue(completableResultCode.isSuccess()); + verify(wrapped).export(deserializedData); + } + + private File createDummyFile(long createdTimeMillis, String... lines) throws IOException { + File file = new File(rootDir, STORAGE_FOLDER_NAME + "/" + createdTimeMillis); + Files.write(file.toPath(), Arrays.asList(lines)); + return file; + } + + private void setUpSerializer() { + serializer = mock(); + doReturn(deserializedData).when(serializer).deserialize(any()); + } + + private static StorageClock createClockMock(long initialTimeMillis) { + StorageClock mock = mock(); + doReturn(initialTimeMillis).when(mock).now(); + return mock; + } +} diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/AttributesMapperTest.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/AttributesMapperTest.java new file mode 100644 index 000000000..1ca814fcb --- /dev/null +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/AttributesMapperTest.java @@ -0,0 +1,58 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.common; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.proto.common.v1.KeyValue; +import java.util.Arrays; +import java.util.List; +import org.junit.jupiter.api.Test; + +class AttributesMapperTest { + + @Test + public void verifyMapping() { + Attributes attributes = + Attributes.builder() + .put(AttributeKey.stringKey("someString"), "someValue") + .put(AttributeKey.booleanKey("someBool"), true) + .put(AttributeKey.longKey("someLong"), 10L) + .put(AttributeKey.doubleKey("someDouble"), 10.0) + .build(); + + List proto = mapToProto(attributes); + + assertEquals(attributes, mapFromProto(proto)); + } + + @Test + public void verifyArrayMapping() { + Attributes attributes = + Attributes.builder() + .put( + AttributeKey.stringArrayKey("someString"), + Arrays.asList("firstString", "secondString")) + .put(AttributeKey.booleanArrayKey("someBool"), Arrays.asList(true, false)) + .put(AttributeKey.longArrayKey("someLong"), Arrays.asList(10L, 50L)) + .put(AttributeKey.doubleArrayKey("someDouble"), Arrays.asList(10.0, 50.5)) + .build(); + + List serialized = mapToProto(attributes); + + assertEquals(attributes, mapFromProto(serialized)); + } + + private static List mapToProto(Attributes attributes) { + return AttributesMapper.getInstance().attributesToProto(attributes); + } + + private static Attributes mapFromProto(List keyValues) { + return AttributesMapper.getInstance().protoToAttributes(keyValues); + } +} diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/ResourceMapperTest.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/ResourceMapperTest.java new file mode 100644 index 000000000..ef9483f78 --- /dev/null +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/ResourceMapperTest.java @@ -0,0 +1,31 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.common; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import io.opentelemetry.contrib.disk.buffering.testutils.TestData; +import io.opentelemetry.proto.resource.v1.Resource; +import org.junit.jupiter.api.Test; + +class ResourceMapperTest { + + @Test + public void verifyMapping() { + Resource proto = mapToProto(TestData.RESOURCE_FULL); + + assertEquals(TestData.RESOURCE_FULL, mapToSdk(proto, TestData.RESOURCE_FULL.getSchemaUrl())); + } + + private static Resource mapToProto(io.opentelemetry.sdk.resources.Resource sdkResource) { + return ResourceMapper.getInstance().mapToProto(sdkResource); + } + + private static io.opentelemetry.sdk.resources.Resource mapToSdk( + Resource protoResource, String schemaUrl) { + return ResourceMapper.getInstance().mapToSdk(protoResource, schemaUrl); + } +} diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/LogRecordDataMapperTest.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/LogRecordDataMapperTest.java new file mode 100644 index 000000000..81e1147db --- /dev/null +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/LogRecordDataMapperTest.java @@ -0,0 +1,53 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.logs; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import io.opentelemetry.api.logs.Severity; +import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.logs.models.LogRecordDataImpl; +import io.opentelemetry.contrib.disk.buffering.testutils.TestData; +import io.opentelemetry.proto.logs.v1.LogRecord; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.logs.data.Body; +import io.opentelemetry.sdk.logs.data.LogRecordData; +import io.opentelemetry.sdk.resources.Resource; +import org.junit.jupiter.api.Test; + +class LogRecordDataMapperTest { + + private static final LogRecordData LOG_RECORD = + LogRecordDataImpl.builder() + .setResource(TestData.RESOURCE_FULL) + .setSpanContext(TestData.SPAN_CONTEXT) + .setInstrumentationScopeInfo(TestData.INSTRUMENTATION_SCOPE_INFO_FULL) + .setAttributes(TestData.ATTRIBUTES) + .setBody(Body.string("Log body")) + .setSeverity(Severity.DEBUG) + .setSeverityText("Log severity text") + .setTimestampEpochNanos(100L) + .setObservedTimestampEpochNanos(200L) + .setTotalAttributeCount(3) + .build(); + + @Test + public void verifyMapping() { + LogRecord proto = mapToProto(LOG_RECORD); + + assertEquals( + LOG_RECORD, + mapToSdk(proto, LOG_RECORD.getResource(), LOG_RECORD.getInstrumentationScopeInfo())); + } + + private static LogRecord mapToProto(LogRecordData data) { + return LogRecordDataMapper.getInstance().mapToProto(data); + } + + private static LogRecordData mapToSdk( + LogRecord data, Resource resource, InstrumentationScopeInfo scopeInfo) { + return LogRecordDataMapper.getInstance().mapToSdk(data, resource, scopeInfo); + } +} diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/ProtoLogsDataMapperTest.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/ProtoLogsDataMapperTest.java new file mode 100644 index 000000000..4834e34c9 --- /dev/null +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/ProtoLogsDataMapperTest.java @@ -0,0 +1,170 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.logs; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import io.opentelemetry.api.logs.Severity; +import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.logs.models.LogRecordDataImpl; +import io.opentelemetry.contrib.disk.buffering.testutils.TestData; +import io.opentelemetry.proto.logs.v1.LogRecord; +import io.opentelemetry.proto.logs.v1.LogsData; +import io.opentelemetry.proto.logs.v1.ResourceLogs; +import io.opentelemetry.proto.logs.v1.ScopeLogs; +import io.opentelemetry.sdk.logs.data.Body; +import io.opentelemetry.sdk.logs.data.LogRecordData; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import org.junit.jupiter.api.Test; + +class ProtoLogsDataMapperTest { + + private static final LogRecordData LOG_RECORD = + LogRecordDataImpl.builder() + .setResource(TestData.RESOURCE_FULL) + .setSpanContext(TestData.SPAN_CONTEXT) + .setInstrumentationScopeInfo(TestData.INSTRUMENTATION_SCOPE_INFO_FULL) + .setAttributes(TestData.ATTRIBUTES) + .setBody(Body.string("Log body")) + .setSeverity(Severity.DEBUG) + .setSeverityText("Log severity text") + .setTimestampEpochNanos(100L) + .setObservedTimestampEpochNanos(200L) + .setTotalAttributeCount(3) + .build(); + + private static final LogRecordData OTHER_LOG_RECORD = + LogRecordDataImpl.builder() + .setResource(TestData.RESOURCE_FULL) + .setSpanContext(TestData.SPAN_CONTEXT) + .setInstrumentationScopeInfo(TestData.INSTRUMENTATION_SCOPE_INFO_FULL) + .setAttributes(TestData.ATTRIBUTES) + .setBody(Body.string("Other log body")) + .setSeverity(Severity.DEBUG) + .setSeverityText("Log severity text") + .setTimestampEpochNanos(100L) + .setObservedTimestampEpochNanos(200L) + .setTotalAttributeCount(3) + .build(); + + private static final LogRecordData LOG_RECORD_WITH_DIFFERENT_SCOPE_SAME_RESOURCE = + LogRecordDataImpl.builder() + .setResource(TestData.RESOURCE_FULL) + .setSpanContext(TestData.SPAN_CONTEXT) + .setInstrumentationScopeInfo(TestData.INSTRUMENTATION_SCOPE_INFO_WITHOUT_VERSION) + .setAttributes(TestData.ATTRIBUTES) + .setBody(Body.string("Same resource other scope log")) + .setSeverity(Severity.DEBUG) + .setSeverityText("Log severity text") + .setTimestampEpochNanos(100L) + .setObservedTimestampEpochNanos(200L) + .setTotalAttributeCount(3) + .build(); + + private static final LogRecordData LOG_RECORD_WITH_DIFFERENT_RESOURCE = + LogRecordDataImpl.builder() + .setResource(TestData.RESOURCE_WITHOUT_SCHEMA_URL) + .setSpanContext(TestData.SPAN_CONTEXT) + .setInstrumentationScopeInfo(TestData.INSTRUMENTATION_SCOPE_INFO_WITHOUT_VERSION) + .setAttributes(TestData.ATTRIBUTES) + .setBody(Body.string("Different resource log")) + .setSeverity(Severity.DEBUG) + .setSeverityText("Log severity text") + .setTimestampEpochNanos(100L) + .setObservedTimestampEpochNanos(200L) + .setTotalAttributeCount(3) + .build(); + + @Test + public void verifyConversionDataStructure() { + List signals = Collections.singletonList(LOG_RECORD); + + LogsData result = mapToProto(signals); + + List resourceLogsList = result.getResourceLogsList(); + assertEquals(1, resourceLogsList.size()); + assertEquals(1, resourceLogsList.get(0).getScopeLogsList().size()); + assertEquals(1, resourceLogsList.get(0).getScopeLogsList().get(0).getLogRecordsList().size()); + + assertThat(mapFromProto(result)).containsExactlyInAnyOrderElementsOf(signals); + } + + @Test + public void verifyMultipleLogsWithSameResourceAndScope() { + List signals = Arrays.asList(LOG_RECORD, OTHER_LOG_RECORD); + + LogsData proto = mapToProto(signals); + + List resourceLogsList = proto.getResourceLogsList(); + assertEquals(1, resourceLogsList.size()); + List scopeLogsList = resourceLogsList.get(0).getScopeLogsList(); + assertEquals(1, scopeLogsList.size()); + List logRecords = scopeLogsList.get(0).getLogRecordsList(); + assertEquals(2, logRecords.size()); + assertEquals("Log body", logRecords.get(0).getBody().getStringValue()); + assertEquals("Other log body", logRecords.get(1).getBody().getStringValue()); + + assertEquals(2, mapFromProto(proto).size()); + + assertThat(mapFromProto(proto)).containsExactlyInAnyOrderElementsOf(signals); + } + + @Test + public void verifyMultipleLogsWithSameResourceDifferentScope() { + List signals = + Arrays.asList(LOG_RECORD, LOG_RECORD_WITH_DIFFERENT_SCOPE_SAME_RESOURCE); + + LogsData proto = mapToProto(signals); + + List resourceLogsList = proto.getResourceLogsList(); + assertEquals(1, resourceLogsList.size()); + List scopeLogsList = resourceLogsList.get(0).getScopeLogsList(); + assertEquals(2, scopeLogsList.size()); + ScopeLogs firstScope = scopeLogsList.get(0); + ScopeLogs secondScope = scopeLogsList.get(1); + List firstScopeLogs = firstScope.getLogRecordsList(); + List secondScopeLogs = secondScope.getLogRecordsList(); + assertEquals(1, firstScopeLogs.size()); + assertEquals(1, secondScopeLogs.size()); + + assertThat(mapFromProto(proto)).containsExactlyInAnyOrderElementsOf(signals); + } + + @Test + public void verifyMultipleLogsWithDifferentResource() { + List signals = Arrays.asList(LOG_RECORD, LOG_RECORD_WITH_DIFFERENT_RESOURCE); + + LogsData proto = mapToProto(signals); + + List resourceLogsList = proto.getResourceLogsList(); + assertEquals(2, resourceLogsList.size()); + ResourceLogs firstResourceLogs = resourceLogsList.get(0); + ResourceLogs secondResourceLogs = resourceLogsList.get(1); + List firstScopeLogsList = firstResourceLogs.getScopeLogsList(); + List secondScopeLogsList = secondResourceLogs.getScopeLogsList(); + assertEquals(1, firstScopeLogsList.size()); + assertEquals(1, secondScopeLogsList.size()); + ScopeLogs firstScope = firstScopeLogsList.get(0); + ScopeLogs secondScope = secondScopeLogsList.get(0); + List firstScopeLogs = firstScope.getLogRecordsList(); + List secondScopeLogs = secondScope.getLogRecordsList(); + assertEquals(1, firstScopeLogs.size()); + assertEquals(1, secondScopeLogs.size()); + + assertThat(mapFromProto(proto)).containsExactlyInAnyOrderElementsOf(signals); + } + + private static LogsData mapToProto(Collection signals) { + return ProtoLogsDataMapper.getInstance().toProto(signals); + } + + private static List mapFromProto(LogsData protoData) { + return ProtoLogsDataMapper.getInstance().fromProto(protoData); + } +} diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/metrics/MetricDataMapperTest.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/metrics/MetricDataMapperTest.java new file mode 100644 index 000000000..fccfc90b6 --- /dev/null +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/metrics/MetricDataMapperTest.java @@ -0,0 +1,275 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.metrics; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import io.opentelemetry.contrib.disk.buffering.testutils.TestData; +import io.opentelemetry.proto.metrics.v1.Metric; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.metrics.data.AggregationTemporality; +import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; +import io.opentelemetry.sdk.metrics.data.DoublePointData; +import io.opentelemetry.sdk.metrics.data.ExponentialHistogramBuckets; +import io.opentelemetry.sdk.metrics.data.ExponentialHistogramData; +import io.opentelemetry.sdk.metrics.data.ExponentialHistogramPointData; +import io.opentelemetry.sdk.metrics.data.GaugeData; +import io.opentelemetry.sdk.metrics.data.HistogramData; +import io.opentelemetry.sdk.metrics.data.HistogramPointData; +import io.opentelemetry.sdk.metrics.data.LongExemplarData; +import io.opentelemetry.sdk.metrics.data.LongPointData; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.data.SumData; +import io.opentelemetry.sdk.metrics.data.SummaryData; +import io.opentelemetry.sdk.metrics.data.SummaryPointData; +import io.opentelemetry.sdk.metrics.data.ValueAtQuantile; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramBuckets; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramPointData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableGaugeData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramPointData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongExemplarData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongPointData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableSumData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableSummaryData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableSummaryPointData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableValueAtQuantile; +import io.opentelemetry.sdk.resources.Resource; +import java.util.Arrays; +import java.util.Collections; +import org.junit.jupiter.api.Test; + +class MetricDataMapperTest { + + private static final LongExemplarData LONG_EXEMPLAR_DATA = + ImmutableLongExemplarData.create(TestData.ATTRIBUTES, 100L, TestData.SPAN_CONTEXT, 1L); + + private static final DoubleExemplarData DOUBLE_EXEMPLAR_DATA = + ImmutableDoubleExemplarData.create(TestData.ATTRIBUTES, 100L, TestData.SPAN_CONTEXT, 1.0); + private static final LongPointData LONG_POINT_DATA = + ImmutableLongPointData.create( + 1L, 2L, TestData.ATTRIBUTES, 1L, Collections.singletonList(LONG_EXEMPLAR_DATA)); + + private static final DoublePointData DOUBLE_POINT_DATA = + ImmutableDoublePointData.create( + 1L, 2L, TestData.ATTRIBUTES, 1.0, Collections.singletonList(DOUBLE_EXEMPLAR_DATA)); + + private static final GaugeData LONG_GAUGE_DATA = + ImmutableGaugeData.create(Collections.singletonList(LONG_POINT_DATA)); + + private static final GaugeData DOUBLE_GAUGE_DATA = + ImmutableGaugeData.create(Collections.singletonList(DOUBLE_POINT_DATA)); + + private static final SumData LONG_SUM_DATA = + ImmutableSumData.create( + true, AggregationTemporality.DELTA, Collections.singletonList(LONG_POINT_DATA)); + + private static final SumData DOUBLE_SUM_DATA = + ImmutableSumData.create( + true, AggregationTemporality.DELTA, Collections.singletonList(DOUBLE_POINT_DATA)); + + private static final ValueAtQuantile VALUE_AT_QUANTILE = + ImmutableValueAtQuantile.create(2.0, 1.0); + private static final SummaryPointData SUMMARY_POINT_DATA = + ImmutableSummaryPointData.create( + 1L, 2L, TestData.ATTRIBUTES, 1L, 2.0, Collections.singletonList(VALUE_AT_QUANTILE)); + + private static final SummaryData SUMMARY_DATA = + ImmutableSummaryData.create(Collections.singletonList(SUMMARY_POINT_DATA)); + + private static final HistogramPointData HISTOGRAM_POINT_DATA = + ImmutableHistogramPointData.create( + 1L, + 2L, + TestData.ATTRIBUTES, + 15.0, + true, + 4.0, + true, + 7.0, + Collections.singletonList(10.0), + Arrays.asList(1L, 2L), + Collections.singletonList(DOUBLE_EXEMPLAR_DATA)); + private static final ExponentialHistogramBuckets POSITIVE_BUCKET = + ImmutableExponentialHistogramBuckets.create(1, 10, Arrays.asList(1L, 10L)); + + private static final ExponentialHistogramBuckets NEGATIVE_BUCKET = + ImmutableExponentialHistogramBuckets.create(1, 0, Collections.emptyList()); + + private static final ExponentialHistogramPointData EXPONENTIAL_HISTOGRAM_POINT_DATA = + ImmutableExponentialHistogramPointData.create( + 1, + 10.0, + 1L, + true, + 2.0, + true, + 4.0, + POSITIVE_BUCKET, + NEGATIVE_BUCKET, + 1L, + 2L, + TestData.ATTRIBUTES, + Collections.singletonList(DOUBLE_EXEMPLAR_DATA)); + private static final HistogramData HISTOGRAM_DATA = + ImmutableHistogramData.create( + AggregationTemporality.CUMULATIVE, Collections.singletonList(HISTOGRAM_POINT_DATA)); + + private static final ExponentialHistogramData EXPONENTIAL_HISTOGRAM_DATA = + ImmutableExponentialHistogramData.create( + AggregationTemporality.CUMULATIVE, + Collections.singletonList(EXPONENTIAL_HISTOGRAM_POINT_DATA)); + + private static final MetricData LONG_GAUGE_METRIC = + ImmutableMetricData.createLongGauge( + TestData.RESOURCE_FULL, + TestData.INSTRUMENTATION_SCOPE_INFO_FULL, + "Long gauge name", + "Long gauge description", + "ms", + LONG_GAUGE_DATA); + + private static final MetricData DOUBLE_GAUGE_METRIC = + ImmutableMetricData.createDoubleGauge( + TestData.RESOURCE_FULL, + TestData.INSTRUMENTATION_SCOPE_INFO_FULL, + "Double gauge name", + "Double gauge description", + "ms", + DOUBLE_GAUGE_DATA); + private static final MetricData LONG_SUM_METRIC = + ImmutableMetricData.createLongSum( + TestData.RESOURCE_FULL, + TestData.INSTRUMENTATION_SCOPE_INFO_FULL, + "Long sum name", + "Long sum description", + "ms", + LONG_SUM_DATA); + private static final MetricData DOUBLE_SUM_METRIC = + ImmutableMetricData.createDoubleSum( + TestData.RESOURCE_FULL, + TestData.INSTRUMENTATION_SCOPE_INFO_FULL, + "Double sum name", + "Double sum description", + "ms", + DOUBLE_SUM_DATA); + private static final MetricData SUMMARY_METRIC = + ImmutableMetricData.createDoubleSummary( + TestData.RESOURCE_FULL, + TestData.INSTRUMENTATION_SCOPE_INFO_FULL, + "Summary name", + "Summary description", + "ms", + SUMMARY_DATA); + + private static final MetricData HISTOGRAM_METRIC = + ImmutableMetricData.createDoubleHistogram( + TestData.RESOURCE_FULL, + TestData.INSTRUMENTATION_SCOPE_INFO_FULL, + "Histogram name", + "Histogram description", + "ms", + HISTOGRAM_DATA); + private static final MetricData EXPONENTIAL_HISTOGRAM_METRIC = + ImmutableMetricData.createExponentialHistogram( + TestData.RESOURCE_FULL, + TestData.INSTRUMENTATION_SCOPE_INFO_FULL, + "Exponential histogram name", + "Exponential histogram description", + "ms", + EXPONENTIAL_HISTOGRAM_DATA); + + @Test + public void verifyLongGaugeMapping() { + Metric proto = mapToProto(LONG_GAUGE_METRIC); + + assertEquals( + LONG_GAUGE_METRIC, + mapToSdk( + proto, + LONG_GAUGE_METRIC.getResource(), + LONG_GAUGE_METRIC.getInstrumentationScopeInfo())); + } + + @Test + public void verifyDoubleGaugeMapping() { + Metric proto = mapToProto(DOUBLE_GAUGE_METRIC); + + assertEquals( + DOUBLE_GAUGE_METRIC, + mapToSdk( + proto, + DOUBLE_GAUGE_METRIC.getResource(), + DOUBLE_GAUGE_METRIC.getInstrumentationScopeInfo())); + } + + @Test + public void verifyLongSumMapping() { + Metric proto = mapToProto(LONG_SUM_METRIC); + + assertEquals( + LONG_SUM_METRIC, + mapToSdk( + proto, LONG_SUM_METRIC.getResource(), LONG_SUM_METRIC.getInstrumentationScopeInfo())); + } + + @Test + public void verifyDoubleSumMapping() { + Metric proto = mapToProto(DOUBLE_SUM_METRIC); + + assertEquals( + DOUBLE_SUM_METRIC, + mapToSdk( + proto, + DOUBLE_SUM_METRIC.getResource(), + DOUBLE_SUM_METRIC.getInstrumentationScopeInfo())); + } + + @Test + public void verifySummaryMapping() { + Metric proto = mapToProto(SUMMARY_METRIC); + + assertEquals( + SUMMARY_METRIC, + mapToSdk( + proto, SUMMARY_METRIC.getResource(), SUMMARY_METRIC.getInstrumentationScopeInfo())); + } + + @Test + public void verifyHistogramMapping() { + Metric proto = mapToProto(HISTOGRAM_METRIC); + + assertEquals( + HISTOGRAM_METRIC, + mapToSdk( + proto, HISTOGRAM_METRIC.getResource(), HISTOGRAM_METRIC.getInstrumentationScopeInfo())); + } + + @Test + public void verifyExponentialHistogramMapping() { + Metric proto = mapToProto(EXPONENTIAL_HISTOGRAM_METRIC); + + assertEquals( + EXPONENTIAL_HISTOGRAM_METRIC, + mapToSdk( + proto, + EXPONENTIAL_HISTOGRAM_METRIC.getResource(), + EXPONENTIAL_HISTOGRAM_METRIC.getInstrumentationScopeInfo())); + } + + private static Metric mapToProto(MetricData source) { + return MetricDataMapper.getInstance().mapToProto(source); + } + + private static MetricData mapToSdk( + Metric source, Resource resource, InstrumentationScopeInfo scope) { + return MetricDataMapper.getInstance().mapToSdk(source, resource, scope); + } +} diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/metrics/ProtoMetricsDataMapperTest.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/metrics/ProtoMetricsDataMapperTest.java new file mode 100644 index 000000000..40d1d2f40 --- /dev/null +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/metrics/ProtoMetricsDataMapperTest.java @@ -0,0 +1,160 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.metrics; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import io.opentelemetry.contrib.disk.buffering.testutils.TestData; +import io.opentelemetry.proto.metrics.v1.Metric; +import io.opentelemetry.proto.metrics.v1.MetricsData; +import io.opentelemetry.proto.metrics.v1.ResourceMetrics; +import io.opentelemetry.proto.metrics.v1.ScopeMetrics; +import io.opentelemetry.sdk.metrics.data.GaugeData; +import io.opentelemetry.sdk.metrics.data.LongExemplarData; +import io.opentelemetry.sdk.metrics.data.LongPointData; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableGaugeData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongExemplarData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongPointData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import org.junit.jupiter.api.Test; + +class ProtoMetricsDataMapperTest { + + private static final LongExemplarData LONG_EXEMPLAR_DATA = + ImmutableLongExemplarData.create(TestData.ATTRIBUTES, 100L, TestData.SPAN_CONTEXT, 1L); + + private static final LongPointData LONG_POINT_DATA = + ImmutableLongPointData.create( + 1L, 2L, TestData.ATTRIBUTES, 1L, Collections.singletonList(LONG_EXEMPLAR_DATA)); + private static final GaugeData LONG_GAUGE_DATA = + ImmutableGaugeData.create(Collections.singletonList(LONG_POINT_DATA)); + + private static final MetricData LONG_GAUGE_METRIC = + ImmutableMetricData.createLongGauge( + TestData.RESOURCE_FULL, + TestData.INSTRUMENTATION_SCOPE_INFO_FULL, + "Long gauge name", + "Long gauge description", + "ms", + LONG_GAUGE_DATA); + + private static final MetricData OTHER_LONG_GAUGE_METRIC = + ImmutableMetricData.createLongGauge( + TestData.RESOURCE_FULL, + TestData.INSTRUMENTATION_SCOPE_INFO_FULL, + "Long gauge name", + "Long gauge description", + "ms", + LONG_GAUGE_DATA); + + private static final MetricData LONG_GAUGE_METRIC_WITH_DIFFERENT_SCOPE_SAME_RESOURCE = + ImmutableMetricData.createLongGauge( + TestData.RESOURCE_FULL, + TestData.INSTRUMENTATION_SCOPE_INFO_WITHOUT_VERSION, + "Long gauge name", + "Long gauge description", + "ms", + LONG_GAUGE_DATA); + + private static final MetricData LONG_GAUGE_METRIC_WITH_DIFFERENT_RESOURCE = + ImmutableMetricData.createLongGauge( + TestData.RESOURCE_WITHOUT_SCHEMA_URL, + TestData.INSTRUMENTATION_SCOPE_INFO_WITHOUT_VERSION, + "Long gauge name", + "Long gauge description", + "ms", + LONG_GAUGE_DATA); + + @Test + public void verifyConversionDataStructure() { + List signals = Collections.singletonList(LONG_GAUGE_METRIC); + + MetricsData proto = mapToProto(signals); + + List resourceMetrics = proto.getResourceMetricsList(); + assertEquals(1, resourceMetrics.size()); + assertEquals(1, resourceMetrics.get(0).getScopeMetricsList().size()); + assertEquals(1, resourceMetrics.get(0).getScopeMetricsList().get(0).getMetricsList().size()); + + assertThat(mapFromProto(proto)).containsExactlyInAnyOrderElementsOf(signals); + } + + @Test + public void verifyMultipleMetricsWithSameResourceAndScope() { + List signals = Arrays.asList(LONG_GAUGE_METRIC, OTHER_LONG_GAUGE_METRIC); + + MetricsData proto = mapToProto(signals); + + List resourceMetrics = proto.getResourceMetricsList(); + assertEquals(1, resourceMetrics.size()); + List scopeMetrics = resourceMetrics.get(0).getScopeMetricsList(); + assertEquals(1, scopeMetrics.size()); + List metrics = scopeMetrics.get(0).getMetricsList(); + assertEquals(2, metrics.size()); + + assertThat(mapFromProto(proto)).containsExactlyInAnyOrderElementsOf(signals); + } + + @Test + public void verifyMultipleMetricsWithSameResourceDifferentScope() { + List signals = + Arrays.asList(LONG_GAUGE_METRIC, LONG_GAUGE_METRIC_WITH_DIFFERENT_SCOPE_SAME_RESOURCE); + + MetricsData proto = mapToProto(signals); + + List resourceMetrics = proto.getResourceMetricsList(); + assertEquals(1, resourceMetrics.size()); + List scopeMetrics = resourceMetrics.get(0).getScopeMetricsList(); + assertEquals(2, scopeMetrics.size()); + ScopeMetrics firstScope = scopeMetrics.get(0); + ScopeMetrics secondScope = scopeMetrics.get(1); + List firstScopeMetrics = firstScope.getMetricsList(); + List secondScopeMetrics = secondScope.getMetricsList(); + assertEquals(1, firstScopeMetrics.size()); + assertEquals(1, secondScopeMetrics.size()); + + assertThat(mapFromProto(proto)).containsExactlyInAnyOrderElementsOf(signals); + } + + @Test + public void verifyMultipleMetricsWithDifferentResource() { + List signals = + Arrays.asList(LONG_GAUGE_METRIC, LONG_GAUGE_METRIC_WITH_DIFFERENT_RESOURCE); + + MetricsData proto = mapToProto(signals); + + List resourceMetrics = proto.getResourceMetricsList(); + assertEquals(2, resourceMetrics.size()); + ResourceMetrics firstResourceMetrics = resourceMetrics.get(0); + ResourceMetrics secondResourceMetrics = resourceMetrics.get(1); + List firstScopeMetrics = firstResourceMetrics.getScopeMetricsList(); + List secondScopeMetrics = secondResourceMetrics.getScopeMetricsList(); + assertEquals(1, firstScopeMetrics.size()); + assertEquals(1, secondScopeMetrics.size()); + ScopeMetrics firstScope = firstScopeMetrics.get(0); + ScopeMetrics secondScope = secondScopeMetrics.get(0); + List firstMetrics = firstScope.getMetricsList(); + List secondMetrics = secondScope.getMetricsList(); + assertEquals(1, firstMetrics.size()); + assertEquals(1, secondMetrics.size()); + + assertThat(mapFromProto(proto)).containsExactlyInAnyOrderElementsOf(signals); + } + + private static MetricsData mapToProto(Collection signals) { + return ProtoMetricsDataMapper.getInstance().toProto(signals); + } + + private static List mapFromProto(MetricsData protoData) { + return ProtoMetricsDataMapper.getInstance().fromProto(protoData); + } +} diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/ProtoSpansDataMapperTest.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/ProtoSpansDataMapperTest.java new file mode 100644 index 000000000..97a12edbb --- /dev/null +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/ProtoSpansDataMapperTest.java @@ -0,0 +1,196 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.spans; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import io.opentelemetry.api.trace.SpanKind; +import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.spans.models.SpanDataImpl; +import io.opentelemetry.contrib.disk.buffering.testutils.TestData; +import io.opentelemetry.proto.trace.v1.ResourceSpans; +import io.opentelemetry.proto.trace.v1.ScopeSpans; +import io.opentelemetry.proto.trace.v1.Span; +import io.opentelemetry.proto.trace.v1.TracesData; +import io.opentelemetry.sdk.trace.data.EventData; +import io.opentelemetry.sdk.trace.data.LinkData; +import io.opentelemetry.sdk.trace.data.SpanData; +import io.opentelemetry.sdk.trace.data.StatusData; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import org.junit.jupiter.api.Test; + +class ProtoSpansDataMapperTest { + + private static final EventData EVENT_DATA = + EventData.create(1L, "Event name", TestData.ATTRIBUTES, 10); + + private static final LinkData LINK_DATA = + LinkData.create(TestData.SPAN_CONTEXT, TestData.ATTRIBUTES, 20); + + private static final LinkData LINK_DATA_WITH_TRACE_STATE = + LinkData.create(TestData.SPAN_CONTEXT_WITH_TRACE_STATE, TestData.ATTRIBUTES, 20); + + private static final SpanData SPAN_DATA = + SpanDataImpl.builder() + .setResource(TestData.RESOURCE_FULL) + .setInstrumentationScopeInfo(TestData.INSTRUMENTATION_SCOPE_INFO_FULL) + .setName("Span name") + .setSpanContext(TestData.SPAN_CONTEXT) + .setParentSpanContext(TestData.PARENT_SPAN_CONTEXT) + .setAttributes(TestData.ATTRIBUTES) + .setStartEpochNanos(1L) + .setEndEpochNanos(2L) + .setKind(SpanKind.CLIENT) + .setStatus(StatusData.error()) + .setEvents(Collections.singletonList(EVENT_DATA)) + .setLinks(Arrays.asList(LINK_DATA, LINK_DATA_WITH_TRACE_STATE)) + .setTotalAttributeCount(10) + .setTotalRecordedEvents(2) + .setTotalRecordedLinks(2) + .build(); + + private static final SpanData OTHER_SPAN_DATA = + SpanDataImpl.builder() + .setResource(TestData.RESOURCE_FULL) + .setInstrumentationScopeInfo(TestData.INSTRUMENTATION_SCOPE_INFO_FULL) + .setName("Span name") + .setSpanContext(TestData.SPAN_CONTEXT) + .setParentSpanContext(TestData.PARENT_SPAN_CONTEXT) + .setAttributes(TestData.ATTRIBUTES) + .setStartEpochNanos(1L) + .setEndEpochNanos(2L) + .setKind(SpanKind.CLIENT) + .setStatus(StatusData.error()) + .setEvents(Collections.singletonList(EVENT_DATA)) + .setLinks(Arrays.asList(LINK_DATA, LINK_DATA_WITH_TRACE_STATE)) + .setTotalAttributeCount(10) + .setTotalRecordedEvents(2) + .setTotalRecordedLinks(2) + .build(); + + private static final SpanData SPAN_DATA_WITH_DIFFERENT_SCOPE_SAME_RESOURCE = + SpanDataImpl.builder() + .setResource(TestData.RESOURCE_FULL) + .setInstrumentationScopeInfo(TestData.INSTRUMENTATION_SCOPE_INFO_WITHOUT_VERSION) + .setName("Span name") + .setSpanContext(TestData.SPAN_CONTEXT) + .setParentSpanContext(TestData.PARENT_SPAN_CONTEXT) + .setAttributes(TestData.ATTRIBUTES) + .setStartEpochNanos(1L) + .setEndEpochNanos(2L) + .setKind(SpanKind.CLIENT) + .setStatus(StatusData.error()) + .setEvents(Collections.singletonList(EVENT_DATA)) + .setLinks(Arrays.asList(LINK_DATA, LINK_DATA_WITH_TRACE_STATE)) + .setTotalAttributeCount(10) + .setTotalRecordedEvents(2) + .setTotalRecordedLinks(2) + .build(); + + private static final SpanData SPAN_DATA_WITH_DIFFERENT_RESOURCE = + SpanDataImpl.builder() + .setResource(TestData.RESOURCE_WITHOUT_SCHEMA_URL) + .setInstrumentationScopeInfo(TestData.INSTRUMENTATION_SCOPE_INFO_WITHOUT_VERSION) + .setName("Span name") + .setSpanContext(TestData.SPAN_CONTEXT) + .setParentSpanContext(TestData.PARENT_SPAN_CONTEXT) + .setAttributes(TestData.ATTRIBUTES) + .setStartEpochNanos(1L) + .setEndEpochNanos(2L) + .setKind(SpanKind.CLIENT) + .setStatus(StatusData.error()) + .setEvents(Collections.singletonList(EVENT_DATA)) + .setLinks(Arrays.asList(LINK_DATA, LINK_DATA_WITH_TRACE_STATE)) + .setTotalAttributeCount(10) + .setTotalRecordedEvents(2) + .setTotalRecordedLinks(2) + .build(); + + @Test + public void verifyConversionDataStructure() { + List signals = Collections.singletonList(SPAN_DATA); + + TracesData proto = mapToProto(signals); + + List resourceSpans = proto.getResourceSpansList(); + assertEquals(1, resourceSpans.size()); + assertEquals(1, resourceSpans.get(0).getScopeSpansList().size()); + assertEquals(1, resourceSpans.get(0).getScopeSpansList().get(0).getSpansList().size()); + + assertThat(mapFromProto(proto)).containsExactlyInAnyOrderElementsOf(signals); + } + + @Test + public void verifyMultipleSpansWithSameResourceAndScope() { + List signals = Arrays.asList(SPAN_DATA, OTHER_SPAN_DATA); + + TracesData proto = mapToProto(signals); + + List resourceSpans = proto.getResourceSpansList(); + assertEquals(1, resourceSpans.size()); + List scopeSpans = resourceSpans.get(0).getScopeSpansList(); + assertEquals(1, scopeSpans.size()); + List spans = scopeSpans.get(0).getSpansList(); + assertEquals(2, spans.size()); + + assertThat(mapFromProto(proto)).containsExactlyInAnyOrderElementsOf(signals); + } + + @Test + public void verifyMultipleSpansWithSameResourceDifferentScope() { + List signals = Arrays.asList(SPAN_DATA, SPAN_DATA_WITH_DIFFERENT_SCOPE_SAME_RESOURCE); + + TracesData proto = mapToProto(signals); + + List resourceSpans = proto.getResourceSpansList(); + assertEquals(1, resourceSpans.size()); + List scopeSpans = resourceSpans.get(0).getScopeSpansList(); + assertEquals(2, scopeSpans.size()); + ScopeSpans firstScope = scopeSpans.get(0); + ScopeSpans secondScope = scopeSpans.get(1); + List firstScopeSpans = firstScope.getSpansList(); + List secondScopeSpans = secondScope.getSpansList(); + assertEquals(1, firstScopeSpans.size()); + assertEquals(1, secondScopeSpans.size()); + + assertThat(mapFromProto(proto)).containsExactlyInAnyOrderElementsOf(signals); + } + + @Test + public void verifyMultipleSpansWithDifferentResource() { + List signals = Arrays.asList(SPAN_DATA, SPAN_DATA_WITH_DIFFERENT_RESOURCE); + + TracesData proto = mapToProto(signals); + + List resourceSpans = proto.getResourceSpansList(); + assertEquals(2, resourceSpans.size()); + ResourceSpans firstResourceSpans = resourceSpans.get(0); + ResourceSpans secondResourceSpans = resourceSpans.get(1); + List firstScopeSpans = firstResourceSpans.getScopeSpansList(); + List secondScopeSpans = secondResourceSpans.getScopeSpansList(); + assertEquals(1, firstScopeSpans.size()); + assertEquals(1, secondScopeSpans.size()); + ScopeSpans firstScope = firstScopeSpans.get(0); + ScopeSpans secondScope = secondScopeSpans.get(0); + List firstSpans = firstScope.getSpansList(); + List secondSpans = secondScope.getSpansList(); + assertEquals(1, firstSpans.size()); + assertEquals(1, secondSpans.size()); + + assertThat(mapFromProto(proto)).containsExactlyInAnyOrderElementsOf(signals); + } + + private static TracesData mapToProto(Collection signals) { + return ProtoSpansDataMapper.getInstance().toProto(signals); + } + + private static List mapFromProto(TracesData protoData) { + return ProtoSpansDataMapper.getInstance().fromProto(protoData); + } +} diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/SpanDataMapperTest.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/SpanDataMapperTest.java new file mode 100644 index 000000000..9735690b9 --- /dev/null +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/SpanDataMapperTest.java @@ -0,0 +1,102 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.spans; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import io.opentelemetry.api.trace.SpanKind; +import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.spans.models.SpanDataImpl; +import io.opentelemetry.contrib.disk.buffering.testutils.TestData; +import io.opentelemetry.proto.trace.v1.Span; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.trace.data.EventData; +import io.opentelemetry.sdk.trace.data.LinkData; +import io.opentelemetry.sdk.trace.data.SpanData; +import io.opentelemetry.sdk.trace.data.StatusData; +import java.util.Arrays; +import java.util.Collections; +import org.junit.jupiter.api.Test; + +class SpanDataMapperTest { + + private static final EventData EVENT_DATA = + EventData.create(1L, "Event name", TestData.ATTRIBUTES, 10); + + private static final LinkData LINK_DATA = + LinkData.create(TestData.SPAN_CONTEXT, TestData.ATTRIBUTES, 20); + + private static final LinkData LINK_DATA_WITH_TRACE_STATE = + LinkData.create(TestData.SPAN_CONTEXT_WITH_TRACE_STATE, TestData.ATTRIBUTES, 20); + + private static final SpanData SPAN_DATA = + SpanDataImpl.builder() + .setResource(TestData.RESOURCE_FULL) + .setInstrumentationScopeInfo(TestData.INSTRUMENTATION_SCOPE_INFO_FULL) + .setName("Span name") + .setSpanContext(TestData.SPAN_CONTEXT) + .setParentSpanContext(TestData.PARENT_SPAN_CONTEXT) + .setAttributes(TestData.ATTRIBUTES) + .setStartEpochNanos(1L) + .setEndEpochNanos(2L) + .setKind(SpanKind.CLIENT) + .setStatus(StatusData.error()) + .setEvents(Collections.singletonList(EVENT_DATA)) + .setLinks(Arrays.asList(LINK_DATA, LINK_DATA_WITH_TRACE_STATE)) + .setTotalAttributeCount(10) + .setTotalRecordedEvents(2) + .setTotalRecordedLinks(2) + .build(); + + private static final SpanData SPAN_DATA_WITH_TRACE_STATE = + SpanDataImpl.builder() + .setResource(TestData.RESOURCE_FULL) + .setInstrumentationScopeInfo(TestData.INSTRUMENTATION_SCOPE_INFO_FULL) + .setName("Span name2") + .setSpanContext(TestData.SPAN_CONTEXT_WITH_TRACE_STATE) + .setParentSpanContext(TestData.PARENT_SPAN_CONTEXT) + .setAttributes(TestData.ATTRIBUTES) + .setStartEpochNanos(1L) + .setEndEpochNanos(2L) + .setKind(SpanKind.CLIENT) + .setStatus(StatusData.error()) + .setEvents(Collections.singletonList(EVENT_DATA)) + .setLinks(Collections.singletonList(LINK_DATA)) + .setTotalAttributeCount(10) + .setTotalRecordedEvents(2) + .setTotalRecordedLinks(2) + .build(); + + @Test + public void verifyMapping() { + Span proto = mapToProto(SPAN_DATA); + + assertEquals( + SPAN_DATA, + mapToSdk(proto, SPAN_DATA.getResource(), SPAN_DATA.getInstrumentationScopeInfo())); + } + + @Test + public void verifyMappingWithTraceState() { + Span proto = mapToProto(SPAN_DATA_WITH_TRACE_STATE); + + assertEquals( + SPAN_DATA_WITH_TRACE_STATE, + mapToSdk( + proto, + SPAN_DATA_WITH_TRACE_STATE.getResource(), + SPAN_DATA_WITH_TRACE_STATE.getInstrumentationScopeInfo())); + } + + private static Span mapToProto(SpanData source) { + return SpanDataMapper.getInstance().mapToProto(source); + } + + private static SpanData mapToSdk( + Span source, Resource resource, InstrumentationScopeInfo instrumentationScopeInfo) { + return SpanDataMapper.getInstance().mapToSdk(source, resource, instrumentationScopeInfo); + } +} diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/LogRecordDataSerializerTest.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/LogRecordDataSerializerTest.java new file mode 100644 index 000000000..a559592d4 --- /dev/null +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/LogRecordDataSerializerTest.java @@ -0,0 +1,55 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.serializers; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.logs.Severity; +import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.logs.models.LogRecordDataImpl; +import io.opentelemetry.contrib.disk.buffering.testutils.BaseSignalSerializerTest; +import io.opentelemetry.contrib.disk.buffering.testutils.TestData; +import io.opentelemetry.sdk.logs.data.Body; +import io.opentelemetry.sdk.logs.data.LogRecordData; +import org.junit.jupiter.api.Test; + +class LogRecordDataSerializerTest extends BaseSignalSerializerTest { + private static final LogRecordData LOG_RECORD = + LogRecordDataImpl.builder() + .setResource(TestData.RESOURCE_FULL) + .setSpanContext(TestData.SPAN_CONTEXT) + .setInstrumentationScopeInfo(TestData.INSTRUMENTATION_SCOPE_INFO_FULL) + .setAttributes(TestData.ATTRIBUTES) + .setBody(Body.string("Log body")) + .setSeverity(Severity.DEBUG) + .setSeverityText("Log severity text") + .setTimestampEpochNanos(100L) + .setObservedTimestampEpochNanos(200L) + .setTotalAttributeCount(3) + .build(); + + private static final LogRecordData LOG_RECORD_WITHOUT_SEVERITY_TEXT = + LogRecordDataImpl.builder() + .setResource(TestData.RESOURCE_FULL) + .setSpanContext(TestData.SPAN_CONTEXT) + .setInstrumentationScopeInfo(TestData.INSTRUMENTATION_SCOPE_INFO_FULL) + .setAttributes(Attributes.empty()) + .setBody(Body.string("Log body")) + .setSeverity(Severity.DEBUG) + .setSeverityText("") + .setTimestampEpochNanos(100L) + .setObservedTimestampEpochNanos(200L) + .setTotalAttributeCount(3) + .build(); + + @Test + public void verifySerialization() { + assertSerialization(LOG_RECORD, LOG_RECORD_WITHOUT_SEVERITY_TEXT); + } + + @Override + protected SignalSerializer getSerializer() { + return SignalSerializer.ofLogs(); + } +} diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/MetricDataSerializerTest.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/MetricDataSerializerTest.java new file mode 100644 index 000000000..f2d2bcc85 --- /dev/null +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/MetricDataSerializerTest.java @@ -0,0 +1,198 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.serializers; + +import io.opentelemetry.contrib.disk.buffering.testutils.BaseSignalSerializerTest; +import io.opentelemetry.contrib.disk.buffering.testutils.TestData; +import io.opentelemetry.sdk.metrics.data.AggregationTemporality; +import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; +import io.opentelemetry.sdk.metrics.data.DoublePointData; +import io.opentelemetry.sdk.metrics.data.ExponentialHistogramBuckets; +import io.opentelemetry.sdk.metrics.data.ExponentialHistogramData; +import io.opentelemetry.sdk.metrics.data.ExponentialHistogramPointData; +import io.opentelemetry.sdk.metrics.data.GaugeData; +import io.opentelemetry.sdk.metrics.data.HistogramData; +import io.opentelemetry.sdk.metrics.data.HistogramPointData; +import io.opentelemetry.sdk.metrics.data.LongExemplarData; +import io.opentelemetry.sdk.metrics.data.LongPointData; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.data.SumData; +import io.opentelemetry.sdk.metrics.data.SummaryData; +import io.opentelemetry.sdk.metrics.data.SummaryPointData; +import io.opentelemetry.sdk.metrics.data.ValueAtQuantile; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramBuckets; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramPointData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableGaugeData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramPointData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongExemplarData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongPointData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableSumData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableSummaryData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableSummaryPointData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableValueAtQuantile; +import java.util.Arrays; +import java.util.Collections; +import org.junit.jupiter.api.Test; + +class MetricDataSerializerTest extends BaseSignalSerializerTest { + + private static final LongExemplarData LONG_EXEMPLAR_DATA = + ImmutableLongExemplarData.create(TestData.ATTRIBUTES, 100L, TestData.SPAN_CONTEXT, 1L); + + private static final DoubleExemplarData DOUBLE_EXEMPLAR_DATA = + ImmutableDoubleExemplarData.create(TestData.ATTRIBUTES, 100L, TestData.SPAN_CONTEXT, 1.0); + private static final LongPointData LONG_POINT_DATA = + ImmutableLongPointData.create( + 1L, 2L, TestData.ATTRIBUTES, 1L, Collections.singletonList(LONG_EXEMPLAR_DATA)); + + private static final DoublePointData DOUBLE_POINT_DATA = + ImmutableDoublePointData.create( + 1L, 2L, TestData.ATTRIBUTES, 1.0, Collections.singletonList(DOUBLE_EXEMPLAR_DATA)); + + private static final GaugeData LONG_GAUGE_DATA = + ImmutableGaugeData.create(Collections.singletonList(LONG_POINT_DATA)); + + private static final GaugeData DOUBLE_GAUGE_DATA = + ImmutableGaugeData.create(Collections.singletonList(DOUBLE_POINT_DATA)); + + private static final SumData LONG_SUM_DATA = + ImmutableSumData.create( + true, AggregationTemporality.DELTA, Collections.singletonList(LONG_POINT_DATA)); + + private static final SumData DOUBLE_SUM_DATA = + ImmutableSumData.create( + true, AggregationTemporality.DELTA, Collections.singletonList(DOUBLE_POINT_DATA)); + + private static final ValueAtQuantile VALUE_AT_QUANTILE = + ImmutableValueAtQuantile.create(2.0, 1.0); + private static final SummaryPointData SUMMARY_POINT_DATA = + ImmutableSummaryPointData.create( + 1L, 2L, TestData.ATTRIBUTES, 1L, 2.0, Collections.singletonList(VALUE_AT_QUANTILE)); + + private static final SummaryData SUMMARY_DATA = + ImmutableSummaryData.create(Collections.singletonList(SUMMARY_POINT_DATA)); + + private static final HistogramPointData HISTOGRAM_POINT_DATA = + ImmutableHistogramPointData.create( + 1L, + 2L, + TestData.ATTRIBUTES, + 15.0, + true, + 4.0, + true, + 7.0, + Collections.singletonList(10.0), + Arrays.asList(1L, 2L), + Collections.singletonList(DOUBLE_EXEMPLAR_DATA)); + private static final ExponentialHistogramBuckets POSITIVE_BUCKET = + ImmutableExponentialHistogramBuckets.create(1, 10, Arrays.asList(1L, 10L)); + + private static final ExponentialHistogramBuckets NEGATIVE_BUCKET = + ImmutableExponentialHistogramBuckets.create(1, 0, Collections.emptyList()); + private static final ExponentialHistogramPointData EXPONENTIAL_HISTOGRAM_POINT_DATA = + ImmutableExponentialHistogramPointData.create( + 1, + 10.0, + 1L, + true, + 2.0, + true, + 4.0, + POSITIVE_BUCKET, + NEGATIVE_BUCKET, + 1L, + 2L, + TestData.ATTRIBUTES, + Collections.singletonList(DOUBLE_EXEMPLAR_DATA)); + private static final HistogramData HISTOGRAM_DATA = + ImmutableHistogramData.create( + AggregationTemporality.CUMULATIVE, Collections.singletonList(HISTOGRAM_POINT_DATA)); + private static final ExponentialHistogramData EXPONENTIAL_HISTOGRAM_DATA = + ImmutableExponentialHistogramData.create( + AggregationTemporality.CUMULATIVE, + Collections.singletonList(EXPONENTIAL_HISTOGRAM_POINT_DATA)); + private static final MetricData LONG_GAUGE_METRIC = + ImmutableMetricData.createLongGauge( + TestData.RESOURCE_FULL, + TestData.INSTRUMENTATION_SCOPE_INFO_FULL, + "Long gauge name", + "Long gauge description", + "ms", + LONG_GAUGE_DATA); + + private static final MetricData DOUBLE_GAUGE_METRIC = + ImmutableMetricData.createDoubleGauge( + TestData.RESOURCE_FULL, + TestData.INSTRUMENTATION_SCOPE_INFO_FULL, + "Double gauge name", + "Double gauge description", + "ms", + DOUBLE_GAUGE_DATA); + private static final MetricData LONG_SUM_METRIC = + ImmutableMetricData.createLongSum( + TestData.RESOURCE_FULL, + TestData.INSTRUMENTATION_SCOPE_INFO_FULL, + "Long sum name", + "Long sum description", + "ms", + LONG_SUM_DATA); + private static final MetricData DOUBLE_SUM_METRIC = + ImmutableMetricData.createDoubleSum( + TestData.RESOURCE_FULL, + TestData.INSTRUMENTATION_SCOPE_INFO_FULL, + "Double sum name", + "Double sum description", + "ms", + DOUBLE_SUM_DATA); + private static final MetricData SUMMARY_METRIC = + ImmutableMetricData.createDoubleSummary( + TestData.RESOURCE_FULL, + TestData.INSTRUMENTATION_SCOPE_INFO_FULL, + "Summary name", + "Summary description", + "ms", + SUMMARY_DATA); + + private static final MetricData HISTOGRAM_METRIC = + ImmutableMetricData.createDoubleHistogram( + TestData.RESOURCE_FULL, + TestData.INSTRUMENTATION_SCOPE_INFO_FULL, + "Histogram name", + "Histogram description", + "ms", + HISTOGRAM_DATA); + private static final MetricData EXPONENTIAL_HISTOGRAM_METRIC = + ImmutableMetricData.createExponentialHistogram( + TestData.RESOURCE_FULL, + TestData.INSTRUMENTATION_SCOPE_INFO_FULL, + "Exponential histogram name", + "Exponential histogram description", + "ms", + EXPONENTIAL_HISTOGRAM_DATA); + + @Test + public void verifySerialization() { + assertSerialization( + LONG_GAUGE_METRIC, + DOUBLE_GAUGE_METRIC, + LONG_SUM_METRIC, + DOUBLE_SUM_METRIC, + SUMMARY_METRIC, + HISTOGRAM_METRIC, + EXPONENTIAL_HISTOGRAM_METRIC); + } + + @Override + protected SignalSerializer getSerializer() { + return SignalSerializer.ofMetrics(); + } +} diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/SpanDataSerializerTest.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/SpanDataSerializerTest.java new file mode 100644 index 000000000..a12155ae0 --- /dev/null +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/SpanDataSerializerTest.java @@ -0,0 +1,78 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.serialization.serializers; + +import io.opentelemetry.api.trace.SpanKind; +import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.spans.models.SpanDataImpl; +import io.opentelemetry.contrib.disk.buffering.testutils.BaseSignalSerializerTest; +import io.opentelemetry.contrib.disk.buffering.testutils.TestData; +import io.opentelemetry.sdk.trace.data.EventData; +import io.opentelemetry.sdk.trace.data.LinkData; +import io.opentelemetry.sdk.trace.data.SpanData; +import io.opentelemetry.sdk.trace.data.StatusData; +import java.util.Arrays; +import java.util.Collections; +import org.junit.jupiter.api.Test; + +class SpanDataSerializerTest extends BaseSignalSerializerTest { + + private static final EventData EVENT_DATA = + EventData.create(1L, "Event name", TestData.ATTRIBUTES, 10); + + private static final LinkData LINK_DATA = + LinkData.create(TestData.SPAN_CONTEXT, TestData.ATTRIBUTES, 20); + + private static final LinkData LINK_DATA_WITH_TRACE_STATE = + LinkData.create(TestData.SPAN_CONTEXT_WITH_TRACE_STATE, TestData.ATTRIBUTES, 20); + + private static final SpanData SPAN_DATA = + SpanDataImpl.builder() + .setResource(TestData.RESOURCE_FULL) + .setInstrumentationScopeInfo(TestData.INSTRUMENTATION_SCOPE_INFO_FULL) + .setName("Span name") + .setSpanContext(TestData.SPAN_CONTEXT) + .setParentSpanContext(TestData.PARENT_SPAN_CONTEXT) + .setAttributes(TestData.ATTRIBUTES) + .setStartEpochNanos(1L) + .setEndEpochNanos(2L) + .setKind(SpanKind.CLIENT) + .setStatus(StatusData.error()) + .setEvents(Collections.singletonList(EVENT_DATA)) + .setLinks(Arrays.asList(LINK_DATA, LINK_DATA_WITH_TRACE_STATE)) + .setTotalAttributeCount(10) + .setTotalRecordedEvents(2) + .setTotalRecordedLinks(2) + .build(); + + private static final SpanData SPAN_DATA_WITH_TRACE_STATE = + SpanDataImpl.builder() + .setResource(TestData.RESOURCE_FULL) + .setInstrumentationScopeInfo(TestData.INSTRUMENTATION_SCOPE_INFO_FULL) + .setName("Span name2") + .setSpanContext(TestData.SPAN_CONTEXT_WITH_TRACE_STATE) + .setParentSpanContext(TestData.PARENT_SPAN_CONTEXT) + .setAttributes(TestData.ATTRIBUTES) + .setStartEpochNanos(1L) + .setEndEpochNanos(2L) + .setKind(SpanKind.CLIENT) + .setStatus(StatusData.error()) + .setEvents(Collections.singletonList(EVENT_DATA)) + .setLinks(Collections.singletonList(LINK_DATA)) + .setTotalAttributeCount(10) + .setTotalRecordedEvents(2) + .setTotalRecordedLinks(2) + .build(); + + @Test + public void verifySerialization() { + assertSerialization(SPAN_DATA, SPAN_DATA_WITH_TRACE_STATE); + } + + @Override + protected SignalSerializer getSerializer() { + return SignalSerializer.ofSpans(); + } +} diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/storage/FolderManagerTest.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/storage/FolderManagerTest.java new file mode 100644 index 000000000..bfd6fed20 --- /dev/null +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/storage/FolderManagerTest.java @@ -0,0 +1,242 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.storage; + +import static io.opentelemetry.contrib.disk.buffering.internal.storage.TestData.MAX_FILE_AGE_FOR_READ_MILLIS; +import static io.opentelemetry.contrib.disk.buffering.internal.storage.TestData.MAX_FILE_SIZE; +import static io.opentelemetry.contrib.disk.buffering.internal.storage.TestData.MIN_FILE_AGE_FOR_READ_MILLIS; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; + +import io.opentelemetry.contrib.disk.buffering.internal.storage.files.ReadableFile; +import io.opentelemetry.contrib.disk.buffering.internal.storage.files.StorageFile; +import io.opentelemetry.contrib.disk.buffering.internal.storage.files.WritableFile; +import io.opentelemetry.contrib.disk.buffering.internal.storage.utils.StorageClock; +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +class FolderManagerTest { + + @TempDir File rootDir; + private FolderManager folderManager; + private StorageClock clock; + + @BeforeEach + public void setUp() { + clock = mock(); + folderManager = new FolderManager(rootDir, TestData.getDefaultConfiguration(), clock); + } + + @Test + public void createWritableFile_withTimeMillisAsName() throws IOException { + doReturn(1000L).when(clock).now(); + + StorageFile file = folderManager.createWritableFile(); + + assertEquals("1000", file.file.getName()); + } + + @Test + public void createWritableFile_andRemoveOldestOne_whenTheAvailableFolderSpaceIsNotEnough() + throws IOException { + File existingFile1 = new File(rootDir, "1000"); + File existingFile2 = new File(rootDir, "1400"); + File existingFile3 = new File(rootDir, "1100"); + createFiles(existingFile3, existingFile2, existingFile1); + fillWithBytes(existingFile1, MAX_FILE_SIZE); + fillWithBytes(existingFile2, MAX_FILE_SIZE); + fillWithBytes(existingFile3, MAX_FILE_SIZE); + doReturn(1500L).when(clock).now(); + + StorageFile file = folderManager.createWritableFile(); + + assertNotEquals(existingFile1, file.file); + assertNotEquals(existingFile2, file.file); + assertNotEquals(existingFile3, file.file); + assertTrue(existingFile2.exists()); + assertTrue(existingFile3.exists()); + assertFalse(existingFile1.exists()); + } + + @Test + public void closeCurrentlyWritableFile_whenItIsReadyToBeRead_anNoOtherReadableFilesAreAvailable() + throws IOException { + long createdFileTime = 1000L; + doReturn(createdFileTime).when(clock).now(); + + WritableFile writableFile = folderManager.createWritableFile(); + writableFile.append(new byte[3]); + + doReturn(createdFileTime + MIN_FILE_AGE_FOR_READ_MILLIS).when(clock).now(); + + ReadableFile readableFile = folderManager.getReadableFile(); + + assertEquals(writableFile.file, readableFile.file); + assertTrue(writableFile.isClosed()); + } + + @Test + public void + closeCurrentlyReadableFileIfAny_whenItIsTheOldestOne_andRemoveIt_whenTheAvailableFolderSpaceIsNotEnough() + throws IOException { + File existingFile1 = new File(rootDir, "1000"); + File existingFile2 = new File(rootDir, "1400"); + File existingFile3 = new File(rootDir, "1100"); + createFiles(existingFile3, existingFile2, existingFile1); + fillWithBytes(existingFile1, MAX_FILE_SIZE); + fillWithBytes(existingFile2, MAX_FILE_SIZE); + fillWithBytes(existingFile3, MAX_FILE_SIZE); + doReturn(1000L + MIN_FILE_AGE_FOR_READ_MILLIS).when(clock).now(); + + ReadableFile readableFile = folderManager.getReadableFile(); + assertEquals(existingFile1, readableFile.file); + + folderManager.createWritableFile(); + + assertTrue(existingFile2.exists()); + assertTrue(existingFile3.exists()); + assertFalse(existingFile1.exists()); + assertTrue(readableFile.isClosed()); + } + + @Test + public void createWritableFile_andDoNotRemoveOldestOne_ifAtLeastOneExpiredFileIsPurged() + throws IOException { + File existingFile1 = new File(rootDir, "1100"); + File existingFile2 = new File(rootDir, "1400"); + File existingFile3 = new File(rootDir, "900"); + createFiles(existingFile3, existingFile2, existingFile1); + fillWithBytes(existingFile1, MAX_FILE_SIZE); + fillWithBytes(existingFile2, MAX_FILE_SIZE); + fillWithBytes(existingFile3, MAX_FILE_SIZE); + doReturn(11_000L).when(clock).now(); + + StorageFile file = folderManager.createWritableFile(); + + assertNotEquals(existingFile1, file.file); + assertNotEquals(existingFile2, file.file); + assertNotEquals(existingFile3, file.file); + assertTrue(existingFile2.exists()); + assertTrue(existingFile1.exists()); + assertFalse(existingFile3.exists()); + } + + @Test + public void purgeExpiredForReadFiles_whenCreatingNewOne() throws IOException { + // Files that cannot be read from are considered fully expired. + File expiredReadableFile = new File(rootDir, "1000"); + // Files that cannot be written, but can still be read, aren't ready to be deleted. + File expiredWritableFile = new File(rootDir, "10000"); + createFiles(expiredReadableFile, expiredWritableFile); + doReturn(11_500L).when(clock).now(); + + StorageFile file = folderManager.createWritableFile(); + + assertFalse(expiredReadableFile.exists()); + assertTrue(expiredWritableFile.exists()); + assertNotEquals(expiredWritableFile, file.file); + } + + @Test + public void closeExpiredReadableFileInUseIfAny_whenPurgingExpiredForReadFiles_whenCreatingNewOne() + throws IOException { + File expiredReadableFileBeingRead = new File(rootDir, "900"); + File expiredReadableFile = new File(rootDir, "1000"); + File expiredWritableFile = new File(rootDir, "10000"); + createFiles(expiredReadableFile, expiredWritableFile, expiredReadableFileBeingRead); + + doReturn(900 + MIN_FILE_AGE_FOR_READ_MILLIS).when(clock).now(); + ReadableFile readableFile = folderManager.getReadableFile(); + assertEquals(expiredReadableFileBeingRead, readableFile.file); + + doReturn(11_500L).when(clock).now(); + + StorageFile file = folderManager.createWritableFile(); + + assertFalse(expiredReadableFile.exists()); + assertFalse(expiredReadableFileBeingRead.exists()); + assertTrue(expiredWritableFile.exists()); + assertNotEquals(expiredWritableFile, file.file); + assertTrue(readableFile.isClosed()); + } + + @Test + public void provideFileForRead_afterItsMinFileAgeForReadTimePassed() throws IOException { + long readableFileCreationTime = 1000; + long currentTime = readableFileCreationTime + MIN_FILE_AGE_FOR_READ_MILLIS; + doReturn(currentTime).when(clock).now(); + File writableFile = new File(rootDir, String.valueOf(currentTime)); + File readableFile = new File(rootDir, String.valueOf(readableFileCreationTime)); + createFiles(writableFile, readableFile); + + StorageFile file = folderManager.getReadableFile(); + + assertEquals(readableFile, file.file); + } + + @Test + public void provideOldestFileForRead_whenMultipleReadableFilesAreAvailable() throws IOException { + long newerReadableFileCreationTime = 1000; + long olderReadableFileCreationTime = 900; + long currentTime = newerReadableFileCreationTime + MIN_FILE_AGE_FOR_READ_MILLIS; + doReturn(currentTime).when(clock).now(); + File writableFile = new File(rootDir, String.valueOf(currentTime)); + File readableFileOlder = new File(rootDir, String.valueOf(olderReadableFileCreationTime)); + File readableFileNewer = new File(rootDir, String.valueOf(newerReadableFileCreationTime)); + createFiles(writableFile, readableFileNewer, readableFileOlder); + + StorageFile file = folderManager.getReadableFile(); + + assertEquals(readableFileOlder, file.file); + } + + @Test + public void provideNullFileForRead_whenNoFilesAreAvailable() throws IOException { + assertNull(folderManager.getReadableFile()); + } + + @Test + public void provideNullFileForRead_whenOnlyReadableFilesAreAvailable() throws IOException { + long currentTime = 1000; + File writableFile = new File(rootDir, String.valueOf(currentTime)); + createFiles(writableFile); + + assertNull(folderManager.getReadableFile()); + } + + @Test + public void provideNullFileForRead_whenReadableFilesAreExpired() throws IOException { + long creationReferenceTime = 1000; + File expiredReadableFile1 = new File(rootDir, String.valueOf(creationReferenceTime - 1)); + File expiredReadableFile2 = new File(rootDir, String.valueOf(creationReferenceTime - 10)); + createFiles(expiredReadableFile1, expiredReadableFile2); + doReturn(creationReferenceTime + MAX_FILE_AGE_FOR_READ_MILLIS).when(clock).now(); + + assertNull(folderManager.getReadableFile()); + } + + private static void fillWithBytes(File file, int size) throws IOException { + Files.write(file.toPath(), new byte[size]); + } + + private static void createFiles(File... files) throws IOException { + for (File file : files) { + if (!file.createNewFile()) { + fail("Could not create temporary file: " + file); + } + } + } +} diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/storage/StorageTest.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/storage/StorageTest.java new file mode 100644 index 000000000..a8bb08689 --- /dev/null +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/storage/StorageTest.java @@ -0,0 +1,229 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.storage; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoInteractions; +import static org.mockito.Mockito.when; + +import io.opentelemetry.contrib.disk.buffering.internal.storage.files.ReadableFile; +import io.opentelemetry.contrib.disk.buffering.internal.storage.files.WritableFile; +import io.opentelemetry.contrib.disk.buffering.internal.storage.responses.ReadableResult; +import io.opentelemetry.contrib.disk.buffering.internal.storage.responses.WritableResult; +import java.io.IOException; +import java.util.function.Function; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +@SuppressWarnings("unchecked") +class StorageTest { + private FolderManager folderManager; + private Storage storage; + private Function processing; + private ReadableFile readableFile; + private WritableFile writableFile; + + @BeforeEach + public void setUp() throws IOException { + folderManager = mock(); + readableFile = mock(); + writableFile = createWritableFile(); + processing = mock(); + doReturn(ReadableResult.SUCCEEDED).when(readableFile).readAndProcess(processing); + storage = new Storage(folderManager); + } + + @Test + public void whenReadingAndProcessingSuccessfully_returnSuccess() throws IOException { + doReturn(readableFile).when(folderManager).getReadableFile(); + + assertEquals(ReadableResult.SUCCEEDED, storage.readAndProcess(processing)); + + verify(readableFile).readAndProcess(processing); + } + + @Test + public void whenReadableFileProcessingFails_returnFailed() throws IOException { + doReturn(readableFile).when(folderManager).getReadableFile(); + doReturn(ReadableResult.PROCESSING_FAILED).when(readableFile).readAndProcess(processing); + + assertEquals(ReadableResult.PROCESSING_FAILED, storage.readAndProcess(processing)); + + verify(readableFile).readAndProcess(processing); + } + + @Test + public void whenReadingMultipleTimes_reuseReader() throws IOException { + ReadableFile anotherReadable = mock(); + when(folderManager.getReadableFile()).thenReturn(readableFile).thenReturn(anotherReadable); + + assertEquals(ReadableResult.SUCCEEDED, storage.readAndProcess(processing)); + assertEquals(ReadableResult.SUCCEEDED, storage.readAndProcess(processing)); + + verify(readableFile, times(2)).readAndProcess(processing); + verify(folderManager, times(1)).getReadableFile(); + verifyNoInteractions(anotherReadable); + } + + @Test + public void whenWritingMultipleTimes_reuseWriter() throws IOException { + byte[] data = new byte[1]; + WritableFile anotherWriter = createWritableFile(); + when(folderManager.createWritableFile()).thenReturn(writableFile).thenReturn(anotherWriter); + + storage.write(data); + storage.write(data); + + verify(writableFile, times(2)).append(data); + verify(folderManager, times(1)).createWritableFile(); + verifyNoInteractions(anotherWriter); + } + + @Test + public void whenAttemptingToReadAfterClosed_returnFailed() throws IOException { + storage.close(); + assertEquals(ReadableResult.FAILED, storage.readAndProcess(processing)); + } + + @Test + public void whenAttemptingToWriteAfterClosed_returnFalse() throws IOException { + storage.close(); + assertFalse(storage.write(new byte[1])); + } + + @Test + public void whenNoFileAvailableForReading_returnFailed() throws IOException { + assertEquals(ReadableResult.FAILED, storage.readAndProcess(processing)); + } + + @Test + public void whenTheReadTimeExpires_lookForNewFileToRead() throws IOException { + when(folderManager.getReadableFile()).thenReturn(readableFile).thenReturn(null); + doReturn(ReadableResult.FAILED).when(readableFile).readAndProcess(processing); + + storage.readAndProcess(processing); + + verify(folderManager, times(2)).getReadableFile(); + } + + @Test + public void whenNoMoreLinesToRead_lookForNewFileToRead() throws IOException { + when(folderManager.getReadableFile()).thenReturn(readableFile).thenReturn(null); + doReturn(ReadableResult.FAILED).when(readableFile).readAndProcess(processing); + + storage.readAndProcess(processing); + + verify(folderManager, times(2)).getReadableFile(); + } + + @Test + public void whenResourceClosed_lookForNewFileToRead() throws IOException { + when(folderManager.getReadableFile()).thenReturn(readableFile).thenReturn(null); + doReturn(ReadableResult.FAILED).when(readableFile).readAndProcess(processing); + + storage.readAndProcess(processing); + + verify(folderManager, times(2)).getReadableFile(); + } + + @Test + public void whenEveryNewFileFoundCannotBeRead_returnContentNotAvailable() throws IOException { + when(folderManager.getReadableFile()).thenReturn(readableFile); + doReturn(ReadableResult.FAILED).when(readableFile).readAndProcess(processing); + + assertEquals(ReadableResult.FAILED, storage.readAndProcess(processing)); + + verify(folderManager, times(3)).getReadableFile(); + } + + @Test + public void appendDataToFile() throws IOException { + doReturn(writableFile).when(folderManager).createWritableFile(); + byte[] data = new byte[1]; + + storage.write(data); + + verify(writableFile).append(data); + } + + @Test + public void whenWritingTimeoutHappens_retryWithNewFile() throws IOException { + byte[] data = new byte[1]; + WritableFile workingWritableFile = createWritableFile(); + when(folderManager.createWritableFile()) + .thenReturn(writableFile) + .thenReturn(workingWritableFile); + doReturn(WritableResult.FAILED).when(writableFile).append(data); + + storage.write(data); + + verify(folderManager, times(2)).createWritableFile(); + } + + @Test + public void whenThereIsNoSpaceAvailableForWriting_retryWithNewFile() throws IOException { + byte[] data = new byte[1]; + WritableFile workingWritableFile = createWritableFile(); + when(folderManager.createWritableFile()) + .thenReturn(writableFile) + .thenReturn(workingWritableFile); + doReturn(WritableResult.FAILED).when(writableFile).append(data); + + storage.write(data); + + verify(folderManager, times(2)).createWritableFile(); + } + + @Test + public void whenWritingResourceIsClosed_retryWithNewFile() throws IOException { + byte[] data = new byte[1]; + WritableFile workingWritableFile = createWritableFile(); + when(folderManager.createWritableFile()) + .thenReturn(writableFile) + .thenReturn(workingWritableFile); + doReturn(WritableResult.FAILED).when(writableFile).append(data); + + storage.write(data); + + verify(folderManager, times(2)).createWritableFile(); + } + + @Test + public void whenEveryAttemptToWriteFails_returnFalse() throws IOException { + byte[] data = new byte[1]; + when(folderManager.createWritableFile()).thenReturn(writableFile); + doReturn(WritableResult.FAILED).when(writableFile).append(data); + + assertFalse(storage.write(data)); + + verify(folderManager, times(3)).createWritableFile(); + } + + @Test + public void whenClosing_closeWriterAndReaderIfNotNull() throws IOException { + doReturn(writableFile).when(folderManager).createWritableFile(); + doReturn(readableFile).when(folderManager).getReadableFile(); + storage.write(new byte[1]); + storage.readAndProcess(processing); + + storage.close(); + + verify(writableFile).close(); + verify(readableFile).close(); + } + + private static WritableFile createWritableFile() throws IOException { + WritableFile mock = mock(); + doReturn(WritableResult.SUCCEEDED).when(mock).append(any()); + return mock; + } +} diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/storage/TestData.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/storage/TestData.java new file mode 100644 index 000000000..b267327ca --- /dev/null +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/storage/TestData.java @@ -0,0 +1,36 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.storage; + +import io.opentelemetry.contrib.disk.buffering.internal.StorageConfiguration; +import io.opentelemetry.contrib.disk.buffering.internal.files.DefaultTemporaryFileProvider; +import io.opentelemetry.contrib.disk.buffering.internal.files.TemporaryFileProvider; + +public final class TestData { + + public static final long MAX_FILE_AGE_FOR_WRITE_MILLIS = 1000; + public static final long MIN_FILE_AGE_FOR_READ_MILLIS = MAX_FILE_AGE_FOR_WRITE_MILLIS + 500; + public static final long MAX_FILE_AGE_FOR_READ_MILLIS = 10_000; + public static final int MAX_FILE_SIZE = 100; + public static final int MAX_FOLDER_SIZE = 300; + + public static StorageConfiguration getDefaultConfiguration() { + return getConfiguration(DefaultTemporaryFileProvider.getInstance()); + } + + public static StorageConfiguration getConfiguration(TemporaryFileProvider fileProvider) { + return StorageConfiguration.builder() + .setMaxFileAgeForWriteMillis(MAX_FILE_AGE_FOR_WRITE_MILLIS) + .setMinFileAgeForReadMillis(MIN_FILE_AGE_FOR_READ_MILLIS) + .setMaxFileAgeForReadMillis(MAX_FILE_AGE_FOR_READ_MILLIS) + .setMaxFileSize(MAX_FILE_SIZE) + .setMaxFolderSize(MAX_FOLDER_SIZE) + .setTemporaryFileProvider(fileProvider) + .build(); + } + + private TestData() {} +} diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/ReadableFileTest.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/ReadableFileTest.java new file mode 100644 index 000000000..4c4056ba7 --- /dev/null +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/ReadableFileTest.java @@ -0,0 +1,236 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.storage.files; + +import static io.opentelemetry.contrib.disk.buffering.internal.storage.TestData.MAX_FILE_AGE_FOR_READ_MILLIS; +import static io.opentelemetry.contrib.disk.buffering.internal.storage.TestData.getConfiguration; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; + +import io.opentelemetry.api.logs.Severity; +import io.opentelemetry.contrib.disk.buffering.internal.files.TemporaryFileProvider; +import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.logs.models.LogRecordDataImpl; +import io.opentelemetry.contrib.disk.buffering.internal.serialization.serializers.LogRecordDataSerializer; +import io.opentelemetry.contrib.disk.buffering.internal.serialization.serializers.SignalSerializer; +import io.opentelemetry.contrib.disk.buffering.internal.storage.responses.ReadableResult; +import io.opentelemetry.contrib.disk.buffering.internal.storage.utils.StorageClock; +import io.opentelemetry.contrib.disk.buffering.testutils.TestData; +import io.opentelemetry.sdk.logs.data.Body; +import io.opentelemetry.sdk.logs.data.LogRecordData; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +class ReadableFileTest { + + @TempDir File dir; + private File source; + private File temporaryFile; + private ReadableFile readableFile; + private StorageClock clock; + private TemporaryFileProvider temporaryFileProvider; + private static final long CREATED_TIME_MILLIS = 1000L; + private static final LogRecordDataSerializer SERIALIZER = SignalSerializer.ofLogs(); + private static final LogRecordData FIRST_LOG_RECORD = + LogRecordDataImpl.builder() + .setResource(TestData.RESOURCE_FULL) + .setSpanContext(TestData.SPAN_CONTEXT) + .setInstrumentationScopeInfo(TestData.INSTRUMENTATION_SCOPE_INFO_FULL) + .setAttributes(TestData.ATTRIBUTES) + .setBody(Body.string("First log body")) + .setSeverity(Severity.DEBUG) + .setSeverityText("Log severity text") + .setTimestampEpochNanos(100L) + .setObservedTimestampEpochNanos(200L) + .setTotalAttributeCount(3) + .build(); + + private static final LogRecordData SECOND_LOG_RECORD = + LogRecordDataImpl.builder() + .setResource(TestData.RESOURCE_FULL) + .setSpanContext(TestData.SPAN_CONTEXT) + .setInstrumentationScopeInfo(TestData.INSTRUMENTATION_SCOPE_INFO_FULL) + .setAttributes(TestData.ATTRIBUTES) + .setBody(Body.string("Second log body")) + .setSeverity(Severity.DEBUG) + .setSeverityText("Log severity text") + .setTimestampEpochNanos(100L) + .setObservedTimestampEpochNanos(200L) + .setTotalAttributeCount(3) + .build(); + + private static final LogRecordData THIRD_LOG_RECORD = + LogRecordDataImpl.builder() + .setResource(TestData.RESOURCE_FULL) + .setSpanContext(TestData.SPAN_CONTEXT) + .setInstrumentationScopeInfo(TestData.INSTRUMENTATION_SCOPE_INFO_FULL) + .setAttributes(TestData.ATTRIBUTES) + .setBody(Body.string("Third log body")) + .setSeverity(Severity.DEBUG) + .setSeverityText("Log severity text") + .setTimestampEpochNanos(100L) + .setObservedTimestampEpochNanos(200L) + .setTotalAttributeCount(3) + .build(); + + @BeforeEach + public void setUp() throws IOException { + source = new File(dir, "sourceFile"); + temporaryFile = new File(dir, "temporaryFile"); + addFileContents(source); + temporaryFileProvider = mock(); + doReturn(temporaryFile).when(temporaryFileProvider).createTemporaryFile(anyString()); + clock = mock(); + readableFile = + new ReadableFile( + source, CREATED_TIME_MILLIS, clock, getConfiguration(temporaryFileProvider)); + } + + private static void addFileContents(File source) throws IOException { + List items = new ArrayList<>(); + items.add(SERIALIZER.serialize(Collections.singleton(FIRST_LOG_RECORD))); + items.add(SERIALIZER.serialize(Collections.singleton(SECOND_LOG_RECORD))); + items.add(SERIALIZER.serialize(Collections.singleton(THIRD_LOG_RECORD))); + + try (FileOutputStream out = new FileOutputStream(source)) { + for (byte[] item : items) { + out.write(item); + } + } + } + + @Test + public void readSingleItemAndRemoveIt() throws IOException { + readableFile.readAndProcess( + bytes -> { + assertEquals(FIRST_LOG_RECORD, deserialize(bytes)); + return true; + }); + + List logs = getRemainingDataAndClose(readableFile); + + assertEquals(2, logs.size()); + assertEquals(SECOND_LOG_RECORD, logs.get(0)); + assertEquals(THIRD_LOG_RECORD, logs.get(1)); + } + + @Test + public void whenProcessingSucceeds_returnSuccessStatus() throws IOException { + assertEquals(ReadableResult.SUCCEEDED, readableFile.readAndProcess(bytes -> true)); + } + + @Test + public void whenProcessingFails_returnProcessFailedStatus() throws IOException { + assertEquals(ReadableResult.PROCESSING_FAILED, readableFile.readAndProcess(bytes -> false)); + } + + @Test + public void deleteTemporaryFileWhenClosing() throws IOException { + readableFile.readAndProcess(bytes -> true); + readableFile.close(); + + assertFalse(temporaryFile.exists()); + } + + @Test + public void readMultipleLinesAndRemoveThem() throws IOException { + readableFile.readAndProcess(bytes -> true); + readableFile.readAndProcess(bytes -> true); + + List logs = getRemainingDataAndClose(readableFile); + + assertEquals(1, logs.size()); + assertEquals(THIRD_LOG_RECORD, logs.get(0)); + } + + @Test + public void whenConsumerReturnsFalse_doNotRemoveLineFromSource() throws IOException { + readableFile.readAndProcess(bytes -> false); + + List logs = getRemainingDataAndClose(readableFile); + + assertEquals(3, logs.size()); + } + + @Test + public void whenReadingLastLine_deleteOriginalFile_and_close() throws IOException { + getRemainingDataAndClose(readableFile); + + assertFalse(source.exists()); + assertTrue(readableFile.isClosed()); + } + + @Test + public void whenNoMoreLinesAvailableToRead_deleteOriginalFile_close_and_returnNoContentStatus() + throws IOException { + File emptyFile = new File(dir, "emptyFile"); + if (!emptyFile.createNewFile()) { + fail("Could not create file for tests"); + } + + ReadableFile emptyReadableFile = + new ReadableFile( + emptyFile, CREATED_TIME_MILLIS, clock, getConfiguration(temporaryFileProvider)); + + assertEquals(ReadableResult.FAILED, emptyReadableFile.readAndProcess(bytes -> true)); + + assertTrue(emptyReadableFile.isClosed()); + assertFalse(emptyFile.exists()); + } + + @Test + public void + whenReadingAfterTheConfiguredReadingTimeExpired_deleteOriginalFile_close_and_returnFileExpiredException() + throws IOException { + readableFile.readAndProcess(bytes -> true); + doReturn(CREATED_TIME_MILLIS + MAX_FILE_AGE_FOR_READ_MILLIS).when(clock).now(); + + assertEquals(ReadableResult.FAILED, readableFile.readAndProcess(bytes -> true)); + + assertTrue(readableFile.isClosed()); + } + + @Test + public void whenReadingAfterClosed_returnFailedStatus() throws IOException { + readableFile.readAndProcess(bytes -> true); + readableFile.close(); + + assertEquals(ReadableResult.FAILED, readableFile.readAndProcess(bytes -> true)); + } + + private static List getRemainingDataAndClose(ReadableFile readableFile) + throws IOException { + List result = new ArrayList<>(); + ReadableResult readableResult = ReadableResult.SUCCEEDED; + while (readableResult == ReadableResult.SUCCEEDED) { + readableResult = + readableFile.readAndProcess( + bytes -> { + result.add(deserialize(bytes)); + return true; + }); + } + + readableFile.close(); + + return result; + } + + private static LogRecordData deserialize(byte[] data) { + return SERIALIZER.deserialize(data).get(0); + } +} diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/WritableFileTest.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/WritableFileTest.java new file mode 100644 index 000000000..8e037c236 --- /dev/null +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/WritableFileTest.java @@ -0,0 +1,119 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.internal.storage.files; + +import static io.opentelemetry.contrib.disk.buffering.internal.storage.TestData.MAX_FILE_AGE_FOR_WRITE_MILLIS; +import static io.opentelemetry.contrib.disk.buffering.internal.storage.TestData.MAX_FILE_SIZE; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; + +import io.opentelemetry.contrib.disk.buffering.internal.storage.TestData; +import io.opentelemetry.contrib.disk.buffering.internal.storage.responses.WritableResult; +import io.opentelemetry.contrib.disk.buffering.internal.storage.utils.StorageClock; +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.util.List; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +class WritableFileTest { + + @TempDir File rootDir; + private StorageClock clock; + private WritableFile writableFile; + private static final long CREATED_TIME_MILLIS = 1000L; + private static final byte[] NEW_LINE_BYTES = + System.lineSeparator().getBytes(StandardCharsets.UTF_8); + private static final int NEW_LINE_BYTES_SIZE = NEW_LINE_BYTES.length; + + @BeforeEach + public void setUp() throws IOException { + clock = mock(); + writableFile = + new WritableFile( + new File(rootDir, String.valueOf(CREATED_TIME_MILLIS)), + CREATED_TIME_MILLIS, + TestData.getDefaultConfiguration(), + clock); + } + + @Test + public void hasNotExpired_whenWriteAgeHasNotExpired() { + doReturn(1500L).when(clock).now(); + + assertFalse(writableFile.hasExpired()); + } + + @Test + public void hasExpired_whenWriteAgeHasExpired() { + doReturn(2000L).when(clock).now(); + + assertTrue(writableFile.hasExpired()); + } + + @Test + public void appendDataInNewLines_andIncreaseSize() throws IOException { + byte[] line1 = getByteArrayLine("First line"); + byte[] line2 = getByteArrayLine("Second line"); + writableFile.append(line1); + writableFile.append(line2); + writableFile.close(); + + List lines = getWrittenLines(); + + assertEquals(2, lines.size()); + assertEquals("First line", lines.get(0)); + assertEquals("Second line", lines.get(1)); + assertEquals(line1.length + line2.length, writableFile.getSize()); + } + + @Test + public void whenAppendingData_andNotEnoughSpaceIsAvailable_closeAndReturnFailed() + throws IOException { + assertEquals(WritableResult.SUCCEEDED, writableFile.append(new byte[MAX_FILE_SIZE])); + + assertEquals(WritableResult.FAILED, writableFile.append(new byte[1])); + + assertEquals(1, getWrittenLines().size()); + assertEquals(MAX_FILE_SIZE, writableFile.getSize()); + } + + @Test + public void whenAppendingData_andHasExpired_closeAndReturnExpiredStatus() throws IOException { + writableFile.append(new byte[2]); + doReturn(CREATED_TIME_MILLIS + MAX_FILE_AGE_FOR_WRITE_MILLIS).when(clock).now(); + + assertEquals(WritableResult.FAILED, writableFile.append(new byte[1])); + + assertEquals(1, getWrittenLines().size()); + } + + @Test + public void whenAppendingData_andIsAlreadyClosed_returnFailedStatus() throws IOException { + writableFile.append(new byte[1]); + writableFile.close(); + + assertEquals(WritableResult.FAILED, writableFile.append(new byte[2])); + } + + private static byte[] getByteArrayLine(String line) { + byte[] lineBytes = line.getBytes(StandardCharsets.UTF_8); + byte[] fullLine = new byte[lineBytes.length + NEW_LINE_BYTES_SIZE]; + System.arraycopy(lineBytes, 0, fullLine, 0, lineBytes.length); + System.arraycopy(NEW_LINE_BYTES, 0, fullLine, lineBytes.length, NEW_LINE_BYTES_SIZE); + return fullLine; + } + + private List getWrittenLines() throws IOException { + return Files.readAllLines(writableFile.file.toPath()); + } +} diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/testutils/BaseSignalSerializerTest.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/testutils/BaseSignalSerializerTest.java new file mode 100644 index 000000000..5ad19aff6 --- /dev/null +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/testutils/BaseSignalSerializerTest.java @@ -0,0 +1,40 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.testutils; + +import static org.assertj.core.api.Assertions.assertThat; + +import io.opentelemetry.contrib.disk.buffering.internal.serialization.serializers.SignalSerializer; +import io.opentelemetry.contrib.disk.buffering.internal.storage.files.reader.DelimitedProtoStreamReader; +import io.opentelemetry.contrib.disk.buffering.internal.storage.files.reader.StreamReader; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +@SuppressWarnings("unchecked") +public abstract class BaseSignalSerializerTest { + protected byte[] serialize(SIGNAL_SDK_ITEM... items) { + return getSerializer().serialize(Arrays.asList(items)); + } + + protected List deserialize(byte[] source) { + try (ByteArrayInputStream in = new ByteArrayInputStream(source)) { + StreamReader streamReader = DelimitedProtoStreamReader.Factory.getInstance().create(in); + return getSerializer().deserialize(Objects.requireNonNull(streamReader.read()).content); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + protected void assertSerialization(SIGNAL_SDK_ITEM... targets) { + byte[] serialized = serialize(targets); + assertThat(deserialize(serialized)).containsExactly(targets); + } + + protected abstract SignalSerializer getSerializer(); +} diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/testutils/TestData.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/testutils/TestData.java new file mode 100644 index 000000000..dc049229e --- /dev/null +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/testutils/TestData.java @@ -0,0 +1,71 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.contrib.disk.buffering.testutils; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.api.trace.TraceFlags; +import io.opentelemetry.api.trace.TraceState; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.resources.Resource; + +@SuppressWarnings("unchecked") +public final class TestData { + public static final String TRACE_ID = "b535b3b5232b5dabced5b0ab8037eb78"; + public static final String SPAN_ID = "f3fc364fb6b77cff"; + public static final String PARENT_SPAN_ID = "d3fc364fb6b77cfa"; + public static final Attributes ATTRIBUTES = + Attributes.builder() + .put("bear", "mya") + .put("warm", true) + .put("temperature", 30) + .put("length", 1.2) + .put("colors", "red", "blue") + .put("conditions", false, true) + .put("scores", 0L, 1L) + .put("coins", 0.01, 0.05, 0.1) + .build(); + + public static final Resource RESOURCE_FULL = + Resource.create( + Attributes.builder().put("resourceAttr", "resourceAttrValue").build(), + "resourceSchemaUrl"); + + public static final Resource RESOURCE_WITHOUT_SCHEMA_URL = + Resource.create(Attributes.builder().put("resourceAttr", "resourceAttrValue").build()); + + public static final SpanContext SPAN_CONTEXT = + SpanContext.create(TRACE_ID, SPAN_ID, TraceFlags.getSampled(), TraceState.getDefault()); + public static final SpanContext SPAN_CONTEXT_WITH_TRACE_STATE = + SpanContext.create( + TRACE_ID, + SPAN_ID, + TraceFlags.getSampled(), + TraceState.builder().put("aaa", "bbb").put("ccc", "ddd").build()); + public static final SpanContext PARENT_SPAN_CONTEXT = + SpanContext.create( + TRACE_ID, PARENT_SPAN_ID, TraceFlags.getSampled(), TraceState.getDefault()); + public static final InstrumentationScopeInfo INSTRUMENTATION_SCOPE_INFO_FULL = + InstrumentationScopeInfo.builder("Instrumentation scope name") + .setVersion("1.2.3") + .setSchemaUrl("instrumentationScopeInfoSchemaUrl") + .setAttributes( + Attributes.builder() + .put("instrumentationScopeInfoAttr", "instrumentationScopeInfoAttrValue") + .build()) + .build(); + + public static final InstrumentationScopeInfo INSTRUMENTATION_SCOPE_INFO_WITHOUT_VERSION = + InstrumentationScopeInfo.builder("Instrumentation scope name") + .setSchemaUrl("instrumentationScopeInfoSchemaUrl") + .setAttributes( + Attributes.builder() + .put("instrumentationScopeInfoAttr", "instrumentationScopeInfoAttrValue") + .build()) + .build(); + + private TestData() {} +} diff --git a/settings.gradle.kts b/settings.gradle.kts index 36d25379b..ec30d8acc 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -69,6 +69,7 @@ include(":aws-xray") include(":aws-xray-propagator") include(":consistent-sampling") include(":dependencyManagement") +include(":disk-buffering") include(":example") include(":jfr-events") include(":jfr-connection")