Skip to content

Commit

Permalink
test(otlp-transformer): add tests for logs serializer
Browse files Browse the repository at this point in the history
  • Loading branch information
pichlermarc committed Mar 20, 2024
1 parent c14bc9b commit 432cb41
Show file tree
Hide file tree
Showing 3 changed files with 270 additions and 89 deletions.
311 changes: 237 additions & 74 deletions experimental/packages/otlp-transformer/test/logs.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,28 @@ import {
createExportLogsServiceRequest,
ESeverityNumber,
IExportLogsServiceRequest,
ProtobufLogsSerializer,
JsonLogsSerializer,
OtlpEncodingOptions,
} from '../src';
import { ReadableLogRecord } from '@opentelemetry/sdk-logs';
import { SeverityNumber } from '@opentelemetry/api-logs';
import { toBase64 } from './utils';
import * as root from '../src/generated/root';

function createExpectedLogJson(
options: OtlpEncodingOptions
): IExportLogsServiceRequest {
const useHex = options.useHex ?? false;
const useLongBits = options.useLongBits ?? true;

const timeUnixNano = useLongBits
? { low: 4132445859, high: 391214506 }
: '1680253513123241635';
const observedTimeUnixNano = useLongBits
? { low: 584929536, high: 391976663 }
: '1683526948965142784';

function createExpectedLogJson(useHex: boolean): IExportLogsServiceRequest {
const traceId = useHex
? '00000000000000000000000000000001'
: hexToBinary('00000000000000000000000000000001');
Expand All @@ -49,8 +66,8 @@ function createExpectedLogJson(useHex: boolean): IExportLogsServiceRequest {
scope: { name: 'scope_name_1', version: '0.1.0' },
logRecords: [
{
timeUnixNano: { low: 4132445859, high: 391214506 },
observedTimeUnixNano: { low: 584929536, high: 391976663 },
timeUnixNano,
observedTimeUnixNano,
severityNumber: ESeverityNumber.SEVERITY_NUMBER_ERROR,
severityText: 'error',
body: { stringValue: 'some_log_body' },
Expand All @@ -75,78 +92,125 @@ function createExpectedLogJson(useHex: boolean): IExportLogsServiceRequest {
};
}

describe('Logs', () => {
describe('createExportLogsServiceRequest', () => {
let resource_1: Resource;
let resource_2: Resource;
let scope_1: InstrumentationScope;
let scope_2: InstrumentationScope;
let log_1_1_1: ReadableLogRecord;
let log_1_1_2: ReadableLogRecord;
let log_1_2_1: ReadableLogRecord;
let log_2_1_1: ReadableLogRecord;

beforeEach(() => {
resource_1 = new Resource({
'resource-attribute': 'some attribute value',
});
resource_2 = new Resource({
'resource-attribute': 'another attribute value',
});
scope_1 = {
name: 'scope_name_1',
version: '0.1.0',
schemaUrl: 'http://url.to.schema',
};
scope_2 = {
name: 'scope_name_2',
};
const log_fragment_1 = {
hrTime: [1680253513, 123241635] as HrTime,
hrTimeObserved: [1683526948, 965142784] as HrTime,
attributes: {
'some-attribute': 'some attribute value',
},
droppedAttributesCount: 0,
severityNumber: SeverityNumber.ERROR,
severityText: 'error',
body: 'some_log_body',
spanContext: {
spanId: '0000000000000002',
traceFlags: TraceFlags.SAMPLED,
traceId: '00000000000000000000000000000001',
},
};
const log_fragment_2 = {
hrTime: [1680253797, 687038506] as HrTime,
hrTimeObserved: [1680253797, 687038506] as HrTime,
attributes: {
'another-attribute': 'another attribute value',
function createExpectedLogProtobuf(): IExportLogsServiceRequest {
const traceId = toBase64('00000000000000000000000000000001');
const spanId = toBase64('0000000000000002');

return {
resourceLogs: [
{
resource: {
attributes: [
{
key: 'resource-attribute',
value: { stringValue: 'some attribute value' },
},
],
droppedAttributesCount: 0,
},
droppedAttributesCount: 0,
};
log_1_1_1 = {
...log_fragment_1,
resource: resource_1,
instrumentationScope: scope_1,
};
log_1_1_2 = {
...log_fragment_2,
resource: resource_1,
instrumentationScope: scope_1,
};
log_1_2_1 = {
...log_fragment_1,
resource: resource_1,
instrumentationScope: scope_2,
};
log_2_1_1 = {
...log_fragment_1,
resource: resource_2,
instrumentationScope: scope_1,
};
scopeLogs: [
{
scope: { name: 'scope_name_1', version: '0.1.0' },
logRecords: [
{
timeUnixNano: 1680253513123241700,
observedTimeUnixNano: 1683526948965142800,
severityNumber: ESeverityNumber.SEVERITY_NUMBER_ERROR,
severityText: 'error',
body: { stringValue: 'some_log_body' },

attributes: [
{
key: 'some-attribute',
value: { stringValue: 'some attribute value' },
},
],
droppedAttributesCount: 0,
flags: 1,
traceId: traceId,
spanId: spanId,
},
],
schemaUrl: 'http://url.to.schema',
},
],
},
],
};
}

describe('Logs', () => {
let resource_1: Resource;
let resource_2: Resource;
let scope_1: InstrumentationScope;
let scope_2: InstrumentationScope;
let log_1_1_1: ReadableLogRecord;
let log_1_1_2: ReadableLogRecord;
let log_1_2_1: ReadableLogRecord;
let log_2_1_1: ReadableLogRecord;

beforeEach(() => {
resource_1 = new Resource({
'resource-attribute': 'some attribute value',
});
resource_2 = new Resource({
'resource-attribute': 'another attribute value',
});
scope_1 = {
name: 'scope_name_1',
version: '0.1.0',
schemaUrl: 'http://url.to.schema',
};
scope_2 = {
name: 'scope_name_2',
};
const log_fragment_1 = {
hrTime: [1680253513, 123241635] as HrTime,
hrTimeObserved: [1683526948, 965142784] as HrTime,
attributes: {
'some-attribute': 'some attribute value',
},
droppedAttributesCount: 0,
severityNumber: SeverityNumber.ERROR,
severityText: 'error',
body: 'some_log_body',
spanContext: {
spanId: '0000000000000002',
traceFlags: TraceFlags.SAMPLED,
traceId: '00000000000000000000000000000001',
},
};
const log_fragment_2 = {
hrTime: [1680253797, 687038506] as HrTime,
hrTimeObserved: [1680253797, 687038506] as HrTime,
attributes: {
'another-attribute': 'another attribute value',
},
droppedAttributesCount: 0,
};
log_1_1_1 = {
...log_fragment_1,
resource: resource_1,
instrumentationScope: scope_1,
};
log_1_1_2 = {
...log_fragment_2,
resource: resource_1,
instrumentationScope: scope_1,
};
log_1_2_1 = {
...log_fragment_1,
resource: resource_1,
instrumentationScope: scope_2,
};
log_2_1_1 = {
...log_fragment_1,
resource: resource_2,
instrumentationScope: scope_1,
};
});

describe('createExportLogsServiceRequest', () => {
it('returns null on an empty list', () => {
assert.deepStrictEqual(
createExportLogsServiceRequest([], { useHex: true }),
Expand All @@ -161,15 +225,21 @@ describe('Logs', () => {
useHex: true,
});
assert.ok(exportRequest);
assert.deepStrictEqual(exportRequest, createExpectedLogJson(true));
assert.deepStrictEqual(
exportRequest,
createExpectedLogJson({ useHex: true })
);
});

it('serializes a log record with useHex = false', () => {
const exportRequest = createExportLogsServiceRequest([log_1_1_1], {
useHex: false,
});
assert.ok(exportRequest);
assert.deepStrictEqual(exportRequest, createExpectedLogJson(false));
assert.deepStrictEqual(
exportRequest,
createExpectedLogJson({ useHex: false })
);
});

it('aggregates multiple logs with same resource and same scope', () => {
Expand Down Expand Up @@ -205,4 +275,97 @@ describe('Logs', () => {
assert.strictEqual(exportRequest.resourceLogs?.length, 2);
});
});

describe('ProtobufLogsSerializer', function () {
it('serializes an export request', () => {
const serialized = ProtobufLogsSerializer.serializeRequest([log_1_1_1]);
assert.ok(serialized, 'serialized response is undefined');
const decoded =
root.opentelemetry.proto.collector.logs.v1.ExportLogsServiceRequest.decode(
serialized
);

const expected = createExpectedLogProtobuf();
const decodedObj =
root.opentelemetry.proto.collector.logs.v1.ExportLogsServiceRequest.toObject(
decoded,
{
// This incurs some precision loss that's taken into account in createExpectedLogsProtobuf()
// Using String here will incur the same precision loss on browser only, using Number to prevent having to
// have different assertions for browser and Node.js
longs: Number,
// Convert to String (Base64) as otherwise the type will be different for Node.js (Buffer) and Browser (Uint8Array)
// and this fails assertions.
bytes: String,
}
);

assert.deepStrictEqual(decodedObj, expected);
});

it('deserializes a response', () => {
const protobufSerializedResponse =
root.opentelemetry.proto.collector.logs.v1.ExportLogsServiceResponse.encode(
{
partialSuccess: {
errorMessage: 'foo',
rejectedLogRecords: 1,
},
}
).finish();

const deserializedResponse = ProtobufLogsSerializer.deserializeResponse(
protobufSerializedResponse
);

assert.ok(
deserializedResponse.partialSuccess,
'partialSuccess not present in the deserialized message'
);
assert.equal(deserializedResponse.partialSuccess.errorMessage, 'foo');
assert.equal(
Number(deserializedResponse.partialSuccess.rejectedLogRecords),
1
);
});
});

describe('JsonLogsSerializer', function () {
it('serializes an export request', () => {
// stringify, then parse to remove undefined keys in the expected JSON
const expected = JSON.parse(
JSON.stringify(
createExpectedLogJson({ useHex: true, useLongBits: false })
)
);
const serialized = JsonLogsSerializer.serializeRequest([log_1_1_1]);

const decoder = new TextDecoder();
assert.deepStrictEqual(JSON.parse(decoder.decode(serialized)), expected);
});

it('deserializes a response', () => {
const expectedResponse = {
partialSuccess: {
errorMessage: 'foo',
rejectedLogRecords: 1,
},
};
const encoder = new TextEncoder();
const encodedResponse = encoder.encode(JSON.stringify(expectedResponse));

const deserializedResponse =
JsonLogsSerializer.deserializeResponse(encodedResponse);

assert.ok(
deserializedResponse.partialSuccess,
'partialSuccess not present in the deserialized message'
);
assert.equal(deserializedResponse.partialSuccess.errorMessage, 'foo');
assert.equal(
Number(deserializedResponse.partialSuccess.rejectedLogRecords),
1
);
});
});
});
16 changes: 1 addition & 15 deletions experimental/packages/otlp-transformer/test/trace.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ import {
ProtobufTraceSerializer,
JsonTraceSerializer,
} from '../src';
import { toBase64 } from './utils';

function createExpectedSpanJson(options: OtlpEncodingOptions) {
const useHex = options.useHex ?? false;
Expand Down Expand Up @@ -136,21 +137,6 @@ function createExpectedSpanJson(options: OtlpEncodingOptions) {
};
}

/**
* utility function to convert a string representing a hex value to a base64 string
* that represents the bytes of that hex value. This is needed as we need to support Node.js 14
* where btoa() does not exist, and the Browser, where Buffer does not exist.
* @param hexStr
*/
function toBase64(hexStr: string) {
if (typeof btoa !== 'undefined') {
const decoder = new TextDecoder('utf8');
return btoa(decoder.decode(hexToBinary(hexStr)));
}

return Buffer.from(hexToBinary(hexStr)).toString('base64');
}

function createExpectedSpanProtobuf() {
const startTime = 1640715557342725400;
const endTime = 1640715558642725400;
Expand Down
Loading

0 comments on commit 432cb41

Please sign in to comment.