Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

AI Integration: Adds a new flag in DistributedTracingOptions #3562

Merged
merged 7 commits into from
Nov 16, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ public static void RecordDiagnosticsForRequests(
Documents.OperationType operationType,
OpenTelemetryAttributes response)
{
if (CosmosDbEventSource.IsEnabled(EventLevel.Informational))
if (config.EnableDiagnosticsTraceForAllRequests)
{
CosmosDbEventSource.Singleton.WriteInfoEvent(response.Diagnostics.ToString());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,43 @@ internal sealed class DistributedTracingOptions
/// Default Latency threshold for QUERY operation
/// </summary>
internal static readonly TimeSpan DefaultQueryTimeoutThreshold = TimeSpan.FromMilliseconds(500);
private bool enableDiagnosticsTraceForAllRequests;
private TimeSpan? diagnosticsLatencyThreshold;

/// <summary>
/// Latency Threshold to generate (<see cref="System.Diagnostics.Tracing.EventSource"/>) with Request diagnostics in distributing Tracing.<br></br>
/// If it is not set then by default it will generate (<see cref="System.Diagnostics.Tracing.EventSource"/>) for query operation which are taking more than 500 ms and non-query operations taking more than 100 ms.
/// </summary>
public TimeSpan? DiagnosticsLatencyThreshold { get; set; }
public TimeSpan? DiagnosticsLatencyThreshold
{
get => this.diagnosticsLatencyThreshold;
set
{
if (this.EnableDiagnosticsTraceForAllRequests)
{
throw new ArgumentException("EnableDiagnosticsTraceForAllRequests can not be true along with DiagnosticsLatencyThreshold.");
}

this.diagnosticsLatencyThreshold = value;
}
}

/// <summary>
/// Set this flag as true if you want to generate (<see cref="System.Diagnostics.Tracing.EventSource"/>) containing request diagnostics string for all the operations.
/// If this flag is true then, it won't honour <see cref="DiagnosticsLatencyThreshold"/> value to generate diagnostic traces.
sourabh1007 marked this conversation as resolved.
Show resolved Hide resolved
/// </summary>
public bool EnableDiagnosticsTraceForAllRequests
{
get => this.enableDiagnosticsTraceForAllRequests;
set
{
if (value && this.DiagnosticsLatencyThreshold != null)
{
throw new ArgumentException("EnableDiagnosticsTraceForAllRequests can not be true along with DiagnosticsLatencyThreshold.");
}

this.enableDiagnosticsTraceForAllRequests = value;
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -1602,7 +1602,7 @@
throttleClient.ClientOptions.EnableDistributedTracing = true;
throttleClient.ClientOptions.DistributedTracingOptions = new DistributedTracingOptions()
{
DiagnosticsLatencyThreshold = TimeSpan.FromMilliseconds(DiagnosticsLatencyThresholdValue)
EnableDiagnosticsTraceForAllRequests = true
};

ItemRequestOptions requestOptions = new ItemRequestOptions();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@
throttleClient.ClientOptions.EnableDistributedTracing = true;
throttleClient.ClientOptions.DistributedTracingOptions = new DistributedTracingOptions()
{
DiagnosticsLatencyThreshold = TimeSpan.FromMilliseconds(DiagnosticsLatencyThresholdValue)
EnableDiagnosticsTraceForAllRequests = true
};

ItemRequestOptions requestOptions = new ItemRequestOptions();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,7 @@ public sealed class EndToEndTraceWriterBaselineTests : BaselineTests<EndToEndTra

private static readonly TimeSpan delayTime = TimeSpan.FromSeconds(2);
private static readonly RequestHandler requestHandler = new RequestHandlerSleepHelper(delayTime);

private const double DiagnosticsLatencyThresholdValue = .0001; // Very Very Small Value

[ClassInitialize()]
public static async Task ClassInitAsync(TestContext context)
{
Expand All @@ -58,17 +57,17 @@ public static async Task ClassInitAsync(TestContext context)

client.ClientOptions.DistributedTracingOptions = new DistributedTracingOptions()
{
DiagnosticsLatencyThreshold = TimeSpan.FromMilliseconds(DiagnosticsLatencyThresholdValue)
EnableDiagnosticsTraceForAllRequests = true
};

bulkClient.ClientOptions.DistributedTracingOptions = new DistributedTracingOptions()
{
DiagnosticsLatencyThreshold = TimeSpan.FromMilliseconds(DiagnosticsLatencyThresholdValue)
EnableDiagnosticsTraceForAllRequests = true
};

miscCosmosClient.ClientOptions.DistributedTracingOptions = new DistributedTracingOptions()
{
DiagnosticsLatencyThreshold = TimeSpan.FromMilliseconds(DiagnosticsLatencyThresholdValue)
EnableDiagnosticsTraceForAllRequests = true
};

EndToEndTraceWriterBaselineTests.database = await client.CreateDatabaseAsync(
Expand Down Expand Up @@ -985,7 +984,7 @@ public async Task PointOperationsExceptionsAsync()
throttleClient.ClientOptions.EnableDistributedTracing = true;
throttleClient.ClientOptions.DistributedTracingOptions = new DistributedTracingOptions()
{
DiagnosticsLatencyThreshold = TimeSpan.FromMilliseconds(DiagnosticsLatencyThresholdValue)
EnableDiagnosticsTraceForAllRequests = true
};

ItemRequestOptions requestOptions = new ItemRequestOptions();
Expand Down Expand Up @@ -1272,7 +1271,7 @@ public async Task BulkOperationsAsync()
throttleClient.ClientOptions.EnableDistributedTracing = true;
throttleClient.ClientOptions.DistributedTracingOptions = new DistributedTracingOptions()
{
DiagnosticsLatencyThreshold = TimeSpan.FromMilliseconds(DiagnosticsLatencyThresholdValue)
EnableDiagnosticsTraceForAllRequests = true
};

ItemRequestOptions requestOptions = new ItemRequestOptions();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -358,6 +358,7 @@ static FeedIteratorInternal feedCreator(DocumentServiceLease lease, string conti
Mock<ContainerInternal> mockedMonitoredContainer = new Mock<ContainerInternal>(MockBehavior.Strict);
mockedMonitoredContainer.Setup(c => c.GetCachedRIDAsync(It.IsAny<bool>(), It.IsAny<ITrace>(), It.IsAny<CancellationToken>())).ReturnsAsync(monitoredContainerRid);
mockedMonitoredContainer.Setup(c => c.ClientContext).Returns(mockedContext.Object);
mockedMonitoredContainer.Setup(c => c.Database.Id).Returns("databaseId");
mockedMonitoredContainer.Setup(c => c.Id).Returns("containerId");

Mock<FeedIteratorInternal> leaseFeedIterator = new Mock<FeedIteratorInternal>();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,25 @@ public void VerifyCosmosConfigurationPropertiesGetUpdated()
Assert.AreEqual(portReuseMode, policy.PortReuseMode);
Assert.IsTrue(policy.EnableTcpConnectionEndpointRediscovery);
CollectionAssert.AreEqual(preferredLocations.ToArray(), policy.PreferredLocations.ToArray());

// Verify DiagnosticsLatencyThreshold
cosmosClientBuilder = new CosmosClientBuilder(
accountEndpoint: endpoint,
authKeyOrResourceToken: key);
var exception = Assert.ThrowsException<ArgumentException>(() => cosmosClientBuilder.WithConnectionModeDirect(
idleTcpConnectionTimeout,
openTcpConnectionTimeout,
maxRequestsPerTcpConnection,
maxTcpConnectionsPerEndpoint,
portReuseMode,
enableTcpConnectionEndpointRediscovery)
.WithApplicationPreferredRegions(preferredLocations)
.WithDistributingTracing(new DistributedTracingOptions
{
DiagnosticsLatencyThreshold = TimeSpan.FromMilliseconds(100),
EnableDiagnosticsTraceForAllRequests = true
}));
Assert.AreEqual("EnableDiagnosticsTraceForAllRequests can not be true along with DiagnosticsLatencyThreshold.", exception.Message);
}

[TestMethod]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ public void CheckReturnFalseOnSuccessAndLowerLatencyThanConfiguredConfig()

DistributedTracingOptions distributedTracingOptions = new DistributedTracingOptions
{
DiagnosticsLatencyThreshold = TimeSpan.FromMilliseconds(20)
DiagnosticsLatencyThreshold = this.rootTrace.Duration.Add(TimeSpan.FromSeconds(1))
sourabh1007 marked this conversation as resolved.
Show resolved Hide resolved
};

OpenTelemetryAttributes response = new OpenTelemetryAttributes
Expand All @@ -63,7 +63,7 @@ public void CheckReturnTrueOnFailedStatusCode()

DistributedTracingOptions distributedTracingOptions = new DistributedTracingOptions
{
DiagnosticsLatencyThreshold = TimeSpan.FromMilliseconds(20)
DiagnosticsLatencyThreshold = this.rootTrace.Duration.Add(TimeSpan.FromSeconds(1))
};

OpenTelemetryAttributes response = new OpenTelemetryAttributes
Expand Down