Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for OTLP exported metrics and tracing #1489

Open
wants to merge 15 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 15 additions & 2 deletions app/backend/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -561,8 +561,7 @@ def create_app():
app = Quart(__name__)
app.register_blueprint(bp)

if os.getenv("APPLICATIONINSIGHTS_CONNECTION_STRING"):
configure_azure_monitor()
def instrument_app():
# This tracks HTTP requests made by aiohttp:
AioHttpClientInstrumentor().instrument()
# This tracks HTTP requests made by httpx:
Expand All @@ -572,6 +571,20 @@ def create_app():
# This middleware tracks app route requests:
app.asgi_app = OpenTelemetryMiddleware(app.asgi_app) # type: ignore[assignment]

if os.getenv("APPLICATIONINSIGHTS_CONNECTION_STRING"):
configure_azure_monitor()
instrument_app()
elif os.getenv("OTEL_EXPORTER_OTLP_ENDPOINT"):
from otlp_tracing import configure_oltp_grpc_tracing

configure_oltp_grpc_tracing(
service_name=os.getenv("OTEL_SERVICE_NAME", "azure-search-openai-demo"),
endpoint=os.getenv("OTEL_EXPORTER_OTLP_ENDPOINT"),
insecure=os.getenv("OTEL_EXPORTER_OTLP_TRACES_INSECURE", "true").lower() == "true",
api_key=os.getenv("OTEL_EXPORTER_OTLP_TRACES_API_KEY"),
)
instrument_app()

# Level should be one of https://docs.python.org/3/library/logging.html#logging-levels
default_level = "INFO" # In development, log more verbosely
if os.getenv("WEBSITE_HOSTNAME"): # In production, don't log as heavily
Expand Down
52 changes: 52 additions & 0 deletions app/backend/otlp_tracing.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
import logging

from opentelemetry import metrics, trace

# Logging (Experimental)
from opentelemetry._logs import set_logger_provider
from opentelemetry.exporter.otlp.proto.grpc._log_exporter import (
OTLPLogExporter,
)
from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import OTLPMetricExporter
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler
from opentelemetry.sdk._logs.export import BatchLogRecordProcessor
from opentelemetry.sdk.metrics import MeterProvider
from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader
from opentelemetry.sdk.resources import SERVICE_NAME, Resource
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor


def configure_oltp_grpc_tracing(
service_name: str = "azure-search-openai-demo", endpoint=None, insecure=True, api_key=None
):
# Service name is required for most backends
resource = Resource(attributes={SERVICE_NAME: service_name})

if api_key:
headers = {"x-otlp-api-key": api_key}
else:
headers = None

# Configure Tracing
traceProvider = TracerProvider(resource=resource)
processor = BatchSpanProcessor(OTLPSpanExporter(endpoint=endpoint, insecure=insecure, headers=headers))
traceProvider.add_span_processor(processor)
trace.set_tracer_provider(traceProvider)

# Configure Metrics
reader = PeriodicExportingMetricReader(OTLPMetricExporter(endpoint=endpoint, insecure=insecure, headers=headers))
meterProvider = MeterProvider(resource=resource, metric_readers=[reader])
metrics.set_meter_provider(meterProvider)

# Configure Logging
logger_provider = LoggerProvider(resource=resource)
set_logger_provider(logger_provider)

exporter = OTLPLogExporter(endpoint=endpoint, insecure=insecure, headers=headers)
logger_provider.add_log_record_processor(BatchLogRecordProcessor(exporter))
handler = LoggingHandler(level=logging.NOTSET, logger_provider=logger_provider)

# Attach OTLP handler to root logger
logging.getLogger().addHandler(handler)
3 changes: 3 additions & 0 deletions app/backend/requirements.in
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,9 @@ opentelemetry-instrumentation-httpx
opentelemetry-instrumentation-requests
opentelemetry-instrumentation-aiohttp-client
opentelemetry-instrumentation-openai
opentelemetry-exporter-otlp-proto-grpc
opentelemetry-exporter-otlp-proto-http

msal
azure-keyvault-secrets
cryptography
Expand Down
110 changes: 67 additions & 43 deletions app/backend/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ anyio==4.3.0
# via
# httpx
# openai
asgiref==3.7.2
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
attrs==23.2.0
# via aiohttp
Expand Down Expand Up @@ -89,7 +89,10 @@ cryptography==42.0.5
# pyjwt
# python-jose
deprecated==1.2.14
# via opentelemetry-api
# via
# opentelemetry-api
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
distro==1.9.0
# via openai
ecdsa==0.18.0
Expand All @@ -102,6 +105,12 @@ frozenlist==1.4.1
# via
# aiohttp
# aiosignal
googleapis-common-protos==1.63.0
# via
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
grpcio==1.62.1
# via opentelemetry-exporter-otlp-proto-grpc
h11==0.14.0
# via
# httpcore
Expand All @@ -114,7 +123,7 @@ h2==4.1.0
# hypercorn
hpack==4.0.0
# via h2
httpcore==1.0.4
httpcore==1.0.5
# via httpx
httpx[http2]==0.27.0
# via
Expand All @@ -131,8 +140,10 @@ idna==3.7
# httpx
# requests
# yarl
importlib-metadata==6.11.0
# via opentelemetry-api
importlib-metadata==7.0.0
# via
# opentelemetry-api
# opentelemetry-instrumentation-flask
isodate==0.6.1
# via
# azure-ai-documentintelligence
Expand Down Expand Up @@ -174,7 +185,7 @@ microsoft-kiota-serialization-json==1.1.0
# via msgraph-sdk
microsoft-kiota-serialization-text==1.0.0
# via msgraph-sdk
msal==1.27.0
msal==1.28.0
# via
# -r requirements.in
# azure-identity
Expand All @@ -198,15 +209,12 @@ numpy==1.26.4
# pandas-stubs
oauthlib==3.2.2
# via requests-oauthlib
openai[datalib]==1.13.3
openai[datalib]==1.16.1
# via -r requirements.in
opentelemetry-api==1.23.0
opentelemetry-api==1.24.0
# via
# azure-core-tracing-opentelemetry
# azure-monitor-opentelemetry-exporter
# microsoft-kiota-abstractions
# microsoft-kiota-authentication-azure
# microsoft-kiota-http
# opentelemetry-instrumentation
# opentelemetry-instrumentation-aiohttp-client
# opentelemetry-instrumentation-asgi
Expand All @@ -222,7 +230,15 @@ opentelemetry-api==1.23.0
# opentelemetry-instrumentation-urllib3
# opentelemetry-instrumentation-wsgi
# opentelemetry-sdk
opentelemetry-instrumentation==0.44b0
opentelemetry-exporter-otlp-proto-common==1.24.0
# via
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
opentelemetry-exporter-otlp-proto-grpc==1.24.0
# via -r requirements.in
opentelemetry-exporter-otlp-proto-http==1.24.0
# via -r requirements.in
opentelemetry-instrumentation==0.45b0
# via
# opentelemetry-instrumentation-aiohttp-client
# opentelemetry-instrumentation-asgi
Expand All @@ -237,48 +253,50 @@ opentelemetry-instrumentation==0.44b0
# opentelemetry-instrumentation-urllib
# opentelemetry-instrumentation-urllib3
# opentelemetry-instrumentation-wsgi
opentelemetry-instrumentation-aiohttp-client==0.44b0
opentelemetry-instrumentation-aiohttp-client==0.45b0
# via -r requirements.in
opentelemetry-instrumentation-asgi==0.44b0
opentelemetry-instrumentation-asgi==0.45b0
# via
# -r requirements.in
# opentelemetry-instrumentation-fastapi
opentelemetry-instrumentation-dbapi==0.44b0
opentelemetry-instrumentation-dbapi==0.45b0
# via opentelemetry-instrumentation-psycopg2
opentelemetry-instrumentation-django==0.44b0
opentelemetry-instrumentation-django==0.45b0
# via azure-monitor-opentelemetry
opentelemetry-instrumentation-fastapi==0.44b0
opentelemetry-instrumentation-fastapi==0.45b0
# via azure-monitor-opentelemetry
opentelemetry-instrumentation-flask==0.44b0
opentelemetry-instrumentation-flask==0.45b0
# via azure-monitor-opentelemetry
opentelemetry-instrumentation-httpx==0.44b0
opentelemetry-instrumentation-httpx==0.45b0
# via -r requirements.in
opentelemetry-instrumentation-openai==0.13.1
opentelemetry-instrumentation-openai==0.15.9
# via -r requirements.in
opentelemetry-instrumentation-psycopg2==0.44b0
opentelemetry-instrumentation-psycopg2==0.45b0
# via azure-monitor-opentelemetry
opentelemetry-instrumentation-requests==0.44b0
opentelemetry-instrumentation-requests==0.45b0
# via
# -r requirements.in
# azure-monitor-opentelemetry
opentelemetry-instrumentation-urllib==0.44b0
opentelemetry-instrumentation-urllib==0.45b0
# via azure-monitor-opentelemetry
opentelemetry-instrumentation-urllib3==0.44b0
opentelemetry-instrumentation-urllib3==0.45b0
# via azure-monitor-opentelemetry
opentelemetry-instrumentation-wsgi==0.44b0
opentelemetry-instrumentation-wsgi==0.45b0
# via
# opentelemetry-instrumentation-django
# opentelemetry-instrumentation-flask
opentelemetry-proto==1.24.0
# via
# opentelemetry-exporter-otlp-proto-common
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
opentelemetry-resource-detector-azure==0.1.3
# via azure-monitor-opentelemetry
opentelemetry-sdk==1.23.0
opentelemetry-sdk==1.24.0
# via
# azure-monitor-opentelemetry-exporter
# microsoft-kiota-abstractions
# microsoft-kiota-authentication-azure
# microsoft-kiota-http
# opentelemetry-resource-detector-azure
opentelemetry-semantic-conventions==0.44b0
opentelemetry-semantic-conventions==0.45b0
# via
# opentelemetry-instrumentation-aiohttp-client
# opentelemetry-instrumentation-asgi
Expand All @@ -287,14 +305,15 @@ opentelemetry-semantic-conventions==0.44b0
# opentelemetry-instrumentation-fastapi
# opentelemetry-instrumentation-flask
# opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-openai
# opentelemetry-instrumentation-requests
# opentelemetry-instrumentation-urllib
# opentelemetry-instrumentation-urllib3
# opentelemetry-instrumentation-wsgi
# opentelemetry-sdk
opentelemetry-semantic-conventions-ai==0.0.20
opentelemetry-semantic-conventions-ai==0.1.1
# via opentelemetry-instrumentation-openai
opentelemetry-util-http==0.44b0
opentelemetry-util-http==0.45b0
# via
# opentelemetry-instrumentation-aiohttp-client
# opentelemetry-instrumentation-asgi
Expand All @@ -306,13 +325,13 @@ opentelemetry-util-http==0.44b0
# opentelemetry-instrumentation-urllib
# opentelemetry-instrumentation-urllib3
# opentelemetry-instrumentation-wsgi
packaging==23.2
packaging==24.0
# via
# msal-extensions
# opentelemetry-instrumentation-flask
pandas==2.2.1
# via openai
pandas-stubs==2.2.0.240218
pandas-stubs==2.2.1.240316
# via openai
pendulum==3.0.0
# via microsoft-kiota-serialization-json
Expand All @@ -322,13 +341,17 @@ portalocker==2.8.2
# via msal-extensions
priority==2.0.0
# via hypercorn
pyasn1==0.5.1
protobuf==4.25.3
# via
# googleapis-common-protos
# opentelemetry-proto
pyasn1==0.6.0
# via
# python-jose
# rsa
pycparser==2.21
pycparser==2.22
# via cffi
pydantic==2.6.3
pydantic==2.6.4
# via openai
pydantic-core==2.16.3
# via pydantic
Expand All @@ -352,7 +375,7 @@ python-jose[cryptography]==3.3.0
# via -r requirements.in
pytz==2024.1
# via pandas
quart==0.19.4
quart==0.19.5
# via
# -r requirements.in
# quart-cors
Expand All @@ -365,9 +388,10 @@ requests==2.31.0
# azure-core
# msal
# msrest
# opentelemetry-exporter-otlp-proto-http
# requests-oauthlib
# tiktoken
requests-oauthlib==1.3.1
requests-oauthlib==2.0.0
# via msrest
rsa==4.9
# via python-jose
Expand Down Expand Up @@ -398,7 +422,7 @@ types-beautifulsoup4==4.12.0.20240229
# via -r requirements.in
types-html5lib==1.1.11.20240228
# via types-beautifulsoup4
types-pillow==10.2.0.20240213
types-pillow==10.2.0.20240331
# via -r requirements.in
types-pyasn1==0.6.0.20240402
# via types-python-jose
Expand All @@ -423,9 +447,9 @@ tzdata==2024.1
# pendulum
urllib3==2.2.1
# via requests
uvicorn==0.27.1
uvicorn==0.29.0
# via -r requirements.in
werkzeug==3.0.1
werkzeug==3.0.2
# via
# flask
# quart
Expand All @@ -440,7 +464,7 @@ wsproto==1.2.0
# via hypercorn
yarl==1.9.4
# via aiohttp
zipp==3.17.0
zipp==3.18.1
# via importlib-metadata

# The following packages are considered to be unsafe in a requirements file:
Expand Down
4 changes: 4 additions & 0 deletions docs/localdev.md
Original file line number Diff line number Diff line change
Expand Up @@ -65,3 +65,7 @@ If you're running inside a dev container, use this local URL instead:
```shell
azd env set OPENAI_BASE_URL http://host.docker.internal:8080/v1
```

## (Optional) Running with OpenTelemetry tracing

To run the service with OpenTelemetry tracing, you can use the local Aspire Dashboard, see [OpenTelemetry support](opentelemetry.md) for details on starting the dashboard.
Loading
Loading