Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 19 additions & 0 deletions code/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
azure_package_log_level = getattr(
logging, PACKAGE_LOGGING_LEVEL.upper(), logging.WARNING
)

for logger_name in AZURE_LOGGING_PACKAGES:
logging.getLogger(logger_name).setLevel(azure_package_log_level)

Expand All @@ -33,6 +34,24 @@
configure_azure_monitor()
HTTPXClientInstrumentor().instrument() # httpx is used by openai

# Register ConversationSpanProcessor to propagate conversation_id/user_id to all child spans
from opentelemetry import trace as otel_trace
from create_app import ConversationSpanProcessor

provider = otel_trace.get_tracer_provider()
if hasattr(provider, "add_span_processor"):
provider.add_span_processor(ConversationSpanProcessor())

# Suppress noisy Azure SDK loggers AFTER configure_azure_monitor()
# to prevent it from overriding our levels
_NOISY_AZURE_LOGGERS = [
"azure.core.pipeline.policies.http_logging_policy",
"azure.monitor.opentelemetry.exporter",
"azure.identity",
]
for logger_name in _NOISY_AZURE_LOGGERS:
logging.getLogger(logger_name).setLevel(logging.WARNING)

# pylint: disable=wrong-import-position
from create_app import create_app # noqa: E402

Expand Down
11 changes: 11 additions & 0 deletions code/backend/Admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
azure_package_log_level = getattr(
logging, PACKAGE_LOGGING_LEVEL.upper(), logging.WARNING
)

for logger_name in AZURE_LOGGING_PACKAGES:
logging.getLogger(logger_name).setLevel(azure_package_log_level)

Expand All @@ -35,6 +36,16 @@
if os.getenv("APPLICATIONINSIGHTS_ENABLED", "false").lower() == "true":
configure_azure_monitor()

# Suppress noisy Azure SDK loggers AFTER configure_azure_monitor()
# to prevent it from overriding our levels
_NOISY_AZURE_LOGGERS = [
"azure.core.pipeline.policies.http_logging_policy",
"azure.monitor.opentelemetry.exporter",
"azure.identity",
]
for logger_name in _NOISY_AZURE_LOGGERS:
logging.getLogger(logger_name).setLevel(logging.WARNING)

logger = logging.getLogger(__name__)
logger.debug("Starting admin app")

Expand Down
21 changes: 21 additions & 0 deletions code/backend/api/chat_history.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from backend.batch.utilities.helpers.config.config_helper import ConfigHelper
from backend.batch.utilities.helpers.env_helper import EnvHelper
from backend.batch.utilities.chat_history.database_factory import DatabaseFactory
from backend.batch.utilities.loggers.event_utils import track_event_if_configured

load_dotenv()
bp_chat_history_response = Blueprint("chat_history", __name__)
Expand Down Expand Up @@ -110,6 +111,11 @@ async def rename_conversation():
if not title or title.strip() == "":
return jsonify({"error": "A non-empty title is required"}), 400

track_event_if_configured("HistoryRenameRequested", {
"conversation_id": conversation_id,
"user_id": user_id,
})

# Initialize and connect to the database client
conversation_client = init_database_client()
if not conversation_client:
Expand Down Expand Up @@ -167,6 +173,11 @@ async def get_conversation():
if not conversation_id:
return jsonify({"error": "conversation_id is required"}), 400

track_event_if_configured("HistoryReadRequested", {
"conversation_id": conversation_id,
"user_id": user_id,
})

# Initialize and connect to the database client
conversation_client = init_database_client()
if not conversation_client:
Expand Down Expand Up @@ -246,6 +257,11 @@ async def delete_conversation():
400,
)

track_event_if_configured("HistoryDeleteRequested", {
"conversation_id": conversation_id,
"user_id": user_id,
})

# Initialize and connect to the database client
conversation_client = init_database_client()
if not conversation_client:
Expand Down Expand Up @@ -369,6 +385,11 @@ async def update_conversation():
if not conversation_id:
return jsonify({"error": "conversation_id is required"}), 400

track_event_if_configured("HistoryUpdateRequested", {
"conversation_id": conversation_id,
"user_id": user_id,
})

messages = request_json["messages"]
if not messages or len(messages) == 0:
return jsonify({"error": "Messages are required"}), 400
Expand Down
11 changes: 11 additions & 0 deletions code/backend/batch/function_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,23 @@
azure_package_log_level = getattr(
logging, PACKAGE_LOGGING_LEVEL.upper(), logging.WARNING
)

for logger_name in AZURE_LOGGING_PACKAGES:
logging.getLogger(logger_name).setLevel(azure_package_log_level)

if os.getenv("APPLICATIONINSIGHTS_ENABLED", "false").lower() == "true":
configure_azure_monitor()

# Suppress noisy Azure SDK loggers AFTER configure_azure_monitor()
# to prevent it from overriding our levels
_NOISY_AZURE_LOGGERS = [
"azure.core.pipeline.policies.http_logging_policy",
"azure.monitor.opentelemetry.exporter",
"azure.identity",
]
for logger_name in _NOISY_AZURE_LOGGERS:
logging.getLogger(logger_name).setLevel(logging.WARNING)

app = func.FunctionApp(
http_auth_level=func.AuthLevel.FUNCTION
) # change to ANONYMOUS for local debugging
Expand Down
32 changes: 32 additions & 0 deletions code/backend/batch/utilities/loggers/event_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
"""
Utility for tracking custom events to Application Insights.
"""

import os
import logging

logger = logging.getLogger(__name__)


def track_event_if_configured(event_name: str, event_data: dict):
"""Track custom event to Application Insights if configured.

Args:
event_name: Name of the event to track
event_data: Dictionary of event properties
"""
if os.getenv("APPLICATIONINSIGHTS_ENABLED", "false").lower() == "true":
try:
from azure.monitor.events.extension import track_event

track_event(event_name, event_data)
except ImportError:
logger.warning(
"azure-monitor-events-extension not installed. Skipping track_event for %s",
event_name,
)
else:
logger.debug(
"Skipping track_event for %s: Application Insights is not enabled",
event_name,
)
93 changes: 93 additions & 0 deletions code/create_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
This module creates a Flask app that serves the web interface for the chatbot.
"""

import contextvars
import functools
import json
import logging
Expand Down Expand Up @@ -29,6 +30,26 @@
from backend.batch.utilities.helpers.azure_blob_storage_client import (
AzureBlobStorageClient,
)
from backend.batch.utilities.loggers.event_utils import track_event_if_configured
from backend.batch.utilities.chat_history.auth_utils import get_authenticated_user_details
from opentelemetry import trace
from opentelemetry.sdk.trace import SpanProcessor

_conversation_id_var: contextvars.ContextVar[str] = contextvars.ContextVar("conversation_id", default="")
_user_id_var: contextvars.ContextVar[str] = contextvars.ContextVar("user_id", default="")


class ConversationSpanProcessor(SpanProcessor):
"""Attaches conversation_id and user_id to every span created during a request."""

def on_start(self, span, parent_context=None):
conversation_id = _conversation_id_var.get()
user_id = _user_id_var.get()
if conversation_id:
span.set_attribute("conversation_id", conversation_id)
if user_id:
span.set_attribute("user_id", user_id)


ERROR_429_MESSAGE = "We're currently experiencing a high number of requests for the service you're trying to access. Please wait a moment and try again."
ERROR_GENERIC_MESSAGE = "An error occurred. Please try again. If the problem persists, please contact the site administrator."
Expand Down Expand Up @@ -413,6 +434,32 @@ def create_app():

logger.debug("Starting web app")

@app.before_request
def set_span_attributes():
"""Middleware to attach conversation_id and user_id to the current OpenTelemetry span and context vars."""
if request.method == "POST" and request.is_json:
try:
body = request.get_json(silent=True) or {}
conversation_id = body.get("conversation_id", "")
authenticated_user = get_authenticated_user_details(request_headers=request.headers)
user_id = authenticated_user.get("user_principal_id", "")
_conversation_id_var.set(conversation_id)
_user_id_var.set(user_id)
span = trace.get_current_span()
if span:
if conversation_id:
span.set_attribute("conversation_id", conversation_id)
if user_id:
span.set_attribute("user_id", user_id)
except Exception:
pass # Don't let telemetry middleware break requests

@app.teardown_request
def clear_span_context(exc=None):
"""Clear conversation context vars after each request."""
_conversation_id_var.set("")
_user_id_var.set("")

@app.route("/", defaults={"path": "index.html"})
@app.route("/<path:path>")
def static_file(path):
Expand Down Expand Up @@ -558,13 +605,28 @@ def get_file(filename):
def conversation_azure_byod():
logger.info("Method conversation_azure_byod started")
try:
authenticated_user = get_authenticated_user_details(request_headers=request.headers)
user_id = authenticated_user.get("user_principal_id", "")
conversation_id = request.json.get("conversation_id", "")

track_event_if_configured("ConversationBYODRequestReceived", {
"conversation_id": conversation_id,
"user_id": user_id,
})

if should_use_data(env_helper, azure_search_helper):
return conversation_with_data(request, env_helper)
else:
return conversation_without_data(request, env_helper)
except APIStatusError as e:
error_message = str(e)
logger.exception("Exception in /api/conversation | %s", error_message)
track_event_if_configured("ConversationBYODError", {
"conversation_id": locals().get("conversation_id", ""),
"user_id": locals().get("user_id", ""),
"error": error_message,
"error_type": type(e).__name__,
})
response_json = e.response.json()
response_message = response_json.get("error", {}).get("message", "")
response_code = response_json.get("error", {}).get("code", "")
Expand All @@ -574,6 +636,12 @@ def conversation_azure_byod():
except Exception as e:
error_message = str(e)
logger.exception("Exception in /api/conversation | %s", error_message)
track_event_if_configured("ConversationBYODError", {
"conversation_id": locals().get("conversation_id", ""),
"user_id": locals().get("user_id", ""),
"error": error_message,
"error_type": type(e).__name__,
})
return jsonify({"error": ERROR_GENERIC_MESSAGE}), 500
finally:
logger.info("Method conversation_azure_byod ended")
Expand All @@ -583,8 +651,16 @@ async def conversation_custom():

try:
logger.info("Method conversation_custom started")
authenticated_user = get_authenticated_user_details(request_headers=request.headers)
user_id = authenticated_user.get("user_principal_id", "")
user_message = request.json["messages"][-1]["content"]
conversation_id = request.json["conversation_id"]

track_event_if_configured("ConversationCustomRequestReceived", {
"conversation_id": conversation_id,
"user_id": user_id,
})

user_assistant_messages = list(
filter(
lambda x: x["role"] in ("user", "assistant"),
Expand All @@ -599,6 +675,11 @@ async def conversation_custom():
orchestrator=get_orchestrator_config(),
)

track_event_if_configured("ConversationCustomSuccess", {
"conversation_id": conversation_id,
"user_id": user_id,
})

response_obj = {
"id": "response.id",
"model": env_helper.AZURE_OPENAI_MODEL,
Expand All @@ -612,6 +693,12 @@ async def conversation_custom():
except APIStatusError as e:
error_message = str(e)
logger.exception("Exception in /api/conversation | %s", error_message)
track_event_if_configured("ConversationCustomError", {
"conversation_id": locals().get("conversation_id", ""),
"user_id": locals().get("user_id", ""),
"error": error_message,
"error_type": type(e).__name__,
})
response_json = e.response.json()
response_message = response_json.get("error", {}).get("message", "")
response_code = response_json.get("error", {}).get("code", "")
Expand All @@ -621,6 +708,12 @@ async def conversation_custom():
except Exception as e:
error_message = str(e)
logger.exception("Exception in /api/conversation | %s", error_message)
track_event_if_configured("ConversationCustomError", {
"conversation_id": locals().get("conversation_id", ""),
"user_id": locals().get("user_id", ""),
"error": error_message,
"error_type": type(e).__name__,
})
return jsonify({"error": ERROR_GENERIC_MESSAGE}), 500
finally:
logger.info("Method conversation_custom ended")
Expand Down
3 changes: 3 additions & 0 deletions infra/main.bicep
Original file line number Diff line number Diff line change
Expand Up @@ -1293,6 +1293,7 @@ module web 'modules/app/web.bicep' = {
AZURE_CLIENT_ID: managedIdentityModule.outputs.clientId // Required so LangChain AzureSearch vector store authenticates with this user-assigned managed identity
APP_ENV: appEnvironment
AZURE_SEARCH_DIMENSIONS: azureSearchDimensions
APPLICATIONINSIGHTS_ENABLED: enableMonitoring ? 'true' : 'false'
},
databaseType == 'CosmosDB'
? {
Expand Down Expand Up @@ -1392,6 +1393,7 @@ module adminweb 'modules/app/adminweb.bicep' = {
MANAGED_IDENTITY_RESOURCE_ID: managedIdentityModule.outputs.resourceId
APP_ENV: appEnvironment
AZURE_SEARCH_DIMENSIONS: azureSearchDimensions
APPLICATIONINSIGHTS_ENABLED: enableMonitoring ? 'true' : 'false'
},
databaseType == 'CosmosDB'
? {
Expand Down Expand Up @@ -1494,6 +1496,7 @@ module function 'modules/app/function.bicep' = {
APP_ENV: appEnvironment
BACKEND_URL: backendUrl
AZURE_SEARCH_DIMENSIONS: azureSearchDimensions
APPLICATIONINSIGHTS_ENABLED: enableMonitoring ? 'true' : 'false'
},
databaseType == 'CosmosDB'
? {
Expand Down
1 change: 0 additions & 1 deletion infra/modules/core/monitor/monitoring.bicep
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,6 @@ module avmAppInsights 'br/public:avm/res/insights/component:0.6.0' = {
disableIpMasking: false
flowType: 'Bluefield'
workspaceResourceId: empty(workspaceResourceId) ? '' : workspaceResourceId
diagnosticSettings: empty(workspaceResourceId) ? null : [{ workspaceResourceId: workspaceResourceId }]
}
}

Expand Down
18 changes: 17 additions & 1 deletion poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading
Loading