@@ -76,6 +76,8 @@ def __init__(self):
7676
7777 async def initialize (self ):
7878 """Initialize Azure AI Foundry connection"""
79+ logger .info ("=== Starting Azure AI Foundry initialization ===" )
80+
7981 if not AZURE_AI_AVAILABLE :
8082 logger .warning ("Azure AI packages not available" )
8183 return False
@@ -86,29 +88,99 @@ async def initialize(self):
8688 model_deployment_name = os .getenv ('AZURE_AI_MODEL_DEPLOYMENT_NAME' , 'gpt-4-1106-preview' )
8789 app_service_url = os .getenv ('AZURE_APP_SERVICE_URL' , 'http://localhost:8000' )
8890
91+ logger .info ("=== Environment Variables ===" )
92+ logger .info (f" AZURE_AI_PROJECT_ENDPOINT: { project_endpoint } " )
93+ logger .info (f" AZURE_AI_MODEL_DEPLOYMENT_NAME: { model_deployment_name } " )
94+ logger .info (f" AZURE_APP_SERVICE_URL: { app_service_url } " )
95+
96+ # Also log other Azure-related env vars for debugging
97+ other_vars = [
98+ 'AZURE_OPENAI_ENDPOINT' ,
99+ 'AZURE_OPENAI_DEPLOYMENT_NAME' ,
100+ 'AZURE_TENANT_ID' ,
101+ 'AZURE_CLIENT_ID' ,
102+ 'AZURE_CLIENT_SECRET'
103+ ]
104+ for var in other_vars :
105+ value = os .getenv (var )
106+ if value :
107+ logger .info (f" { var } : { value [:50 ]} ..." )
108+ else :
109+ logger .info (f" { var } : Not set" )
110+
89111 if not project_endpoint :
90- logger .warning ("AZURE_AI_PROJECT_ENDPOINT not configured" )
112+ logger .error ("AZURE_AI_PROJECT_ENDPOINT not configured" )
91113 return False
92114
93- # Initialize Azure AI Project Client with managed identity
94- credential = DefaultAzureCredential ()
95- self .project_client = AIProjectClient (
96- endpoint = project_endpoint ,
97- credential = credential
98- )
115+ # Initialize Azure AI Project Client using connection string format
116+ logger .info ("=== Initializing Azure Credentials ===" )
117+ try :
118+ credential = DefaultAzureCredential ()
119+ logger .info ("DefaultAzureCredential created successfully" )
120+
121+ # Test credential by getting a token
122+ logger .info ("Testing credential by requesting token..." )
123+ token = credential .get_token ("https://cognitiveservices.azure.com/.default" )
124+ logger .info (f"Token acquired successfully, expires: { token .expires_on } " )
125+ except Exception as cred_error :
126+ logger .error (f"Credential initialization failed: { cred_error } " )
127+ return False
128+
129+ # Try the from_connection_string method first
130+ logger .info ("=== Creating AIProjectClient ===" )
131+ logger .info (f"Using connection string: { project_endpoint } " )
132+ try :
133+ self .project_client = AIProjectClient .from_connection_string (
134+ conn_str = project_endpoint ,
135+ credential = credential
136+ )
137+ logger .info ("✓ Successfully created AIProjectClient with connection string" )
138+ except Exception as conn_error :
139+ logger .error (f"✗ Connection string method failed: { conn_error } " )
140+ logger .info ("Trying fallback method with direct endpoint..." )
141+ # Fallback to direct endpoint initialization
142+ try :
143+ # Convert connection string to endpoint URL
144+ parts = project_endpoint .split (';' )
145+ if len (parts ) >= 4 :
146+ endpoint_url = f"https://{ parts [3 ]} .{ parts [0 ]} "
147+ logger .info (f"Converted to endpoint URL: { endpoint_url } " )
148+ self .project_client = AIProjectClient (
149+ endpoint = endpoint_url ,
150+ credential = credential
151+ )
152+ logger .info ("✓ Successfully created AIProjectClient with direct endpoint" )
153+ else :
154+ raise Exception ("Invalid connection string format" )
155+ except Exception as direct_error :
156+ logger .error (f"✗ Direct endpoint method also failed: { direct_error } " )
157+ return False
99158
100159 # Set MCP server URL to point to our own app
101160 self .mcp_server_url = f"{ app_service_url } /mcp/stream"
102161 self .model_deployment_name = model_deployment_name
103162
104- logger .info (f"Azure AI Foundry initialized with endpoint: { project_endpoint } " )
105- logger .info (f"MCP Server URL: { self .mcp_server_url } " )
163+ logger .info (f"Azure AI Foundry initialized successfully" )
164+ logger .info (f" Project client: { type (self .project_client )} " )
165+ logger .info (f" MCP Server URL: { self .mcp_server_url } " )
166+ logger .info (f" Model deployment: { self .model_deployment_name } " )
167+
168+ # Test the connection by trying to get project info
169+ try :
170+ logger .info ("Testing connection by checking project endpoint..." )
171+ endpoint_url = self .project_client .get_endpoint_url ()
172+ logger .info (f"Project endpoint URL: { endpoint_url } " )
173+ except Exception as test_error :
174+ logger .warning (f"Failed to test connection: { test_error } " )
106175
107176 self .initialized = True
108177 return True
109178
110179 except Exception as e :
111180 logger .error (f"Failed to initialize Azure AI Foundry: { e } " )
181+ logger .error (f"Exception type: { type (e )} " )
182+ import traceback
183+ logger .error (f"Full traceback: { traceback .format_exc ()} " )
112184 return False
113185
114186 async def create_agent (self ) -> Optional [str ]:
@@ -605,6 +677,124 @@ async def health():
605677 "ai_foundry_initialized" : ai_foundry_service .initialized
606678 }
607679
680+ @app .get ("/debug/azure-ai-status" )
681+ async def azure_ai_debug_status ():
682+ """Comprehensive Azure AI debugging endpoint"""
683+ import traceback
684+ from azure .identity import DefaultAzureCredential
685+
686+ status = {
687+ "timestamp" : datetime .now ().isoformat (),
688+ "azure_ai_packages_available" : AZURE_AI_AVAILABLE ,
689+ "ai_foundry_service_initialized" : ai_foundry_service .initialized ,
690+ "environment_variables" : {},
691+ "credential_test" : {},
692+ "project_client_test" : {},
693+ "agent_test" : {}
694+ }
695+
696+ # Check environment variables
697+ env_vars = [
698+ 'AZURE_AI_PROJECT_ENDPOINT' ,
699+ 'AZURE_AI_MODEL_DEPLOYMENT_NAME' ,
700+ 'AZURE_APP_SERVICE_URL' ,
701+ 'AZURE_OPENAI_ENDPOINT' ,
702+ 'AZURE_OPENAI_DEPLOYMENT_NAME' ,
703+ 'AZURE_TENANT_ID' ,
704+ 'AZURE_CLIENT_ID' ,
705+ 'AZURE_CLIENT_SECRET'
706+ ]
707+
708+ for var in env_vars :
709+ value = os .getenv (var )
710+ if value :
711+ # Mask sensitive values
712+ if 'SECRET' in var or 'KEY' in var :
713+ status ["environment_variables" ][var ] = f"{ value [:10 ]} ...***"
714+ else :
715+ status ["environment_variables" ][var ] = value
716+ else :
717+ status ["environment_variables" ][var ] = None
718+
719+ # Test Azure credential
720+ try :
721+ credential = DefaultAzureCredential ()
722+ token = credential .get_token ("https://cognitiveservices.azure.com/.default" )
723+ status ["credential_test" ] = {
724+ "success" : True ,
725+ "expires_on" : token .expires_on ,
726+ "message" : "Credential test successful"
727+ }
728+ except Exception as e :
729+ status ["credential_test" ] = {
730+ "success" : False ,
731+ "error" : str (e ),
732+ "traceback" : traceback .format_exc ()
733+ }
734+
735+ # Test project client
736+ if AZURE_AI_AVAILABLE :
737+ try :
738+ project_endpoint = os .getenv ('AZURE_AI_PROJECT_ENDPOINT' )
739+ if project_endpoint :
740+ from azure .ai .projects import AIProjectClient
741+ credential = DefaultAzureCredential ()
742+
743+ # Try connection string method
744+ try :
745+ client = AIProjectClient .from_connection_string (
746+ conn_str = project_endpoint ,
747+ credential = credential
748+ )
749+ status ["project_client_test" ] = {
750+ "success" : True ,
751+ "method" : "connection_string" ,
752+ "message" : "Project client created successfully"
753+ }
754+ except Exception as conn_error :
755+ # Try direct endpoint method
756+ try :
757+ parts = project_endpoint .split (';' )
758+ if len (parts ) >= 4 :
759+ endpoint_url = f"https://{ parts [3 ]} .{ parts [0 ]} "
760+ client = AIProjectClient (
761+ endpoint = endpoint_url ,
762+ credential = credential
763+ )
764+ status ["project_client_test" ] = {
765+ "success" : True ,
766+ "method" : "direct_endpoint" ,
767+ "endpoint_url" : endpoint_url ,
768+ "message" : "Project client created with fallback method"
769+ }
770+ else :
771+ raise Exception ("Invalid connection string format" )
772+ except Exception as direct_error :
773+ status ["project_client_test" ] = {
774+ "success" : False ,
775+ "connection_string_error" : str (conn_error ),
776+ "direct_endpoint_error" : str (direct_error ),
777+ "traceback" : traceback .format_exc ()
778+ }
779+ else :
780+ status ["project_client_test" ] = {
781+ "success" : False ,
782+ "error" : "AZURE_AI_PROJECT_ENDPOINT not configured"
783+ }
784+ except Exception as e :
785+ status ["project_client_test" ] = {
786+ "success" : False ,
787+ "error" : str (e ),
788+ "traceback" : traceback .format_exc ()
789+ }
790+ else :
791+ status ["project_client_test" ] = {
792+ "success" : False ,
793+ "error" : "Azure AI packages not available"
794+ }
795+
796+ return status
797+
608798@app .get ("/tools" )
609799async def list_tools ():
610800 """REST endpoint to list available tools"""
@@ -765,6 +955,17 @@ async def send_chat_message(chat_data: ChatMessage):
765955 logger .error (f"Failed to send chat message: { e } " )
766956 raise HTTPException (status_code = 500 , detail = "Failed to process chat message" )
767957
958+ @app .get ("/api/chat/status" )
959+ async def chat_status ():
960+ """Get the status of the Azure AI Foundry chat service"""
961+ return {
962+ "available" : ai_foundry_service .initialized ,
963+ "azure_ai_available" : AZURE_AI_AVAILABLE ,
964+ "project_endpoint" : os .getenv ('AZURE_AI_PROJECT_ENDPOINT' ),
965+ "model_deployment" : os .getenv ('AZURE_AI_MODEL_DEPLOYMENT_NAME' ),
966+ "app_service_url" : os .getenv ('AZURE_APP_SERVICE_URL' )
967+ }
968+
768969# MCP Streamable HTTP Endpoints
769970@app .get ("/mcp/stream" )
770971async def mcp_stream_info ():
0 commit comments