diff --git a/sdk/ai/azure-ai-projects/.env.template b/sdk/ai/azure-ai-projects/.env.template index 11db8b598a50..190950a3cc5b 100644 --- a/sdk/ai/azure-ai-projects/.env.template +++ b/sdk/ai/azure-ai-projects/.env.template @@ -43,7 +43,9 @@ BROWSER_AUTOMATION_PROJECT_CONNECTION_ID= OPENAPI_PROJECT_CONNECTION_ID= AI_SEARCH_USER_INPUT= SHAREPOINT_USER_INPUT= - +FABRIC_SEARCH_USER_INPUT= +BING_CUSTOM_USER_INPUT= +A2A_USER_INPUT= ####################################################################### # diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_bing_custom_search.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_bing_custom_search.py index 7557d27629ac..f24b96604038 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_bing_custom_search.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_bing_custom_search.py @@ -25,6 +25,7 @@ 3) BING_CUSTOM_SEARCH_PROJECT_CONNECTION_ID - The Bing Custom Search project connection ID, as found in the "Connections" tab in your Microsoft Foundry project. 4) BING_CUSTOM_SEARCH_INSTANCE_NAME - The Bing Custom Search instance name + 5) BING_CUSTOM_USER_INPUT - (Optional) The question to ask. If not set, you will be prompted. """ import os @@ -72,7 +73,7 @@ ) print(f"Agent created (id: {agent.id}, name: {agent.name}, version: {agent.version})") - user_input = input("Enter your question (e.g., 'Tell me more about foundry agent service'): \n") + user_input = os.environ.get("BING_CUSTOM_USER_INPUT") or input("Enter your question: \n") # Send initial request that will trigger the Bing Custom Search tool stream_response = openai_client.responses.create( diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_fabric.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_fabric.py index 09ff276795df..526ea0073fc6 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_fabric.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_fabric.py @@ -23,6 +23,7 @@ the "Models + endpoints" tab in your Microsoft Foundry project. 3) FABRIC_PROJECT_CONNECTION_ID - The Fabric project connection ID, as found in the "Connections" tab in your Microsoft Foundry project. + 4) FABRIC_SEARCH_USER_INPUT - (Optional) The question to ask. If not set, you will be prompted. """ import os @@ -65,7 +66,7 @@ ) print(f"Agent created (id: {agent.id}, name: {agent.name}, version: {agent.version})") - user_input = input("Enter your question (e.g., 'Tell me about sales records'): \n") + user_input = os.environ.get("FABRIC_SEARCH_USER_INPUT") or input("Enter your question: \n") response = openai_client.responses.create( tool_choice="required", diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_to_agent.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_to_agent.py index d2c9149c9cea..34922b999778 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_to_agent.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_to_agent.py @@ -24,8 +24,9 @@ the "Models + endpoints" tab in your Microsoft Foundry project. 3) A2A_PROJECT_CONNECTION_ID - The A2A project connection ID, as found in the "Connections" tab in your Microsoft Foundry project. - 4) (Optional) A2A_ENDPOINT - If the connection is missing target i.e. if it is of "Custom keys" type, we need to set the A2A + 4) A2A_ENDPOINT - (Optional) If the connection is missing target i.e. if it is of "Custom keys" type, we need to set the A2A endpoint on the tool. + 5) A2A_USER_INPUT - (Optional) The question to ask. If not set, you will be prompted. """ import os @@ -66,7 +67,7 @@ ) print(f"Agent created (id: {agent.id}, name: {agent.name}, version: {agent.version})") - user_input = input("Enter your question (e.g., 'What can the secondary agent do?'): \n") + user_input = os.environ.get("A2A_USER_INPUT") or input("Enter your question (e.g., 'What can the secondary agent do?'): \n") stream_response = openai_client.responses.create( stream=True, diff --git a/sdk/ai/azure-ai-projects/tests/conftest.py b/sdk/ai/azure-ai-projects/tests/conftest.py index 39deacea7ac1..c28a47fb1170 100644 --- a/sdk/ai/azure-ai-projects/tests/conftest.py +++ b/sdk/ai/azure-ai-projects/tests/conftest.py @@ -25,6 +25,7 @@ if not load_dotenv(find_dotenv(), override=True): print("Did not find a .env file. Using default environment variable values for tests.") + def pytest_collection_modifyitems(items): if os.environ.get("AZURE_TEST_RUN_LIVE") == "true": return diff --git a/sdk/ai/azure-ai-projects/tests/test_base.py b/sdk/ai/azure-ai-projects/tests/test_base.py index 188a1564d1ab..895708f87aba 100644 --- a/sdk/ai/azure-ai-projects/tests/test_base.py +++ b/sdk/ai/azure-ai-projects/tests/test_base.py @@ -64,6 +64,9 @@ azure_resource_group="sanitized-resource-group", ai_search_user_input="What is Azure AI Projects?", sharepoint_user_input="What is SharePoint?", + fabric_user_input="List all customers!", + a2a_user_input="What can the secondary agent do?", + bing_custom_user_input="Tell me more about foundry agent service", memory_store_chat_model_deployment_name="gpt-4.1-mini", memory_store_embedding_model_deployment_name="text-embedding-ada-002", )