I have created a standalone foundry project (outside of hub). I want to use AI Agent service. And I want to use it using already deployed models from a connected resource. Is it possible? Because I have connected the deployed models to the project, I can see those in the portal but have a Run error: {'code': 'invalid_engine_error', 'message': 'Failed to resolve model info for: gpt-4o'} error when using the SDK.
What is the needed configuration? I have verified that the models I am using are compatible with the region (currently Sweden Central). I have also set my environment variables OPENAI_API_VERSION, AZURE_OPENAI_ENDPOINT & AZURE_OPENAI_API_KEY.
Here is my code snippet:
async with DefaultAzureCredential() as creds:
agents_client = AgentsClient(
endpoint=os.environ[PROJECT_ENDPOINT],
credential=creds,
)
async with agents_client:
agent = await agents_client.create_agent(
model=os.environ['GPT_4O_DEPLOYMENT_NAME'], name="my-agent", instructions="You are helpful agent"
)
print(f"Created agent, agent ID: {agent.id}")
thread = await agents_client.threads.create()
print(f"Created thread, thread ID: {thread.id}")
message = await agents_client.messages.create(
thread_id=thread.id, role="user", content="How to choose a washing machine"
)
print(f"Created message, message ID: {message.id}")
run = await agents_client.runs.create(thread_id=thread.id, agent_id=agent.id)
# Poll the run as long as run status is queued or in progress
while run.status in ["queued", "in_progress", "requires_action"]:
# Wait for a second
time.sleep(1)
run = await agents_client.runs.get(thread_id=thread.id, run_id=run.id)
print(f"Run status: {run.status}")
if run.status == "failed":
print(f"Run error: {run.last_error}")
await agents_client.delete_agent(agent.id)
print("Deleted agent")
messages = agents_client.messages.list(
thread_id=thread.id,
order=ListSortOrder.ASCENDING,
)
async for msg in messages:
last_part = msg.content[-1]
if isinstance(last_part, MessageTextContent):
print(f"{msg.role}: {last_part.text.value}")