Here is an working example of langchain using AzureOpenAI's chat endpoint. There are a few ways to pass in endpoint details. Please try it out, your code snippet does not seem to have these details, it could be api key is not correct.
import os
from langchain.chat_models import AzureChatOpenAI
from langchain.schema import HumanMessage
# Set up your Azure OpenAI credentials
os.environ["OPENAI_API_TYPE"] = "azure"
os.environ["OPENAI_API_KEY"] = "ce886d51axxxxxxxx"
os.environ["OPENAI_API_BASE"] = "https://aoai-xxxxxxxxxxx.openai.azure.com/"
os.environ["OPENAI_API_VERSION"] = "2023-12-01-preview" # Update if necessary
# Initialize the Azure OpenAI LLM with LangChain
llm = AzureChatOpenAI(
deployment_name="gpt4o", # Replace with your deployment name
temperature=0.7, # Adjust the temperature as needed
max_tokens=150 # Set the maximum number of tokens
)
msg = HumanMessage(content="Explain step by step. How old is the president of USA?")
print(llm(messages=[msg]))