diff --git a/docs/core_docs/docs/integrations/llms/azure.mdx b/docs/core_docs/docs/integrations/llms/azure.mdx
index 6f29f04a97b2..eeb34320814d 100644
--- a/docs/core_docs/docs/integrations/llms/azure.mdx
+++ b/docs/core_docs/docs/integrations/llms/azure.mdx
@@ -6,56 +6,6 @@ LangChain.js supports integration with [Azure OpenAI](https://azure.microsoft.co
 
 You can learn more about Azure OpenAI and its difference with the OpenAI API on [this page](https://learn.microsoft.com/azure/ai-services/openai/overview). If you don't have an Azure account, you can [create a free account](https://azure.microsoft.com/free/) to get started.
 
-## Using the OpenAI SDK
-
-You can also use the `OpenAI` class to call OpenAI models hosted on Azure.
-
-For example, if your Azure instance is hosted under `https://{MY_INSTANCE_NAME}.openai.azure.com/openai/deployments/{DEPLOYMENT_NAME}`, you could initialize your instance like this:
-
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
-<IntegrationInstallTooltip></IntegrationInstallTooltip>
-
-```bash npm2yarn
-npm install @langchain/openai
-```
-
-```typescript
-import { OpenAI } from "@langchain/openai";
-
-const model = new OpenAI({
-  temperature: 0.9,
-  azureOpenAIApiKey: "YOUR-API-KEY",
-  azureOpenAIApiVersion: "YOUR-API-VERSION",
-  azureOpenAIApiInstanceName: "{MY_INSTANCE_NAME}",
-  azureOpenAIApiDeploymentName: "{DEPLOYMENT_NAME}",
-});
-const res = await model.invoke(
-  "What would be a good company name for a company that makes colorful socks?"
-);
-console.log({ res });
-```
-
-If your instance is hosted under a domain other than the default `openai.azure.com`, you'll need to use the alternate `AZURE_OPENAI_BASE_PATH` environment variable.
-For example, here's how you would connect to the domain `https://westeurope.api.microsoft.com/openai/deployments/{DEPLOYMENT_NAME}`:
-
-```typescript
-import { OpenAI } from "@langchain/openai";
-
-const model = new OpenAI({
-  temperature: 0.9,
-  azureOpenAIApiKey: "YOUR-API-KEY",
-  azureOpenAIApiVersion: "YOUR-API-VERSION",
-  azureOpenAIApiDeploymentName: "{DEPLOYMENT_NAME}",
-  azureOpenAIBasePath:
-    "https://westeurope.api.microsoft.com/openai/deployments", // In Node.js defaults to process.env.AZURE_OPENAI_BASE_PATH
-});
-const res = await model.invoke(
-  "What would be a good company name for a company that makes colorful socks?"
-);
-console.log({ res });
-```
-
 ## Using the Azure OpenAI SDK
 
 You'll first need to install the [`@langchain/azure-openai`](https://www.npmjs.com/package/@langchain/azure-openai) package:
@@ -87,7 +37,21 @@ const model = new AzureOpenAI({
   azureOpenAIEndpoint: "<your_endpoint>",
   azureOpenAIApiKey: "<your_key>",
   azureOpenAIApiDeploymentName: "<your_deployment_name",
-  modelName: "<your_model>",
+});
+```
+
+If you're using Azure Managed Identity, you can also pass the credentials directly to the constructor:
+
+```typescript
+import { DefaultAzureCredential } from "@azure/identity";
+import { AzureOpenAI } from "@langchain/azure-openai";
+
+const credentials = new DefaultAzureCredential();
+
+const model = new AzureOpenAI({
+  credentials,
+  azureOpenAIEndpoint: "<your_endpoint>",
+  azureOpenAIApiDeploymentName: "<your_deployment_name",
 });
 ```
 
@@ -113,3 +77,59 @@ import CodeBlock from "@theme/CodeBlock";
 import LLMExample from "@examples/llms/azure_openai.ts";
 
 <CodeBlock language="text">{LLMExample}</CodeBlock>
+
+### Chat usage example
+
+import ChatExample from "@examples/llms/azure_openai-chat.ts";
+
+<CodeBlock language="text">{ChatExample}</CodeBlock>
+
+## Using OpenAI SDK
+
+You can also use the `OpenAI` class to call OpenAI models hosted on Azure.
+
+For example, if your Azure instance is hosted under `https://{MY_INSTANCE_NAME}.openai.azure.com/openai/deployments/{DEPLOYMENT_NAME}`, you could initialize your instance like this:
+
+import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
+
+<IntegrationInstallTooltip></IntegrationInstallTooltip>
+
+```bash npm2yarn
+npm install @langchain/openai
+```
+
+```typescript
+import { OpenAI } from "@langchain/openai";
+
+const model = new OpenAI({
+  temperature: 0.9,
+  azureOpenAIApiKey: "YOUR-API-KEY",
+  azureOpenAIApiVersion: "YOUR-API-VERSION",
+  azureOpenAIApiInstanceName: "{MY_INSTANCE_NAME}",
+  azureOpenAIApiDeploymentName: "{DEPLOYMENT_NAME}",
+});
+const res = await model.invoke(
+  "What would be a good company name for a company that makes colorful socks?"
+);
+console.log({ res });
+```
+
+If your instance is hosted under a domain other than the default `openai.azure.com`, you'll need to use the alternate `AZURE_OPENAI_BASE_PATH` environment variable.
+For example, here's how you would connect to the domain `https://westeurope.api.microsoft.com/openai/deployments/{DEPLOYMENT_NAME}`:
+
+```typescript
+import { OpenAI } from "@langchain/openai";
+
+const model = new OpenAI({
+  temperature: 0.9,
+  azureOpenAIApiKey: "YOUR-API-KEY",
+  azureOpenAIApiVersion: "YOUR-API-VERSION",
+  azureOpenAIApiDeploymentName: "{DEPLOYMENT_NAME}",
+  azureOpenAIBasePath:
+    "https://westeurope.api.microsoft.com/openai/deployments", // In Node.js defaults to process.env.AZURE_OPENAI_BASE_PATH
+});
+const res = await model.invoke(
+  "What would be a good company name for a company that makes colorful socks?"
+);
+console.log({ res });
+```
diff --git a/docs/core_docs/docs/integrations/text_embedding/azure_openai.mdx b/docs/core_docs/docs/integrations/text_embedding/azure_openai.mdx
index fd8e062882d7..04de92f656b5 100644
--- a/docs/core_docs/docs/integrations/text_embedding/azure_openai.mdx
+++ b/docs/core_docs/docs/integrations/text_embedding/azure_openai.mdx
@@ -10,57 +10,14 @@ LangChain.js supports integration with [Azure OpenAI](https://azure.microsoft.co
 
 You can learn more about Azure OpenAI and its difference with the OpenAI API on [this page](https://learn.microsoft.com/azure/ai-services/openai/overview). If you don't have an Azure account, you can [create a free account](https://azure.microsoft.com/free/) to get started.
 
-## Using the OpenAI SDK
+## Using Azure OpenAI SDK
 
-The `OpenAIEmbeddings` class can also use the OpenAI API on Azure to generate embeddings for a given text. By default it strips new line characters from the text, as recommended by OpenAI, but you can disable this by passing `stripNewLines: false` to the constructor.
-
-For example, if your Azure instance is hosted under `https://{MY_INSTANCE_NAME}.openai.azure.com/openai/deployments/{DEPLOYMENT_NAME}`, you
-could initialize your instance like this:
+You'll first need to install the [`@langchain/azure-openai`](https://www.npmjs.com/package/@langchain/azure-openai) package:
 
 import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
 
 <IntegrationInstallTooltip></IntegrationInstallTooltip>
 
-```bash npm2yarn
-npm install @langchain/openai
-```
-
-```typescript
-import { OpenAIEmbeddings } from "@langchain/openai";
-
-const embeddings = new OpenAIEmbeddings({
-  azureOpenAIApiKey: "YOUR-API-KEY", // In Node.js defaults to process.env.AZURE_OPENAI_API_KEY
-  azureOpenAIApiVersion: "YOUR-API-VERSION", // In Node.js defaults to process.env.AZURE_OPENAI_API_VERSION
-  azureOpenAIApiInstanceName: "{MY_INSTANCE_NAME}", // In Node.js defaults to process.env.AZURE_OPENAI_API_INSTANCE_NAME
-  azureOpenAIApiDeploymentName: "{DEPLOYMENT_NAME}", // In Node.js defaults to process.env.AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME
-});
-```
-
-If you'd like to initialize using environment variable defaults, the `process.env.AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME`
-will be used first, then `process.env.AZURE_OPENAI_API_DEPLOYMENT_NAME`. This can be useful if you're using these embeddings
-with another Azure OpenAI model.
-
-If your instance is hosted under a domain other than the default `openai.azure.com`, you'll need to use the alternate `AZURE_OPENAI_BASE_PATH` environment variable.
-For example, here's how you would connect to the domain `https://westeurope.api.microsoft.com/openai/deployments/{DEPLOYMENT_NAME}`:
-
-```typescript
-import { OpenAIEmbeddings } from "@langchain/openai";
-
-const embeddings = new OpenAIEmbeddings({
-  azureOpenAIApiKey: "YOUR-API-KEY",
-  azureOpenAIApiVersion: "YOUR-API-VERSION",
-  azureOpenAIApiDeploymentName: "{DEPLOYMENT_NAME}",
-  azureOpenAIBasePath:
-    "https://westeurope.api.microsoft.com/openai/deployments", // In Node.js defaults to process.env.AZURE_OPENAI_BASE_PATH
-});
-```
-
-## Using the Azure OpenAI SDK
-
-You'll first need to install the [`@langchain/azure-openai`](https://www.npmjs.com/package/@langchain/azure-openai) package:
-
-<IntegrationInstallTooltip></IntegrationInstallTooltip>
-
 ```bash npm2yarn
 npm install -S @langchain/azure-openai
 ```
@@ -86,7 +43,6 @@ const model = new AzureOpenAI({
   azureOpenAIEndpoint: "<your_endpoint>",
   azureOpenAIApiKey: "<your_key>",
   azureOpenAIApiDeploymentName: "<your_embedding_deployment_name",
-  modelName: "<your_model>",
 });
 ```
 
@@ -102,13 +58,55 @@ const model = new AzureOpenAI({
   credentials,
   azureOpenAIEndpoint: "<your_endpoint>",
   azureOpenAIApiDeploymentName: "<your_embedding_deployment_name",
-  modelName: "<your_model>",
 });
 ```
 
 ### Usage example
 
 import CodeBlock from "@theme/CodeBlock";
-import Example from "@examples/llms/azure_openai.ts";
+import Example from "@examples/embeddings/azure_openai.ts";
 
 <CodeBlock language="text">{Example}</CodeBlock>
+
+## Using OpenAI SDK
+
+The `OpenAIEmbeddings` class can also use the OpenAI API on Azure to generate embeddings for a given text. By default it strips new line characters from the text, as recommended by OpenAI, but you can disable this by passing `stripNewLines: false` to the constructor.
+
+For example, if your Azure instance is hosted under `https://{MY_INSTANCE_NAME}.openai.azure.com/openai/deployments/{DEPLOYMENT_NAME}`, you
+could initialize your instance like this:
+
+<IntegrationInstallTooltip></IntegrationInstallTooltip>
+
+```bash npm2yarn
+npm install @langchain/openai
+```
+
+```typescript
+import { OpenAIEmbeddings } from "@langchain/openai";
+
+const embeddings = new OpenAIEmbeddings({
+  azureOpenAIApiKey: "YOUR-API-KEY", // In Node.js defaults to process.env.AZURE_OPENAI_API_KEY
+  azureOpenAIApiVersion: "YOUR-API-VERSION", // In Node.js defaults to process.env.AZURE_OPENAI_API_VERSION
+  azureOpenAIApiInstanceName: "{MY_INSTANCE_NAME}", // In Node.js defaults to process.env.AZURE_OPENAI_API_INSTANCE_NAME
+  azureOpenAIApiDeploymentName: "{DEPLOYMENT_NAME}", // In Node.js defaults to process.env.AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME
+});
+```
+
+If you'd like to initialize using environment variable defaults, the `process.env.AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME`
+will be used first, then `process.env.AZURE_OPENAI_API_DEPLOYMENT_NAME`. This can be useful if you're using these embeddings
+with another Azure OpenAI model.
+
+If your instance is hosted under a domain other than the default `openai.azure.com`, you'll need to use the alternate `AZURE_OPENAI_BASE_PATH` environment variable.
+For example, here's how you would connect to the domain `https://westeurope.api.microsoft.com/openai/deployments/{DEPLOYMENT_NAME}`:
+
+```typescript
+import { OpenAIEmbeddings } from "@langchain/openai";
+
+const embeddings = new OpenAIEmbeddings({
+  azureOpenAIApiKey: "YOUR-API-KEY",
+  azureOpenAIApiVersion: "YOUR-API-VERSION",
+  azureOpenAIApiDeploymentName: "{DEPLOYMENT_NAME}",
+  azureOpenAIBasePath:
+    "https://westeurope.api.microsoft.com/openai/deployments", // In Node.js defaults to process.env.AZURE_OPENAI_BASE_PATH
+});
+```
diff --git a/examples/src/llms/azure_openai-chat.ts b/examples/src/llms/azure_openai-chat.ts
new file mode 100644
index 000000000000..f98bdd605e64
--- /dev/null
+++ b/examples/src/llms/azure_openai-chat.ts
@@ -0,0 +1,18 @@
+import { AzureChatOpenAI } from "@langchain/azure-openai";
+
+export const run = async () => {
+  const model = new AzureChatOpenAI({
+    modelName: "gpt-4",
+    prefixMessages: [
+      {
+        role: "system",
+        content: "You are a helpful assistant that answers in pirate language",
+      },
+    ],
+    maxTokens: 50,
+  });
+  const res = await model.invoke(
+    "What would be a good company name for a company that makes colorful socks?"
+  );
+  console.log({ res });
+};
diff --git a/libs/langchain-azure-openai/README.md b/libs/langchain-azure-openai/README.md
index 44988dcf74d2..5c09cc02ea52 100644
--- a/libs/langchain-azure-openai/README.md
+++ b/libs/langchain-azure-openai/README.md
@@ -19,7 +19,7 @@ You can do so by adding appropriate fields to your project's `package.json` like
   "name": "your-project",
   "version": "0.0.0",
   "dependencies": {
-    "@langchain/openai": "^0.0.9",
+    "@langchain/azure-openai": "^0.0.4",
     "langchain": "0.0.207"
   },
   "resolutions": {
@@ -56,7 +56,6 @@ Then initialize the model and make the calls:
 import { AzureChatOpenAI } from "@langchain/azure-openai";
 
 const model = new AzureChatOpenAI({
-  modelName: "gpt-4-1106-preview",
   // Note that the following are optional, and will default to the values below
   // if not provided.
   azureOpenAIEndpoint: process.env.AZURE_OPENAI_API_ENDPOINT,
@@ -72,7 +71,6 @@ const response = await model.invoke(new HumanMessage("Hello world!"));
 import { AzureChatOpenAI } from "@langchain/azure-openai";
 
 const model = new AzureChatOpenAI({
-  modelName: "gpt-4-1106-preview",
   // Note that the following are optional, and will default to the values below
   // if not provided.
   azureOpenAIEndpoint: process.env.AZURE_OPENAI_API_ENDPOINT,
@@ -94,11 +92,41 @@ const embeddings = new AzureOpenAIEmbeddings({
   // if not provided.
   azureOpenAIEndpoint: process.env.AZURE_OPENAI_API_ENDPOINT,
   azureOpenAIApiKey: process.env.AZURE_OPENAI_API_KEY,
-  azureOpenAIApiDeploymentName: process.env.AZURE_OPENAI_API_DEPLOYMENT_NAME,
+  azureOpenAIApiDeploymentName: process.env.AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME,
 });
 const res = await embeddings.embedQuery("Hello world");
 ```
 
+## Using Azure managed identity
+
+If you're using [Azure Managed Identity](https://learn.microsoft.com/azure/ai-services/openai/how-to/managed-identity), you can also pass the credentials directly to the constructor:
+
+```typescript
+import { DefaultAzureCredential } from "@azure/identity";
+import { AzureOpenAI } from "@langchain/azure-openai";
+
+const credentials = new DefaultAzureCredential();
+
+const model = new AzureOpenAI({
+  credentials,
+  azureOpenAIEndpoint: process.env.AZURE_OPENAI_API_ENDPOINT,
+  azureOpenAIApiDeploymentName: process.env.AZURE_OPENAI_API_DEPLOYMENT_NAME,
+});
+```
+
+## Compatibility with OpenAI API
+
+This library is provides compatibility with the OpenAI API. You can use an API key from OpenAI's developer portal like in the example below:
+
+```typescript
+import { AzureOpenAI, OpenAIKeyCredential } from "@langchain/azure-openai";
+
+const model = new AzureOpenAI({
+  modelName: "gpt-3.5-turbo",
+  credentials: new OpenAIKeyCredential("<your_openai_api_key>"),
+});
+```
+
 ## Development
 
 To develop the Azure OpenAI package, you'll need to follow these instructions:
diff --git a/libs/langchain-azure-openai/src/chat_models.ts b/libs/langchain-azure-openai/src/chat_models.ts
index 6acda2300b23..ffa7c8030adb 100644
--- a/libs/langchain-azure-openai/src/chat_models.ts
+++ b/libs/langchain-azure-openai/src/chat_models.ts
@@ -266,15 +266,27 @@ export class AzureChatOpenAI
       (getEnvironmentVariable("AZURE_OPENAI_API_KEY") ||
         getEnvironmentVariable("OPENAI_API_KEY"));
 
+    const azureCredential =
+      fields?.credentials ??
+      (fields?.azureOpenAIApiKey ||
+      getEnvironmentVariable("AZURE_OPENAI_API_KEY")
+        ? new AzureKeyCredential(this.azureOpenAIApiKey ?? "")
+        : new OpenAIKeyCredential(this.azureOpenAIApiKey ?? ""));
+
+    const isOpenAIApiKey =
+      fields?.azureOpenAIApiKey ||
+      // eslint-disable-next-line no-instanceof/no-instanceof
+      azureCredential instanceof OpenAIKeyCredential;
+
     if (!this.azureOpenAIApiKey && !fields?.credentials) {
       throw new Error("Azure OpenAI API key not found");
     }
 
-    if (!this.azureOpenAIEndpoint) {
+    if (!this.azureOpenAIEndpoint && !isOpenAIApiKey) {
       throw new Error("Azure OpenAI Endpoint not found");
     }
 
-    if (!this.azureOpenAIApiDeploymentName) {
+    if (!this.azureOpenAIApiDeploymentName && !isOpenAIApiKey) {
       throw new Error("Azure OpenAI Deployment name not found");
     }
 
@@ -294,28 +306,25 @@ export class AzureChatOpenAI
 
     this.streaming = fields?.streaming ?? false;
 
-    const azureCredential =
-      fields?.credentials ??
-      (fields?.azureOpenAIApiKey ||
-      getEnvironmentVariable("AZURE_OPENAI_API_KEY")
-        ? new AzureKeyCredential(this.azureOpenAIApiKey ?? "")
-        : new OpenAIKeyCredential(this.azureOpenAIApiKey ?? ""));
+    const options = {
+      userAgentOptions: { userAgentPrefix: USER_AGENT_PREFIX },
+    };
 
-    if (isTokenCredential(azureCredential)) {
+    if (isOpenAIApiKey) {
+      this.client = new AzureOpenAIClient(
+        azureCredential as OpenAIKeyCredential
+      );
+    } else if (isTokenCredential(azureCredential)) {
       this.client = new AzureOpenAIClient(
         this.azureOpenAIEndpoint ?? "",
         azureCredential as TokenCredential,
-        {
-          userAgentOptions: { userAgentPrefix: USER_AGENT_PREFIX },
-        }
+        options
       );
     } else {
       this.client = new AzureOpenAIClient(
         this.azureOpenAIEndpoint ?? "",
         azureCredential as KeyCredential,
-        {
-          userAgentOptions: { userAgentPrefix: USER_AGENT_PREFIX },
-        }
+        options
       );
     }
   }
@@ -339,11 +348,11 @@ export class AzureChatOpenAI
     options: this["ParsedCallOptions"]
   ): Promise<EventStream<ChatCompletions>> {
     return this.caller.call(async () => {
-      if (!this.azureOpenAIApiDeploymentName) {
-        throw new Error("Azure OpenAI Deployment name not found");
-      }
+      const deploymentName =
+        this.azureOpenAIApiDeploymentName || this.modelName;
+
       const res = await this.client.streamChatCompletions(
-        this.azureOpenAIApiDeploymentName,
+        deploymentName,
         azureOpenAIMessages,
         {
           functions: options?.functions,
@@ -434,10 +443,7 @@ export class AzureChatOpenAI
     options: this["ParsedCallOptions"],
     runManager?: CallbackManagerForLLMRun
   ): Promise<ChatResult> {
-    if (!this.azureOpenAIApiDeploymentName) {
-      throw new Error("Azure OpenAI Deployment name not found");
-    }
-    const deploymentName = this.azureOpenAIApiDeploymentName;
+    const deploymentName = this.azureOpenAIApiDeploymentName || this.modelName;
     const tokenUsage: TokenUsage = {};
     const azureOpenAIMessages: ChatRequestMessage[] =
       this.formatMessages(messages);
diff --git a/libs/langchain-azure-openai/src/embeddings.ts b/libs/langchain-azure-openai/src/embeddings.ts
index e7ae702b4378..c62817f5baf4 100644
--- a/libs/langchain-azure-openai/src/embeddings.ts
+++ b/libs/langchain-azure-openai/src/embeddings.ts
@@ -23,8 +23,7 @@ export class AzureOpenAIEmbeddings
 
   batchSize = 512;
 
-  // TODO: Update to `false` on next minor release (see: https://github.com/langchain-ai/langchainjs/pull/3612)
-  stripNewLines = true;
+  stripNewLines = false;
 
   timeout?: number;
 
@@ -64,16 +63,28 @@ export class AzureOpenAIEmbeddings
       (getEnvironmentVariable("AZURE_OPENAI_API_KEY") ||
         getEnvironmentVariable("OPENAI_API_KEY"));
 
+    const azureCredential =
+      fields?.credentials ??
+      (fields?.azureOpenAIApiKey ||
+      getEnvironmentVariable("AZURE_OPENAI_API_KEY")
+        ? new AzureKeyCredential(this.azureOpenAIApiKey ?? "")
+        : new OpenAIKeyCredential(this.azureOpenAIApiKey ?? ""));
+
+    const isOpenAIApiKey =
+      fields?.azureOpenAIApiKey ||
+      // eslint-disable-next-line no-instanceof/no-instanceof
+      azureCredential instanceof OpenAIKeyCredential;
+
     if (!this.azureOpenAIApiKey && !fields?.credentials) {
       throw new Error("Azure OpenAI API key not found");
     }
 
-    if (!this.azureOpenAIApiDeploymentName) {
-      throw new Error("Azure OpenAI Completion Deployment name not found");
+    if (!this.azureOpenAIEndpoint && !isOpenAIApiKey) {
+      throw new Error("Azure OpenAI Endpoint not found");
     }
 
-    if (!this.azureOpenAIEndpoint) {
-      throw new Error("Azure OpenAI Endpoint not found");
+    if (!this.azureOpenAIApiDeploymentName && !isOpenAIApiKey) {
+      throw new Error("Azure OpenAI Deployment name not found");
     }
 
     this.modelName = fieldsWithDefaults?.modelName ?? this.modelName;
@@ -87,28 +98,25 @@ export class AzureOpenAIEmbeddings
 
     this.timeout = fieldsWithDefaults?.timeout;
 
-    const azureCredential =
-      fields?.credentials ??
-      (fields?.azureOpenAIApiKey ||
-      getEnvironmentVariable("AZURE_OPENAI_API_KEY")
-        ? new AzureKeyCredential(this.azureOpenAIApiKey ?? "")
-        : new OpenAIKeyCredential(this.azureOpenAIApiKey ?? ""));
+    const options = {
+      userAgentOptions: { userAgentPrefix: USER_AGENT_PREFIX },
+    };
 
-    if (isTokenCredential(azureCredential)) {
+    if (isOpenAIApiKey) {
+      this.client = new AzureOpenAIClient(
+        azureCredential as OpenAIKeyCredential
+      );
+    } else if (isTokenCredential(azureCredential)) {
       this.client = new AzureOpenAIClient(
         this.azureOpenAIEndpoint ?? "",
         azureCredential as TokenCredential,
-        {
-          userAgentOptions: { userAgentPrefix: USER_AGENT_PREFIX },
-        }
+        options
       );
     } else {
       this.client = new AzureOpenAIClient(
         this.azureOpenAIEndpoint ?? "",
         azureCredential as KeyCredential,
-        {
-          userAgentOptions: { userAgentPrefix: USER_AGENT_PREFIX },
-        }
+        options
       );
     }
   }
@@ -133,22 +141,16 @@ export class AzureOpenAIEmbeddings
   }
 
   private async getEmbeddings(input: string[]) {
-    if (!this.azureOpenAIApiDeploymentName) {
-      throw new Error("Azure OpenAI Deployment name not found");
-    }
+    const deploymentName = this.azureOpenAIApiDeploymentName || this.modelName;
 
     const res = await this.caller.call(() =>
-      this.client.getEmbeddings(
-        this.azureOpenAIApiDeploymentName ?? "",
-        input,
-        {
-          user: this.user,
-          model: this.modelName,
-          requestOptions: {
-            timeout: this.timeout,
-          },
-        }
-      )
+      this.client.getEmbeddings(deploymentName, input, {
+        user: this.user,
+        model: this.modelName,
+        requestOptions: {
+          timeout: this.timeout,
+        },
+      })
     );
 
     return res.data[0].embedding;
diff --git a/libs/langchain-azure-openai/src/llms.ts b/libs/langchain-azure-openai/src/llms.ts
index e735b54161ed..05e8e0b255a5 100644
--- a/libs/langchain-azure-openai/src/llms.ts
+++ b/libs/langchain-azure-openai/src/llms.ts
@@ -133,15 +133,27 @@ export class AzureOpenAI<
       (getEnvironmentVariable("AZURE_OPENAI_API_KEY") ||
         getEnvironmentVariable("OPENAI_API_KEY"));
 
+    const azureCredential =
+      fields?.credentials ??
+      (fields?.azureOpenAIApiKey ||
+      getEnvironmentVariable("AZURE_OPENAI_API_KEY")
+        ? new AzureKeyCredential(this.azureOpenAIApiKey ?? "")
+        : new OpenAIKeyCredential(this.azureOpenAIApiKey ?? ""));
+
+    const isOpenAIApiKey =
+      fields?.azureOpenAIApiKey ||
+      // eslint-disable-next-line no-instanceof/no-instanceof
+      azureCredential instanceof OpenAIKeyCredential;
+
     if (!this.azureOpenAIApiKey && !fields?.credentials) {
       throw new Error("Azure OpenAI API key not found");
     }
 
-    if (!this.azureOpenAIEndpoint) {
+    if (!this.azureOpenAIEndpoint && !isOpenAIApiKey) {
       throw new Error("Azure OpenAI Endpoint not found");
     }
 
-    if (!this.azureOpenAIApiDeploymentName) {
+    if (!this.azureOpenAIApiDeploymentName && !isOpenAIApiKey) {
       throw new Error("Azure OpenAI Deployment name not found");
     }
 
@@ -166,28 +178,25 @@ export class AzureOpenAI<
       throw new Error("Cannot stream results when bestOf > 1");
     }
 
-    const azureCredential =
-      fields?.credentials ??
-      (fields?.azureOpenAIApiKey ||
-      getEnvironmentVariable("AZURE_OPENAI_API_KEY")
-        ? new AzureKeyCredential(this.azureOpenAIApiKey ?? "")
-        : new OpenAIKeyCredential(this.azureOpenAIApiKey ?? ""));
+    const options = {
+      userAgentOptions: { userAgentPrefix: USER_AGENT_PREFIX },
+    };
 
-    if (isTokenCredential(azureCredential)) {
+    if (isOpenAIApiKey) {
+      this.client = new AzureOpenAIClient(
+        azureCredential as OpenAIKeyCredential
+      );
+    } else if (isTokenCredential(azureCredential)) {
       this.client = new AzureOpenAIClient(
         this.azureOpenAIEndpoint ?? "",
         azureCredential as TokenCredential,
-        {
-          userAgentOptions: { userAgentPrefix: USER_AGENT_PREFIX },
-        }
+        options
       );
     } else {
       this.client = new AzureOpenAIClient(
         this.azureOpenAIEndpoint ?? "",
         azureCredential as KeyCredential,
-        {
-          userAgentOptions: { userAgentPrefix: USER_AGENT_PREFIX },
-        }
+        options
       );
     }
   }
@@ -197,11 +206,7 @@ export class AzureOpenAI<
     options: this["ParsedCallOptions"],
     runManager?: CallbackManagerForLLMRun
   ): AsyncGenerator<GenerationChunk> {
-    if (!this.azureOpenAIApiDeploymentName) {
-      throw new Error("Azure OpenAI Completion Deployment name not found");
-    }
-
-    const deploymentName = this.azureOpenAIApiDeploymentName;
+    const deploymentName = this.azureOpenAIApiDeploymentName || this.modelName;
 
     const stream = await this.caller.call(() =>
       this.client.streamCompletions(deploymentName, [input], {
@@ -251,11 +256,7 @@ export class AzureOpenAI<
     options: this["ParsedCallOptions"],
     runManager?: CallbackManagerForLLMRun
   ): Promise<LLMResult> {
-    if (!this.azureOpenAIApiDeploymentName) {
-      throw new Error("Azure OpenAI Completion Deployment name not found");
-    }
-
-    const deploymentName = this.azureOpenAIApiDeploymentName;
+    const deploymentName = this.azureOpenAIApiDeploymentName || this.modelName;
 
     if (this.maxTokens === -1) {
       if (prompts.length !== 1) {
diff --git a/libs/langchain-azure-openai/src/tests/chat_models.int.test.ts b/libs/langchain-azure-openai/src/tests/chat_models.int.test.ts
index 2a4d7e01d717..e5c48c1e13fc 100644
--- a/libs/langchain-azure-openai/src/tests/chat_models.int.test.ts
+++ b/libs/langchain-azure-openai/src/tests/chat_models.int.test.ts
@@ -16,6 +16,8 @@ import {
 import { CallbackManager } from "@langchain/core/callbacks/manager";
 import { NewTokenIndices } from "@langchain/core/callbacks/base";
 import { InMemoryCache } from "@langchain/core/caches";
+import { getEnvironmentVariable } from "@langchain/core/utils/env";
+import { OpenAIKeyCredential } from "@azure/openai";
 import { AzureChatOpenAI } from "../chat_models.js";
 
 test("Test ChatOpenAI", async () => {
@@ -790,3 +792,19 @@ test("Test ChatOpenAI token usage reporting for streaming calls", async () => {
     expect(streamingTokenUsed).toEqual(nonStreamingTokenUsed);
   }
 });
+
+test("Test ChatOpenAI with OpenAI API key credentials", async () => {
+  const openAiKey: string = getEnvironmentVariable("OPENAI_API_KEY") ?? "";
+  const credentials = new OpenAIKeyCredential(openAiKey);
+
+  const chat = new AzureChatOpenAI({
+    modelName: "gpt-3.5-turbo",
+    maxTokens: 5,
+    credentials,
+    azureOpenAIEndpoint: "",
+    azureOpenAIApiDeploymentName: "",
+  });
+  const message = new HumanMessage("Hello!");
+  const res = await chat.invoke([["system", "Say hi"], message]);
+  console.log(res);
+});
diff --git a/libs/langchain-azure-openai/src/tests/embeddings.int.test.ts b/libs/langchain-azure-openai/src/tests/embeddings.int.test.ts
index 49d230cec189..90babbcc8410 100644
--- a/libs/langchain-azure-openai/src/tests/embeddings.int.test.ts
+++ b/libs/langchain-azure-openai/src/tests/embeddings.int.test.ts
@@ -1,6 +1,7 @@
 import { test, expect } from "@jest/globals";
 import { getEnvironmentVariable } from "@langchain/core/utils/env";
 import { ClientSecretCredential, TokenCredential } from "@azure/identity";
+import { OpenAIKeyCredential } from "@azure/openai";
 import { AzureOpenAIEmbeddings } from "../embeddings.js";
 
 test("Test OpenAIEmbeddings.embedQuery", async () => {
@@ -68,3 +69,16 @@ test("Test OpenAIEmbeddings.embedQuery with TokenCredentials", async () => {
   const res = await embeddings.embedQuery("Hello world");
   expect(typeof res[0]).toBe("number");
 });
+
+test("Test OpenAIEmbeddings.embedQuery with OpenAI API key credentials", async () => {
+  const openAiKey: string = getEnvironmentVariable("OPENAI_API_KEY") ?? "";
+  const credentials = new OpenAIKeyCredential(openAiKey);
+
+  const embeddings = new AzureOpenAIEmbeddings({
+    credentials,
+    azureOpenAIEndpoint: "",
+    azureOpenAIApiDeploymentName: "",
+  });
+  const res = await embeddings.embedQuery("Hello world");
+  expect(typeof res[0]).toBe("number");
+});
diff --git a/libs/langchain-azure-openai/src/tests/llms.int.test.ts b/libs/langchain-azure-openai/src/tests/llms.int.test.ts
index 6a03c9c5152d..58bcbc80677e 100644
--- a/libs/langchain-azure-openai/src/tests/llms.int.test.ts
+++ b/libs/langchain-azure-openai/src/tests/llms.int.test.ts
@@ -6,6 +6,7 @@ import { NewTokenIndices } from "@langchain/core/callbacks/base";
 import { ClientSecretCredential } from "@azure/identity";
 import { TokenCredential } from "@azure/core-auth";
 import { getEnvironmentVariable } from "@langchain/core/utils/env";
+import { OpenAIKeyCredential } from "@azure/openai";
 import { AzureOpenAI } from "../llms.js";
 
 test("Test OpenAI", async () => {
@@ -325,3 +326,18 @@ test("Test OpenAI with Token credentials ", async () => {
   const res = await model.invoke("Print hello world");
   console.log({ res });
 });
+
+test("Test OpenAI with OpenAI API key credentials ", async () => {
+  const openAiKey: string = getEnvironmentVariable("OPENAI_API_KEY") ?? "";
+  const credentials = new OpenAIKeyCredential(openAiKey);
+
+  const model = new AzureOpenAI({
+    maxTokens: 5,
+    modelName: "davinci-002",
+    credentials,
+    azureOpenAIEndpoint: "",
+    azureOpenAIApiDeploymentName: "",
+  });
+  const res = await model.invoke("Print hello world");
+  console.log({ res });
+});