Skip to content

Commit

Permalink
fix: Fix Incorrect Tweet ID Parameter Passed to sendTweet Function (#…
Browse files Browse the repository at this point in the history
…2430)

* correct id

* skip action if model is not supported

* warn

* update warn
  • Loading branch information
tcm390 authored Jan 17, 2025
1 parent a44fa94 commit f4e54ca
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 17 deletions.
2 changes: 1 addition & 1 deletion packages/client-twitter/src/search.ts
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,7 @@ export class TwitterSearchClient {
response,
message.roomId,
this.twitterUsername,
tweetId
selectedTweet.id
);
return memories;
};
Expand Down
48 changes: 32 additions & 16 deletions packages/plugin-node/src/services/image.ts
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,6 @@ class OpenAIImageProvider implements ImageProvider {
}
}


class GroqImageProvider implements ImageProvider {
constructor(private runtime: IAgentRuntime) {}

Expand Down Expand Up @@ -233,7 +232,6 @@ class GroqImageProvider implements ImageProvider {
}
}


class GoogleImageProvider implements ImageProvider {
constructor(private runtime: IAgentRuntime) {}

Expand Down Expand Up @@ -299,11 +297,18 @@ export class ImageDescriptionService
this.runtime = runtime;
}

private async initializeProvider(): Promise<void> {
private async initializeProvider(): Promise<boolean> {
if (!this.runtime) {
throw new Error("Runtime is required for image recognition");
}

const availableModels = [
ModelProviderName.LLAMALOCAL,
ModelProviderName.GOOGLE,
ModelProviderName.OPENAI,
ModelProviderName.GROQ,
].join(", ");

const model = models[this.runtime?.character?.modelProvider];

if (this.runtime.imageVisionModelProvider) {
Expand All @@ -326,15 +331,17 @@ export class ImageDescriptionService
this.provider = new OpenAIImageProvider(this.runtime);
elizaLogger.debug("Using openai for vision model");
} else if (
this.runtime.imageVisionModelProvider ===
ModelProviderName.GROQ
this.runtime.imageVisionModelProvider === ModelProviderName.GROQ
) {
this.provider = new GroqImageProvider(this.runtime);
elizaLogger.debug("Using Groq for vision model");
} else {
elizaLogger.error(
`Unsupported image vision model provider: ${this.runtime.imageVisionModelProvider}`
elizaLogger.warn(
`Unsupported image vision model provider: ${this.runtime.imageVisionModelProvider}. ` +
`Please use one of the following: ${availableModels}. ` +
`Update the 'imageVisionModelProvider' field in the character file.`
);
return false;
}
} else if (model === models[ModelProviderName.LLAMALOCAL]) {
this.provider = new LocalImageProvider();
Expand All @@ -350,8 +357,15 @@ export class ImageDescriptionService
this.provider = new OpenAIImageProvider(this.runtime);
}

await this.provider.initialize();
this.initialized = true;
try {
await this.provider.initialize();
} catch (error) {

Check failure on line 362 in packages/plugin-node/src/services/image.ts

View workflow job for this annotation

GitHub Actions / check

'error' is defined but never used
elizaLogger.error(
`Failed to initialize the image vision model provider: ${this.runtime.imageVisionModelProvider}`
);
return false;
}
return true;
}

private async loadImageData(
Expand Down Expand Up @@ -416,15 +430,17 @@ export class ImageDescriptionService
imageUrl: string
): Promise<{ title: string; description: string }> {
if (!this.initialized) {
await this.initializeProvider();
this.initialized = await this.initializeProvider();
}

try {
const { data, mimeType } = await this.loadImageData(imageUrl);
return await this.provider!.describeImage(data, mimeType);
} catch (error) {
elizaLogger.error("Error in describeImage:", error);
throw error;
if (this.initialized) {
try {
const { data, mimeType } = await this.loadImageData(imageUrl);
return await this.provider!.describeImage(data, mimeType);
} catch (error) {
elizaLogger.error("Error in describeImage:", error);
throw error;
}
}
}
}
Expand Down

0 comments on commit f4e54ca

Please sign in to comment.