Skip to content

Commit

Permalink
Merge branch 'develop' into feat/view-chat-history-with-agent-in-clie…
Browse files Browse the repository at this point in the history
…nt-UI
  • Loading branch information
wtfsayo authored Jan 15, 2025
2 parents fb180a7 + 0067a07 commit b24c35f
Show file tree
Hide file tree
Showing 2 changed files with 100 additions and 3 deletions.
7 changes: 5 additions & 2 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -88,8 +88,11 @@ USE_OPENAI_EMBEDDING= # Set to TRUE for OpenAI/1536, leave blank for local

# Eternal AI's Decentralized Inference API
ETERNALAI_URL=
ETERNALAI_MODEL= # Default: "neuralmagic/Meta-Llama-3.1-405B-Instruct-quantized.w4a16"
ETERNALAI_CHAIN_ID=45762 #Default: "45762"
ETERNALAI_MODEL= # Default: "NousResearch/Hermes-3-Llama-3.1-70B-FP8"
ETERNALAI_CHAIN_ID=8453 # Default: "8453"
ETERNALAI_RPC_URL= # Ex: https://mainnet.base.org/
ETERNALAI_AGENT_CONTRACT_ADDRESS= # Ex: 0xAed016e060e2fFE3092916b1650Fc558D62e1CCC
ETERNALAI_AGENT_ID= # Ex: 1711
ETERNALAI_API_KEY=
ETERNALAI_LOG=false #Default: false

Expand Down
96 changes: 95 additions & 1 deletion packages/core/src/generation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,8 @@ import {
} from "./types.ts";
import { fal } from "@fal-ai/client";
import { tavily } from "@tavily/core";
import BigNumber from "bignumber.js";
import {createPublicClient, http} from "viem";

type Tool = CoreTool<any, any>;
type StepResult = AIStepResult<any>;
Expand Down Expand Up @@ -164,6 +166,85 @@ async function truncateTiktoken(
}
}

/**
* Get OnChain EternalAI System Prompt
* @returns System Prompt
*/
async function getOnChainEternalAISystemPrompt(runtime: IAgentRuntime): Promise<string> | undefined {
const agentId = runtime.getSetting("ETERNALAI_AGENT_ID")
const providerUrl = runtime.getSetting("ETERNALAI_RPC_URL");
const contractAddress = runtime.getSetting("ETERNALAI_AGENT_CONTRACT_ADDRESS");
if (agentId && providerUrl && contractAddress) {
// get on-chain system-prompt
const contractABI = [{"inputs": [{"internalType": "uint256", "name": "_agentId", "type": "uint256"}], "name": "getAgentSystemPrompt", "outputs": [{"internalType": "bytes[]", "name": "","type": "bytes[]"}], "stateMutability": "view", "type": "function"}];

const publicClient = createPublicClient({
transport: http(providerUrl),
});

try {
const validAddress: `0x${string}` = contractAddress as `0x${string}`;
const result = await publicClient.readContract({
address: validAddress,
abi: contractABI,
functionName: "getAgentSystemPrompt",
args: [new BigNumber(agentId)],
});
if (result) {
elizaLogger.info('on-chain system-prompt response', result[0]);
const value = result[0].toString().replace("0x", "");
let content = Buffer.from(value, 'hex').toString('utf-8');
elizaLogger.info('on-chain system-prompt', content);
return await fetchEternalAISystemPrompt(runtime, content)
} else {
return undefined;
}
} catch (error) {
elizaLogger.error(error);
elizaLogger.error('err', error);
}
}
return undefined;
}

/**
* Fetch EternalAI System Prompt
* @returns System Prompt
*/
async function fetchEternalAISystemPrompt(runtime: IAgentRuntime, content: string): Promise<string> | undefined {
const IPFS = "ipfs://"
const containsSubstring: boolean = content.includes(IPFS);
if (containsSubstring) {

const lightHouse = content.replace(IPFS, "https://gateway.lighthouse.storage/ipfs/");
elizaLogger.info("fetch lightHouse", lightHouse)
const responseLH = await fetch(lightHouse, {
method: "GET",
});
elizaLogger.info("fetch lightHouse resp", responseLH)
if (responseLH.ok) {
const data = await responseLH.text();
return data;
} else {
const gcs = content.replace(IPFS, "https://cdn.eternalai.org/upload/")
elizaLogger.info("fetch gcs", gcs)
const responseGCS = await fetch(gcs, {
method: "GET",
});
elizaLogger.info("fetch lightHouse gcs", responseGCS)
if (responseGCS.ok) {
const data = await responseGCS.text();
return data;
} else {
throw new Error("invalid on-chain system prompt")
}
return undefined
}
} else {
return content;
}
}

/**
* Gets the Cloudflare Gateway base URL for a specific provider if enabled
* @param runtime The runtime environment
Expand Down Expand Up @@ -485,10 +566,23 @@ export async function generateText({
},
});

let system_prompt = runtime.character.system ?? settings.SYSTEM_PROMPT ?? undefined;
try {
const on_chain_system_prompt = await getOnChainEternalAISystemPrompt(runtime);
if (!on_chain_system_prompt) {
elizaLogger.error(new Error("invalid on_chain_system_prompt"))
} else {
system_prompt = on_chain_system_prompt
elizaLogger.info("new on-chain system prompt", system_prompt)
}
} catch (e) {
elizaLogger.error(e)
}

const { text: openaiResponse } = await aiGenerateText({
model: openai.languageModel(model),
prompt: context,
system: runtime.character.system ?? settings.SYSTEM_PROMPT ?? undefined,
system: system_prompt,
temperature: temperature,
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
Expand Down

0 comments on commit b24c35f

Please sign in to comment.