Skip to content

Commit

Permalink
Adds gpt-4-turbo (vision) e2e test
Browse files Browse the repository at this point in the history
  • Loading branch information
kgrofelnik committed Apr 18, 2024
1 parent fda5155 commit 5df982f
Show file tree
Hide file tree
Showing 4 changed files with 122 additions and 1 deletion.
6 changes: 6 additions & 0 deletions .github/workflows/e2e_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ jobs:
BEARLY_API_KEY: ${{ secrets.BEARLY_API_KEY }}
PINATA_GATEWAY_TOKEN: ${{ secrets.PINATA_GATEWAY_TOKEN }}
PINATA_API_JWT: ${{ secrets.PINATA_API_JWT }}
IMAGE_URL: "https://picsum.photos/200/300"
- name: "Test 1 - OpenAI gpt-4-turbo-preview"
run:
cd contracts && npx hardhat openai --contract-address ${{ env.TEST_CONTRACT_ADDRESS }} --model gpt-4-turbo-preview --message "Who is the president of USA?" --network ${{ env.NETWORK }}
Expand Down Expand Up @@ -120,6 +121,11 @@ jobs:
cd contracts && npx hardhat query_knowledge_base --contract-address ${{ env.TEST_CONTRACT_ADDRESS }} --cid QmdCgbMawRVE6Kc1joZmhgDo2mSZFgRgWvBCqUvJV9JwkF --query "What is the oracle smart contract address?" --network ${{ env.NETWORK }}
env:
PRIVATE_KEY_LOCALHOST: ${{ secrets.PRIVATE_KEY }}
- name: "Test 11 - OpenAI gpt-4-turbo vision"
run:
cd contracts && npx hardhat openai_vision --contract-address ${{ env.TEST_CONTRACT_ADDRESS }} --model gpt-4-turbo --message "What is on this image?" --image-url ${{env.IMAGE_URL}} --network ${{ env.NETWORK }}
env:
PRIVATE_KEY_LOCALHOST: ${{ secrets.PRIVATE_KEY }}
- name: Display Oracle Output
if: always() # Ensures this step runs even if a previous step fails
run: cat oracles/oracle_output.txt
49 changes: 49 additions & 0 deletions contracts/contracts/Test.sol
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ contract Test {
address private owner;
address public oracleAddress;
string public llmMessage;
IOracle.Message visionMessage;
string public lastResponse;
string public lastError;
uint private callsCount;
Expand Down Expand Up @@ -83,6 +84,48 @@ contract Test {
return currentId;
}

function callOpenAiVisionLLM(string memory model, string memory message, string memory imageUrl) public returns (uint i) {
uint currentId = callsCount;
callsCount = currentId + 1;

lastResponse = "";
lastError = "";

visionMessage = IOracle.Message({
role: "user",
content: new IOracle.Content[](2)
});
visionMessage.content[0] = IOracle.Content({
contentType: "text",
value: message
});
visionMessage.content[1] = IOracle.Content({
contentType: "image_url",
value: imageUrl
});

IOracle(oracleAddress).createOpenAiLlmCall(
currentId,
IOracle.OpenAiRequest({
model: model,
frequencyPenalty : 21, // > 20 for null
logitBias : "", // empty str for null
maxTokens : 1000, // 0 for null
presencePenalty : 21, // > 20 for null
responseFormat : "{\"type\":\"text\"}",
seed : 0, // null
stop : "", // null
temperature : 10, // Example temperature (scaled up, 10 means 1.0), > 20 means null
topP : 101, // Percentage 0-100, > 100 means null
tools : "",
toolChoice : "", // "none" or "auto"
user : "" // null
})
);

return currentId;
}

function callGroqLLM(string memory model, string memory message) public returns (uint i) {
uint currentId = callsCount;
callsCount = currentId + 1;
Expand Down Expand Up @@ -123,6 +166,12 @@ contract Test {
return roles;
}

function getMessageHistory(uint /*chatId*/) public view returns (IOracle.Message[] memory) {
IOracle.Message[] memory messages = new IOracle.Message[](1);
messages[0] = visionMessage;
return messages;
}

function queryKnowledgeBase(string memory cid, string memory query) public returns (uint i) {
uint currentId = callsCount;
callsCount = currentId + 1;
Expand Down
29 changes: 28 additions & 1 deletion contracts/tasks/e2e.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,14 @@ task("e2e", "Runs all e2e tests")
"Who is the president of USA?",
hre,
)
testResults["OpenAI gpt-4-turbo"] = result.error || "✅";
result = await runOpenAiVision(
contractAddress,
"gpt-4-turbo",
"What is on this image",
"https://picsum.photos/200/300",
hre,
)
testResults["Groq llama2-70b-4096"] = result.error || "✅";
result = await runGroq(
contractAddress,
Expand Down Expand Up @@ -145,7 +153,6 @@ async function runTaskWithTimeout(
}
}


async function runOpenAi(
contractAddress: string,
model: string,
Expand All @@ -164,6 +171,26 @@ async function runOpenAi(
return result;
}

async function runOpenAiVision(
contractAddress: string,
model: string,
message: string,
imageUrl: string,
hre: HardhatRuntimeEnvironment,
) {
let result = await runTaskWithTimeout(
"openai_vision",
{
contractAddress,
model,
message,
imageUrl
},
hre,
)
return result;
}

async function runGroq(
contractAddress: string,
model: string,
Expand Down
39 changes: 39 additions & 0 deletions contracts/tasks/functions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,21 @@ task("openai", "Calls the OpenAI LLM")
return checkResult(response);
});

task("openai_vision", "Calls the OpenAI LLM")
.addParam("contractAddress", "The address of the Test contract")
.addParam("model", "The model to use")
.addParam("imageUrl", "The image URL to send to the model")
.addParam("message", "The message to send to the model")
.setAction(async (taskArgs, hre) => {
const contractAddress = taskArgs.contractAddress;
const model = taskArgs.model;
const message = taskArgs.message;
const imageUrl = taskArgs.imageUrl;
const contract = await getContract("Test", contractAddress, hre);
const response = await queryOpenAiVisionLLM(contract, model, message, imageUrl, hre);
return checkResult(response);
});

task("groq", "Calls the Groq LLM")
.addParam("contractAddress", "The address of the Test contract")
.addParam("model", "The model to use")
Expand Down Expand Up @@ -141,6 +156,30 @@ async function queryOpenAiLLM(
return { response: "", error: "Call failed" };
}

async function queryOpenAiVisionLLM(
contract: Contract,
model: string,
message: string,
image_url: string,
hre: HardhatRuntimeEnvironment
): Promise<FunctionResponse> {
try {
const txResponse = await contract.callOpenAiVisionLLM(model, message, image_url);
await txResponse.wait();
let response = await contract.lastResponse();
let error = await contract.lastError();
while (response.length === 0 && error.length === 0) {
await new Promise((resolve) => setTimeout(resolve, 1000));
response = await contract.lastResponse();
error = await contract.lastError();
}
return { response: response, error: error };
} catch (error) {
console.error(`Error calling contract function: ${error}`);
}
return { response: "", error: "Call failed" };
}

async function queryGroqLLM(
contract: Contract,
model: string,
Expand Down

0 comments on commit 5df982f

Please sign in to comment.