diff --git a/examples/chatgpt-apikey-confidential-store/MyGpt.sol b/examples/chatgpt-apikey-confidential-store/MyGpt.sol new file mode 100644 index 0000000..cbe94a8 --- /dev/null +++ b/examples/chatgpt-apikey-confidential-store/MyGpt.sol @@ -0,0 +1,136 @@ +// SPDX-License-Identifier: UNLICENSED +pragma solidity ^0.8.8; + +import "suave-std/Suapp.sol"; +import "suave-std/Context.sol"; +import "solady/src/utils/JSONParserLib.sol"; + +contract Chat is Suapp { + using JSONParserLib for *; + struct DataItem { + Suave.DataId id; + bool used; + } + enum Role { + User, + System + } + + struct Message { + Role role; + string content; + } + mapping(address=>DataItem) apiKeys; + string public API_KEY = "API_KEY"; + + event Response(string messages); + event UpdateKey(address sender); + error NoKeyExists(address sender); + + function updateKeyOnchain(Suave.DataId _apiKeyRecord) public { + emit UpdateKey(msg.sender); + apiKeys[msg.sender] = DataItem({ + id: _apiKeyRecord, + used : true + }); + } + + function registerKeyOffchain() public returns (bytes memory) { + bytes memory keyData = Context.confidentialInputs(); + + address[] memory peekers = new address[](1); + peekers[0] = address(this); + + Suave.DataRecord memory record = Suave.newDataRecord(0, peekers, peekers, "api_key"); + Suave.confidentialStore(record.id, API_KEY, keyData); + + return abi.encodeWithSelector(this.updateKeyOnchain.selector, record.id); + } + + function onchain() public emitOffchainLogs {} + + function ask(string calldata prompt,string calldata model,string calldata temperature) external returns (bytes memory) { + if(!apiKeys[msg.sender].used){ + revert NoKeyExists(msg.sender); + } + bytes memory keyData = Suave.confidentialRetrieve(apiKeys[msg.sender].id, API_KEY); + string memory apiKey = bytesToString(keyData); + + Message[] memory messages = new Message[](1); + messages[0] = Message(Role.User, prompt); + + string memory data = complete(messages, model, temperature,apiKey); + + emit Response(data); + + return abi.encodeWithSelector(this.onchain.selector); + } + + function bytesToString(bytes memory data) internal pure returns (string memory) { + uint256 length = data.length; + bytes memory chars = new bytes(length); + + for(uint i = 0; i < length; i++) { + chars[i] = data[i]; + } + + return string(chars); + } + + function complete(Message[] memory messages, string memory model, string memory temperature, string memory key) public returns (string memory) { + bytes memory body; + body = abi.encodePacked('{"model": "',model); + body = abi.encodePacked(body,'", "messages": ['); + for (uint256 i = 0; i < messages.length; i++) { + body = abi.encodePacked( + body, + '{"role": "', + messages[i].role == Role.User ? "user" : "system", + '", "content": "', + messages[i].content, + '"}' + ); + if (i < messages.length - 1) { + body = abi.encodePacked(body, ","); + } + } + body = abi.encodePacked(body, '], "temperature":'); + body = abi.encodePacked(body, temperature); + body = abi.encodePacked(body, '}'); + + return doGptRequest(body,key); + } + + function doGptRequest(bytes memory body,string memory apiKey) private returns (string memory) { + Suave.HttpRequest memory request; + request.method = "POST"; + request.url = "https://api.openai.com/v1/chat/completions"; + request.headers = new string[](2); + request.headers[0] = string.concat("Authorization: Bearer ", apiKey); + request.headers[1] = "Content-Type: application/json"; + request.body = body; + + bytes memory output = Suave.doHTTPRequest(request); + + // decode responses + JSONParserLib.Item memory item = string(output).parse(); + string memory result = trimQuotes(item.at('"choices"').at(0).at('"message"').at('"content"').value()); + + return result; + } + + function trimQuotes(string memory input) private pure returns (string memory) { + bytes memory inputBytes = bytes(input); + require( + inputBytes.length >= 2 && inputBytes[0] == '"' && inputBytes[inputBytes.length - 1] == '"', "Invalid input" + ); + + bytes memory result = new bytes(inputBytes.length - 2); + + for (uint256 i = 1; i < inputBytes.length - 1; i++) { + result[i - 1] = inputBytes[i]; + } + + return string(result); + } +} \ No newline at end of file diff --git a/examples/chatgpt-apikey-confidential-store/README.md b/examples/chatgpt-apikey-confidential-store/README.md new file mode 100644 index 0000000..abd2091 --- /dev/null +++ b/examples/chatgpt-apikey-confidential-store/README.md @@ -0,0 +1,17 @@ +# Example Suapp with interact with chatgpt while apikey stored in the confidential store + +This example shows how Suapps can interact with chatgpt while apikey stored in the confidential store. Everyone can use his own apikey to chat with chatgpt. It will fail while no apikey stored by himself. + +## How to use + +Run `Suave` in development mode: + +``` +$ suave --suave.dev +``` + +Execute the deployment script: + +``` +$ go run main.go +``` diff --git a/examples/chatgpt-apikey-confidential-store/main.go b/examples/chatgpt-apikey-confidential-store/main.go new file mode 100644 index 0000000..46dce30 --- /dev/null +++ b/examples/chatgpt-apikey-confidential-store/main.go @@ -0,0 +1,89 @@ +package main + +import ( + "fmt" + "bufio" + "os" + "github.com/flashbots/suapp-examples/framework" +) + +func main() { + fr := framework.New() + + chat := fr.Suave.DeployContract("MyGpt.sol/Chat.json") + + fmt.Println("deploy chat complete!") + var apiKey string + updateKey := func(){ + fmt.Println("please input your chatgpt api-key:") + _,err := fmt.Scanf("%s",&apiKey) + if(err != nil){ + fmt.Printf("error :", err) + } + fmt.Printf("your key is : %s\n", apiKey) + receipt := chat.SendConfidentialRequest("registerKeyOffchain", []interface{}{}, []byte(apiKey)) + if len(receipt.Logs) >= 1 { + fmt.Printf("%s\n",receipt.Logs[0].Data) + } + fmt.Println("your key has been put in suave confidential store") + } + sendQuestion := func () { + fmt.Println("please input your question:") + var question string + + scanner := bufio.NewScanner(os.Stdin) + scanner.Scan() + question = scanner.Text() + fmt.Printf("your question is : %s\n", question) + fmt.Println("please choose your model:") + var model int + fmt.Println("please choose a model,1 or 2") + fmt.Println("1:gpt-3.5-turbo") + fmt.Println("2:gpt-4o") + _,err := fmt.Scanf("%d",&model) + if(err != nil) { + fmt.Printf("error :", err) + return + } + gptModel := "gpt-3.5-turbo" + switch model { + case 1: + gptModel = "gpt-3.5-turbo" + case 2: + gptModel = "gpt-4o" + default: + fmt.Printf("you have choose a wrong option\n") + } + + receipt := chat.SendConfidentialRequest("ask", []interface{}{question, gptModel, "0.7"}, nil) + if len(receipt.Logs) >= 1 { + fmt.Printf("%s\n",receipt.Logs[0].Data) + } + } + hasKey := false + for true { + if(!hasKey){ + updateKey() + hasKey = true + } + fmt.Println("please choose a option,1 or 2") + fmt.Println("1:ask a question") + fmt.Println("2:update api key") + option := -1 + _,err := fmt.Scanf("%d",&option) + if(err != nil){ + fmt.Printf("error :", err) + break + } + switch option { + case 1: + sendQuestion() + case 2: + updateKey() + default: + fmt.Printf("you have choose a wrong option\n") + } + } + + fmt.Println("finished") +}