From 9aac76fc9b73e75e70df8b8c2cf8a8c7856a71ca Mon Sep 17 00:00:00 2001 From: Scharief Date: Tue, 23 Jan 2024 17:07:15 +0100 Subject: [PATCH 1/7] clean branch - fb-public-node-mainnet updates --- .../gcp-nitro-node/scripts/startup_script.tpl | 137 +++-------- infrastructure/nitro-node-wrapper/README.md | 19 ++ .../arb1.config.tpl copy.json | 132 +++++++++++ .../xai-goerli.config.tpl.json | 129 ++++++++++ .../xai-mainnet.config.tpl.json | 129 ++++++++++ .../configs/xai-mainnet.config.json | 78 ++++++ .../nitro-node-wrapper/docker-compose.yml | 50 ++++ .../nitro-node-wrapper/publisher/Dockerfile | 15 ++ .../nitro-node-wrapper/publisher/README.md | 10 + .../nitro-node-wrapper/publisher/index.mjs | 222 ++++++++++++++++++ .../nitro-node-wrapper/publisher/package.json | 19 ++ infrastructure/nitro-node-wrapper/sample.env | 12 + 12 files changed, 842 insertions(+), 110 deletions(-) create mode 100644 infrastructure/nitro-node-wrapper/README.md create mode 100644 infrastructure/nitro-node-wrapper/config-templates/arb1.config.tpl copy.json create mode 100644 infrastructure/nitro-node-wrapper/config-templates/xai-goerli.config.tpl.json create mode 100644 infrastructure/nitro-node-wrapper/config-templates/xai-mainnet.config.tpl.json create mode 100644 infrastructure/nitro-node-wrapper/configs/xai-mainnet.config.json create mode 100644 infrastructure/nitro-node-wrapper/docker-compose.yml create mode 100644 infrastructure/nitro-node-wrapper/publisher/Dockerfile create mode 100644 infrastructure/nitro-node-wrapper/publisher/README.md create mode 100644 infrastructure/nitro-node-wrapper/publisher/index.mjs create mode 100644 infrastructure/nitro-node-wrapper/publisher/package.json create mode 100644 infrastructure/nitro-node-wrapper/sample.env diff --git a/infrastructure/gcp-nitro-node/scripts/startup_script.tpl b/infrastructure/gcp-nitro-node/scripts/startup_script.tpl index 1ccabdb2e..8900fbc1d 100644 --- a/infrastructure/gcp-nitro-node/scripts/startup_script.tpl +++ b/infrastructure/gcp-nitro-node/scripts/startup_script.tpl @@ -11,7 +11,7 @@ then fi # Add the user to the docker group -sudo usermod -aG docker ${USER} +sudo usermod -aG docker $(whoami) # Modify useradd defaults to add new users to the docker group if grep -q "^GROUP=" /etc/default/useradd; then @@ -20,121 +20,38 @@ else echo "GROUP=docker" | sudo tee -a /etc/default/useradd fi -# Check if the docker image is present -if ! docker image inspect offchainlabs/nitro-node:v2.1.0-72ccc0c &> /dev/null -then - # Pull the docker image - docker pull offchainlabs/nitro-node:v2.1.0-72ccc0c -fi - -# Check if the directory exists -if [ ! -d "/opt/arbitrum" ]; then - # Create the directory - sudo mkdir -p /opt/arbitrum -fi +# Create the node data directories +sudo mkdir -p /opt/arbitrum +sudo mkdir -p /opt/xai +sudo mkdir -p /opt/node/build # Change the owner of the directory to the current user and group sudo chown -R $(whoami):$(id -gn) /opt/arbitrum +sudo chown -R $(whoami):$(id -gn) /opt/xai +sudo chown -R $(whoami):$(id -gn) /opt/node/build # Change the permissions of the directory so that any user can edit files in it sudo chmod -R a+rwX /opt/arbitrum - -# Check if the docker container is running -if docker ps | grep -q 'offchainlabs/nitro-node:v2.1.0-72ccc0c'; then - # Stop the docker container - docker stop $(docker ps -q --filter ancestor=offchainlabs/nitro-node:v2.1.0-72ccc0c) +sudo chmod -R a+rwX /opt/xai +sudo chmod -R a+rwX /opt/node/build + +# Stop all running containers +docker stop --time=300 $(docker ps -aq) +docker rm $(docker ps -aq) + +cd /opt/node/build +if [ -f "/opt/node/build/docker-compose.yml" ]; then + # If we already have a docker-compsoe we will try to remove all containers and delete the outdated versions + docker compose down + sudo rm -f docker-compose.yml + sudo rm -f xai-mainnet.config.json fi -# Create a JSON file with the configuration parameters -cat > /opt/arbitrum/config.json </xai-public-node-publisher:latest .` +- `docker tag /xai-public-node-publisher:latest /xai-public-node-publisher:latest` +- `docker push /xai-public-node-publisher:latest` \ No newline at end of file diff --git a/infrastructure/nitro-node-wrapper/publisher/index.mjs b/infrastructure/nitro-node-wrapper/publisher/index.mjs new file mode 100644 index 000000000..760e8cf39 --- /dev/null +++ b/infrastructure/nitro-node-wrapper/publisher/index.mjs @@ -0,0 +1,222 @@ +import Docker from "dockerode"; +import fs from 'fs'; +import { Writable } from 'stream'; +import { concat, keccak256 } from "ethers"; +import { Bucket, Storage } from "@google-cloud/storage"; + +// create an instance of the bucket +const storage = new Storage({ + projectId: process.env.PROJECT_ID, + credentials: { + client_email: process.env.SERVICE_ACCOUNT_EMAIL, + private_key: process.env.SERVICE_ACCOUNT_PRIVATE_KEY.replace(/\\n/g, '\n'), + } +}); + +const CLOUD_STORAGE_BUCKET = new Bucket(storage, process.env.BUCKET_NAME) +const DOCKER_CONTAINER_NAME = 'xai-public-node'; + + +// const CONFIG_TEMPLATE = 'xai-goerli.config.tpl.json'; +// const CONFIG_TEMPLATE = 'xai-mainnet.config.tpl.json'; + +// const PATH_TO_NODE_CONFIG_DIR = `${process.cwd()}/arbitrum`; +// const PATH_TO_NODE_CONFIG = path.join(PATH_TO_NODE_CONFIG_DIR, "config.json"); + +// //TODO we should remove this and have the actual config in the github or in the docker image +// const createNodeConfig = () => { +// // configure a config for the docker container +// const config = JSON.parse(fs.readFileSync(path.join("./templates", CONFIG_TEMPLATE))); +// config.chain["info-json"] = JSON.stringify(config.chain["info-json"]); + +// // Check if the directory exists, if not create it +// if (!fs.existsSync(PATH_TO_NODE_CONFIG_DIR)) { +// fs.mkdirSync(PATH_TO_NODE_CONFIG_DIR, { recursive: true }); +// } +// fs.writeFileSync(PATH_TO_NODE_CONFIG, JSON.stringify(config, null, 2)); +// console.log("Config created"); +// } + + +function _tryParseJSONObject(jsonString) { + try { + var o = JSON.parse(jsonString); + if (o && typeof o === "object") { + return o; + } + } catch (e) { } + return null; +}; + +//Helper function for the assertion state object, which currently is not a valid JSON object +function _parseStateObject(stateStringObj) { + let parsed = _tryParseJSONObject(stateStringObj); + if (parsed == null) { + // Adding double quotes around the keys + stateStringObj = stateStringObj.replace(/(\w+):/g, '"$1":'); + // Adding double quotes around the hex values + stateStringObj = stateStringObj.replace(/(0x[a-fA-F0-9]+)/g, '"$1"'); + // Replace space with commas + stateStringObj = stateStringObj.trim().replace(/ /g, ','); + return _tryParseJSONObject(stateStringObj) + } +} + + +const onNewAssertion = async (json) => { + const state = _parseStateObject(json.state); + + if (state === null) { + console.error("Failed to parse state object", json.state); + return; + } + + console.log("Has new assertion"); + + // Concatenate the blockHash and sendRoot + const concatenatedHashes = concat([state.BlockHash, state.SendRoot]); + + // Create the confirm hash by keccak256 + const confirmHash = keccak256(concatenatedHashes); + + // create a JSON object that will get saved to the bucket + // TODO save object to google bucket storage! + const jsonSave = { + assertion: json.assertion, + blockHash: state.BlockHash, + sendRoot: state.SendRoot, + confirmHash, + } + + //TODO retry if upload failed / send error notification + const link = await uploadToBucket(jsonSave); + if (link) { + console.log("Posting new confirmed assertion", jsonSave, link); + } +} + +const onJSONLog = (json) => { + if (json.hasOwnProperty('err')) { + console.error("Node container had error:", json); + //TODO this could be an error that stops the container, we should handle it appropriately - maybe check the error and restart the whole script + } else { + + console.log("New container JSON log", json); + // if there is an assertion and a state field then this means the validator has found a stateRoot we should process + if (json.hasOwnProperty('assertion') && json.hasOwnProperty('state')) { + onNewAssertion(json); + } + } +} + +const getAssertionsFromContainerLogs = async (container, since) => { + //TODO this is not working correctly, its not using the since param and returns the fist 50 logs since container start + const logs = await container.logs({ stdout: true, stderr: true, timestamps: true, since }); + // TODO get assertions from log +} + +const dockerContainerOutputHandler = new Writable({ + async write(chunk, encoding, callback) { + const out = chunk.toString('utf8'); + + // Split the output by new lines + const potentialJsons = out.split(/\n/); + + for (const potentialJson of potentialJsons) { + const json = _tryParseJSONObject(potentialJson); + + if (json !== null) { + + onJSONLog(json); + + } else if (potentialJson.trim() !== '') { + console.info(potentialJson); + } + } + + callback(); + } +}); + +const setupDockerContainer = async () => { + + const docker = new Docker(); + let container; + + try { + const containers = await docker.listContainers({ all: true }); + for (const containerInfo of containers) { + if (containerInfo.Names.includes("/" + DOCKER_CONTAINER_NAME)) { + container = docker.getContainer(containerInfo.Id); + break; + } + } + + } catch (error) { + console.error("Error listing docker container", error); + throw new Error("Error listing container"); + } + + if (!container) { + throw new Error("Did not find container " + DOCKER_CONTAINER_NAME); + } + + try { + //TODO get timestamp from last log + const lastContainerLog = Date.now() - (60 * 60 * 1000); + const assertionsFromLogs = await getAssertionsFromContainerLogs(container, lastContainerLog); + //TODO Post missed assertions + } catch (error) { + console.error("Failed to load recent container logs - continue with current stream", error); + } + + // TODO handle errors in here ? + // Attach the stdout and stderr to custom streams + container.attach({ stream: true, stdout: true, stderr: true }, function (err, stream) { + // Dockerode may demultiplex attach streams for you :) + container.modem.demuxStream(stream, dockerContainerOutputHandler, dockerContainerOutputHandler); + }); + + console.log('Publisher started successfully, listening on container logs'); +} + + +const uploadToBucket = async (jsonSave) => { + + const fileName = `${jsonSave.blockHash}.json`; + const localPath = `./${fileName}`; + const destinationPath = `assertions/${fileName}`; + + const exists = await CLOUD_STORAGE_BUCKET.file(destinationPath).exists(); + if (exists.length && exists[0] == true) { + console.info("Skipping duplicate assertion", jsonSave.blockHash); + return null; + } + + fs.writeFileSync(localPath, JSON.stringify(jsonSave, null, 2)); + + try { + const result = await CLOUD_STORAGE_BUCKET.upload(localPath, { + destination: destinationPath, + public: true, + metadata: { + contentType: "application/plain", //application/csv for excel or csv file upload + } + }); + fs.unlinkSync(fileName); + return result[0].metadata.mediaLink; + + } catch (error) { + console.error("Error posting to bucket", error); + throw new Error(error.message); + } +} + +const main = () => { + setupDockerContainer() + .catch(err => { + console.error("Error on runtime", err); + //TODO send notification here, container should be restart5ed by docker + }) +} +main(); \ No newline at end of file diff --git a/infrastructure/nitro-node-wrapper/publisher/package.json b/infrastructure/nitro-node-wrapper/publisher/package.json new file mode 100644 index 000000000..874e52b1a --- /dev/null +++ b/infrastructure/nitro-node-wrapper/publisher/package.json @@ -0,0 +1,19 @@ +{ + "name": "@sentry/nitro-node-wrapper", + "version": "1.0.0", + "description": "", + "main": "index.mjs", + "type": "module", + "scripts": { + "start": "zx index.mjs" + }, + "dependencies": { + "@google-cloud/storage": "7.7.0", + "dockerode": "4.0.0", + "ethers": "6.8.1", + "zx": "7.2.3" + }, + "keywords": [], + "author": "", + "license": "UNLICENSED" +} diff --git a/infrastructure/nitro-node-wrapper/sample.env b/infrastructure/nitro-node-wrapper/sample.env new file mode 100644 index 000000000..0c7c36375 --- /dev/null +++ b/infrastructure/nitro-node-wrapper/sample.env @@ -0,0 +1,12 @@ +# Arb1 Node Settings +# The Etehreum mainnet RPC to sync the ARB1 node with (prob > 100K requests to sync) +ETH_RPC_URL= + +# GBUCKET Project ID +PROJECT_ID= +# GBUCKET Account Email +SERVICE_ACCOUNT_EMAIL= +# GBUCKET PRIVATE KEY +SERVICE_ACCOUNT_PRIVATE_KEY= +# GBUCKET BUCKET NAME +BUCKET_NAME= \ No newline at end of file From 2d85af4b462daa2d33881c16b39971ce68536069 Mon Sep 17 00:00:00 2001 From: Scharief Date: Thu, 25 Jan 2024 14:04:07 +0100 Subject: [PATCH 2/7] update terraform deploy add variables for startup --- .../gcp-nitro-node/.terraform.lock.hcl | 2 ++ infrastructure/gcp-nitro-node/main.tf | 34 ++++++++++++------- .../gcp-nitro-node/scripts/startup_script.tpl | 4 +++ .../gcp-nitro-node/terraform.tfvars.example | 3 +- infrastructure/gcp-nitro-node/variables.tf | 16 ++++++--- .../configs/xai-mainnet.config.json | 2 +- 6 files changed, 43 insertions(+), 18 deletions(-) diff --git a/infrastructure/gcp-nitro-node/.terraform.lock.hcl b/infrastructure/gcp-nitro-node/.terraform.lock.hcl index 0eb2b7318..a8208b834 100644 --- a/infrastructure/gcp-nitro-node/.terraform.lock.hcl +++ b/infrastructure/gcp-nitro-node/.terraform.lock.hcl @@ -5,6 +5,7 @@ provider "registry.terraform.io/hashicorp/google" { version = "5.0.0" hashes = [ "h1:J6n7/Su0fc+cfDVI43fch8w60vjUJLuzIzQHGmBS4Eo=", + "h1:OaPOtGfFqiBlg7Rwhzo1bjYsIUe9EPG2EA5V3CUGzH4=", "zh:28ca14826a86d703d1b30474f46d8e4383ddced96e42e8064544bdb1cd0e2200", "zh:2cd61ecdfd4b6d787861ccfa593c24a84bc34445c13e3e0ca433fd60128dc1a4", "zh:5ad2e73b0eaa349b3dea4deec648cbfb5ba7e8c5353ffe30d5ad2158dea8396a", @@ -24,6 +25,7 @@ provider "registry.terraform.io/hashicorp/local" { version = "2.4.0" hashes = [ "h1:ZUEYUmm2t4vxwzxy1BvN1wL6SDWrDxfH7pxtzX8c6d0=", + "h1:sfyLOvWTFeT1dlRURlE2HpIUOOX9SB16Kgi6KPp0Oxs=", "zh:53604cd29cb92538668fe09565c739358dc53ca56f9f11312b9d7de81e48fab9", "zh:66a46e9c508716a1c98efbf793092f03d50049fa4a83cd6b2251e9a06aca2acf", "zh:70a6f6a852dd83768d0778ce9817d81d4b3f073fab8fa570bff92dcb0824f732", diff --git a/infrastructure/gcp-nitro-node/main.tf b/infrastructure/gcp-nitro-node/main.tf index b8b48822b..296f14bac 100644 --- a/infrastructure/gcp-nitro-node/main.tf +++ b/infrastructure/gcp-nitro-node/main.tf @@ -11,10 +11,6 @@ provider "google" { zone = var.gcp_zone } -locals { - startup_script = file("${path.module}/scripts/startup_script.tpl") -} - resource "google_compute_address" "default" { name = "node-static-ip" } @@ -29,12 +25,26 @@ resource "google_service_account_key" "bucket_updater_key" { service_account_id = google_service_account.bucket_updater.name } +locals { + startup_script = templatefile("${path.module}/scripts/startup_script.tpl", { + gcp_project_id = var.gcp_project_id, + bucket_name = var.bucket_name, + service_account_email = google_service_account.bucket_updater.email, + service_account_api_key = google_service_account_key.bucket_updater_key.private_key, + eth_rpc_url = var.eth_rpc_url + }) +} + + resource "google_storage_bucket_iam_member" "bucket_updater" { bucket = google_storage_bucket.public_bucket.name role = "roles/storage.objectAdmin" member = "serviceAccount:${google_service_account.bucket_updater.email}" } + +// n1-standard-4 = 4 vCPUs and 15 GB RAM +// size=2000 = 2000 GB resource "google_compute_instance" "default" { name = "arbitrum-full-node" machine_type = "n1-standard-4" @@ -51,7 +61,7 @@ resource "google_compute_instance" "default" { metadata_startup_script = local.startup_script - metadata = { + metadata = { service_account_key = google_service_account_key.bucket_updater_key.private_key } @@ -67,16 +77,16 @@ resource "google_storage_bucket" "public_bucket" { # allow public access to the sitemap bucket data "google_iam_policy" "viewer" { - binding { - role = "roles/storage.objectViewer" - members = [ - "allUsers", - ] - } + binding { + role = "roles/storage.objectViewer" + members = [ + "allUsers", + ] + } } locals { - + markdown = <<-EOF # OUTPUT Service Account Key: `${google_service_account_key.bucket_updater_key.private_key}` diff --git a/infrastructure/gcp-nitro-node/scripts/startup_script.tpl b/infrastructure/gcp-nitro-node/scripts/startup_script.tpl index 8900fbc1d..fd368327c 100644 --- a/infrastructure/gcp-nitro-node/scripts/startup_script.tpl +++ b/infrastructure/gcp-nitro-node/scripts/startup_script.tpl @@ -52,6 +52,10 @@ curl -o docker-compose.yml https://storage.googleapis.com/xai-sentry-public-node curl -o xai-mainnet.config.json https://storage.googleapis.com/xai-sentry-public-node/node-config/xai-mainnet.config.json # EPORT ENV VARS +EXPORT ETH_RPC_URL=${eth_rpc_url} +EXPORT PROJECT_ID=${gcp_project_id} +EXPORT SERVICE_ACCOUNT_EMAIL=${service_account_email} +EXPORT SERVICE_ACCOUNT_PRIVATE_KEY=${service_account_api_key} EXPORT BUCKET_NAME=${bucket_name} docker compose up -d \ No newline at end of file diff --git a/infrastructure/gcp-nitro-node/terraform.tfvars.example b/infrastructure/gcp-nitro-node/terraform.tfvars.example index 64270f636..6b93c7bee 100644 --- a/infrastructure/gcp-nitro-node/terraform.tfvars.example +++ b/infrastructure/gcp-nitro-node/terraform.tfvars.example @@ -1,4 +1,5 @@ gcp_project_id = "your-gcp-project-id" gcp_region = "asia-east1" gcp_zone = "asia-east1-a" -parent_chain_rpc_url = "your-ethereum-rpc-url" +bucket_name = "bucket-name" +eth_rpc_url = "https://eth.public-rpc.com" \ No newline at end of file diff --git a/infrastructure/gcp-nitro-node/variables.tf b/infrastructure/gcp-nitro-node/variables.tf index 0d66d016a..9f9e14c02 100644 --- a/infrastructure/gcp-nitro-node/variables.tf +++ b/infrastructure/gcp-nitro-node/variables.tf @@ -13,12 +13,20 @@ variable "gcp_zone" { type = string } -variable "parent_chain_rpc_url" { - description = "The URL of the parent chain RPC node" - type = string -} +//In config.json - will be the local arb1 node once synced +# variable "parent_chain_rpc_url" { +# description = "The URL of the parent chain RPC node" +# type = string +# } variable "bucket_name" { description = "The name of the bucket to be used to store challenge information in for the public node. This needs to be unique across all of GCP." type = string } + +variable "eth_rpc_url" { + description = "The ETH RPC for the Arb1 node to sync with" + type = string +} + +# ADD ALL NEEDED ENV VARS IN HERE diff --git a/infrastructure/nitro-node-wrapper/configs/xai-mainnet.config.json b/infrastructure/nitro-node-wrapper/configs/xai-mainnet.config.json index ece890c0e..a1bbe0dfd 100644 --- a/infrastructure/nitro-node-wrapper/configs/xai-mainnet.config.json +++ b/infrastructure/nitro-node-wrapper/configs/xai-mainnet.config.json @@ -49,7 +49,7 @@ "execution": { "forwarding-target": "https://xai-chain.net/rpc", "caching": { - "archive": false + "archive": true } }, "parent-chain": { From 7e795a304449f25deeb954b99fcbc2cf75994ed7 Mon Sep 17 00:00:00 2001 From: Scharief Date: Sun, 28 Jan 2024 16:27:58 +0100 Subject: [PATCH 3/7] implement node software assertion log updates --- .../nitro-node-wrapper/docker-compose.yml | 2 +- infrastructure/nitro-node-wrapper/index.mjs | 210 ------------------ .../nitro-node-wrapper/package.json | 19 -- .../nitro-node-wrapper/publisher/index.mjs | 107 +++------ .../nitro-node-wrapper/publisher/package.json | 1 + infrastructure/nitro-node-wrapper/sample.env | 12 - 6 files changed, 32 insertions(+), 319 deletions(-) delete mode 100644 infrastructure/nitro-node-wrapper/index.mjs delete mode 100644 infrastructure/nitro-node-wrapper/package.json delete mode 100644 infrastructure/nitro-node-wrapper/sample.env diff --git a/infrastructure/nitro-node-wrapper/docker-compose.yml b/infrastructure/nitro-node-wrapper/docker-compose.yml index e5272f865..bb0fd1d77 100644 --- a/infrastructure/nitro-node-wrapper/docker-compose.yml +++ b/infrastructure/nitro-node-wrapper/docker-compose.yml @@ -20,7 +20,7 @@ services: - 8.8.4.4 xai-node: - image: offchainlabs/nitro-node:v2.2.2-8f33fea + image: offchainlabs/nitro-node:v2.2.3-beta.1-3f67f04 container_name: xai-public-node command: - --conf.file=/home/user/node-config/config.json diff --git a/infrastructure/nitro-node-wrapper/index.mjs b/infrastructure/nitro-node-wrapper/index.mjs deleted file mode 100644 index 09a636bd8..000000000 --- a/infrastructure/nitro-node-wrapper/index.mjs +++ /dev/null @@ -1,210 +0,0 @@ -import Docker from "dockerode"; -import fs from 'fs'; -import { Writable } from 'stream'; -import ethers from "ethers"; -import {Bucket, Storage} from "@google-cloud/storage"; - -// create an instance of the bucket -storage = new Storage({ - projectId: process.env.PROJECT_ID, - credentials: { - client_email: process.env.SERVICE_ACCOUNT_EMAIL, - private_key: process.env.SERVICE_ACCOUNT_PRIVATE_KEY.replace(/\\n/g, '\n'), - } -}); -storage.Bucket(process.env.BUCKET_NAME) - -// configure a config for the docker container -const config = { - "chain": { - "id": 47279324479, - "info-json": "[{\"chain-id\":47279324479,\"parent-chain-id\":421613,\"chain-name\":\"Xai Orbit Testnet\",\"chain-config\":{\"chainId\":47279324479,\"homesteadBlock\":0,\"daoForkBlock\":null,\"daoForkSupport\":true,\"eip150Block\":0,\"eip150Hash\":\"0x0000000000000000000000000000000000000000000000000000000000000000\",\"eip155Block\":0,\"eip158Block\":0,\"byzantiumBlock\":0,\"constantinopleBlock\":0,\"petersburgBlock\":0,\"istanbulBlock\":0,\"muirGlacierBlock\":0,\"berlinBlock\":0,\"londonBlock\":0,\"clique\":{\"period\":0,\"epoch\":0},\"arbitrum\":{\"EnableArbOS\":true,\"AllowDebugPrecompiles\":false,\"DataAvailabilityCommittee\":true,\"InitialArbOSVersion\":10,\"InitialChainOwner\":\"0x9eA24417Ee7722e7EFaf87d2D68d4bC6b41231F7\",\"GenesisBlockNum\":0}},\"rollup\":{\"bridge\":\"0xf958e56d431eA78C7444Cf6A6184Af732Ae6a8A3\",\"inbox\":\"0x8b842ad88AAffD63d52EC54f6428fb7ff83060a8\",\"sequencer-inbox\":\"0x5fD0cCc5D31748A44b43cf8DFBFA0FAA32665464\",\"rollup\":\"0x082742561295f6e1b43c4f5d1e2d52d7FfE082f1\",\"validator-utils\":\"0x32E35dBDCC3558F6A0f7c3af6B13380FC3E6ce97\",\"validator-wallet-creator\":\"0x237aed14AEA28032B12A389590875Ed8d1D12811\",\"deployed-at\":32449497}}]", - "name": "Xai Orbit Testnet" - }, - "conf": { - "env-prefix": "NITRO" - }, - "http": { - "addr": "0.0.0.0", - "api": [ - "eth", - "net", - "web3", - "arb", - "debug" - ], - "corsdomain": "*", - "port": 8547, - "rpcprefix": "/rpc", - "vhosts": "*" - }, - "log-type": "json", - "metrics": true, - "metrics-server": { - "addr": "0.0.0.0", - "port": 6070 - }, - "node": { - "caching": { - "archive": true - }, - "data-availability": { - "enable": true, - "parent-chain-node-url": "https://icy-thrilling-frog.arbitrum-goerli.quiknode.pro/4d27f3253823ff8ec0afbabc49cbe924bfc9acdb/", - "request-timeout": "5s", - "rest-aggregator": { - "enable": true, - "urls": [ - "https://testnet-das-mirror.xai-chain.net/" - ], - "wait-before-try-next": "2s" - }, - "sequencer-inbox-address": "0x5fD0cCc5D31748A44b43cf8DFBFA0FAA32665464" - }, - "feed": { - "input": { - "reconnect-initial-backoff": "50ms", - "reconnect-maximum-backoff": "800ms", - "timeout": "10s", - "url": "wss://testnet.xai-chain.net/feed" - } - }, - "forwarding-target": "https://testnet.xai-chain.net/rpc/", - "rpc": { - "tx-fee-cap": 0 - }, - "tx-lookup-limit": 0 - }, - "parent-chain": { - "connection": { - "url": "https://icy-thrilling-frog.arbitrum-goerli.quiknode.pro/4d27f3253823ff8ec0afbabc49cbe924bfc9acdb/" - }, - "id": 421613 - }, - "persistent": { - "chain": "/home/user/data/" - }, - "pprof": true, - "pprof-cfg": { - "addr": "0.0.0.0", - "port": "6071" - }, - "ws": { - "addr": "0.0.0.0", - "api": [ - "net", - "web3", - "eth", - "arb" - ], - "port": 8548, - "rpcprefix": "/ws", - "origins": "*" - } -}; - -// Check if the directory exists, if not create it -if (!fs.existsSync('./arbitrum/')) { - fs.mkdirSync('./arbitrum/', { recursive: true }); -} -fs.writeFileSync('./arbitrum/config.json', JSON.stringify(config, null, 2)); - -// Create an instance of Docker runtime -const docker = new Docker({ socketPath: '/var/run/docker.sock' }); - -// Pull the docker container for the nitro node -const image = await docker.pull('offchainlabs/nitro-node:v2.1.0-72ccc0c'); - -// Stop all running containers of the specified image -const containers = await docker.listContainers(); -for (const containerInfo of containers) { - if (containerInfo.Image === 'offchainlabs/nitro-node:v2.1.0-72ccc0c') { - await docker.getContainer(containerInfo.Id).stop(); - } -} - -// Create the docker container -const container = await docker.createContainer({ - Image: 'offchainlabs/nitro-node:v2.1.0-72ccc0c', - Cmd: ['--conf.file', '/home/user/.arbitrum/config.json', '--metrics', '--ws.port=8548', '--ws.addr=0.0.0.0', '--ws.origins=*'], - Tty: false, - ExposedPorts: { - '8547/tcp': {}, - '8548/tcp': {}, - '9642/tcp': {} - }, - HostConfig: { - Binds: [ - `${process.cwd()}/arbitrum:/home/user/.arbitrum`, - `${process.cwd()}/data:/home/user/data:delegated` - ], - PortBindings: { - '8547/tcp': [{ HostPort: '8547' }], - '8548/tcp': [{ HostPort: '8548' }], - '9642/tcp': [{ HostPort: '9642' }] - } - } -}); - -function isJsonString(str) { - try { - JSON.parse(str); - } catch (e) { - return false; - } - return true; -} - -const stdOut = new Writable({ - async write(chunk, encoding, callback) { - const out = chunk.toString('utf8'); - - // Split the output by new lines - const potentialJsons = out.split(/\n/); - - for (const potentialJson of potentialJsons) { - if (potentialJson.trim() !== '' && isJsonString(potentialJson)) { - const json = JSON.parse(potentialJson); - if (json.hasOwnProperty('err')) { - console.error(json); - } else { - console.log(json); - - // if there is an assertion and a state field then this means the validator has found a stateRoot we should process - if (json.hasOwnProperty('assertion') && json.hasOwnProperty('state') && isJsonString(json.state)) { - const state = JSON.parse(json.state); - - // Concatenate the blockHash and sendRoot - const concatenatedHashes = ethers.hexConcat([state.BlockHash, state.SendRoot]); - - // Create the confirm hash by keccak256 - const confirmHash = ethers.keccak256(concatenatedHashes); - - // create a JSON object that will get saved to the bucket - const jsonSave = { - assertion: json.assertion, - blockHash: state.BlockHash, - sendRoot: state.SendRoot, - confirmHash, - } - } - } - } else if (potentialJson.trim() !== '') { - console.info(potentialJson); - } - } - - callback(); - } -}); - -// Attach the stdout and stderr to custom streams -container.attach({stream: true, stdout: true, stderr: true}, function (err, stream) { - // Dockerode may demultiplex attach streams for you :) - container.modem.demuxStream(stream, stdOut, stdOut); -}); - -// Start the docker container -await container.start(); - -console.log('Docker container started successfully'); diff --git a/infrastructure/nitro-node-wrapper/package.json b/infrastructure/nitro-node-wrapper/package.json deleted file mode 100644 index 874e52b1a..000000000 --- a/infrastructure/nitro-node-wrapper/package.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name": "@sentry/nitro-node-wrapper", - "version": "1.0.0", - "description": "", - "main": "index.mjs", - "type": "module", - "scripts": { - "start": "zx index.mjs" - }, - "dependencies": { - "@google-cloud/storage": "7.7.0", - "dockerode": "4.0.0", - "ethers": "6.8.1", - "zx": "7.2.3" - }, - "keywords": [], - "author": "", - "license": "UNLICENSED" -} diff --git a/infrastructure/nitro-node-wrapper/publisher/index.mjs b/infrastructure/nitro-node-wrapper/publisher/index.mjs index 760e8cf39..52d28e13f 100644 --- a/infrastructure/nitro-node-wrapper/publisher/index.mjs +++ b/infrastructure/nitro-node-wrapper/publisher/index.mjs @@ -1,7 +1,8 @@ import Docker from "dockerode"; +import axios from 'axios'; import fs from 'fs'; import { Writable } from 'stream'; -import { concat, keccak256 } from "ethers"; +import { concat, keccak256, ethers } from "ethers"; import { Bucket, Storage } from "@google-cloud/storage"; // create an instance of the bucket @@ -16,28 +17,6 @@ const storage = new Storage({ const CLOUD_STORAGE_BUCKET = new Bucket(storage, process.env.BUCKET_NAME) const DOCKER_CONTAINER_NAME = 'xai-public-node'; - -// const CONFIG_TEMPLATE = 'xai-goerli.config.tpl.json'; -// const CONFIG_TEMPLATE = 'xai-mainnet.config.tpl.json'; - -// const PATH_TO_NODE_CONFIG_DIR = `${process.cwd()}/arbitrum`; -// const PATH_TO_NODE_CONFIG = path.join(PATH_TO_NODE_CONFIG_DIR, "config.json"); - -// //TODO we should remove this and have the actual config in the github or in the docker image -// const createNodeConfig = () => { -// // configure a config for the docker container -// const config = JSON.parse(fs.readFileSync(path.join("./templates", CONFIG_TEMPLATE))); -// config.chain["info-json"] = JSON.stringify(config.chain["info-json"]); - -// // Check if the directory exists, if not create it -// if (!fs.existsSync(PATH_TO_NODE_CONFIG_DIR)) { -// fs.mkdirSync(PATH_TO_NODE_CONFIG_DIR, { recursive: true }); -// } -// fs.writeFileSync(PATH_TO_NODE_CONFIG, JSON.stringify(config, null, 2)); -// console.log("Config created"); -// } - - function _tryParseJSONObject(jsonString) { try { var o = JSON.parse(jsonString); @@ -48,43 +27,36 @@ function _tryParseJSONObject(jsonString) { return null; }; -//Helper function for the assertion state object, which currently is not a valid JSON object -function _parseStateObject(stateStringObj) { - let parsed = _tryParseJSONObject(stateStringObj); - if (parsed == null) { - // Adding double quotes around the keys - stateStringObj = stateStringObj.replace(/(\w+):/g, '"$1":'); - // Adding double quotes around the hex values - stateStringObj = stateStringObj.replace(/(0x[a-fA-F0-9]+)/g, '"$1"'); - // Replace space with commas - stateStringObj = stateStringObj.trim().replace(/ /g, ','); - return _tryParseJSONObject(stateStringObj) +const _getSendRoot = async (blockHash) => { + try { + const response = await axios.post(`http://${DOCKER_CONTAINER_NAME}:8547/rpc`, { + jsonrpc: '2.0', + method: 'eth_getBlockByHash', + params: [blockHash, false], // Set to false if you don't need full transaction details + id: 1 + }); + + return response.data.result.sendRoot; + } catch (error) { + console.error('Error in loading block info to get sendRoot', error); + throw new Error('Error in loading block info to get sendRoot') } -} +} const onNewAssertion = async (json) => { - const state = _parseStateObject(json.state); - - if (state === null) { - console.error("Failed to parse state object", json.state); - return; - } - console.log("Has new assertion"); + const sendRoot = await _getSendRoot(json.blockHash); - // Concatenate the blockHash and sendRoot - const concatenatedHashes = concat([state.BlockHash, state.SendRoot]); - - // Create the confirm hash by keccak256 + // Create the confirm hash + const concatenatedHashes = concat([json.blockHash, sendRoot]); const confirmHash = keccak256(concatenatedHashes); // create a JSON object that will get saved to the bucket - // TODO save object to google bucket storage! const jsonSave = { - assertion: json.assertion, - blockHash: state.BlockHash, - sendRoot: state.SendRoot, + assertion: json.node, + blockHash: json.blockHash, + sendRoot: sendRoot, confirmHash, } @@ -103,18 +75,12 @@ const onJSONLog = (json) => { console.log("New container JSON log", json); // if there is an assertion and a state field then this means the validator has found a stateRoot we should process - if (json.hasOwnProperty('assertion') && json.hasOwnProperty('state')) { + if (json.hasOwnProperty('blockHash') && json.hasOwnProperty('msg') && json['msg'] == "found correct assertion") { onNewAssertion(json); } } } -const getAssertionsFromContainerLogs = async (container, since) => { - //TODO this is not working correctly, its not using the since param and returns the fist 50 logs since container start - const logs = await container.logs({ stdout: true, stderr: true, timestamps: true, since }); - // TODO get assertions from log -} - const dockerContainerOutputHandler = new Writable({ async write(chunk, encoding, callback) { const out = chunk.toString('utf8'); @@ -138,7 +104,7 @@ const dockerContainerOutputHandler = new Writable({ } }); -const setupDockerContainer = async () => { +const setupDockerContainerListener = async () => { const docker = new Docker(); let container; @@ -161,16 +127,7 @@ const setupDockerContainer = async () => { throw new Error("Did not find container " + DOCKER_CONTAINER_NAME); } - try { - //TODO get timestamp from last log - const lastContainerLog = Date.now() - (60 * 60 * 1000); - const assertionsFromLogs = await getAssertionsFromContainerLogs(container, lastContainerLog); - //TODO Post missed assertions - } catch (error) { - console.error("Failed to load recent container logs - continue with current stream", error); - } - - // TODO handle errors in here ? + // TODO handle errors in here // Attach the stdout and stderr to custom streams container.attach({ stream: true, stdout: true, stderr: true }, function (err, stream) { // Dockerode may demultiplex attach streams for you :) @@ -200,7 +157,7 @@ const uploadToBucket = async (jsonSave) => { destination: destinationPath, public: true, metadata: { - contentType: "application/plain", //application/csv for excel or csv file upload + contentType: "application/plain", } }); fs.unlinkSync(fileName); @@ -212,11 +169,7 @@ const uploadToBucket = async (jsonSave) => { } } -const main = () => { - setupDockerContainer() - .catch(err => { - console.error("Error on runtime", err); - //TODO send notification here, container should be restart5ed by docker - }) -} -main(); \ No newline at end of file +setupDockerContainerListener() + .catch(err => { + console.error("Error on runtime", err); + }) \ No newline at end of file diff --git a/infrastructure/nitro-node-wrapper/publisher/package.json b/infrastructure/nitro-node-wrapper/publisher/package.json index 874e52b1a..a028d1937 100644 --- a/infrastructure/nitro-node-wrapper/publisher/package.json +++ b/infrastructure/nitro-node-wrapper/publisher/package.json @@ -11,6 +11,7 @@ "@google-cloud/storage": "7.7.0", "dockerode": "4.0.0", "ethers": "6.8.1", + "axios": "^1.6.7", "zx": "7.2.3" }, "keywords": [], diff --git a/infrastructure/nitro-node-wrapper/sample.env b/infrastructure/nitro-node-wrapper/sample.env deleted file mode 100644 index 0c7c36375..000000000 --- a/infrastructure/nitro-node-wrapper/sample.env +++ /dev/null @@ -1,12 +0,0 @@ -# Arb1 Node Settings -# The Etehreum mainnet RPC to sync the ARB1 node with (prob > 100K requests to sync) -ETH_RPC_URL= - -# GBUCKET Project ID -PROJECT_ID= -# GBUCKET Account Email -SERVICE_ACCOUNT_EMAIL= -# GBUCKET PRIVATE KEY -SERVICE_ACCOUNT_PRIVATE_KEY= -# GBUCKET BUCKET NAME -BUCKET_NAME= \ No newline at end of file From 4efe5f9679004907f57531d902a4e19951efaaab Mon Sep 17 00:00:00 2001 From: Scharief Date: Sun, 28 Jan 2024 23:13:09 +0100 Subject: [PATCH 4/7] udpate deployment scripts and config, fix docker compose permission issue by using volumes --- infrastructure/gcp-nitro-node/main.tf | 25 ++++++++- .../gcp-nitro-node/scripts/startup_script.tpl | 55 +++++++++---------- .../configs/xai-mainnet.config.json | 5 +- .../nitro-node-wrapper/docker-compose.yml | 14 +++-- 4 files changed, 59 insertions(+), 40 deletions(-) diff --git a/infrastructure/gcp-nitro-node/main.tf b/infrastructure/gcp-nitro-node/main.tf index 296f14bac..771d2e00c 100644 --- a/infrastructure/gcp-nitro-node/main.tf +++ b/infrastructure/gcp-nitro-node/main.tf @@ -15,6 +15,10 @@ resource "google_compute_address" "default" { name = "node-static-ip" } +resource "google_compute_address" "static_ip" { + name = "my-static-ip" +} + resource "google_service_account" "bucket_updater" { account_id = "bucket-updater" display_name = "Bucket Updater Service Account" @@ -42,12 +46,26 @@ resource "google_storage_bucket_iam_member" "bucket_updater" { member = "serviceAccount:${google_service_account.bucket_updater.email}" } +resource "google_compute_firewall" "outbound_access" { + name = "allow-outbound" + network = "default" + + allow { + protocol = "tcp" + ports = ["80", "443"] + } + + direction = "EGRESS" + destination_ranges = ["0.0.0.0/0"] +} // n1-standard-4 = 4 vCPUs and 15 GB RAM // size=2000 = 2000 GB resource "google_compute_instance" "default" { - name = "arbitrum-full-node" - machine_type = "n1-standard-4" + name = "arbitrum-full-node" + machine_type = "n1-standard-4" + deletion_protection = true + boot_disk { initialize_params { image = "ubuntu-os-cloud/ubuntu-2204-lts" @@ -57,6 +75,9 @@ resource "google_compute_instance" "default" { } network_interface { network = "default" + access_config { + nat_ip = google_compute_address.default.address + } } metadata_startup_script = local.startup_script diff --git a/infrastructure/gcp-nitro-node/scripts/startup_script.tpl b/infrastructure/gcp-nitro-node/scripts/startup_script.tpl index fd368327c..4d5d63170 100644 --- a/infrastructure/gcp-nitro-node/scripts/startup_script.tpl +++ b/infrastructure/gcp-nitro-node/scripts/startup_script.tpl @@ -1,17 +1,23 @@ #!/bin/bash +if ! id "tfadmin" &>/dev/null; then + sudo useradd -m -s /bin/bash tfadmin + # Set password or SSH keys as appropriate +fi + # Check if docker is installed if ! command -v docker &> /dev/null then - # Update the package lists for upgrades and new package installations - sudo apt-get update && - - # Install Docker - sudo apt-get install docker.io -y + sudo apt-get update + sudo apt-get install apt-transport-https ca-certificates curl software-properties-common -y + curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg + echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null + sudo apt-get update + sudo apt-get install docker-ce -y fi # Add the user to the docker group -sudo usermod -aG docker $(whoami) +sudo usermod -aG docker tfadmin # Modify useradd defaults to add new users to the docker group if grep -q "^GROUP=" /etc/default/useradd; then @@ -21,27 +27,13 @@ else fi # Create the node data directories -sudo mkdir -p /opt/arbitrum -sudo mkdir -p /opt/xai -sudo mkdir -p /opt/node/build - -# Change the owner of the directory to the current user and group -sudo chown -R $(whoami):$(id -gn) /opt/arbitrum -sudo chown -R $(whoami):$(id -gn) /opt/xai -sudo chown -R $(whoami):$(id -gn) /opt/node/build - -# Change the permissions of the directory so that any user can edit files in it -sudo chmod -R a+rwX /opt/arbitrum -sudo chmod -R a+rwX /opt/xai -sudo chmod -R a+rwX /opt/node/build +sudo mkdir -p /opt/public-node/build -# Stop all running containers -docker stop --time=300 $(docker ps -aq) -docker rm $(docker ps -aq) +cd /opt/public-node/build -cd /opt/node/build -if [ -f "/opt/node/build/docker-compose.yml" ]; then +if [ -f "/opt/public-node/build/docker-compose.yml" ]; then # If we already have a docker-compsoe we will try to remove all containers and delete the outdated versions + docker compose stop -t 300 docker compose down sudo rm -f docker-compose.yml sudo rm -f xai-mainnet.config.json @@ -51,11 +43,14 @@ fi curl -o docker-compose.yml https://storage.googleapis.com/xai-sentry-public-node/node-config/docker-compose.yml curl -o xai-mainnet.config.json https://storage.googleapis.com/xai-sentry-public-node/node-config/xai-mainnet.config.json +# Change the owner of the directory to the current user and group +sudo chown -R tfadmin:tfadmin /opt/public-node + # EPORT ENV VARS -EXPORT ETH_RPC_URL=${eth_rpc_url} -EXPORT PROJECT_ID=${gcp_project_id} -EXPORT SERVICE_ACCOUNT_EMAIL=${service_account_email} -EXPORT SERVICE_ACCOUNT_PRIVATE_KEY=${service_account_api_key} -EXPORT BUCKET_NAME=${bucket_name} +export ETH_RPC_URL=${eth_rpc_url} +export PROJECT_ID=${gcp_project_id} +export SERVICE_ACCOUNT_EMAIL=${service_account_email} +export SERVICE_ACCOUNT_PRIVATE_KEY=${service_account_api_key} +export BUCKET_NAME=${bucket_name} -docker compose up -d \ No newline at end of file +sudo -u tfadmin docker compose up -d \ No newline at end of file diff --git a/infrastructure/nitro-node-wrapper/configs/xai-mainnet.config.json b/infrastructure/nitro-node-wrapper/configs/xai-mainnet.config.json index a1bbe0dfd..bc1ae38ac 100644 --- a/infrastructure/nitro-node-wrapper/configs/xai-mainnet.config.json +++ b/infrastructure/nitro-node-wrapper/configs/xai-mainnet.config.json @@ -17,7 +17,6 @@ "debug" ], "corsdomain": "*", - "port": 8547, "rpcprefix": "/rpc", "vhosts": "*" }, @@ -31,7 +30,7 @@ "data-availability": { "enable": true, "sequencer-inbox-address": "0x995a9d3ca121D48d21087eDE20bc8acb2398c8B1", - "parent-chain-node-url": "https://tame-alpha-violet.arbitrum-mainnet.quiknode.pro/d55a31b32f04c82b0e1bcb77f1fc6dcf53147f2a/", + "parent-chain-node-url": "http://arb1-nitro-node:8547", "rest-aggregator": { "enable": true, "online-url-list": "https://xai-chain.net/das-servers" @@ -54,7 +53,7 @@ }, "parent-chain": { "connection": { - "url": "https://tame-alpha-violet.arbitrum-mainnet.quiknode.pro/d55a31b32f04c82b0e1bcb77f1fc6dcf53147f2a/" + "url": "http://arb1-nitro-node:8547" }, "id": 42161 }, diff --git a/infrastructure/nitro-node-wrapper/docker-compose.yml b/infrastructure/nitro-node-wrapper/docker-compose.yml index bb0fd1d77..9fdb1a0d9 100644 --- a/infrastructure/nitro-node-wrapper/docker-compose.yml +++ b/infrastructure/nitro-node-wrapper/docker-compose.yml @@ -13,7 +13,7 @@ services: --http.addr=0.0.0.0 --http.vhosts=* volumes: - - /opt/arbitrum:/home/user/.arbitrum + - arb1-data:/home/user/.arbitrum restart: unless-stopped dns: - 8.8.8.8 @@ -29,14 +29,14 @@ services: - --ws.addr=0.0.0.0 - --ws.origins=* volumes: - - /opt/node/build/xai-mainnet.config.json:/home/user/node-config/config.json - - /opt/xai:/home/user/.arbitrum + - /opt/public-node/build/xai-mainnet.config.json:/home/user/node-config/config.json + - xai-data:/home/user/.arbitrum depends_on: - nitro-node restart: unless-stopped publisher: - image: cryptitaustria/xai-public-node-publisher:latest + image: xaidevelopment/xai-node:latest container_name: js-publisher environment: - PROJECT_ID=${PROJECT_ID} @@ -47,4 +47,8 @@ services: - /var/run/docker.sock:/var/run/docker.sock depends_on: - xai-node - restart: unless-stopped \ No newline at end of file + restart: unless-stopped + +volumes: + arb1-data: {} + xai-data: {} \ No newline at end of file From b55337628d9b132ae24caf5bbd2ce8ffbcfc208c Mon Sep 17 00:00:00 2001 From: Scharief Date: Thu, 1 Feb 2024 12:35:27 +0100 Subject: [PATCH 5/7] update save assertion by confirmHash --- infrastructure/nitro-node-wrapper/publisher/README.md | 7 +++---- infrastructure/nitro-node-wrapper/publisher/index.mjs | 6 +++--- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/infrastructure/nitro-node-wrapper/publisher/README.md b/infrastructure/nitro-node-wrapper/publisher/README.md index 85b5ccd67..833dd0ff0 100644 --- a/infrastructure/nitro-node-wrapper/publisher/README.md +++ b/infrastructure/nitro-node-wrapper/publisher/README.md @@ -4,7 +4,6 @@ A Javascript runtime that reads from a local XAI Node and publishes assertions t ### Create & Publish the image -- `docker login` -- `docker build -t /xai-public-node-publisher:latest .` -- `docker tag /xai-public-node-publisher:latest /xai-public-node-publisher:latest` -- `docker push /xai-public-node-publisher:latest` \ No newline at end of file +- `docker build -t xaidevelopment/xai-node:latest .` +- `docker tag xaidevelopment/xai-node:latest xaidevelopment/xai-node:latest` +- `docker push xaidevelopment/xai-node:latest` \ No newline at end of file diff --git a/infrastructure/nitro-node-wrapper/publisher/index.mjs b/infrastructure/nitro-node-wrapper/publisher/index.mjs index 52d28e13f..7f5b33e9b 100644 --- a/infrastructure/nitro-node-wrapper/publisher/index.mjs +++ b/infrastructure/nitro-node-wrapper/publisher/index.mjs @@ -2,7 +2,7 @@ import Docker from "dockerode"; import axios from 'axios'; import fs from 'fs'; import { Writable } from 'stream'; -import { concat, keccak256, ethers } from "ethers"; +import { concat, keccak256 } from "ethers"; import { Bucket, Storage } from "@google-cloud/storage"; // create an instance of the bucket @@ -140,7 +140,7 @@ const setupDockerContainerListener = async () => { const uploadToBucket = async (jsonSave) => { - const fileName = `${jsonSave.blockHash}.json`; + const fileName = `${jsonSave.confirmHash}.json`; const localPath = `./${fileName}`; const destinationPath = `assertions/${fileName}`; @@ -160,7 +160,7 @@ const uploadToBucket = async (jsonSave) => { contentType: "application/plain", } }); - fs.unlinkSync(fileName); + fs.unlinkSync(localPath); return result[0].metadata.mediaLink; } catch (error) { From 0ffaef401eae79d0480ca4b0d3767da677fdaa30 Mon Sep 17 00:00:00 2001 From: CryptIT Austria Date: Mon, 4 Mar 2024 22:42:49 +0100 Subject: [PATCH 6/7] update xai node image for blockchain update --- infrastructure/nitro-node-wrapper/docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/infrastructure/nitro-node-wrapper/docker-compose.yml b/infrastructure/nitro-node-wrapper/docker-compose.yml index 9fdb1a0d9..c98c5c338 100644 --- a/infrastructure/nitro-node-wrapper/docker-compose.yml +++ b/infrastructure/nitro-node-wrapper/docker-compose.yml @@ -20,7 +20,7 @@ services: - 8.8.4.4 xai-node: - image: offchainlabs/nitro-node:v2.2.3-beta.1-3f67f04 + image: offchainlabs/nitro-node:v2.3.0-3e14543 container_name: xai-public-node command: - --conf.file=/home/user/node-config/config.json From 10fe23eafa2bbc0bbc98aebd8bf9b3d7cc9f3fc2 Mon Sep 17 00:00:00 2001 From: CryptIT Austria Date: Mon, 11 Mar 2024 18:46:19 +0100 Subject: [PATCH 7/7] update node software for march 14 update --- infrastructure/nitro-node-wrapper/docker-compose.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/infrastructure/nitro-node-wrapper/docker-compose.yml b/infrastructure/nitro-node-wrapper/docker-compose.yml index c98c5c338..ef56326a3 100644 --- a/infrastructure/nitro-node-wrapper/docker-compose.yml +++ b/infrastructure/nitro-node-wrapper/docker-compose.yml @@ -3,10 +3,11 @@ version: '3' services: nitro-node: container_name: arb1-nitro-node - image: offchainlabs/nitro-node:v2.2.2-8f33fea + image: offchainlabs/nitro-node:v2.3.1-26fad6f command: --init.url="https://snapshot.arbitrum.foundation/arb1/nitro-pruned.tar" --parent-chain.connection.url=${ETH_RPC_URL} + --parent-chain.blob-client.beacon-url=${ETH_RPC_URL} --chain.id=42161 --http.api=net,web3,eth --http.corsdomain=* @@ -20,7 +21,7 @@ services: - 8.8.4.4 xai-node: - image: offchainlabs/nitro-node:v2.3.0-3e14543 + image: offchainlabs/nitro-node:v2.3.1-26fad6f container_name: xai-public-node command: - --conf.file=/home/user/node-config/config.json