Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Optimization merge into dev #137

Open
wants to merge 5 commits into
base: dev
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions Dockerfile.local
Original file line number Diff line number Diff line change
@@ -3,13 +3,23 @@ FROM docker.io/library/golang:1.20-alpine3.17 AS builder

RUN apk --no-cache add build-base linux-headers git bash ca-certificates libstdc++

# install rust env
RUN apk add --no-cache curl build-base
RUN curl https://sh.rustup.rs -sSf | sh -s -- -y
ENV PATH="/root/.cargo/bin:${PATH}"
RUN rustup install nightly

WORKDIR /app
ADD go.mod go.mod
ADD go.sum go.sum

RUN go mod download
ADD . .

WORKDIR /app/smt/pkg/hash/poseidon_goldilocks
RUN cargo +nightly build --release && cp ./target/release/libposeidon_goldilocks.* . && cargo clean
WORKDIR /app

RUN --mount=type=cache,target=/root/.cache \
--mount=type=cache,target=/tmp/go-build \
--mount=type=cache,target=/go/pkg/mod \
102 changes: 77 additions & 25 deletions smt/pkg/blockinfo/block_info.go
Original file line number Diff line number Diff line change
@@ -4,14 +4,15 @@ import (
"context"
"encoding/hex"
"math/big"
"runtime"
"sync"

ethTypes "github.com/ledgerwatch/erigon/core/types"

"github.com/gateway-fm/cdk-erigon-lib/common"
"github.com/ledgerwatch/erigon/smt/pkg/smt"
"github.com/ledgerwatch/erigon/smt/pkg/utils"
zktx "github.com/ledgerwatch/erigon/zk/tx"

"github.com/gateway-fm/cdk-erigon-lib/common"
"github.com/ledgerwatch/log/v3"
)

@@ -50,34 +51,39 @@ func BuildBlockInfoTree(
"l1BlockHash", l1BlockHash.String(),
)
var logIndex int64 = 0
for i, txInfo := range *transactionInfos {
receipt := txInfo.Receipt
t := txInfo.Tx
jobs := make(chan TxParserJob, len(*transactionInfos))
results := make(chan interface{}, len(*transactionInfos))
var wg sync.WaitGroup

l2TxHash, err := zktx.ComputeL2TxHash(
t.GetChainID().ToBig(),
t.GetValue(),
t.GetPrice(),
t.GetNonce(),
t.GetGas(),
t.GetTo(),
txInfo.Signer,
t.GetData(),
)
if err != nil {
return nil, err
for workerId := 1; workerId <= runtime.NumCPU(); workerId++ {
wg.Add(1)
go txParserWorker(infoTree, blockNumber, jobs, results, &wg)
}

for i, txInfo := range *transactionInfos {
jobs <- TxParserJob{
txIdx: i,
txInfo: txInfo,
logIndex: logIndex,
}

log.Trace("info-tree-tx", "block", blockNumber, "idx", i, "hash", l2TxHash.String())
logIndex += int64(len(txInfo.Receipt.Logs))
}
close(jobs)

wg.Wait()
close(results)

genKeys, genVals, err := infoTree.GenerateBlockTxKeysVals(&l2TxHash, i, receipt, logIndex, receipt.CumulativeGasUsed, txInfo.EffectiveGasPrice)
if err != nil {
return nil, err
}
keys = append(keys, genKeys...)
vals = append(vals, genVals...)
for data := range results {
switch data.(type) {
case ParsedKeyVals:
txKeysVals := data.(ParsedKeyVals)
keys = append(keys, txKeysVals.keys...)
vals = append(vals, txKeysVals.vals...)

logIndex += int64(len(receipt.Logs))
default:
log.Error("not supported data type")
}
}

key, val, err := generateBlockGasUsed(blockGasUsed)
@@ -358,3 +364,49 @@ func (b *BlockInfoTree) GenerateBlockTxKeysVals(

return keys, vals, nil
}

type TxParserJob struct {
txIdx int
txInfo ExecutedTxInfo
logIndex int64
}

type ParsedKeyVals struct {
keys []*utils.NodeKey
vals []*utils.NodeValue8
}

func txParserWorker(infoTree *BlockInfoTree, blockNumber uint64, jobs <-chan TxParserJob, results chan<- interface{}, wg *sync.WaitGroup) {
defer wg.Done()

for job := range jobs {
receipt := job.txInfo.Receipt
t := job.txInfo.Tx

l2TxHash, err := zktx.ComputeL2TxHash(
t.GetChainID().ToBig(),
t.GetValue(),
t.GetPrice(),
t.GetNonce(),
t.GetGas(),
t.GetTo(),
job.txInfo.Signer,
t.GetData(),
)
if err != nil {
log.Error("Fail to ComputeL2TxHash", "TxHash", t.Hash(), "error", err)
}

log.Trace("info-tree-tx", "block", blockNumber, "idx", job.txIdx, "hash", l2TxHash.String())

genKeys, genVals, err := infoTree.GenerateBlockTxKeysVals(&l2TxHash, job.txIdx, receipt, job.logIndex, receipt.CumulativeGasUsed, job.txInfo.EffectiveGasPrice)
if err != nil {
log.Error("Fail to GenerateBlockTxKeysVals", "TxHash", t.Hash(), "error", err)
}

results <- ParsedKeyVals{
keys: genKeys,
vals: genVals,
}
}
}
48 changes: 48 additions & 0 deletions smt/pkg/hash/poseidon_godilocks_hash.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
package hash

/*
#cgo LDFLAGS: -L./poseidon_goldilocks -lposeidon_goldilocks
#include "./poseidon_goldilocks/poseidon_goldilocks.h"
*/
import "C"
import (
"unsafe"
)

func cBufferToGoSlice(cBuffer *C.Buffer) [4]uint64 {
if cBuffer == nil || cBuffer.len == 0 {
return [4]uint64{}
}

rst := unsafe.Slice((*uint64)(cBuffer.data), cBuffer.len)

var result [4]uint64
copy(result[:], rst)

return result
}

func goSliceToCBuffer(in []uint64) C.Buffer {
if len(in) == 0 {
return C.Buffer{}
}

return C.Buffer{
data: (*C.uint64_t)(unsafe.Pointer(&in[0])),
len: C.size_t(len(in)),
}
}

func Hash(in [8]uint64, capacity [4]uint64) ([4]uint64, error) {
input := make([]uint64, 0, len(in)+len(capacity))
input = append(input, in[:]...)
input = append(input, capacity[:]...)

cInput := goSliceToCBuffer(input[:])
cHashRst := C.rustPoseidongoldHash(cInput)
rst := cBufferToGoSlice(&cHashRst)

C.free_buf(cHashRst)

return rst, nil
}
93 changes: 93 additions & 0 deletions smt/pkg/hash/poseidon_godilocks_hash_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
package hash

import (
"github.com/stretchr/testify/assert"
"math/rand"
"testing"
"time"
)

func TestPoseidonHashFunc(t *testing.T) {
tests := []struct {
input [8]uint64
capacity [4]uint64
result [4]uint64
}{
{
input: [8]uint64{5577006791947779410, 8674665223082153551, 15352856648520921629, 13260572831089785859, 3916589616287113937, 6334824724549167320, 9828766684487745566, 10667007354186551956},
capacity: [4]uint64{0, 0, 0, 0},
result: [4]uint64{7986352640330579808, 16698976638447200418, 14099060853601989680, 1806029100513259151},
},
{
input: [8]uint64{894385949183117216, 11998794077335055257, 4751997750760398084, 7504504064263669287, 11199607447739267382, 3510942875414458836, 12156940908066221323, 4324745483838182873},
capacity: [4]uint64{0, 0, 0, 0},
result: [4]uint64{4912038160490892692, 4103686885524875147, 10541378107520313959, 17279065624757782690},
},
{
input: [8]uint64{11833901312327420776, 11926759511765359899, 6263450610539110790, 11239168150708129139, 1874068156324778273, 3328451335138149956, 14486903973548550719, 7955079406183515637},
capacity: [4]uint64{0, 0, 0, 0},
result: [4]uint64{7890399244011379224, 3893786354640587971, 13560196409483468805, 2948784903943663078},
},
{
input: [8]uint64{11926873763676642186, 2740103009342231109, 6941261091797652072, 1905388747193831650, 17204678798284737396, 15649472107743074779, 4831389563158288344, 261049867304784443},
capacity: [4]uint64{0, 0, 0, 0},
result: [4]uint64{14700294369684741534, 14895735373969203815, 16434826207003907392, 17867437290801947189},
},
{
input: [8]uint64{10683692646452562431, 5600924393587988459, 18218388313430417611, 9956202364908137547, 5486140987150761883, 9768663798983814715, 6382800227808658932, 2781055864473387780},
capacity: [4]uint64{0, 0, 0, 0},
result: [4]uint64{17625602373883346164, 18149609703473926001, 2824810185344003270, 9975387089464755098},
},
{
input: [8]uint64{10821471013040158923, 4990765271833742716, 14242321332569825828, 11792151447964398879, 13126262220165910460, 14117161486975057715, 2338498362660772719, 2601737961087659062},
capacity: [4]uint64{0, 0, 0, 0},
result: [4]uint64{2691796281516679790, 14584425213549820217, 14318483276994184927, 17940735015359233298},
},
{
input: [8]uint64{7273596521315663110, 3337066551442961397, 17344948852394588913, 11963748953446345529, 8249030965139585917, 898860202204764712, 9010467728050264449, 9908585559158765387},
capacity: [4]uint64{0, 0, 0, 0},
result: [4]uint64{8150678766998536923, 14721645535435562842, 2012097115710913290, 6143064933387483688},
},
{
input: [8]uint64{11273630029763932141, 15505210698284655633, 2227583514184312746, 12096659438561119542, 8603989663476771718, 6842348953158377901, 7388428680384065704, 6735196588112087610},
capacity: [4]uint64{0, 0, 0, 0},
result: [4]uint64{9286804905202849659, 7450030268744143082, 2587697138684996149, 603260420412321806},
},
}

for _, test := range tests {
rst, err := Hash(test.input, test.capacity)
assert.Nil(t, err, "fail to calculate poseidon hash")

if rst != test.result {
t.Errorf("parse doesn't match, expected: %v, got: %v", test.result, rst)
}
}
}

func RandCalPoseidonHashFunc() {
rand.Seed(time.Now().UnixNano())

input := [8]uint64{rand.Uint64(), rand.Uint64(), rand.Uint64(), rand.Uint64(), rand.Uint64(), rand.Uint64(), rand.Uint64(), rand.Uint64()}
capacity := [4]uint64{rand.Uint64(), rand.Uint64(), rand.Uint64(), rand.Uint64()}
Hash(input, capacity)
}

func BenchmarkRandCalculateHash_rust(b *testing.B) {
for i := 0; i < b.N; i++ {
RandCalPoseidonHashFunc()
}
}

func CalPoseidonHashFunc() {
input := [8]uint64{5577006791947779410, 8674665223082153551, 15352856648520921629, 13260572831089785859, 3916589616287113937, 6334824724549167320, 9828766684487745566, 10667007354186551956}
capacity := [4]uint64{0, 0, 0, 0}

Hash(input, capacity)
}

func BenchmarkCalculateHash_rust(b *testing.B) {
for i := 0; i < b.N; i++ {
CalPoseidonHashFunc()
}
}
1 change: 1 addition & 0 deletions smt/pkg/hash/poseidon_goldilocks/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
/target
Loading
Loading