Skip to content

Commit

Permalink
Typos checked
Browse files Browse the repository at this point in the history
  • Loading branch information
clint committed Dec 19, 2023
1 parent 2b3e079 commit 3ea1210
Show file tree
Hide file tree
Showing 18 changed files with 58 additions and 49 deletions.
2 changes: 1 addition & 1 deletion .github/workflow/ci.yml → .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ jobs:
cache: 'npm'
cache-dependency-path: 'package-lock.json'
- name: Install dependencies
run: npm ci
run: npm i
# https://github.com/crate-ci/typos/blob/master/docs/github-action.md
- name: check spelling
uses: crate-ci/typos@master
Expand Down
2 changes: 1 addition & 1 deletion contracts/Storage.sol
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ contract Storage is ZkLinkAcceptor, Config {
uint256 timestamp; // Rollup block timestamp, have the same format as Ethereum block constant
bytes32 stateHash; // Root hash of the rollup state
bytes32 commitment; // Verified input for the ZkLink circuit
SyncHash[] syncHashs; // Used for cross chain block verify
SyncHash[] syncHashes; // Used for cross chain block verify
}
struct SyncHash {
uint8 chainId;
Expand Down
34 changes: 17 additions & 17 deletions contracts/ZkLink.sol
Original file line number Diff line number Diff line change
Expand Up @@ -244,13 +244,13 @@ contract ZkLink is ReentrancyGuard, Storage, Events, UpgradeableMaster {
uint64 priorityReqCommitted,
bytes memory onchainOpsOffsetCommitment,
uint256 slaverChainNum,
bytes32[] memory onchainOperationPubdataHashs
bytes32[] memory onchainOperationPubdataHashes
) = collectOnchainOps(_newBlock);

// Create block commitment for verification proof
bytes32 commitment = createBlockCommitment(_previousBlock, _newBlock, onchainOpsOffsetCommitment);
// Create synchronization hash for cross chain block verify
SyncHash[] memory syncHashs = createSyncHash(_previousBlock.syncHashs, _newBlock, slaverChainNum, onchainOperationPubdataHashs);
SyncHash[] memory syncHashes = createSyncHash(_previousBlock.syncHashes, _newBlock, slaverChainNum, onchainOperationPubdataHashes);

return StoredBlockInfo(
_newBlock.blockNumber,
Expand All @@ -259,7 +259,7 @@ contract ZkLink is ReentrancyGuard, Storage, Events, UpgradeableMaster {
_newBlock.timestamp,
_newBlock.newStateHash,
commitment,
syncHashs
syncHashes
);
}

Expand All @@ -271,18 +271,18 @@ contract ZkLink is ReentrancyGuard, Storage, Events, UpgradeableMaster {
/// offsetsCommitment - array where 1 is stored in chunk where onchainOperation begins and other is 0 (used in commitments)
/// slaverChainNum - the slaver chain num
/// onchainOperationPubdatas - onchain operation (Deposits, ChangePubKeys, Withdraws, ForcedExits, FullExits) pubdatas group by chain id (used in cross chain block verify)
function collectOnchainOps(CommitBlockInfo memory _newBlockData) internal view returns (bytes32 processableOperationsHash, uint64 priorityOperationsProcessed, bytes memory offsetsCommitment, uint256 slaverChainNum, bytes32[] memory onchainOperationPubdataHashs) {
function collectOnchainOps(CommitBlockInfo memory _newBlockData) internal view returns (bytes32 processableOperationsHash, uint64 priorityOperationsProcessed, bytes memory offsetsCommitment, uint256 slaverChainNum, bytes32[] memory onchainOperationPubdataHashes) {
bytes memory pubData = _newBlockData.publicData;
// pubdata length must be a multiple of CHUNK_BYTES
require(pubData.length % CHUNK_BYTES == 0, "h0");
offsetsCommitment = new bytes(pubData.length / CHUNK_BYTES);
priorityOperationsProcessed = 0;
(slaverChainNum, onchainOperationPubdataHashs) = initOnchainOperationPubdataHashs();
(slaverChainNum, onchainOperationPubdataHashes) = initOnchainOperationPubdataHashes();
processableOperationsHash = EMPTY_STRING_KECCAK;

// early return to save once slot read
if (_newBlockData.onchainOperations.length == 0) {
return (processableOperationsHash, priorityOperationsProcessed, offsetsCommitment, slaverChainNum, onchainOperationPubdataHashs);
return (processableOperationsHash, priorityOperationsProcessed, offsetsCommitment, slaverChainNum, onchainOperationPubdataHashes);
}
uint64 uncommittedPriorityRequestsOffset = firstPriorityRequestId + totalCommittedPriorityRequests;
for (uint256 i = 0; i < _newBlockData.onchainOperations.length; ++i) {
Expand Down Expand Up @@ -318,7 +318,7 @@ contract ZkLink is ReentrancyGuard, Storage, Events, UpgradeableMaster {
// group onchain operations pubdata hash by chain id for slaver chains
if (chainId != CHAIN_ID) {
uint256 chainOrder = chainId - 1;
onchainOperationPubdataHashs[chainOrder] = Utils.concatHash(onchainOperationPubdataHashs[chainOrder], opPubData);
onchainOperationPubdataHashes[chainOrder] = Utils.concatHash(onchainOperationPubdataHashes[chainOrder], opPubData);
}
if (processablePubData.length > 0) {
// concat processable onchain operations pubdata hash of current chain
Expand All @@ -344,40 +344,40 @@ contract ZkLink is ReentrancyGuard, Storage, Events, UpgradeableMaster {
}

/// @dev Create synchronization hash for cross chain block verify
function createSyncHash(SyncHash[] memory preBlockSyncHashs, CommitBlockInfo memory _newBlock, uint256 slaverChainNum, bytes32[] memory onchainOperationPubdataHashs) internal pure returns (SyncHash[] memory syncHashs) {
syncHashs = new SyncHash[](slaverChainNum);
function createSyncHash(SyncHash[] memory preBlockSyncHashes, CommitBlockInfo memory _newBlock, uint256 slaverChainNum, bytes32[] memory onchainOperationPubdataHashes) internal pure returns (SyncHash[] memory syncHashes) {
syncHashes = new SyncHash[](slaverChainNum);
uint256 chainOrder = 0;
for (uint8 i = 0; i < onchainOperationPubdataHashs.length; ++i) {
for (uint8 i = 0; i < onchainOperationPubdataHashes.length; ++i) {
uint8 chainId = i + 1;
if (chainId == CHAIN_ID) {
continue;
}
uint256 chainIndex = 1 << chainId - 1;
if (chainIndex & ALL_CHAINS == chainIndex) {
bytes32 preBlockSyncHash = EMPTY_STRING_KECCAK;
for (uint j = 0; j < preBlockSyncHashs.length; ++j) {
SyncHash memory _preBlockSyncHash = preBlockSyncHashs[j];
for (uint j = 0; j < preBlockSyncHashes.length; ++j) {
SyncHash memory _preBlockSyncHash = preBlockSyncHashes[j];
if (_preBlockSyncHash.chainId == chainId) {
preBlockSyncHash = _preBlockSyncHash.syncHash;
break;
}
}
// only append syncHash if onchain op exist in pubdata
bytes32 newBlockSyncHash = preBlockSyncHash;
bytes32 onchainOperationPubdataHash = onchainOperationPubdataHashs[i];
bytes32 onchainOperationPubdataHash = onchainOperationPubdataHashes[i];
if (onchainOperationPubdataHash != EMPTY_STRING_KECCAK) {
newBlockSyncHash = createSlaverChainSyncHash(preBlockSyncHash, _newBlock.blockNumber, _newBlock.newStateHash, onchainOperationPubdataHash);
}
syncHashs[chainOrder] = SyncHash(chainId, newBlockSyncHash);
syncHashes[chainOrder] = SyncHash(chainId, newBlockSyncHash);
chainOrder++;
}
}
}

/// @dev init onchain op pubdata hash for all slaver chains
function initOnchainOperationPubdataHashs() internal pure returns (uint256 slaverChainNum, bytes32[] memory onchainOperationPubdataHashs) {
function initOnchainOperationPubdataHashes() internal pure returns (uint256 slaverChainNum, bytes32[] memory onchainOperationPubdataHashes) {
slaverChainNum = 0;
onchainOperationPubdataHashs = new bytes32[](MAX_CHAIN_ID);
onchainOperationPubdataHashes = new bytes32[](MAX_CHAIN_ID);
for(uint8 i = 0; i < MAX_CHAIN_ID; ++i) {
uint8 chainId = i + 1;
if (chainId == CHAIN_ID) {
Expand All @@ -386,7 +386,7 @@ contract ZkLink is ReentrancyGuard, Storage, Events, UpgradeableMaster {
uint256 chainIndex = 1 << i;
if (chainIndex & ALL_CHAINS == chainIndex) {
slaverChainNum++;
onchainOperationPubdataHashs[i] = EMPTY_STRING_KECCAK;
onchainOperationPubdataHashes[i] = EMPTY_STRING_KECCAK;
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions contracts/ZkLinkPeriphery.sol
Original file line number Diff line number Diff line change
Expand Up @@ -355,8 +355,8 @@ contract ZkLinkPeriphery is ReentrancyGuard, Storage, Events {
if (hashStoredBlockInfo(_block) != storedBlockHashes[blockNumber]) {
return false;
}
for (uint8 i = 0; i < _block.syncHashs.length; ++i) {
SyncHash memory sync = _block.syncHashs[i];
for (uint8 i = 0; i < _block.syncHashes.length; ++i) {
SyncHash memory sync = _block.syncHashes[i];
bytes32 remoteSyncHash = synchronizedChains[sync.chainId];
if (remoteSyncHash != sync.syncHash) {
return false;
Expand Down
4 changes: 2 additions & 2 deletions contracts/bridge/LayerZeroBridge.sol
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ contract LayerZeroBridge is ReentrancyGuard, LayerZeroStorage, ISyncService, ILa
uint256 originBalance= tx.origin.balance;
// solhint-disable-next-line check-send-result
endpoint.send{value:msg.value}(dstChainId, path, payload, payable(tx.origin), address(0), new bytes(0));
// log the fee payed to layerzero
// log the fee paid to layerzero
emit SynchronizationFee(originMsgValue - (tx.origin.balance - originBalance));
}

Expand Down Expand Up @@ -166,7 +166,7 @@ contract LayerZeroBridge is ReentrancyGuard, LayerZeroStorage, ISyncService, ILa
(bool success, ) = tx.origin.call{value: leftMsgValue}("");
require(success, "Refund failed");
}
// log the fee payed to layerzero
// log the fee paid to layerzero
emit SynchronizationFee(originMsgValue - leftMsgValue);
}

Expand Down
2 changes: 1 addition & 1 deletion contracts/dev-contracts/LZEndpointMock.sol
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ contract LZEndpointMock is ILayerZeroEndpoint {
uint public oracleFee;
bytes public defaultAdapterParams;

// path = remote addrss + local address
// path = remote address + local address
// inboundNonce = [srcChainId][path].
mapping(uint16 => mapping(bytes => uint64)) public inboundNonce;
// outboundNonce = [dstChainId][srcAddress]
Expand Down
12 changes: 6 additions & 6 deletions contracts/dev-contracts/MultiCall.sol
Original file line number Diff line number Diff line change
Expand Up @@ -67,10 +67,10 @@ contract MultiCall {

function batchWithdrawToL1(
IZkLink zkLinkInstance,
WithdrawToL1Info[] calldata _withdrawDatas
WithdrawToL1Info[] calldata _withdrawData
) external payable {
for (uint i; i < _withdrawDatas.length; i++) {
WithdrawToL1Info memory withdrawInfo = _withdrawDatas[i];
for (uint i; i < _withdrawData.length; i++) {
WithdrawToL1Info memory withdrawInfo = _withdrawData[i];
zkLinkInstance.withdrawToL1{value: withdrawInfo.value}(
withdrawInfo.owner,
withdrawInfo.token,
Expand All @@ -85,10 +85,10 @@ contract MultiCall {

function batchWithdrawPendingBalance(
IZkLink zkLinkInstance,
WithdrawPendingBalanceInfo[] calldata _withdrawDatas
WithdrawPendingBalanceInfo[] calldata _withdrawData
) external {
for (uint i; i < _withdrawDatas.length; i++) {
WithdrawPendingBalanceInfo memory withdrawInfo = _withdrawDatas[i];
for (uint i; i < _withdrawData.length; i++) {
WithdrawPendingBalanceInfo memory withdrawInfo = _withdrawData[i];
zkLinkInstance.withdrawPendingBalance(
withdrawInfo.owner,
withdrawInfo.tokenId,
Expand Down
2 changes: 1 addition & 1 deletion contracts/dev-contracts/ZkLinkTest.sol
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ contract ZkLinkTest is ZkLink {
uint64 priorityOperationsProcessed,
bytes memory offsetsCommitment,
uint256 slaverChainNum,
bytes32[] memory onchainOperationPubdataHashs
bytes32[] memory onchainOperationPubdataHashes
) {
return collectOnchainOps(_newBlockData);
}
Expand Down
2 changes: 1 addition & 1 deletion contracts/gateway/LineaGateway.sol
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ abstract contract LineaGateway is OwnableUpgradeable, UUPSUpgradeable, Reentranc
/// @dev Bridge token to remote gateway
/// @param _token The token on local chain
/// @param _amount The token amount
/// @param _fee The fee payed to message service
/// @param _fee The fee paid to message service
/// @return Return native token and whether it is USDC
function bridgeERC20ToRemoteGateway(address _token, uint256 _amount, uint256 _fee) internal returns (bool, address) {
bool isUSDC;
Expand Down
2 changes: 1 addition & 1 deletion contracts/zksync/Events.sol
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ interface Events {
event Withdrawal(uint16 indexed tokenId, uint128 amount);

/// @notice Event emitted when user funds are withdrawn from the zkLink state but not from contract
event WithdrawalPending(uint16 indexed tokenId, bytes32 indexed recepient, uint128 amount);
event WithdrawalPending(uint16 indexed tokenId, bytes32 indexed recipient, uint128 amount);

/// @notice Event emitted when user funds are withdrawn from the zkLink state to L1 and contract
event WithdrawalL1(bytes32 indexed withdrawHash);
Expand Down
2 changes: 1 addition & 1 deletion contracts/zksync/PlonkCore.sol
Original file line number Diff line number Diff line change
Expand Up @@ -719,7 +719,7 @@ contract Plonk4VerifierWithAccessToDNext {
tmp_g1.point_sub_assign(vk.copy_permutation_commitments[STATE_WIDTH - 1].point_mul(last_permutation_part_at_z));

res.point_add_assign(tmp_g1);
// multiply them by v immedately as linearization has a factor of v^1
// multiply them by v immediately as linearization has a factor of v^1
res.point_mul_assign(state.v);
// res now contains contribution from the gates linearization and
// copy permutation part
Expand Down
2 changes: 1 addition & 1 deletion docs/Deploy.md
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ OPTIONS:
deployZkLink: Deploy zklink contracts
```

`--block-number`,`--commitment `,`--genesis-root`,`--sync-hash`,`--timestamp` are used to produce genesie block. When deploying for the first time `--block-number`,`--commitment `,`--sync-hash` and `--timestamp` can be left unset. When deploying for appending a new chain all these options need to be set with the latest exectuted block properties.
`--block-number`,`--commitment `,`--genesis-root`,`--sync-hash`,`--timestamp` are used to produce genesie block. When deploying for the first time `--block-number`,`--commitment `,`--sync-hash` and `--timestamp` can be left unset. When deploying for appending a new chain all these options need to be set with the latest executed block properties.

You could left`--governor` and `--validator` be unset when deploy to devnet or testnet, but **DOT NOT** use deployer address as network governor and validator on the mainnet.

Expand Down
8 changes: 4 additions & 4 deletions docs/SecurityCheck.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ T2 - Use fixed compiler version to compile contract (SWC-103)

T3 - Check for correct inheritance, keep it simple and linear (SWC-125)

T4 - Constructor should not be exsited in proxyed contract
T4 - Constructor should not be existed in proxyed contract

T5 - Right-To-Left-Override control character not used (SWC-130)

Expand Down Expand Up @@ -126,7 +126,7 @@ L9 - Don't use msg.value repeatedly at delegatecall

### Token

A1 - Recipent-Withdraw pattern followed?
A1 - Recipient-Withdraw pattern followed?

A2 - Use call to send eth to a contract address and check the result (SWC-134)

Expand All @@ -136,7 +136,7 @@ A4 - Does msg.sender has the authority to move token of other addresses?

A5 - Use the balance difference as the amount when non-standard token deposit in or withdraw out of contract

A6 - Is there a possiblity that tokens can not be retrieved?
A6 - Is there a possibility that tokens can not be retrieved?

A7 - Is code is still correct if the token contract is upgradable?

Expand All @@ -150,7 +150,7 @@ O2 - Use TWP of onchain oracle

O3 - The price of LP is correct?

O4 - Is there a possiblity that lend a large amout of low-value token and manipulate its price to borrow a high-value token?
O4 - Is there a possibility that lend a large amount of low-value token and manipulate its price to borrow a high-value token?

## Reference

Expand Down
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@
"test-master": "MASTER_UNITTEST=true npx hardhat test",
"test-slaver": "SLAVER_UNITTEST=true npx hardhat test",
"flatten-master": "MASTER_UNITTEST=true npx hardhat run script/combine_zklink_abi.js",
"flatten-slaver": "SLAVER_UNITTEST=true npx hardhat run script/combine_zklink_abi.js"
"flatten-slaver": "SLAVER_UNITTEST=true npx hardhat run script/combine_zklink_abi.js",
"clean": "npx hardhat clean && rm -rf *Master.json *Slaver.json"
},
"dependencies": {
"@openzeppelin/contracts": "^4.9.2",
Expand Down
14 changes: 7 additions & 7 deletions test/block_commit_test.js
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ describe('Block commit unit tests', function () {
priorityOperationsProcessed,
offsetsCommitment:hexlify(concat(offsetsCommitment)),
slaverChainNum: 2,
onchainOperationPubdataHashs:[
onchainOperationPubdataHashes:[
ZERO_BYTES32,
onchainOpPubdataHash2,
ZERO_BYTES32,
Expand Down Expand Up @@ -244,7 +244,7 @@ describe('Block commit unit tests', function () {
expect(actual.priorityOperationsProcessed).eq(expected.priorityOperationsProcessed);
expect(actual.offsetsCommitment).eq(expected.offsetsCommitment);
expect(actual.slaverChainNum).eq(expected.slaverChainNum);
expect(actual.onchainOperationPubdataHashs).eql(expected.onchainOperationPubdataHashs);
expect(actual.onchainOperationPubdataHashes).eql(expected.onchainOperationPubdataHashes);
}

it('no pubdata should be success', async () => {
Expand All @@ -255,7 +255,7 @@ describe('Block commit unit tests', function () {
priorityOperationsProcessed:0,
offsetsCommitment:"0x",
slaverChainNum:2,
onchainOperationPubdataHashs:[
onchainOperationPubdataHashes:[
ZERO_BYTES32, // the master chain
EMPTY_STRING_KECCAK,
ZERO_BYTES32, // the unSupport chain
Expand Down Expand Up @@ -351,7 +351,7 @@ describe('Block commit unit tests', function () {
timestamp:1652422395,
stateHash:"0x0000000000000000000000000000000000000000000000000000000000000002",
commitment:"0x0000000000000000000000000000000000000000000000000000000000000003",
syncHashs:[]
syncHashes:[]
}
const commitBlock = {
newStateHash:"0x0000000000000000000000000000000000000000000000000000000000000005",
Expand Down Expand Up @@ -395,9 +395,9 @@ describe('Block commit unit tests', function () {
expect(Number(r.timestamp)).to.eql(commitBlock.timestamp);
expect(r.stateHash).to.eql(commitBlock.newStateHash);

const syncHash2 = hexlify(createSlaverChainSyncHash(EMPTY_STRING_KECCAK, commitBlock.blockNumber, commitBlock.newStateHash, expected.onchainOperationPubdataHashs[1]));
const syncHash4 = hexlify(createSlaverChainSyncHash(EMPTY_STRING_KECCAK, commitBlock.blockNumber, commitBlock.newStateHash, expected.onchainOperationPubdataHashs[3]));
expect(r.syncHashs).to.eql([[2n, syncHash2],[4n, syncHash4]]);
const syncHash2 = hexlify(createSlaverChainSyncHash(EMPTY_STRING_KECCAK, commitBlock.blockNumber, commitBlock.newStateHash, expected.onchainOperationPubdataHashes[1]));
const syncHash4 = hexlify(createSlaverChainSyncHash(EMPTY_STRING_KECCAK, commitBlock.blockNumber, commitBlock.newStateHash, expected.onchainOperationPubdataHashes[3]));
expect(r.syncHashes).to.eql([[2n, syncHash2],[4n, syncHash4]]);
});
});
});
2 changes: 1 addition & 1 deletion test/exodus_test.js
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ describe('ZkLink exodus unit tests', function () {
"timestamp":1652422395,
"stateHash":"0xbb66ffc06a476f05a218f6789ca8946e4f0cf29f1efc2e4d0f9a8e70f0326313",
"commitment":"0x6104d07f7c285404dc58dd0b37894b20c4193a231499a20e4056d119fc2c1184",
"syncHashs":[]
"syncHashes":[]
};
});

Expand Down
2 changes: 1 addition & 1 deletion test/utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ const GENESIS_BLOCK = {
timestamp:0,
stateHash:GENESIS_ROOT,
commitment:ZERO_BYTES32,
syncHashs:[]
syncHashes:[]
}
const USD_TOKEN_ID = 1;
const MIN_USD_STABLE_TOKEN_ID = 17;
Expand Down
8 changes: 8 additions & 0 deletions typos.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
[files]
extend-exclude = ["*.tsv", "*.json", "*.txt"]

[default.extend-words]
# Ignore false-positives

[default.extend-identifiers]
# *sigh* this just isn't worth the cost of fixing

0 comments on commit 3ea1210

Please sign in to comment.