From 6d1106b8e212facd33e514fb9932621b6d466bb0 Mon Sep 17 00:00:00 2001 From: clint Date: Tue, 19 Dec 2023 19:03:08 +0800 Subject: [PATCH] Typos checked --- .github/{workflow => workflows}/ci.yml | 4 ++- contracts/Storage.sol | 2 +- contracts/ZkLink.sol | 34 +++++++++++----------- contracts/ZkLinkPeriphery.sol | 4 +-- contracts/bridge/LayerZeroBridge.sol | 4 +-- contracts/dev-contracts/LZEndpointMock.sol | 2 +- contracts/dev-contracts/MultiCall.sol | 12 ++++---- contracts/dev-contracts/ZkLinkTest.sol | 2 +- contracts/gateway/LineaGateway.sol | 2 +- contracts/zksync/Events.sol | 2 +- contracts/zksync/PlonkCore.sol | 2 +- docs/Deploy.md | 2 +- docs/SecurityCheck.md | 8 ++--- package.json | 3 +- test/block_commit_test.js | 14 ++++----- test/exodus_test.js | 2 +- test/utils.js | 2 +- typos.toml | 8 +++++ 18 files changed, 60 insertions(+), 49 deletions(-) rename .github/{workflow => workflows}/ci.yml (86%) create mode 100644 typos.toml diff --git a/.github/workflow/ci.yml b/.github/workflows/ci.yml similarity index 86% rename from .github/workflow/ci.yml rename to .github/workflows/ci.yml index 384a97e..2d5c7b3 100644 --- a/.github/workflow/ci.yml +++ b/.github/workflows/ci.yml @@ -31,10 +31,12 @@ jobs: cache: 'npm' cache-dependency-path: 'package-lock.json' - name: Install dependencies - run: npm ci + run: npm i # https://github.com/crate-ci/typos/blob/master/docs/github-action.md - name: check spelling uses: crate-ci/typos@master + - name: Copy KeysWithPlonkVerifier.sol + uses: cp KeysWithPlonkVerifier.example contracts/zksync/KeysWithPlonkVerifier.sol - name: ${{ matrix.cmd }} run: | ${{ matrix.cmd }} diff --git a/contracts/Storage.sol b/contracts/Storage.sol index 3596835..c0c96e5 100644 --- a/contracts/Storage.sol +++ b/contracts/Storage.sol @@ -128,7 +128,7 @@ contract Storage is ZkLinkAcceptor, Config { uint256 timestamp; // Rollup block timestamp, have the same format as Ethereum block constant bytes32 stateHash; // Root hash of the rollup state bytes32 commitment; // Verified input for the ZkLink circuit - SyncHash[] syncHashs; // Used for cross chain block verify + SyncHash[] syncHashes; // Used for cross chain block verify } struct SyncHash { uint8 chainId; diff --git a/contracts/ZkLink.sol b/contracts/ZkLink.sol index a2113a2..d733b37 100644 --- a/contracts/ZkLink.sol +++ b/contracts/ZkLink.sol @@ -244,13 +244,13 @@ contract ZkLink is ReentrancyGuard, Storage, Events, UpgradeableMaster { uint64 priorityReqCommitted, bytes memory onchainOpsOffsetCommitment, uint256 slaverChainNum, - bytes32[] memory onchainOperationPubdataHashs + bytes32[] memory onchainOperationPubdataHashes ) = collectOnchainOps(_newBlock); // Create block commitment for verification proof bytes32 commitment = createBlockCommitment(_previousBlock, _newBlock, onchainOpsOffsetCommitment); // Create synchronization hash for cross chain block verify - SyncHash[] memory syncHashs = createSyncHash(_previousBlock.syncHashs, _newBlock, slaverChainNum, onchainOperationPubdataHashs); + SyncHash[] memory syncHashes = createSyncHash(_previousBlock.syncHashes, _newBlock, slaverChainNum, onchainOperationPubdataHashes); return StoredBlockInfo( _newBlock.blockNumber, @@ -259,7 +259,7 @@ contract ZkLink is ReentrancyGuard, Storage, Events, UpgradeableMaster { _newBlock.timestamp, _newBlock.newStateHash, commitment, - syncHashs + syncHashes ); } @@ -271,18 +271,18 @@ contract ZkLink is ReentrancyGuard, Storage, Events, UpgradeableMaster { /// offsetsCommitment - array where 1 is stored in chunk where onchainOperation begins and other is 0 (used in commitments) /// slaverChainNum - the slaver chain num /// onchainOperationPubdatas - onchain operation (Deposits, ChangePubKeys, Withdraws, ForcedExits, FullExits) pubdatas group by chain id (used in cross chain block verify) - function collectOnchainOps(CommitBlockInfo memory _newBlockData) internal view returns (bytes32 processableOperationsHash, uint64 priorityOperationsProcessed, bytes memory offsetsCommitment, uint256 slaverChainNum, bytes32[] memory onchainOperationPubdataHashs) { + function collectOnchainOps(CommitBlockInfo memory _newBlockData) internal view returns (bytes32 processableOperationsHash, uint64 priorityOperationsProcessed, bytes memory offsetsCommitment, uint256 slaverChainNum, bytes32[] memory onchainOperationPubdataHashes) { bytes memory pubData = _newBlockData.publicData; // pubdata length must be a multiple of CHUNK_BYTES require(pubData.length % CHUNK_BYTES == 0, "h0"); offsetsCommitment = new bytes(pubData.length / CHUNK_BYTES); priorityOperationsProcessed = 0; - (slaverChainNum, onchainOperationPubdataHashs) = initOnchainOperationPubdataHashs(); + (slaverChainNum, onchainOperationPubdataHashes) = initOnchainOperationPubdataHashes(); processableOperationsHash = EMPTY_STRING_KECCAK; // early return to save once slot read if (_newBlockData.onchainOperations.length == 0) { - return (processableOperationsHash, priorityOperationsProcessed, offsetsCommitment, slaverChainNum, onchainOperationPubdataHashs); + return (processableOperationsHash, priorityOperationsProcessed, offsetsCommitment, slaverChainNum, onchainOperationPubdataHashes); } uint64 uncommittedPriorityRequestsOffset = firstPriorityRequestId + totalCommittedPriorityRequests; for (uint256 i = 0; i < _newBlockData.onchainOperations.length; ++i) { @@ -318,7 +318,7 @@ contract ZkLink is ReentrancyGuard, Storage, Events, UpgradeableMaster { // group onchain operations pubdata hash by chain id for slaver chains if (chainId != CHAIN_ID) { uint256 chainOrder = chainId - 1; - onchainOperationPubdataHashs[chainOrder] = Utils.concatHash(onchainOperationPubdataHashs[chainOrder], opPubData); + onchainOperationPubdataHashes[chainOrder] = Utils.concatHash(onchainOperationPubdataHashes[chainOrder], opPubData); } if (processablePubData.length > 0) { // concat processable onchain operations pubdata hash of current chain @@ -344,10 +344,10 @@ contract ZkLink is ReentrancyGuard, Storage, Events, UpgradeableMaster { } /// @dev Create synchronization hash for cross chain block verify - function createSyncHash(SyncHash[] memory preBlockSyncHashs, CommitBlockInfo memory _newBlock, uint256 slaverChainNum, bytes32[] memory onchainOperationPubdataHashs) internal pure returns (SyncHash[] memory syncHashs) { - syncHashs = new SyncHash[](slaverChainNum); + function createSyncHash(SyncHash[] memory preBlockSyncHashes, CommitBlockInfo memory _newBlock, uint256 slaverChainNum, bytes32[] memory onchainOperationPubdataHashes) internal pure returns (SyncHash[] memory syncHashes) { + syncHashes = new SyncHash[](slaverChainNum); uint256 chainOrder = 0; - for (uint8 i = 0; i < onchainOperationPubdataHashs.length; ++i) { + for (uint8 i = 0; i < onchainOperationPubdataHashes.length; ++i) { uint8 chainId = i + 1; if (chainId == CHAIN_ID) { continue; @@ -355,8 +355,8 @@ contract ZkLink is ReentrancyGuard, Storage, Events, UpgradeableMaster { uint256 chainIndex = 1 << chainId - 1; if (chainIndex & ALL_CHAINS == chainIndex) { bytes32 preBlockSyncHash = EMPTY_STRING_KECCAK; - for (uint j = 0; j < preBlockSyncHashs.length; ++j) { - SyncHash memory _preBlockSyncHash = preBlockSyncHashs[j]; + for (uint j = 0; j < preBlockSyncHashes.length; ++j) { + SyncHash memory _preBlockSyncHash = preBlockSyncHashes[j]; if (_preBlockSyncHash.chainId == chainId) { preBlockSyncHash = _preBlockSyncHash.syncHash; break; @@ -364,20 +364,20 @@ contract ZkLink is ReentrancyGuard, Storage, Events, UpgradeableMaster { } // only append syncHash if onchain op exist in pubdata bytes32 newBlockSyncHash = preBlockSyncHash; - bytes32 onchainOperationPubdataHash = onchainOperationPubdataHashs[i]; + bytes32 onchainOperationPubdataHash = onchainOperationPubdataHashes[i]; if (onchainOperationPubdataHash != EMPTY_STRING_KECCAK) { newBlockSyncHash = createSlaverChainSyncHash(preBlockSyncHash, _newBlock.blockNumber, _newBlock.newStateHash, onchainOperationPubdataHash); } - syncHashs[chainOrder] = SyncHash(chainId, newBlockSyncHash); + syncHashes[chainOrder] = SyncHash(chainId, newBlockSyncHash); chainOrder++; } } } /// @dev init onchain op pubdata hash for all slaver chains - function initOnchainOperationPubdataHashs() internal pure returns (uint256 slaverChainNum, bytes32[] memory onchainOperationPubdataHashs) { + function initOnchainOperationPubdataHashes() internal pure returns (uint256 slaverChainNum, bytes32[] memory onchainOperationPubdataHashes) { slaverChainNum = 0; - onchainOperationPubdataHashs = new bytes32[](MAX_CHAIN_ID); + onchainOperationPubdataHashes = new bytes32[](MAX_CHAIN_ID); for(uint8 i = 0; i < MAX_CHAIN_ID; ++i) { uint8 chainId = i + 1; if (chainId == CHAIN_ID) { @@ -386,7 +386,7 @@ contract ZkLink is ReentrancyGuard, Storage, Events, UpgradeableMaster { uint256 chainIndex = 1 << i; if (chainIndex & ALL_CHAINS == chainIndex) { slaverChainNum++; - onchainOperationPubdataHashs[i] = EMPTY_STRING_KECCAK; + onchainOperationPubdataHashes[i] = EMPTY_STRING_KECCAK; } } } diff --git a/contracts/ZkLinkPeriphery.sol b/contracts/ZkLinkPeriphery.sol index f0d3966..7f96856 100644 --- a/contracts/ZkLinkPeriphery.sol +++ b/contracts/ZkLinkPeriphery.sol @@ -355,8 +355,8 @@ contract ZkLinkPeriphery is ReentrancyGuard, Storage, Events { if (hashStoredBlockInfo(_block) != storedBlockHashes[blockNumber]) { return false; } - for (uint8 i = 0; i < _block.syncHashs.length; ++i) { - SyncHash memory sync = _block.syncHashs[i]; + for (uint8 i = 0; i < _block.syncHashes.length; ++i) { + SyncHash memory sync = _block.syncHashes[i]; bytes32 remoteSyncHash = synchronizedChains[sync.chainId]; if (remoteSyncHash != sync.syncHash) { return false; diff --git a/contracts/bridge/LayerZeroBridge.sol b/contracts/bridge/LayerZeroBridge.sol index 98e502b..4497ebe 100644 --- a/contracts/bridge/LayerZeroBridge.sol +++ b/contracts/bridge/LayerZeroBridge.sol @@ -104,7 +104,7 @@ contract LayerZeroBridge is ReentrancyGuard, LayerZeroStorage, ISyncService, ILa uint256 originBalance= tx.origin.balance; // solhint-disable-next-line check-send-result endpoint.send{value:msg.value}(dstChainId, path, payload, payable(tx.origin), address(0), new bytes(0)); - // log the fee payed to layerzero + // log the fee paid to layerzero emit SynchronizationFee(originMsgValue - (tx.origin.balance - originBalance)); } @@ -166,7 +166,7 @@ contract LayerZeroBridge is ReentrancyGuard, LayerZeroStorage, ISyncService, ILa (bool success, ) = tx.origin.call{value: leftMsgValue}(""); require(success, "Refund failed"); } - // log the fee payed to layerzero + // log the fee paid to layerzero emit SynchronizationFee(originMsgValue - leftMsgValue); } diff --git a/contracts/dev-contracts/LZEndpointMock.sol b/contracts/dev-contracts/LZEndpointMock.sol index ef91938..510e4a6 100644 --- a/contracts/dev-contracts/LZEndpointMock.sol +++ b/contracts/dev-contracts/LZEndpointMock.sol @@ -32,7 +32,7 @@ contract LZEndpointMock is ILayerZeroEndpoint { uint public oracleFee; bytes public defaultAdapterParams; - // path = remote addrss + local address + // path = remote address + local address // inboundNonce = [srcChainId][path]. mapping(uint16 => mapping(bytes => uint64)) public inboundNonce; // outboundNonce = [dstChainId][srcAddress] diff --git a/contracts/dev-contracts/MultiCall.sol b/contracts/dev-contracts/MultiCall.sol index a3ada09..ff857d5 100644 --- a/contracts/dev-contracts/MultiCall.sol +++ b/contracts/dev-contracts/MultiCall.sol @@ -67,10 +67,10 @@ contract MultiCall { function batchWithdrawToL1( IZkLink zkLinkInstance, - WithdrawToL1Info[] calldata _withdrawDatas + WithdrawToL1Info[] calldata _withdrawData ) external payable { - for (uint i; i < _withdrawDatas.length; i++) { - WithdrawToL1Info memory withdrawInfo = _withdrawDatas[i]; + for (uint i; i < _withdrawData.length; i++) { + WithdrawToL1Info memory withdrawInfo = _withdrawData[i]; zkLinkInstance.withdrawToL1{value: withdrawInfo.value}( withdrawInfo.owner, withdrawInfo.token, @@ -85,10 +85,10 @@ contract MultiCall { function batchWithdrawPendingBalance( IZkLink zkLinkInstance, - WithdrawPendingBalanceInfo[] calldata _withdrawDatas + WithdrawPendingBalanceInfo[] calldata _withdrawData ) external { - for (uint i; i < _withdrawDatas.length; i++) { - WithdrawPendingBalanceInfo memory withdrawInfo = _withdrawDatas[i]; + for (uint i; i < _withdrawData.length; i++) { + WithdrawPendingBalanceInfo memory withdrawInfo = _withdrawData[i]; zkLinkInstance.withdrawPendingBalance( withdrawInfo.owner, withdrawInfo.tokenId, diff --git a/contracts/dev-contracts/ZkLinkTest.sol b/contracts/dev-contracts/ZkLinkTest.sol index 1f83489..09c29a3 100644 --- a/contracts/dev-contracts/ZkLinkTest.sol +++ b/contracts/dev-contracts/ZkLinkTest.sol @@ -43,7 +43,7 @@ contract ZkLinkTest is ZkLink { uint64 priorityOperationsProcessed, bytes memory offsetsCommitment, uint256 slaverChainNum, - bytes32[] memory onchainOperationPubdataHashs + bytes32[] memory onchainOperationPubdataHashes ) { return collectOnchainOps(_newBlockData); } diff --git a/contracts/gateway/LineaGateway.sol b/contracts/gateway/LineaGateway.sol index e2f4986..2300ff2 100644 --- a/contracts/gateway/LineaGateway.sol +++ b/contracts/gateway/LineaGateway.sol @@ -99,7 +99,7 @@ abstract contract LineaGateway is OwnableUpgradeable, UUPSUpgradeable, Reentranc /// @dev Bridge token to remote gateway /// @param _token The token on local chain /// @param _amount The token amount - /// @param _fee The fee payed to message service + /// @param _fee The fee paid to message service /// @return Return native token and whether it is USDC function bridgeERC20ToRemoteGateway(address _token, uint256 _amount, uint256 _fee) internal returns (bool, address) { bool isUSDC; diff --git a/contracts/zksync/Events.sol b/contracts/zksync/Events.sol index 686e458..88bb44f 100644 --- a/contracts/zksync/Events.sol +++ b/contracts/zksync/Events.sol @@ -24,7 +24,7 @@ interface Events { event Withdrawal(uint16 indexed tokenId, uint128 amount); /// @notice Event emitted when user funds are withdrawn from the zkLink state but not from contract - event WithdrawalPending(uint16 indexed tokenId, bytes32 indexed recepient, uint128 amount); + event WithdrawalPending(uint16 indexed tokenId, bytes32 indexed recipient, uint128 amount); /// @notice Event emitted when user funds are withdrawn from the zkLink state to L1 and contract event WithdrawalL1(bytes32 indexed withdrawHash); diff --git a/contracts/zksync/PlonkCore.sol b/contracts/zksync/PlonkCore.sol index dbbd05d..f6095f0 100644 --- a/contracts/zksync/PlonkCore.sol +++ b/contracts/zksync/PlonkCore.sol @@ -719,7 +719,7 @@ contract Plonk4VerifierWithAccessToDNext { tmp_g1.point_sub_assign(vk.copy_permutation_commitments[STATE_WIDTH - 1].point_mul(last_permutation_part_at_z)); res.point_add_assign(tmp_g1); - // multiply them by v immedately as linearization has a factor of v^1 + // multiply them by v immediately as linearization has a factor of v^1 res.point_mul_assign(state.v); // res now contains contribution from the gates linearization and // copy permutation part diff --git a/docs/Deploy.md b/docs/Deploy.md index 3e8ad2e..81c0360 100644 --- a/docs/Deploy.md +++ b/docs/Deploy.md @@ -86,7 +86,7 @@ OPTIONS: deployZkLink: Deploy zklink contracts ``` -`--block-number`,`--commitment `,`--genesis-root`,`--sync-hash`,`--timestamp` are used to produce genesie block. When deploying for the first time `--block-number`,`--commitment `,`--sync-hash` and `--timestamp` can be left unset. When deploying for appending a new chain all these options need to be set with the latest exectuted block properties. +`--block-number`,`--commitment `,`--genesis-root`,`--sync-hash`,`--timestamp` are used to produce genesie block. When deploying for the first time `--block-number`,`--commitment `,`--sync-hash` and `--timestamp` can be left unset. When deploying for appending a new chain all these options need to be set with the latest executed block properties. You could left`--governor` and `--validator` be unset when deploy to devnet or testnet, but **DOT NOT** use deployer address as network governor and validator on the mainnet. diff --git a/docs/SecurityCheck.md b/docs/SecurityCheck.md index 57f2181..f67df02 100644 --- a/docs/SecurityCheck.md +++ b/docs/SecurityCheck.md @@ -16,7 +16,7 @@ T2 - Use fixed compiler version to compile contract (SWC-103) T3 - Check for correct inheritance, keep it simple and linear (SWC-125) -T4 - Constructor should not be exsited in proxyed contract +T4 - Constructor should not be existed in proxyed contract T5 - Right-To-Left-Override control character not used (SWC-130) @@ -126,7 +126,7 @@ L9 - Don't use msg.value repeatedly at delegatecall ### Token -A1 - Recipent-Withdraw pattern followed? +A1 - Recipient-Withdraw pattern followed? A2 - Use call to send eth to a contract address and check the result (SWC-134) @@ -136,7 +136,7 @@ A4 - Does msg.sender has the authority to move token of other addresses? A5 - Use the balance difference as the amount when non-standard token deposit in or withdraw out of contract -A6 - Is there a possiblity that tokens can not be retrieved? +A6 - Is there a possibility that tokens can not be retrieved? A7 - Is code is still correct if the token contract is upgradable? @@ -150,7 +150,7 @@ O2 - Use TWP of onchain oracle O3 - The price of LP is correct? -O4 - Is there a possiblity that lend a large amout of low-value token and manipulate its price to borrow a high-value token? +O4 - Is there a possibility that lend a large amount of low-value token and manipulate its price to borrow a high-value token? ## Reference diff --git a/package.json b/package.json index c50af72..7f78bba 100644 --- a/package.json +++ b/package.json @@ -20,7 +20,8 @@ "test-master": "MASTER_UNITTEST=true npx hardhat test", "test-slaver": "SLAVER_UNITTEST=true npx hardhat test", "flatten-master": "MASTER_UNITTEST=true npx hardhat run script/combine_zklink_abi.js", - "flatten-slaver": "SLAVER_UNITTEST=true npx hardhat run script/combine_zklink_abi.js" + "flatten-slaver": "SLAVER_UNITTEST=true npx hardhat run script/combine_zklink_abi.js", + "clean": "npx hardhat clean && rm -rf *Master.json *Slaver.json" }, "dependencies": { "@openzeppelin/contracts": "^4.9.2", diff --git a/test/block_commit_test.js b/test/block_commit_test.js index a7e815a..b16f81f 100644 --- a/test/block_commit_test.js +++ b/test/block_commit_test.js @@ -216,7 +216,7 @@ describe('Block commit unit tests', function () { priorityOperationsProcessed, offsetsCommitment:hexlify(concat(offsetsCommitment)), slaverChainNum: 2, - onchainOperationPubdataHashs:[ + onchainOperationPubdataHashes:[ ZERO_BYTES32, onchainOpPubdataHash2, ZERO_BYTES32, @@ -244,7 +244,7 @@ describe('Block commit unit tests', function () { expect(actual.priorityOperationsProcessed).eq(expected.priorityOperationsProcessed); expect(actual.offsetsCommitment).eq(expected.offsetsCommitment); expect(actual.slaverChainNum).eq(expected.slaverChainNum); - expect(actual.onchainOperationPubdataHashs).eql(expected.onchainOperationPubdataHashs); + expect(actual.onchainOperationPubdataHashes).eql(expected.onchainOperationPubdataHashes); } it('no pubdata should be success', async () => { @@ -255,7 +255,7 @@ describe('Block commit unit tests', function () { priorityOperationsProcessed:0, offsetsCommitment:"0x", slaverChainNum:2, - onchainOperationPubdataHashs:[ + onchainOperationPubdataHashes:[ ZERO_BYTES32, // the master chain EMPTY_STRING_KECCAK, ZERO_BYTES32, // the unSupport chain @@ -351,7 +351,7 @@ describe('Block commit unit tests', function () { timestamp:1652422395, stateHash:"0x0000000000000000000000000000000000000000000000000000000000000002", commitment:"0x0000000000000000000000000000000000000000000000000000000000000003", - syncHashs:[] + syncHashes:[] } const commitBlock = { newStateHash:"0x0000000000000000000000000000000000000000000000000000000000000005", @@ -395,9 +395,9 @@ describe('Block commit unit tests', function () { expect(Number(r.timestamp)).to.eql(commitBlock.timestamp); expect(r.stateHash).to.eql(commitBlock.newStateHash); - const syncHash2 = hexlify(createSlaverChainSyncHash(EMPTY_STRING_KECCAK, commitBlock.blockNumber, commitBlock.newStateHash, expected.onchainOperationPubdataHashs[1])); - const syncHash4 = hexlify(createSlaverChainSyncHash(EMPTY_STRING_KECCAK, commitBlock.blockNumber, commitBlock.newStateHash, expected.onchainOperationPubdataHashs[3])); - expect(r.syncHashs).to.eql([[2n, syncHash2],[4n, syncHash4]]); + const syncHash2 = hexlify(createSlaverChainSyncHash(EMPTY_STRING_KECCAK, commitBlock.blockNumber, commitBlock.newStateHash, expected.onchainOperationPubdataHashes[1])); + const syncHash4 = hexlify(createSlaverChainSyncHash(EMPTY_STRING_KECCAK, commitBlock.blockNumber, commitBlock.newStateHash, expected.onchainOperationPubdataHashes[3])); + expect(r.syncHashes).to.eql([[2n, syncHash2],[4n, syncHash4]]); }); }); }); diff --git a/test/exodus_test.js b/test/exodus_test.js index 42cac43..01d5efe 100644 --- a/test/exodus_test.js +++ b/test/exodus_test.js @@ -32,7 +32,7 @@ describe('ZkLink exodus unit tests', function () { "timestamp":1652422395, "stateHash":"0xbb66ffc06a476f05a218f6789ca8946e4f0cf29f1efc2e4d0f9a8e70f0326313", "commitment":"0x6104d07f7c285404dc58dd0b37894b20c4193a231499a20e4056d119fc2c1184", - "syncHashs":[] + "syncHashes":[] }; }); diff --git a/test/utils.js b/test/utils.js index 169c1da..dcbc7e8 100644 --- a/test/utils.js +++ b/test/utils.js @@ -17,7 +17,7 @@ const GENESIS_BLOCK = { timestamp:0, stateHash:GENESIS_ROOT, commitment:ZERO_BYTES32, - syncHashs:[] + syncHashes:[] } const USD_TOKEN_ID = 1; const MIN_USD_STABLE_TOKEN_ID = 17; diff --git a/typos.toml b/typos.toml new file mode 100644 index 0000000..21b8272 --- /dev/null +++ b/typos.toml @@ -0,0 +1,8 @@ +[files] +extend-exclude = ["*.tsv", "*.json", "*.txt"] + +[default.extend-words] +# Ignore false-positives + +[default.extend-identifiers] +# *sigh* this just isn't worth the cost of fixing