diff --git a/prover/zkevm/prover/common/flatten_column.go b/prover/zkevm/prover/common/flatten_column.go index 5acabdfddc..0e620f1d8e 100644 --- a/prover/zkevm/prover/common/flatten_column.go +++ b/prover/zkevm/prover/common/flatten_column.go @@ -13,6 +13,8 @@ import ( const ( // NbLimbU32 represents the number of 16-bit limbs for a 32-bit integer. NbLimbU32 = 2 + // NbLimbU48 represents the number of 16-bit limbs for a 48-bit integer. + NbLimbU48 = 3 // NbLimbU64 represents the number of 16-bit limbs for a 64-bit integer. NbLimbU64 = 4 // NbLimbEthAddress represents the number of 16-bit limbs for an Ethereum address (160 bits). diff --git a/prover/zkevm/prover/publicInput/arith_struct/arith_struct.go b/prover/zkevm/prover/publicInput/arith_struct/arith_struct.go index c9232b6833..3be7158de4 100644 --- a/prover/zkevm/prover/publicInput/arith_struct/arith_struct.go +++ b/prover/zkevm/prover/publicInput/arith_struct/arith_struct.go @@ -1,9 +1,11 @@ package arith_struct import ( + "fmt" "github.com/consensys/linea-monorepo/prover/protocol/ifaces" "github.com/consensys/linea-monorepo/prover/protocol/wizard" "github.com/consensys/linea-monorepo/prover/utils/csvtraces" + "github.com/consensys/linea-monorepo/prover/zkevm/prover/common" ) // BlockDataCols models the arithmetization's BlockData module @@ -14,30 +16,45 @@ type BlockDataCols struct { Inst ifaces.Column // Ct is a counter column Ct ifaces.Column - // DataHi/DataLo encode the data, for example the timestamps - DataHi, DataLo ifaces.Column + // DataHi/DataLo encode the data, for example the timestamps. + // It's divided into 16 16-bit limb columns. 256 bits in total. + Data [common.NbLimbU256]ifaces.Column // FirstBlock contains the absolute ID of the first block - FirstBlock ifaces.Column + // It's divided into 3 16-bit limb columns. 48 bits in total. + FirstBlock [common.NbLimbU48]ifaces.Column } // TxnData models the arithmetization's TxnData module type TxnData struct { - AbsTxNum, AbsTxNumMax ifaces.Column // Absolute number of the transaction (starts from 1 and acts as an Active Filter), and the maximum number of transactions - RelTxNum, RelTxNumMax ifaces.Column // Relative TxNum inside the block, - FromHi, FromLo ifaces.Column // Sender address - IsLastTxOfBlock ifaces.Column // 1 if this is the last transaction inside the block - RelBlock ifaces.Column // Relative Block number inside the batch - Ct ifaces.Column + // Absolute number of the transaction (starts from 1 and acts as an Active Filter), and the maximum number of + // transactions + AbsTxNum, AbsTxNumMax ifaces.Column + // Relative TxNum inside the block, + RelTxNum, RelTxNumMax ifaces.Column + // Sender address. It's divided into 10 16-bit limb columns. 160 bits in total. + From [common.NbLimbEthAddress]ifaces.Column + // 1 if this is the last transaction inside the block + IsLastTxOfBlock ifaces.Column + // Relative Block number inside the batch + RelBlock ifaces.Column + Ct ifaces.Column } // RlpTxn models the arithmetization's RlpTxn module type RlpTxn struct { - AbsTxNum, AbsTxNumMax ifaces.Column // Absolute number of the transaction (starts from 1 and acts as an Active Filter), and the maximum number of transactions - ToHashByProver ifaces.Column // Relative TxNum inside the block, - Limb ifaces.Column - NBytes ifaces.Column // the number of bytes to load from the limb - Done ifaces.Column // indicator column which we will use to obtain the ChainID - IsPhaseChainID ifaces.Column // indicator column which we will use to obtain the ChainID + // Absolute number of the transaction (starts from 1 and acts as an Active Filter), and the maximum number of transactions + AbsTxNum, AbsTxNumMax ifaces.Column + // Relative TxNum inside the block, + ToHashByProver ifaces.Column + // Limbs are columns that is used to store the RLP data. + // It represents a single 128-bit limb, which is divided into 8 16-bit columns. + Limbs [common.NbLimbU128]ifaces.Column + // the number of bytes to load from the limb + NBytes ifaces.Column + // indicator column which we will use to obtain the ChainID + Done ifaces.Column + // indicator column which we will use to obtain the ChainID + IsPhaseChainID ifaces.Column } // DefineTestingArithModules defines the BlockDataCols, TxnData and RlpTxn modules based on csv traces. @@ -48,14 +65,20 @@ func DefineTestingArithModules(b *wizard.Builder, ctBlockData, ctTxnData, ctRlpT txnDataCols *TxnData rlpTxn *RlpTxn ) + if ctBlockData != nil { blockDataCols = &BlockDataCols{ - RelBlock: ctBlockData.GetCommit(b, "REL_BLOCK"), - Inst: ctBlockData.GetCommit(b, "INST"), - Ct: ctBlockData.GetCommit(b, "CT"), - DataHi: ctBlockData.GetCommit(b, "DATA_HI"), - DataLo: ctBlockData.GetCommit(b, "DATA_LO"), - FirstBlock: ctBlockData.GetCommit(b, "FIRST_BLOCK_NUMBER"), + RelBlock: ctBlockData.GetCommit(b, "REL_BLOCK"), + Inst: ctBlockData.GetCommit(b, "INST"), + Ct: ctBlockData.GetCommit(b, "CT"), + } + + for i := range blockDataCols.FirstBlock { + blockDataCols.FirstBlock[i] = ctBlockData.GetCommit(b, fmt.Sprintf("FIRST_BLOCK_NUMBER_%d", i)) + } + + for i := range blockDataCols.Data { + blockDataCols.Data[i] = ctBlockData.GetCommit(b, fmt.Sprintf("DATA_%d", i)) } } if ctTxnData != nil { @@ -65,22 +88,27 @@ func DefineTestingArithModules(b *wizard.Builder, ctBlockData, ctTxnData, ctRlpT RelTxNum: ctTxnData.GetCommit(b, "TD.REL_TX_NUM"), RelTxNumMax: ctTxnData.GetCommit(b, "TD.REL_TX_NUM_MAX"), Ct: ctTxnData.GetCommit(b, "TD.CT"), - FromHi: ctTxnData.GetCommit(b, "TD.FROM_HI"), - FromLo: ctTxnData.GetCommit(b, "TD.FROM_LO"), IsLastTxOfBlock: ctTxnData.GetCommit(b, "TD.IS_LAST_TX_OF_BLOCK"), RelBlock: ctTxnData.GetCommit(b, "TD.REL_BLOCK"), } + + for i := range txnDataCols.From { + txnDataCols.From[i] = ctTxnData.GetCommit(b, fmt.Sprintf("TD.FROM_%d", i)) + } } if ctRlpTxn != nil { rlpTxn = &RlpTxn{ AbsTxNum: ctRlpTxn.GetCommit(b, "RT.ABS_TX_NUM"), AbsTxNumMax: ctRlpTxn.GetCommit(b, "RT.ABS_TX_NUM_MAX"), ToHashByProver: ctRlpTxn.GetCommit(b, "RL.TO_HASH_BY_PROVER"), - Limb: ctRlpTxn.GetCommit(b, "RL.LIMB"), NBytes: ctRlpTxn.GetCommit(b, "RL.NBYTES"), Done: ctRlpTxn.GetCommit(b, "RL.DONE"), IsPhaseChainID: ctRlpTxn.GetCommit(b, "RL.IS_PHASE_CHAIN_ID"), } + + for i := range rlpTxn.Limbs { + rlpTxn.Limbs[i] = ctRlpTxn.GetCommit(b, fmt.Sprintf("RL.LIMB_%d", i)) + } } return blockDataCols, txnDataCols, rlpTxn @@ -91,41 +119,50 @@ func DefineTestingArithModules(b *wizard.Builder, ctBlockData, ctTxnData, ctRlpT func AssignTestingArithModules(run *wizard.ProverRuntime, ctBlockData, ctTxnData, ctRlpTxn *csvtraces.CsvTrace) { // assign the CSV data for the mock BlockData, TxnData and RlpTxn arithmetization modules if ctBlockData != nil { - ctBlockData.Assign( - run, - "REL_BLOCK", - "INST", - "CT", - "DATA_HI", - "DATA_LO", - "FIRST_BLOCK_NUMBER", - ) + toAssign := []string{"REL_BLOCK", "INST", "CT"} + + for i := range common.NbLimbU256 { + toAssign = append(toAssign, fmt.Sprintf("DATA_%d", i)) + } + + for i := range common.NbLimbU48 { + toAssign = append(toAssign, fmt.Sprintf("FIRST_BLOCK_NUMBER_%d", i)) + } + + ctBlockData.Assign(run, toAssign...) } if ctTxnData != nil { - ctTxnData.Assign( - run, + toAssign := []string{ "TD.ABS_TX_NUM", "TD.ABS_TX_NUM_MAX", "TD.REL_TX_NUM", "TD.REL_TX_NUM_MAX", "TD.CT", - "TD.FROM_HI", - "TD.FROM_LO", "TD.IS_LAST_TX_OF_BLOCK", "TD.REL_BLOCK", - ) + } + + for i := range common.NbLimbEthAddress { + toAssign = append(toAssign, fmt.Sprintf("TD.FROM_%d", i)) + } + + ctTxnData.Assign(run, toAssign...) } if ctRlpTxn != nil { - ctRlpTxn.Assign( - run, + toAssign := []string{ "RT.ABS_TX_NUM", "RT.ABS_TX_NUM_MAX", "RL.TO_HASH_BY_PROVER", - "RL.LIMB", "RL.NBYTES", "RL.DONE", "RL.IS_PHASE_CHAIN_ID", - ) + } + + for i := range common.NbLimbU128 { + toAssign = append(toAssign, fmt.Sprintf("RL.LIMB_%d", i)) + } + + ctRlpTxn.Assign(run, toAssign...) } } diff --git a/prover/zkevm/prover/publicInput/execution_data_collector/execution_data_collector.go b/prover/zkevm/prover/publicInput/execution_data_collector/execution_data_collector.go index 825e7d7fd1..71299c854f 100644 --- a/prover/zkevm/prover/publicInput/execution_data_collector/execution_data_collector.go +++ b/prover/zkevm/prover/publicInput/execution_data_collector/execution_data_collector.go @@ -28,9 +28,9 @@ The number of transactions in the block (2 bytes), the block timestamp (4 bytes) and then for each transaction tx_i, the sender address (20 bytes) and the transaction RLP. We then continue analogously for each block. -Due to design choices in the arithmetization and other submodules, we can only load at most 16 bytes -at a time. For this reason, blockhash is divided into two columns: BlockHashHi (16 bytes) and BlockHashLo (16 bytes). -Similarly, the sender address is divided into AddrHi (4 bytes) and AddrLo (16 bytes). +Due to design choices in the arithmetization and other submodules, we can only load at most 2 bytes +at a time. For this reason, blockhash is divided into 8 columns: [16]BlockHash (32 bytes in total). +Similarly, the sender address is divided into 10 columns: [10]Addr (20 bytes in total). Finally, the RLP data for each transaction is stored in the RLPTXN module. We use an intermediary fetcher diff --git a/prover/zkevm/prover/publicInput/fetchers_arithmetization/rlp_txn_fetcher.go b/prover/zkevm/prover/publicInput/fetchers_arithmetization/rlp_txn_fetcher.go index ba097e8fc6..3640369e78 100644 --- a/prover/zkevm/prover/publicInput/fetchers_arithmetization/rlp_txn_fetcher.go +++ b/prover/zkevm/prover/publicInput/fetchers_arithmetization/rlp_txn_fetcher.go @@ -1,6 +1,7 @@ package fetchers_arithmetization import ( + "fmt" "github.com/consensys/linea-monorepo/prover/maths/common/smartvectors" "github.com/consensys/linea-monorepo/prover/maths/field" "github.com/consensys/linea-monorepo/prover/protocol/column" @@ -9,60 +10,76 @@ import ( "github.com/consensys/linea-monorepo/prover/protocol/query" "github.com/consensys/linea-monorepo/prover/protocol/wizard" sym "github.com/consensys/linea-monorepo/prover/symbolic" + "github.com/consensys/linea-monorepo/prover/zkevm/prover/common" commonconstraints "github.com/consensys/linea-monorepo/prover/zkevm/prover/common/common_constraints" arith "github.com/consensys/linea-monorepo/prover/zkevm/prover/publicInput/arith_struct" util "github.com/consensys/linea-monorepo/prover/zkevm/prover/publicInput/utilities" ) type RlpTxnFetcher struct { - AbsTxNum, AbsTxNumMax ifaces.Column // Absolute number of the transaction (starts from 1 and acts as an Active Filter), and the maximum number of transactions - Limb ifaces.Column + // Absolute number of the transaction (starts from 1 and acts as an Active Filter), and the maximum number of + // transactions + AbsTxNum, AbsTxNumMax ifaces.Column + Limbs [common.NbLimbU128]ifaces.Column NBytes ifaces.Column - FilterFetched ifaces.Column // isActive filter pattern that lights up on the area containing relevant data - EndOfRlpSegment ifaces.Column // lights up on active rows i for which AbsTxNum[i]!=AbsTxNum[i+1] + // isActive filter pattern that lights up on the area containing relevant data + FilterFetched ifaces.Column + // lights up on active rows i for which AbsTxNum[i]!=AbsTxNum[i+1] + EndOfRlpSegment ifaces.Column // prover action selectors - SelectorDiffAbsTxId ifaces.Column // used to compute EndOfRlpSegment, lights up on active rows i for which AbsTxNum[i]!=AbsTxNum[i+1] + // used to compute EndOfRlpSegment, lights up on active rows i for which AbsTxNum[i]!=AbsTxNum[i+1] + SelectorDiffAbsTxId ifaces.Column ComputeSelectorDiffAbsTxId wizard.ProverAction - // chainID - ChainID ifaces.Column // a size 1 column used to fetch the ChainID. The implementation is currently unaligned with respect to the number of limbs. - NBytesChainID ifaces.Column // a size 1 column used to fetch the number of bytes of the ChainID limb data + // chainID a size 1 column used to fetch the ChainID. The implementation is currently unaligned with respect to the + // number of limbs. + ChainID [common.NbLimbU128]ifaces.Column + // a size 1 column used to fetch the number of bytes of the ChainID limb data + NBytesChainID ifaces.Column } func NewRlpTxnFetcher(comp *wizard.CompiledIOP, name string, rt *arith.RlpTxn) RlpTxnFetcher { - size := rt.Limb.Size() + size := rt.Limbs[0].Size() res := RlpTxnFetcher{ AbsTxNum: util.CreateCol(name, "ABS_TX_NUM", size, comp), AbsTxNumMax: util.CreateCol(name, "ABS_TX_NUM_MAX", size, comp), - Limb: util.CreateCol(name, "LIMB", size, comp), NBytes: util.CreateCol(name, "NBYTES", size, comp), FilterFetched: util.CreateCol(name, "FILTER_FETCHED", size, comp), EndOfRlpSegment: util.CreateCol(name, "END_OF_RLP_SEGMENT", size, comp), - ChainID: util.CreateCol(name, "CHAIN_ID", size, comp), NBytesChainID: util.CreateCol(name, "N_BYTES_CHAIN_ID", size, comp), } + + for i := range res.Limbs { + res.Limbs[i] = util.CreateCol(name, fmt.Sprintf("LIMB_%d", i), size, comp) + res.ChainID[i] = util.CreateCol(name, fmt.Sprintf("CHAIN_ID_%d", i), size, comp) + } + return res } // ConstrainChainID defines constraints for both ChainID and NBytesChainID columns. func ConstrainChainID(comp *wizard.CompiledIOP, fetcher *RlpTxnFetcher, name string, rlpTxnArith *arith.RlpTxn) { - commonconstraints.MustBeConstant(comp, fetcher.ChainID) + for i := range fetcher.ChainID { + commonconstraints.MustBeConstant(comp, fetcher.ChainID[i]) + } commonconstraints.MustBeConstant(comp, fetcher.NBytesChainID) // constraint for the ChainID column - comp.InsertGlobal( - 0, - ifaces.QueryIDf("%s_CHAIN_ID_GLOBAL_CONSTRAINT", name), - sym.Mul( - rlpTxnArith.IsPhaseChainID, // must be 1 to fetch ChainID - rlpTxnArith.Done, // must be 1 to fetch the ChainID - rlpTxnArith.ToHashByProver, - sym.Sub( - rlpTxnArith.Limb, - fetcher.ChainID, + for i := range rlpTxnArith.Limbs { + comp.InsertGlobal( + 0, + ifaces.QueryIDf("%s_CHAIN_ID_GLOBAL_CONSTRAINT_%d", name, i), + sym.Mul( + rlpTxnArith.IsPhaseChainID, // must be 1 to fetch ChainID + rlpTxnArith.Done, // must be 1 to fetch the ChainID + rlpTxnArith.ToHashByProver, + sym.Sub( + rlpTxnArith.Limbs[i], + fetcher.ChainID[i], + ), ), - ), - ) + ) + } // Constraint for the NBytesChainID column comp.InsertGlobal( 0, @@ -131,19 +148,17 @@ func DefineRlpTxnFetcher(comp *wizard.CompiledIOP, fetcher *RlpTxnFetcher, name ) // the table with the data we fetch from the arithmetization columns RlpTxn - fetcherTable := []ifaces.Column{ + fetcherTable := append(fetcher.Limbs[:], fetcher.AbsTxNum, fetcher.AbsTxNumMax, - fetcher.Limb, fetcher.NBytes, - } + ) // the RlpTxn we extract timestamp data from, and which we will use to check for consistency - arithTable := []ifaces.Column{ + arithTable := append(rlpTxnArith.Limbs[:], rlpTxnArith.AbsTxNum, rlpTxnArith.AbsTxNumMax, - rlpTxnArith.Limb, rlpTxnArith.NBytes, - } + ) // a projection query to check that the timestamp data is fetched correctly comp.InsertProjection( @@ -157,33 +172,44 @@ func DefineRlpTxnFetcher(comp *wizard.CompiledIOP, fetcher *RlpTxnFetcher, name func AssignRlpTxnFetcher(run *wizard.ProverRuntime, fetcher *RlpTxnFetcher, rlpTxnArith *arith.RlpTxn) { - absTxNum := make([]field.Element, rlpTxnArith.Limb.Size()) - absTxNumMax := make([]field.Element, rlpTxnArith.Limb.Size()) - limb := make([]field.Element, rlpTxnArith.Limb.Size()) - nBytes := make([]field.Element, rlpTxnArith.Limb.Size()) - filterFetched := make([]field.Element, rlpTxnArith.Limb.Size()) - endOfRlpSegment := make([]field.Element, rlpTxnArith.Limb.Size()) + size := rlpTxnArith.Limbs[0].Size() + + absTxNum := make([]field.Element, size) + absTxNumMax := make([]field.Element, size) + limbs := make([][]field.Element, len(rlpTxnArith.Limbs)) + nBytes := make([]field.Element, size) + filterFetched := make([]field.Element, size) + endOfRlpSegment := make([]field.Element, size) + + for i := range limbs { + limbs[i] = make([]field.Element, size) + } - var chainID, nBytesChainID field.Element + chainID := make([]field.Element, len(rlpTxnArith.Limbs)) + var nBytesChainID field.Element // counter is used to populate filter.Data and will increment every time we find a new timestamp counter := 0 - for i := 0; i < rlpTxnArith.Limb.Size(); i++ { + for i := 0; i < size; i++ { toHashByProver := rlpTxnArith.ToHashByProver.GetColAssignmentAt(run, i) // process the RLP limb, by inspecting AbsTxNum, AbsTxNumMax, Limb, NBytes // and populating a row of the fetcher with these values. if toHashByProver.IsOne() { arithAbsTxNum := rlpTxnArith.AbsTxNum.GetColAssignmentAt(run, i) arithAbsTxNumMax := rlpTxnArith.AbsTxNumMax.GetColAssignmentAt(run, i) - arithLimb := rlpTxnArith.Limb.GetColAssignmentAt(run, i) arithNBytes := rlpTxnArith.NBytes.GetColAssignmentAt(run, i) absTxNum[counter].Set(&arithAbsTxNum) absTxNumMax[counter].Set(&arithAbsTxNumMax) - limb[counter].Set(&arithLimb) nBytes[counter].Set(&arithNBytes) filterFetched[counter].SetOne() + + for j := range rlpTxnArith.Limbs { + arithLimb := rlpTxnArith.Limbs[j].GetColAssignmentAt(run, i) + limbs[j][counter].Set(&arithLimb) + } + counter++ } // check if we have the ChainID @@ -191,15 +217,18 @@ func AssignRlpTxnFetcher(run *wizard.ProverRuntime, fetcher *RlpTxnFetcher, rlpT isPhaseChainID := rlpTxnArith.IsPhaseChainID.GetColAssignmentAt(run, i) if done.IsOne() && isPhaseChainID.IsOne() && toHashByProver.IsOne() { // fetch the ChainID from the limb column - fetchedValue := rlpTxnArith.Limb.GetColAssignmentAt(run, i) - chainID.Set(&fetchedValue) + for j := range rlpTxnArith.Limbs { + fetchedValue := rlpTxnArith.Limbs[j].GetColAssignmentAt(run, i) + chainID[j].Set(&fetchedValue) + } + // fetch the number of bytes for the ChainID fetchedNBytes := rlpTxnArith.NBytes.GetColAssignmentAt(run, i) nBytesChainID.Set(&fetchedNBytes) } } - for i := 0; i < rlpTxnArith.Limb.Size()-1; i++ { + for i := 0; i < size-1; i++ { if filterFetched[i].IsOne() { // only set end of segments in the active area if !absTxNum[i].Equal(&absTxNum[i+1]) { @@ -209,15 +238,17 @@ func AssignRlpTxnFetcher(run *wizard.ProverRuntime, fetcher *RlpTxnFetcher, rlpT } // assign the fetcher columns - size := fetcher.AbsTxNum.Size() run.AssignColumn(fetcher.AbsTxNum.GetColID(), smartvectors.RightZeroPadded(absTxNum[:counter], size)) run.AssignColumn(fetcher.AbsTxNumMax.GetColID(), smartvectors.RightZeroPadded(absTxNumMax[:counter], size)) - run.AssignColumn(fetcher.Limb.GetColID(), smartvectors.RightZeroPadded(limb[:counter], size)) run.AssignColumn(fetcher.NBytes.GetColID(), smartvectors.RightZeroPadded(nBytes[:counter], size)) run.AssignColumn(fetcher.FilterFetched.GetColID(), smartvectors.RightZeroPadded(filterFetched[:counter], size)) run.AssignColumn(fetcher.EndOfRlpSegment.GetColID(), smartvectors.NewRegular(endOfRlpSegment), wizard.DisableAssignmentSizeReduction) - run.AssignColumn(fetcher.ChainID.GetColID(), smartvectors.NewConstant(chainID, size)) run.AssignColumn(fetcher.NBytesChainID.GetColID(), smartvectors.NewConstant(nBytesChainID, size)) + for i := range rlpTxnArith.Limbs { + run.AssignColumn(fetcher.Limbs[i].GetColID(), smartvectors.RightZeroPadded(limbs[i][:counter], size)) + run.AssignColumn(fetcher.ChainID[i].GetColID(), smartvectors.NewConstant(chainID[i], size)) + } + fetcher.ComputeSelectorDiffAbsTxId.Run(run) } diff --git a/prover/zkevm/prover/publicInput/fetchers_arithmetization/rlp_txn_fetcher_test.go b/prover/zkevm/prover/publicInput/fetchers_arithmetization/rlp_txn_fetcher_test.go index 4651d94ba8..846217d259 100644 --- a/prover/zkevm/prover/publicInput/fetchers_arithmetization/rlp_txn_fetcher_test.go +++ b/prover/zkevm/prover/publicInput/fetchers_arithmetization/rlp_txn_fetcher_test.go @@ -11,6 +11,19 @@ import ( "github.com/stretchr/testify/assert" ) +var ( + testChainIDLimbs = []field.Element{ + field.NewFromString("0xccc0"), + field.NewFromString("0x0000"), + field.NewFromString("0x0000"), + field.NewFromString("0x0000"), + field.NewFromString("0x0000"), + field.NewFromString("0x0000"), + field.NewFromString("0x0000"), + field.NewFromString("0x0000"), + } +) + // TestRlpTxnFetcher tests the fetching of the rlp txn data func TestRlpTxnFetcher(t *testing.T) { @@ -32,7 +45,10 @@ func TestRlpTxnFetcher(t *testing.T) { // assign the CSV columns arith.AssignTestingArithModules(run, nil, nil, ctRlpTxn) AssignRlpTxnFetcher(run, &fetcher, rt) - assert.Equal(t, field.NewFromString("0xccc00000000000000000000000000000"), fetcher.ChainID.GetColAssignmentAt(run, 0), "ChainID value is incorrect.") + + for i := range fetcher.Limbs { + assert.Equal(t, testChainIDLimbs[i], fetcher.ChainID[i].GetColAssignmentAt(run, 0), "ChainID value is incorrect.") + } }) if err := wizard.Verify(cmp, proof); err != nil { t.Fatal("proof failed", err) diff --git a/prover/zkevm/prover/publicInput/fetchers_arithmetization/root_hash_fetcher.go b/prover/zkevm/prover/publicInput/fetchers_arithmetization/root_hash_fetcher.go index 2cbf33c45f..6d687977ea 100644 --- a/prover/zkevm/prover/publicInput/fetchers_arithmetization/root_hash_fetcher.go +++ b/prover/zkevm/prover/publicInput/fetchers_arithmetization/root_hash_fetcher.go @@ -1,11 +1,13 @@ package fetchers_arithmetization import ( + "fmt" "github.com/consensys/linea-monorepo/prover/maths/common/smartvectors" "github.com/consensys/linea-monorepo/prover/maths/field" "github.com/consensys/linea-monorepo/prover/protocol/ifaces" "github.com/consensys/linea-monorepo/prover/protocol/wizard" sym "github.com/consensys/linea-monorepo/prover/symbolic" + "github.com/consensys/linea-monorepo/prover/zkevm/prover/common" commonconstraints "github.com/consensys/linea-monorepo/prover/zkevm/prover/common/common_constraints" util "github.com/consensys/linea-monorepo/prover/zkevm/prover/publicInput/utilities" "github.com/consensys/linea-monorepo/prover/zkevm/prover/statemanager/statesummary" @@ -13,76 +15,93 @@ import ( // RootHashFetcher is a struct used to fetch the first/final root hashes from the state summary module type RootHashFetcher struct { - First, Last ifaces.Column + // First and Last are the columns that store the first and last root hashes. + // They are divided into 16 16-bit limb columns. 256 bits in total. + First, Last [common.NbLimbU256]ifaces.Column } // NewRootHashFetcher returns a new RootHashFetcher with initialized columns that are not constrained. func NewRootHashFetcher(comp *wizard.CompiledIOP, name string, sizeSS int) *RootHashFetcher { - return &RootHashFetcher{ - First: util.CreateCol(name, "FIRST", sizeSS, comp), - Last: util.CreateCol(name, "LAST", sizeSS, comp), + var res RootHashFetcher + + for i := range res.First { + res.First[i] = util.CreateCol(name, fmt.Sprintf("FIRST_%d", i), sizeSS, comp) + res.Last[i] = util.CreateCol(name, fmt.Sprintf("LAST_%d", i), sizeSS, comp) } + + return &res } // DefineRootHashFetcher specifies the constraints of the RootHashFetcher with respect to the StateSummary func DefineRootHashFetcher(comp *wizard.CompiledIOP, fetcher *RootHashFetcher, name string, ss statesummary.Module) { + for i := range fetcher.First { + commonconstraints.MustBeConstant(comp, fetcher.First[i]) + commonconstraints.MustBeConstant(comp, fetcher.Last[i]) - commonconstraints.MustBeConstant(comp, fetcher.First) - commonconstraints.MustBeConstant(comp, fetcher.Last) - - // if the first state summary segment starts with storage operations, fetcher.First - // must equal the first value in the state summary's worldstatehash - // otherwise, we take it from the first value of the accumulator - comp.InsertLocal( - 0, - ifaces.QueryIDf("%s_%s", name, "FIRST_LOCAL"), - sym.Sub( - fetcher.First, - util.Ternary(ss.IsStorage, ss.WorldStateRoot, ss.AccumulatorStatement.StateDiff.InitialRoot), - ), - ) - - // ss.IsActive is already constrained in the state summary as a typical IsActive pattern, - // with 1s followed by 0s, no need to constrain it again - // two cases: Case 1: ss.IsActive is not completely full, then fetcher.Last is equal to - // the accumulator's final root at the last cell where isActive is 1 - // (ss.IsActive[i]*(1-ss.IsActive[i+1]))*(fetcher.Last-ss.FinalRoot[i]) - // Case 2: ss.IsActive is completely full, in which case we ask that - // ss.IsActive[size]*(fetcher.Last-ss.FinalRoot[size]) = 0 - // i.e. at the last row, counter is equal to ctMax - util.CheckLastELemConsistency(comp, ss.IsActive, ss.AccumulatorStatement.StateDiff.FinalRoot, fetcher.Last, name) + // if the first state summary segment starts with storage operations, fetcher.First + // must equal the first value in the state summary's worldstatehash + // otherwise, we take it from the first value of the accumulator + comp.InsertLocal( + 0, + ifaces.QueryIDf("%s_FIRST_LOCAL_%d", name, i), + sym.Sub( + fetcher.First[i], + util.Ternary(ss.IsStorage, ss.WorldStateRoot[i], ss.AccumulatorStatement.StateDiff.InitialRoot[i]), + ), + ) + // ss.IsActive is already constrained in the state summary as a typical IsActive pattern, + // with 1s followed by 0s, no need to constrain it again + // two cases: Case 1: ss.IsActive is not completely full, then fetcher.Last is equal to + // the accumulator's final root at the last cell where isActive is 1 + // (ss.IsActive[i]*(1-ss.IsActive[i+1]))*(fetcher.Last-ss.FinalRoot[i]) + // Case 2: ss.IsActive is completely full, in which case we ask that + // ss.IsActive[size]*(fetcher.Last-ss.FinalRoot[size]) = 0 + // i.e. at the last row, counter is equal to ctMax + util.CheckLastELemConsistency(comp, ss.IsActive, ss.AccumulatorStatement.StateDiff.FinalRoot[i], fetcher.Last[i], name) + } } // AssignRootHashFetcher assigns the data in the RootHashFetcher using the data fetched from the StateSummary func AssignRootHashFetcher(run *wizard.ProverRuntime, fetcher *RootHashFetcher, ss statesummary.Module) { // if the first state summary segment starts with storage operations, fetch the value in worldstatehash // otherwise, we take it from the first value of the accumulator - var first field.Element + var first, last [common.NbLimbU256]field.Element + + firstSrcCols := ss.AccumulatorStatement.StateDiff.InitialRoot initialStorage := ss.IsStorage.GetColAssignmentAt(run, 0) if initialStorage.IsOne() { - worldStateHash := ss.WorldStateRoot.GetColAssignmentAt(run, 0) - first.Set(&worldStateHash) - } else { - firstAcc := ss.AccumulatorStatement.StateDiff.InitialRoot.GetColAssignmentAt(run, 0) - first.Set(&firstAcc) + firstSrcCols = ss.WorldStateRoot + } + + for i := range first { + firstSrc := firstSrcCols[i].GetColAssignmentAt(run, 0) + first[i].Set(&firstSrc) } // get the value in the last row of FinalRoot before it goes inactive - var last field.Element size := ss.IsActive.Size() for i := 0; i < size; i++ { isActive := ss.IsActive.GetColAssignmentAt(run, i) - if isActive.IsOne() { - finalRoot := ss.AccumulatorStatement.StateDiff.FinalRoot.GetColAssignmentAt(run, i) - last.Set(&finalRoot) - } else { - // reached the end + if isActive.IsOne() && i != size-1 { + continue + } + + if i == 0 { break } + + for j := range last { + finalRoot := ss.AccumulatorStatement.StateDiff.FinalRoot[j].GetColAssignmentAt(run, i) + last[j].Set(&finalRoot) + } + + break } // assign the fetcher columns - run.AssignColumn(fetcher.First.GetColID(), smartvectors.NewConstant(first, size)) - run.AssignColumn(fetcher.Last.GetColID(), smartvectors.NewConstant(last, size)) + for i := range fetcher.First { + run.AssignColumn(fetcher.First[i].GetColID(), smartvectors.NewConstant(first[i], size)) + run.AssignColumn(fetcher.Last[i].GetColID(), smartvectors.NewConstant(last[i], size)) + } } diff --git a/prover/zkevm/prover/publicInput/fetchers_arithmetization/root_hash_fetcher_test.go b/prover/zkevm/prover/publicInput/fetchers_arithmetization/root_hash_fetcher_test.go index 91ddc4963a..09ddfb3b67 100644 --- a/prover/zkevm/prover/publicInput/fetchers_arithmetization/root_hash_fetcher_test.go +++ b/prover/zkevm/prover/publicInput/fetchers_arithmetization/root_hash_fetcher_test.go @@ -44,6 +44,7 @@ func TestRootHashFetcher(t *testing.T) { stateLogs = tCase.StateLogsGens(initState) shomeiTraces = mock.StateLogsToShomeiTraces(shomeiState, stateLogs) finalRootBytes = shomeiState.AccountTrie.TopRoot() + limbSize = 32 / len(fetcher.First) initRoot field.Element finalRoot field.Element ) @@ -51,12 +52,19 @@ func TestRootHashFetcher(t *testing.T) { ss.Assign(run, shomeiTraces) // assign the RootHashFetcher AssignRootHashFetcher(run, fetcher, ss) - // compute two field elements that correspond to the Shomei initial and final root hash in the account tries - initRoot.SetBytes(initRootBytes[:]) - finalRoot.SetBytes(finalRootBytes[:]) - // check that the fetcher works properly - assert.Equal(t, initRoot, fetcher.First.GetColAssignmentAt(run, 0), "Initial root value is incorrect") - assert.Equal(t, finalRoot, fetcher.Last.GetColAssignmentAt(run, 0), "Final root value is incorrect") + + for j := range initRoot { + start := j * limbSize + end := start + limbSize + + // compute two field elements that correspond to the Shomei initial and final root hash in the account tries + initRoot.SetBytes(initRootBytes[start:end]) + finalRoot.SetBytes(finalRootBytes[start:end]) + + // check that the fetcher works properly + assert.Equal(t, initRoot, fetcher.First[j].GetColAssignmentAt(run, 0), "Initial root value is incorrect") + assert.Equal(t, finalRoot, fetcher.Last[j].GetColAssignmentAt(run, 0), "Final root value is incorrect") + } } comp := wizard.Compile(define, dummy.Compile) diff --git a/prover/zkevm/prover/publicInput/fetchers_arithmetization/timestamp_fetcher.go b/prover/zkevm/prover/publicInput/fetchers_arithmetization/timestamp_fetcher.go index f3f91533ed..8a8e5c4c6c 100644 --- a/prover/zkevm/prover/publicInput/fetchers_arithmetization/timestamp_fetcher.go +++ b/prover/zkevm/prover/publicInput/fetchers_arithmetization/timestamp_fetcher.go @@ -1,14 +1,17 @@ package fetchers_arithmetization import ( + "fmt" "github.com/consensys/linea-monorepo/prover/maths/common/smartvectors" "github.com/consensys/linea-monorepo/prover/maths/field" "github.com/consensys/linea-monorepo/prover/protocol/column" "github.com/consensys/linea-monorepo/prover/protocol/dedicated" + "github.com/consensys/linea-monorepo/prover/protocol/dedicated/byte32cmp" "github.com/consensys/linea-monorepo/prover/protocol/ifaces" "github.com/consensys/linea-monorepo/prover/protocol/query" "github.com/consensys/linea-monorepo/prover/protocol/wizard" sym "github.com/consensys/linea-monorepo/prover/symbolic" + "github.com/consensys/linea-monorepo/prover/zkevm/prover/common" commonconstraints "github.com/consensys/linea-monorepo/prover/zkevm/prover/common/common_constraints" arith "github.com/consensys/linea-monorepo/prover/zkevm/prover/publicInput/arith_struct" util "github.com/consensys/linea-monorepo/prover/zkevm/prover/publicInput/utilities" @@ -18,6 +21,9 @@ const ( // TimestampOffset is the corresponding offset position for the timestamp // since it is a shift, -1 means no offset. TimestampOffset = -12 + // dataLoPartStart is the starting position of the dataLo part where the timestamp data is stored + // limbBitSize is the bit size of each limb + limbBitSize = 16 ) // TimestampFetcher is a struct used to fetch the timestamps from the arithmetization's BlockDataCols @@ -25,13 +31,13 @@ type TimestampFetcher struct { // RelBlock is the relative block number, ranging from 1 to the total number of blocks RelBlock ifaces.Column // timestamp data for the first and last blocks in the conflation, columns of size 1 - First, Last ifaces.Column + First, Last [common.NbLimbU128]ifaces.Column // FirstArith and LastArith are identical to First and Last but are used in the constraints // involving arithmetization columns. They are constrained to be constant and via the // projection query between the fetcher and the - FirstArith, LastArith ifaces.Column + FirstArith, LastArith [common.NbLimbU128]ifaces.Column // Data contains all the timestamps in the conflation, ordered by block - Data ifaces.Column + Data [common.NbLimbU128]ifaces.Column // filter on the TimestampFetcher.Data column FilterFetched ifaces.Column // filter on the Arithmetization's columns @@ -45,13 +51,19 @@ type TimestampFetcher struct { // prover action to compute SelectorCt ComputeSelectorCt wizard.ProverAction // the absolute ID of the first block number - FirstBlockID ifaces.Column + FirstBlockID [common.NbLimbU48]ifaces.Column // the absolute ID of the last block number - LastBlockID ifaces.Column + LastBlockID [common.NbLimbU48]ifaces.Column // the absolute ID of the first block number - FirstBlockIDArith ifaces.Column + FirstBlockIDArith [common.NbLimbU48]ifaces.Column // the absolute ID of the last block number - LastBlockIDArith ifaces.Column + LastBlockIDArith [common.NbLimbU48]ifaces.Column + // the last block ID minus the first block ID, used to compute the difference + // between the first and last blocks and compare it to the RelBlock-1 + LastMinusFirstBlock byte32cmp.LimbColumns + LastMinusFirstBlockAction wizard.ProverAction + // a constant columns that contains -1 at every position + minusOne ifaces.Column } // NewTimestampFetcher returns a new TimestampFetcher with initialized columns that are not constrained. @@ -60,18 +72,24 @@ func NewTimestampFetcher(comp *wizard.CompiledIOP, name string, bdc *arith.Block size := bdc.Ct.Size() res := &TimestampFetcher{ - RelBlock: util.CreateCol(name, "REL_BLOCK", size, comp), - Data: util.CreateCol(name, "DATA", size, comp), - FilterFetched: util.CreateCol(name, "FILTER_FETCHED", size, comp), - FilterArith: util.CreateCol(name, "FILTER_ARITHMETIZATION", size, comp), - FirstBlockID: util.CreateCol(name, "FIRST_BLOCK_ID", size, comp), - LastBlockID: util.CreateCol(name, "LAST_BLOCK_ID", size, comp), - First: util.CreateCol(name, "FIRST", size, comp), - Last: util.CreateCol(name, "LAST", size, comp), - FirstBlockIDArith: util.CreateCol(name, "FIRST_BLOCK_ID_ARITHMETIZATION", size, comp), - LastBlockIDArith: util.CreateCol(name, "LAST_BLOCK_ID_ARITHMETIZATION", size, comp), - FirstArith: util.CreateCol(name, "FIRST_ARITHMETIZATION", size, comp), - LastArith: util.CreateCol(name, "LAST_ARITHMETIZATION", size, comp), + RelBlock: util.CreateCol(name, "REL_BLOCK", size, comp), + FilterFetched: util.CreateCol(name, "FILTER_FETCHED", size, comp), + FilterArith: util.CreateCol(name, "FILTER_ARITHMETIZATION", size, comp), + } + + for i := range res.Data { + res.Data[i] = util.CreateCol(name, fmt.Sprintf("DATA_%d", i), size, comp) + res.First[i] = util.CreateCol(name, fmt.Sprintf("FIRST_%d", i), size, comp) + res.Last[i] = util.CreateCol(name, fmt.Sprintf("LAST_%d", i), size, comp) + res.FirstArith[i] = util.CreateCol(name, fmt.Sprintf("FIRST_ARITHMETIZATION_%d", i), size, comp) + res.LastArith[i] = util.CreateCol(name, fmt.Sprintf("LAST_ARITHMETIZATION_%d", i), size, comp) + } + + for i := range res.FirstBlockID { + res.FirstBlockID[i] = util.CreateCol(name, fmt.Sprintf("FIRST_BLOCK_ID_%d", i), size, comp) + res.LastBlockID[i] = util.CreateCol(name, fmt.Sprintf("LAST_BLOCK_ID_%d", i), size, comp) + res.FirstBlockIDArith[i] = util.CreateCol(name, fmt.Sprintf("FIRST_BLOCK_ID_ARITHMETIZATION_%d", i), size, comp) + res.LastBlockIDArith[i] = util.CreateCol(name, fmt.Sprintf("LAST_BLOCK_ID_ARITHMETIZATION_%d", i), size, comp) } return res @@ -79,38 +97,57 @@ func NewTimestampFetcher(comp *wizard.CompiledIOP, name string, bdc *arith.Block // ConstrainFirstAndLastBlockID constraing the values of FirstBlockID and LastBlockID func ConstrainFirstAndLastBlockID(comp *wizard.CompiledIOP, fetcher *TimestampFetcher, name string, bdc *arith.BlockDataCols) { + fetcher.LastMinusFirstBlock, fetcher.LastMinusFirstBlockAction = byte32cmp.NewMultiLimbAdd(comp, + &byte32cmp.MultiLimbAddIn{ + Name: fmt.Sprintf("%s_LAST_BLOCK_ID_GLOBAL_INTERM_%s", name, fetcher.LastBlockID[0].GetColID()), + ALimbs: byte32cmp.LimbColumns{ + Limbs: fetcher.LastBlockID[:], + LimbBitSize: limbBitSize, + IsBigEndian: true, + }, + BLimbs: byte32cmp.LimbColumns{ + Limbs: fetcher.FirstBlockID[:], + LimbBitSize: limbBitSize, + IsBigEndian: true, + }, + Mask: sym.NewVariable(fetcher.FilterFetched), + }, + false, + ) - commonconstraints.MustBeConstant(comp, fetcher.FirstBlockID) - commonconstraints.MustBeConstant(comp, fetcher.LastBlockID) - commonconstraints.MustBeConstant(comp, fetcher.FirstBlockIDArith) - commonconstraints.MustBeConstant(comp, fetcher.LastBlockIDArith) + for i := range fetcher.FirstBlockID { + commonconstraints.MustBeConstant(comp, fetcher.FirstBlockID[i]) + commonconstraints.MustBeConstant(comp, fetcher.LastBlockID[i]) + commonconstraints.MustBeConstant(comp, fetcher.FirstBlockIDArith[i]) + commonconstraints.MustBeConstant(comp, fetcher.LastBlockIDArith[i]) - // Constrain the First Block ID - comp.InsertGlobal( - 0, - ifaces.QueryIDf("%s_%s_%s", name, "FIRST_BLOCK_ID_GLOBAL", fetcher.FirstBlockID.GetColID()), - sym.Mul( - fetcher.FilterArith, // select only non-padding, valid rows. - sym.Sub( - bdc.FirstBlock, - fetcher.FirstBlockIDArith, + // Constrain the First Block ID + comp.InsertGlobal( + 0, + ifaces.QueryIDf("%s_FIRST_BLOCK_ID_GLOBAL_%s_%d", name, fetcher.FirstBlockID[i].GetColID(), i), + sym.Mul( + fetcher.FilterArith, // select only non-padding, valid rows. + sym.Sub( + bdc.FirstBlock[i], + fetcher.FirstBlockIDArith[i], + ), ), - ), - ) + ) + } // FilterFetched is already constrained in the fetcher, no need to constrain it again // two cases: Case 1: FilterFetched is not completely filled with 1s (we have a border between 1s and 0s) - comp.InsertGlobal(0, ifaces.QueryIDf("%s_%s_%s", name, "LAST_BLOCK_ID_GLOBAL", fetcher.LastBlockID.GetColID()), + comp.InsertGlobal(0, + ifaces.QueryIDf("%s_LAST_BLOCK_ID_GLOBAL_%d", name, common.NbLimbU48-1), sym.Mul( fetcher.FilterFetched, sym.Sub(1, column.Shift(fetcher.FilterFetched, 1), ), sym.Sub( - fetcher.LastBlockID, + fetcher.LastMinusFirstBlock.Limbs[common.NbLimbU48-1], sym.Add( fetcher.RelBlock, - fetcher.FirstBlockID, -1, ), ), @@ -118,20 +155,19 @@ func ConstrainFirstAndLastBlockID(comp *wizard.CompiledIOP, fetcher *TimestampFe ) // Case 2: FilterFetched is completely filled with 1s, in which case we do not have a border between 1s and 0s - comp.InsertLocal(0, ifaces.QueryIDf("%s_%s_%s", name, "LAST_BLOCK_ID_LOCAL", fetcher.LastBlockID.GetColID()), + comp.InsertLocal(0, + ifaces.QueryIDf("%s_LAST_BLOCK_ID_LOCAL_%d", name, common.NbLimbU48-1), sym.Mul( column.Shift(fetcher.FilterFetched, -1), sym.Sub( - column.Shift(fetcher.LastBlockID, -1), + column.Shift(fetcher.LastMinusFirstBlock.Limbs[common.NbLimbU48-1], -1), sym.Add( column.Shift(fetcher.RelBlock, -1), - column.Shift(fetcher.FirstBlockID, -1), -1, ), ), ), ) - } // DefineTimestampFetcher specifies the constraints of the TimestampFetcher with respect to the BlockDataCols @@ -163,30 +199,32 @@ func DefineTimestampFetcher(comp *wizard.CompiledIOP, fetcher *TimestampFetcher, ), ) - commonconstraints.MustBeConstant(comp, fetcher.First) - commonconstraints.MustBeConstant(comp, fetcher.Last) - commonconstraints.MustBeConstant(comp, fetcher.FirstArith) - commonconstraints.MustBeConstant(comp, fetcher.LastArith) + for i := range fetcher.First { + commonconstraints.MustBeConstant(comp, fetcher.First[i]) + commonconstraints.MustBeConstant(comp, fetcher.Last[i]) + commonconstraints.MustBeConstant(comp, fetcher.FirstArith[i]) + commonconstraints.MustBeConstant(comp, fetcher.LastArith[i]) - // constrain fetcher.First to contain the value of the first block's timestamp, using all the timestamps in fetcher.Data - comp.InsertLocal( - 0, - ifaces.QueryIDf("%s_%s", name, "FIRST_LOCAL"), - sym.Sub( - fetcher.First, - fetcher.Data, // fetcher.Data is constrained in the projection query - ), - ) + // constrain fetcher.First to contain the value of the first block's timestamp, using all the timestamps in fetcher.Data + comp.InsertLocal( + 0, + ifaces.QueryIDf("%s_FIRST_LOCAL_%d", name, i), + sym.Sub( + fetcher.First[i], + fetcher.Data[i], // fetcher.Data is constrained in the projection query + ), + ) - // constrain fetcher.Last to contain the value of the last block's timestamp, - comp.InsertLocal( - 0, - ifaces.QueryIDf("%s_%s", name, "LAST_LOCAL"), - sym.Sub( - column.Shift(fetcher.LastArith, -1), - column.Shift(bdc.DataLo, TimestampOffset), - ), - ) + // constrain fetcher.Last to contain the value of the last block's timestamp, + comp.InsertLocal( + 0, + ifaces.QueryIDf("%s_LAST_LOCAL_%d", name, i), + sym.Sub( + column.Shift(fetcher.LastArith[i], -1), + column.Shift(bdc.Data[common.NbLimbU128+i], TimestampOffset), + ), + ) + } // require that the filter on fetched data is a binary column comp.InsertGlobal( @@ -211,23 +249,20 @@ func DefineTimestampFetcher(comp *wizard.CompiledIOP, fetcher *TimestampFetcher, ) // the table with the data we fetch from the arithmetization columns BlockDataCols - fetcherTable := []ifaces.Column{ - fetcher.RelBlock, - fetcher.Data, - fetcher.FirstBlockID, - fetcher.LastBlockID, - fetcher.First, - fetcher.Last, - } + fetcherTable := []ifaces.Column{fetcher.RelBlock} + fetcherTable = append(fetcherTable, fetcher.Data[:]...) + fetcherTable = append(fetcherTable, fetcher.FirstBlockID[:]...) + fetcherTable = append(fetcherTable, fetcher.LastBlockID[:]...) + fetcherTable = append(fetcherTable, fetcher.First[:]...) + fetcherTable = append(fetcherTable, fetcher.Last[:]...) + // the BlockDataCols we extract timestamp data from, and which we will use to check for consistency - arithTable := []ifaces.Column{ - bdc.RelBlock, - bdc.DataLo, - fetcher.FirstBlockIDArith, - fetcher.LastBlockIDArith, - fetcher.FirstArith, - fetcher.LastArith, - } + arithTable := []ifaces.Column{bdc.RelBlock} + arithTable = append(arithTable, bdc.Data[common.NbLimbU128:]...) + arithTable = append(arithTable, fetcher.FirstBlockIDArith[:]...) + arithTable = append(arithTable, fetcher.LastBlockIDArith[:]...) + arithTable = append(arithTable, fetcher.FirstArith[:]...) + arithTable = append(arithTable, fetcher.LastArith[:]...) // a projection query to check that the timestamp data is fetched correctly comp.InsertProjection( @@ -249,7 +284,9 @@ func DefineTimestampFetcher(comp *wizard.CompiledIOP, fetcher *TimestampFetcher, func AssignTimestampFetcher(run *wizard.ProverRuntime, fetcher *TimestampFetcher, bdc *arith.BlockDataCols) { var ( - first, last, firstBlockID field.Element + firstBlockID [common.NbLimbU48]field.Element + + first, last, timestamp [common.NbLimbU128]field.Element // get the hardcoded timestamp flag timestampField = util.GetTimestampField() @@ -261,15 +298,20 @@ func AssignTimestampFetcher(run *wizard.ProverRuntime, fetcher *TimestampFetcher // initialize empty fetched data and filter on the fetched data size = ct.Len() relBlock = make([]field.Element, size) - data = make([]field.Element, size) filterFetched = make([]field.Element, size) filterArith = make([]field.Element, stop-start) + data [common.NbLimbU128][]field.Element + // counter is used to populate filter.Data and will increment every // time we find a new timestamp - counter = 0 + counter uint64 = 0 ) + for i := range data { + data[i] = make([]field.Element, size) + } + for i := start; i < stop; i++ { var ( @@ -279,48 +321,65 @@ func AssignTimestampFetcher(run *wizard.ProverRuntime, fetcher *TimestampFetcher if inst.Equal(×tampField) && ct.IsZero() { // the row type is a timestamp-encoding row - timestamp := bdc.DataLo.GetColAssignmentAt(run, i) + for j := range timestamp { + timestamp[j] = bdc.Data[common.NbLimbU128+j].GetColAssignmentAt(run, i) + } // in the arithmetization, relBlock is the relative block number inside the conflation fetchedRelBlock := bdc.RelBlock.GetColAssignmentAt(run, i) if fetchedRelBlock.IsOne() { // the first relative block has code 0x1 - first.Set(×tamp) + for j := range first { + first[j].Set(×tamp[j]) + } // set the first absolute block ID - firstBlockID = bdc.FirstBlock.GetColAssignmentAt(run, i) + for j := range firstBlockID { + firstBlockID[j] = bdc.FirstBlock[j].GetColAssignmentAt(run, i) + } } // continuously update the last timestamp value - last.Set(×tamp) + for j := range last { + last[j].Set(×tamp[j]) + } // update counters and timestamp data filterFetched[counter].SetOne() relBlock[counter].Set(&fetchedRelBlock) // update the arithmetization filter filterArith[i-start].SetOne() - data[counter].Set(×tamp) + for j := range data { + data[j][counter].Set(×tamp[j]) + } counter++ } } + // compute the last absolute block ID - var lastBlockID field.Element - fieldCounter := field.NewElement(uint64(counter - 1)) - lastBlockID.Add(&firstBlockID, &fieldCounter) + lastBlockID := util.Multi16bitLimbAdd(firstBlockID[:], counter-1) // assign the fetcher columns - run.AssignColumn(fetcher.First.GetColID(), smartvectors.NewConstant(first, size)) - run.AssignColumn(fetcher.Last.GetColID(), smartvectors.NewConstant(last, size)) - run.AssignColumn(fetcher.FirstArith.GetColID(), smartvectors.NewConstant(first, size)) - run.AssignColumn(fetcher.LastArith.GetColID(), smartvectors.NewConstant(last, size)) run.AssignColumn(fetcher.RelBlock.GetColID(), smartvectors.RightZeroPadded(relBlock, size)) - run.AssignColumn(fetcher.Data.GetColID(), smartvectors.RightZeroPadded(data, size)) run.AssignColumn(fetcher.FilterFetched.GetColID(), smartvectors.RightZeroPadded(filterFetched, size)) run.AssignColumn(fetcher.FilterArith.GetColID(), smartvectors.FromCompactWithRange(filterArith, start, stop, size)) - run.AssignColumn(fetcher.FirstBlockID.GetColID(), smartvectors.NewConstant(firstBlockID, size)) - run.AssignColumn(fetcher.LastBlockID.GetColID(), smartvectors.NewConstant(lastBlockID, size)) - run.AssignColumn(fetcher.FirstBlockIDArith.GetColID(), smartvectors.NewConstant(firstBlockID, size)) - run.AssignColumn(fetcher.LastBlockIDArith.GetColID(), smartvectors.NewConstant(lastBlockID, size)) + + for i := range common.NbLimbU128 { + run.AssignColumn(fetcher.First[i].GetColID(), smartvectors.NewConstant(first[i], size)) + run.AssignColumn(fetcher.Last[i].GetColID(), smartvectors.NewConstant(last[i], size)) + run.AssignColumn(fetcher.FirstArith[i].GetColID(), smartvectors.NewConstant(first[i], size)) + run.AssignColumn(fetcher.LastArith[i].GetColID(), smartvectors.NewConstant(last[i], size)) + run.AssignColumn(fetcher.Data[i].GetColID(), smartvectors.RightZeroPadded(data[i], size)) + } + + for i := range firstBlockID { + run.AssignColumn(fetcher.FirstBlockID[i].GetColID(), smartvectors.NewConstant(firstBlockID[i], size)) + run.AssignColumn(fetcher.FirstBlockIDArith[i].GetColID(), smartvectors.NewConstant(firstBlockID[i], size)) + run.AssignColumn(fetcher.LastBlockID[i].GetColID(), smartvectors.NewConstant(lastBlockID[i], size)) + run.AssignColumn(fetcher.LastBlockIDArith[i].GetColID(), smartvectors.NewConstant(lastBlockID[i], size)) + } // assign the SelectorTimestamp using the ComputeSelectorTimestamp prover action fetcher.ComputeSelectorTimestamp.Run(run) // assign the SelectorCt using the ComputeSelectorCt prover action fetcher.ComputeSelectorCt.Run(run) + // assign the LastMinusFirstBlock using the LastMinusFirstBlockAction + fetcher.LastMinusFirstBlockAction.Run(run) } diff --git a/prover/zkevm/prover/publicInput/fetchers_arithmetization/timestamp_fetcher_test.go b/prover/zkevm/prover/publicInput/fetchers_arithmetization/timestamp_fetcher_test.go index 808949afb1..b841e26f93 100644 --- a/prover/zkevm/prover/publicInput/fetchers_arithmetization/timestamp_fetcher_test.go +++ b/prover/zkevm/prover/publicInput/fetchers_arithmetization/timestamp_fetcher_test.go @@ -35,8 +35,13 @@ func TestTimestampFetcher(t *testing.T) { // assign the timestamp fetcher AssignTimestampFetcher(run, fetcher, bdc) // two simple sanity checks based on the mock test data - assert.Equal(t, fetcher.First.GetColAssignmentAt(run, 0), field.NewElement(0xa)) - assert.Equal(t, fetcher.Last.GetColAssignmentAt(run, 0), field.NewElement(0xcd)) + nbLimbs := len(fetcher.First) + assert.Equal(t, fetcher.First[nbLimbs-1].GetColAssignmentAt(run, 0), field.NewElement(0xa)) + assert.Equal(t, fetcher.Last[nbLimbs-1].GetColAssignmentAt(run, 0), field.NewElement(0xcd)) + for i := range nbLimbs - 1 { + assert.Equal(t, fetcher.First[i].GetColAssignmentAt(run, 0), field.Zero()) + assert.Equal(t, fetcher.Last[i].GetColAssignmentAt(run, 0), field.Zero()) + } }) if err := wizard.Verify(cmp, proof); err != nil { t.Fatal("proof failed", err) diff --git a/prover/zkevm/prover/publicInput/fetchers_arithmetization/txn_data_fetcher.go b/prover/zkevm/prover/publicInput/fetchers_arithmetization/txn_data_fetcher.go index 9d451294d0..4fe58395cd 100644 --- a/prover/zkevm/prover/publicInput/fetchers_arithmetization/txn_data_fetcher.go +++ b/prover/zkevm/prover/publicInput/fetchers_arithmetization/txn_data_fetcher.go @@ -1,6 +1,7 @@ package fetchers_arithmetization import ( + "fmt" "github.com/consensys/linea-monorepo/prover/maths/common/smartvectors" "github.com/consensys/linea-monorepo/prover/maths/field" "github.com/consensys/linea-monorepo/prover/protocol/column" @@ -9,6 +10,7 @@ import ( "github.com/consensys/linea-monorepo/prover/protocol/query" "github.com/consensys/linea-monorepo/prover/protocol/wizard" sym "github.com/consensys/linea-monorepo/prover/symbolic" + "github.com/consensys/linea-monorepo/prover/zkevm/prover/common" arith "github.com/consensys/linea-monorepo/prover/zkevm/prover/publicInput/arith_struct" util "github.com/consensys/linea-monorepo/prover/zkevm/prover/publicInput/utilities" ) @@ -16,8 +18,7 @@ import ( type TxnDataFetcher struct { RelBlock ifaces.Column AbsTxNum ifaces.Column - FromHi ifaces.Column - FromLo ifaces.Column + From [common.NbLimbEthAddress]ifaces.Column FilterFetched ifaces.Column SelectorFromAddress ifaces.Column // prover action to compute SelectorFromAddress @@ -30,10 +31,13 @@ func NewTxnDataFetcher(comp *wizard.CompiledIOP, name string, td *arith.TxnData) res := TxnDataFetcher{ RelBlock: util.CreateCol(name, "REL_BLOCK", size, comp), AbsTxNum: util.CreateCol(name, "ABS_TX_NUM", size, comp), - FromHi: util.CreateCol(name, "FROM_HI", size, comp), - FromLo: util.CreateCol(name, "FROM_LO", size, comp), FilterFetched: util.CreateCol(name, "FILTER_FETCHED", size, comp), } + + for i := range td.From { + res.From[i] = util.CreateCol(name, fmt.Sprintf("FROM_%d", i), size, comp) + } + return res } @@ -69,25 +73,19 @@ func DefineTxnDataFetcher(comp *wizard.CompiledIOP, fetcher *TxnDataFetcher, nam ), ) - // the table with the data we fetch from the arithmetization's TxnData columns - fetcherTable := []ifaces.Column{ - fetcher.FromHi, - fetcher.FromLo, - } - // the TxnData we extract sender addresses from, and which we will use to check for consistency - arithTable := []ifaces.Column{ - td.FromHi, - td.FromLo, - } - // a projection query to check that the sender addresses are fetched correctly comp.InsertProjection( ifaces.QueryIDf("%s_TXN_DATA_FETCHER_PROJECTION", name), - query.ProjectionInput{ColumnA: fetcherTable, - ColumnB: arithTable, + query.ProjectionInput{ + // the table with the data we fetch from the arithmetization's TxnData columns + ColumnA: fetcher.From[:], + // the TxnData we extract sender addresses from, and which we will use to check for consistency + ColumnB: td.From[:], FilterA: fetcher.FilterFetched, // filter lights up on the arithmetization's TxnData rows that contain sender address data - FilterB: fetcher.SelectorFromAddress}) + FilterB: fetcher.SelectorFromAddress, + }, + ) } // AssignTxnDataFetcher assigns the data in the TxnDataFetcher using data fetched from the TxnData @@ -95,11 +93,10 @@ func AssignTxnDataFetcher(run *wizard.ProverRuntime, fetcher TxnDataFetcher, td var ( // Those are the assignments from the arithmetization + arithFrom [common.NbLimbEthAddress]ifaces.ColAssignment ct = td.Ct.GetColAssignment(run) fetchedAbsTxNum = td.AbsTxNum.GetColAssignment(run) fetchedRelBlock = td.RelBlock.GetColAssignment(run) - arithFromHi = td.FromHi.GetColAssignment(run) - arithFromLo = td.FromLo.GetColAssignment(run) start, stop = smartvectors.CoCompactRange(ct) size = td.Ct.Size() density = stop - start @@ -107,27 +104,37 @@ func AssignTxnDataFetcher(run *wizard.ProverRuntime, fetcher TxnDataFetcher, td // Those are the ongoing assignment slices relBlock = make([]field.Element, density) absTxNum = make([]field.Element, density) - fromHi = make([]field.Element, density) - fromLo = make([]field.Element, density) + from [common.NbLimbEthAddress][]field.Element filterFetched = make([]field.Element, density) counter = 0 ) + for i := range arithFrom { + arithFrom[i] = td.From[i].GetColAssignment(run) + from[i] = make([]field.Element, density) + } + for i := start; i < stop; i++ { var ( ct = ct.GetPtr(i) fetchedAbsTxNum = fetchedAbsTxNum.GetPtr(i) fetchedRelBlock = fetchedRelBlock.GetPtr(i) - arithFromHi = arithFromHi.GetPtr(i) - arithFromLo = arithFromLo.GetPtr(i) + arithFromVal [common.NbLimbEthAddress]*field.Element ) + for j := range arithFrom { + arithFromVal[j] = arithFrom[j].GetPtr(i) + } + if ct.IsOne() && !fetchedAbsTxNum.IsZero() { // absTxNum starts from 1, ct starts from 0 but always touches 1 absTxNum[counter].Set(fetchedAbsTxNum) relBlock[counter].Set(fetchedRelBlock) - fromHi[counter].Set(arithFromHi) - fromLo[counter].Set(arithFromLo) + + for j := range from { + from[j][counter].Set(arithFromVal[j]) + } + // update counters filterFetched[counter].SetOne() counter++ @@ -137,9 +144,12 @@ func AssignTxnDataFetcher(run *wizard.ProverRuntime, fetcher TxnDataFetcher, td // assign the fetcher columns run.AssignColumn(fetcher.RelBlock.GetColID(), smartvectors.RightZeroPadded(relBlock[:counter], size)) run.AssignColumn(fetcher.AbsTxNum.GetColID(), smartvectors.RightZeroPadded(absTxNum[:counter], size)) - run.AssignColumn(fetcher.FromHi.GetColID(), smartvectors.RightZeroPadded(fromHi[:counter], size)) - run.AssignColumn(fetcher.FromLo.GetColID(), smartvectors.RightZeroPadded(fromLo[:counter], size)) run.AssignColumn(fetcher.FilterFetched.GetColID(), smartvectors.RightZeroPadded(filterFetched[:counter], size)) + + for i := range from { + run.AssignColumn(fetcher.From[i].GetColID(), smartvectors.RightZeroPadded(from[i][:counter], size)) + } + // assign the SelectorFromAddress using the ComputeSelectorFromAddress prover action fetcher.ComputeSelectorFromAddress.Run(run) } diff --git a/prover/zkevm/prover/publicInput/input_extractor.go b/prover/zkevm/prover/publicInput/input_extractor.go index d79495abb5..da6d25fd77 100644 --- a/prover/zkevm/prover/publicInput/input_extractor.go +++ b/prover/zkevm/prover/publicInput/input_extractor.go @@ -3,6 +3,7 @@ package publicInput import ( "github.com/consensys/linea-monorepo/prover/protocol/query" "github.com/consensys/linea-monorepo/prover/protocol/wizard" + "github.com/consensys/linea-monorepo/prover/zkevm/prover/common" ) // FunctionalInputExtractor is a collection over LocalOpeningQueries that can be @@ -27,16 +28,15 @@ type FunctionalInputExtractor struct { // InitialStateRootHash and FinalStateRootHash are resp the initial and // root hash of the state for the - InitialStateRootHash, FinalStateRootHash query.LocalOpening - InitialBlockNumber, FinalBlockNumber query.LocalOpening - InitialBlockTimestamp, FinalBlockTimestamp query.LocalOpening - FirstRollingHashUpdate, LastRollingHashUpdate [2]query.LocalOpening - FirstRollingHashUpdateNumber, LastRollingHashUpdateNumber query.LocalOpening + InitialStateRootHash, FinalStateRootHash [common.NbLimbU256]query.LocalOpening + InitialBlockNumber, FinalBlockNumber [common.NbLimbU48]query.LocalOpening + InitialBlockTimestamp, FinalBlockTimestamp [common.NbLimbU128]query.LocalOpening + FirstRollingHashUpdate, LastRollingHashUpdate [common.NbLimbU256]query.LocalOpening + FirstRollingHashUpdateNumber, LastRollingHashUpdateNumber [common.NbLimbU128]query.LocalOpening - ChainID query.LocalOpening - NBytesChainID query.LocalOpening - L2MessageServiceAddrHi query.LocalOpening - L2MessageServiceAddrLo query.LocalOpening + ChainID [common.NbLimbU128]query.LocalOpening + NBytesChainID query.LocalOpening + L2MessageServiceAddr [common.NbLimbEthAddress]query.LocalOpening } // Run assigns all the local opening queries @@ -49,20 +49,29 @@ func (fie *FunctionalInputExtractor) Run(run *wizard.ProverRuntime) { assignLO(fie.DataNbBytes) assignLO(fie.DataChecksum) assignLO(fie.L2MessageHash) - assignLO(fie.InitialStateRootHash) - assignLO(fie.InitialBlockNumber) - assignLO(fie.InitialBlockTimestamp) - assignLO(fie.FirstRollingHashUpdate[0]) - assignLO(fie.FirstRollingHashUpdate[1]) - assignLO(fie.FirstRollingHashUpdateNumber) - assignLO(fie.FinalStateRootHash) - assignLO(fie.FinalBlockNumber) - assignLO(fie.FinalBlockTimestamp) - assignLO(fie.LastRollingHashUpdate[0]) - assignLO(fie.LastRollingHashUpdate[1]) - assignLO(fie.LastRollingHashUpdateNumber) - assignLO(fie.ChainID) assignLO(fie.NBytesChainID) - assignLO(fie.L2MessageServiceAddrHi) - assignLO(fie.L2MessageServiceAddrLo) + + for i := range common.NbLimbU256 { + assignLO(fie.FirstRollingHashUpdate[i]) + assignLO(fie.LastRollingHashUpdate[i]) + assignLO(fie.InitialStateRootHash[i]) + assignLO(fie.FinalStateRootHash[i]) + } + + for i := range common.NbLimbEthAddress { + assignLO(fie.L2MessageServiceAddr[i]) + } + + for i := range common.NbLimbU48 { + assignLO(fie.InitialBlockNumber[i]) + assignLO(fie.FinalBlockNumber[i]) + } + + for i := range common.NbLimbU128 { + assignLO(fie.ChainID[i]) + assignLO(fie.InitialBlockTimestamp[i]) + assignLO(fie.FinalBlockTimestamp[i]) + assignLO(fie.FirstRollingHashUpdateNumber[i]) + assignLO(fie.LastRollingHashUpdateNumber[i]) + } } diff --git a/prover/zkevm/prover/publicInput/logs/extracted_data.go b/prover/zkevm/prover/publicInput/logs/extracted_data.go index 1e86056a0e..8f4c405438 100644 --- a/prover/zkevm/prover/publicInput/logs/extracted_data.go +++ b/prover/zkevm/prover/publicInput/logs/extracted_data.go @@ -1,6 +1,7 @@ package logs import ( + "fmt" "github.com/consensys/linea-monorepo/prover/maths/common/smartvectors" "github.com/consensys/linea-monorepo/prover/maths/field" "github.com/consensys/linea-monorepo/prover/protocol/column" @@ -8,6 +9,7 @@ import ( "github.com/consensys/linea-monorepo/prover/protocol/query" "github.com/consensys/linea-monorepo/prover/protocol/wizard" sym "github.com/consensys/linea-monorepo/prover/symbolic" + "github.com/consensys/linea-monorepo/prover/zkevm/prover/common" util "github.com/consensys/linea-monorepo/prover/zkevm/prover/publicInput/utilities" ) @@ -16,7 +18,7 @@ import ( // RollingHash case: either the message number stored in the Lo part // or the RollingHash stored in both Hi/Lo type ExtractedData struct { - Hi, Lo ifaces.Column + Data [common.NbLimbU256]ifaces.Column filterArith ifaces.Column filterFetched ifaces.Column } @@ -24,14 +26,17 @@ type ExtractedData struct { // NewExtractedData initializes a NewExtractedData struct, registering columns that are not yet constrained. func NewExtractedData(comp *wizard.CompiledIOP, size int, name string) ExtractedData { res := ExtractedData{ - // register Hi, Lo, the columns in which we embed the message we want to fetch from LogColumns - Hi: util.CreateCol(name, "EXTRACTED_HI", size, comp), - Lo: util.CreateCol(name, "EXTRACTED_LO", size, comp), // register the filter on the arithmetization log columns filterArith: util.CreateCol(name, "FILTER", size, comp), // a filter on the columns with fetched data filterFetched: util.CreateCol(name, "FILTER_ON_FETCHED", size, comp), } + + // register Data, the columns in which we embed the message we want to fetch from LogColumns + for i := range res.Data { + res.Data[i] = util.CreateCol(name, fmt.Sprintf("EXTRACTED_%d", i), size, comp) + } + return res } @@ -39,37 +44,33 @@ func NewExtractedData(comp *wizard.CompiledIOP, size int, name string) Extracted // along with filters to select only L2L1/Rolling Hash logs. // DefineExtractedData then uses a projection query to check that the data was fetched appropriately func DefineExtractedData(comp *wizard.CompiledIOP, logCols LogColumns, sel Selectors, fetched ExtractedData, logType int) { - // the table with the data we fetch from the arithmetization columns LogColumns - fetchedTable := []ifaces.Column{ - fetched.Hi, - fetched.Lo, + selectors := sym.Mul( + // IsLogType returns either isLog3 or isLog4 depending on the case + IsLogType(logCols, logType), + // GetSelectorCounter returns 1 when one of the following holds: + // logCols.Ct = 5 (an L2L1 message) or logCols.Ct = 3 (RollingMsgNo) or logCols.Ct = 4 (RollingHashNo) + GetSelectorCounter(sel, logType), + ) + + // now we check that the first topics are computed properly in the log, by inspecting a previous row at a certain offset + // the offset is -3 (an L2L1 message) or -1 (RollingMsgNo) or -2 (RollingHashNo) + selectorsFirstTopic := GetSelectorFirstTopic(sel, logType) + for i := range selectorsFirstTopic { + selectors = sym.Mul(selectors, column.Shift(selectorsFirstTopic[i], GetOffset(logType, FirstTopic))) } - // the LogColumns we extract data from, and which we will use to check for consistency - logsTable := []ifaces.Column{ - logCols.DataHi, - logCols.DataLo, + + // now we check that the address of this log is indeed the L2BridgeAddress + // the offset is -4 (an L2L1 message) or -2 (RollingMsgNo) or -3 (RollingHashNo) + for i := range sel.SelectorL2BridgeAddress { + selectors = sym.Mul(selectors, column.Shift(sel.SelectorL2BridgeAddress[i], GetOffset(logType, L2BridgeAddress))) } comp.InsertGlobal( 0, ifaces.QueryIDf("%s_LOGS_FILTER_CONSTRAINT_CHECK_LOG_OF_TYPE", GetName(logType)), - sym.Sub( - fetched.filterArith, - sym.Mul( - IsLogType(logCols, logType), // IsLogType returns either isLog3 or isLog4 depending on the case - GetSelectorCounter(sel, logType), // GetSelectorCounter returns 1 when one of the following holds: - // logCols.Ct = 5 (an L2L1 message) or logCols.Ct = 3 (RollingMsgNo) or logCols.Ct = 4 (RollingHashNo) - // now we check that the first topics are computed properly in the log, by inspecting a previous row at a certain offset - // the offset is -3 (an L2L1 message) or -1 (RollingMsgNo) or -2 (RollingHashNo) - column.Shift(GetSelectorFirstTopicHi(sel, logType), GetOffset(logType, FirstTopic)), - column.Shift(GetSelectorFirstTopicLo(sel, logType), GetOffset(logType, FirstTopic)), - // now we check that the address of this log is indeed the L2BridgeAddress - // the offset is -4 (an L2L1 message) or -2 (RollingMsgNo) or -3 (RollingHashNo) - column.Shift(sel.SelectorL2BridgeAddressHi, GetOffset(logType, L2BridgeAddress)), - column.Shift(sel.SelectorL2BridgeAddressLo, GetOffset(logType, L2BridgeAddress)), - ), - ), + sym.Sub(fetched.filterArith, selectors), ) + // require that the filter on fetched data is a binary column comp.InsertGlobal( 0, @@ -94,36 +95,55 @@ func DefineExtractedData(comp *wizard.CompiledIOP, logCols LogColumns, sel Selec // a projection query to check that the messages are fetched correctly comp.InsertProjection( ifaces.QueryIDf("%s_LOGS_PROJECTION", GetName(logType)), - query.ProjectionInput{ColumnA: fetchedTable, - ColumnB: logsTable, + query.ProjectionInput{ + // the table with the data we fetch from the arithmetization columns LogColumns + ColumnA: fetched.Data[:], + // the LogColumns we extract data from, and which we will use to check for consistency + ColumnB: logCols.Data[:], FilterA: fetched.filterFetched, - FilterB: fetched.filterArith}) + FilterB: fetched.filterArith, + }, + ) } // CheckBridgeAddress checks if a row does indeed contain the data corresponding to a the bridge address func CheckBridgeAddress(run *wizard.ProverRuntime, lCols LogColumns, sel Selectors, pos int) bool { - outHi := lCols.DataHi.GetColAssignmentAt(run, pos) - outLo := lCols.DataLo.GetColAssignmentAt(run, pos) - bridgeAddrHi := sel.L2BridgeAddressColHI.GetColAssignmentAt(run, 0) - bridgeAddrLo := sel.L2BridgeAddressColLo.GetColAssignmentAt(run, 0) - if outHi.Equal(&bridgeAddrHi) && outLo.Equal(&bridgeAddrLo) { - return true + offset := common.NbLimbU256 - common.NbLimbEthAddress + + for i := range offset { + out := lCols.Data[i].GetColAssignmentAt(run, pos) + if !out.IsZero() { + return false + } } - return false + + for i := range sel.L2BridgeAddressCol { + out := lCols.Data[i+offset].GetColAssignmentAt(run, pos) + bridgeAddr := sel.L2BridgeAddressCol[i].GetColAssignmentAt(run, 0) + + if !out.Equal(&bridgeAddr) { + return false + } + } + + return true } // CheckFirstTopic checks if a row does indeed contain the data corresponding to a first topic in a L2l1/Rolling hash log func CheckFirstTopic(run *wizard.ProverRuntime, lCols LogColumns, pos int, logType int) bool { - var firstTopicHi, firstTopicLo field.Element + var firstTopicLimb field.Element firstTopicBytes := GetFirstTopic(logType) // fixed expected value for the topic on the first topic row - firstTopicHi.SetBytes(firstTopicBytes[:16]) - firstTopicLo.SetBytes(firstTopicBytes[16:]) - outHi := lCols.DataHi.GetColAssignmentAt(run, pos) - outLo := lCols.DataLo.GetColAssignmentAt(run, pos) - if firstTopicHi.Equal(&outHi) && firstTopicLo.Equal(&outLo) { - return true + for i := range lCols.Data { + + firstTopicLimb.SetBytes(firstTopicBytes[i*2 : (i+1)*2]) + outData := lCols.Data[i].GetColAssignmentAt(run, pos) + + if !firstTopicLimb.Equal(&outData) { + return false + } } - return false + + return true } // IsPositionTargetMessage checks if a row does indeed contain the relevant messages corresponding to L2l1/RollingHash logs @@ -153,28 +173,37 @@ func IsPositionTargetMessage(run *wizard.ProverRuntime, lCols LogColumns, sel Se // AssignExtractedData fetches data from the LogColumns and uses it to populate the ExtractedData columns func AssignExtractedData(run *wizard.ProverRuntime, lCols LogColumns, sel Selectors, fetched ExtractedData, logType int) { filterLogs := make([]field.Element, lCols.Ct.Size()) - Hi := make([]field.Element, lCols.Ct.Size()) - Lo := make([]field.Element, lCols.Ct.Size()) + + var data [common.NbLimbU256][]field.Element + for i := range data { + data[i] = make([]field.Element, lCols.Ct.Size()) + } + filterFetched := make([]field.Element, lCols.Ct.Size()) - counter := 0 // counter used to incrementally populate Hi, Lo of the ExtractedData and their associated filterFetched + counter := 0 // counter used to incrementally populate limbs of the ExtractedData and their associated filterFetched for i := 0; i < lCols.Ct.Size(); i++ { // the following conditional checks if row i contains a message that should be picked - if IsPositionTargetMessage(run, lCols, sel, i, logType) { - hi := lCols.DataHi.GetColAssignmentAt(run, i) - lo := lCols.DataLo.GetColAssignmentAt(run, i) - // pick the messages and add them to the msgHi/Lo ExtractedData columns - Hi[counter].Set(&hi) - Lo[counter].Set(&lo) - // now set the filter on ExtractedData columns to be 1 - filterFetched[counter].SetOne() - // set the filter on the LogColumns to be 1, at position i - filterLogs[i].SetOne() - counter++ + if !IsPositionTargetMessage(run, lCols, sel, i, logType) { + continue + } + + for j := range data { + // pick the messages and add them to the msg limbs ExtractedData columns + data[j][counter] = lCols.Data[j].GetColAssignmentAt(run, i) } + + // now set the filter on ExtractedData columns to be 1 + filterFetched[counter].SetOne() + // set the filter on the LogColumns to be 1, at position i + filterLogs[i].SetOne() + counter++ } + // assign our fetched data - run.AssignColumn(fetched.Hi.GetColID(), smartvectors.NewRegular(Hi)) - run.AssignColumn(fetched.Lo.GetColID(), smartvectors.NewRegular(Lo)) + for i := range data { + run.AssignColumn(fetched.Data[i].GetColID(), smartvectors.NewRegular(data[i])) + } + // assign filters for original log columns and fetched ExtractedData run.AssignColumn(fetched.filterArith.GetColID(), smartvectors.NewRegular(filterLogs)) // filter on LogColumns run.AssignColumn(fetched.filterFetched.GetColID(), smartvectors.NewRegular(filterFetched)) // filter on fetched data diff --git a/prover/zkevm/prover/publicInput/logs/extracted_hasher_test.go b/prover/zkevm/prover/publicInput/logs/extracted_hasher_test.go index f2dcbc703b..b683a08b05 100644 --- a/prover/zkevm/prover/publicInput/logs/extracted_hasher_test.go +++ b/prover/zkevm/prover/publicInput/logs/extracted_hasher_test.go @@ -59,7 +59,7 @@ func TestLogsDataFetcher(t *testing.T) { // initialize hashers hasherL2l1 = NewLogHasher(b.CompiledIOP, colSize, "L2L1LOGS") // initialize rolling selector - rollingSelector = NewRollingSelector(b.CompiledIOP, "ROLLING_SEL", fetchedRollingHash.Hi.Size(), fetchedRollingMsg.Hi.Size()) + rollingSelector = NewRollingSelector(b.CompiledIOP, "ROLLING_SEL", fetchedRollingHash.Data[0].Size()) // define extracted data from logs and associated filters DefineExtractedData(b.CompiledIOP, logCols, selectors, fetchedL2L1, L2L1) DefineExtractedData(b.CompiledIOP, logCols, selectors, fetchedRollingMsg, RollingMsgNo) diff --git a/prover/zkevm/prover/publicInput/logs/hasher.go b/prover/zkevm/prover/publicInput/logs/hasher.go index 2b27b7316b..8a60180f2a 100644 --- a/prover/zkevm/prover/publicInput/logs/hasher.go +++ b/prover/zkevm/prover/publicInput/logs/hasher.go @@ -1,7 +1,7 @@ package logs import ( - "github.com/consensys/linea-monorepo/prover/crypto/mimc" + "fmt" "github.com/consensys/linea-monorepo/prover/maths/common/smartvectors" "github.com/consensys/linea-monorepo/prover/maths/field" "github.com/consensys/linea-monorepo/prover/protocol/column" @@ -9,6 +9,7 @@ import ( "github.com/consensys/linea-monorepo/prover/protocol/ifaces" "github.com/consensys/linea-monorepo/prover/protocol/wizard" sym "github.com/consensys/linea-monorepo/prover/symbolic" + "github.com/consensys/linea-monorepo/prover/zkevm/prover/common" commonconstraints "github.com/consensys/linea-monorepo/prover/zkevm/prover/common/common_constraints" util "github.com/consensys/linea-monorepo/prover/zkevm/prover/publicInput/utilities" ) @@ -28,21 +29,24 @@ import ( // The final value of the chained hash can be retrieved as ---> hashSecond[ctMax[any index]] type LogHasher struct { // the hash value after each step, as explained in the description of LogHasher - hashFirst, hashSecond ifaces.Column + hash [common.NbLimbU256]ifaces.Column // L2L1 logs: inter is a shifted version of hashSecond, necessary due to how the MiMC constraints operate - inter ifaces.Column + inter [common.NbLimbU256]ifaces.Column // the relevant value of the hash (the last value when isActive ends) - HashFinal ifaces.Column + HashFinal [common.NbLimbU256]ifaces.Column } // NewLogHasher returns a new LogHasher with initialized columns that are not constrained. func NewLogHasher(comp *wizard.CompiledIOP, size int, name string) LogHasher { - return LogHasher{ - hashFirst: util.CreateCol(name, "HASH_FIRST", size, comp), - hashSecond: util.CreateCol(name, "HASH_SECOND", size, comp), - inter: util.CreateCol(name, "INTER", size, comp), - HashFinal: util.CreateCol(name, "HASH_FINAL", size, comp), + var res LogHasher + + for i := range res.hash { + res.hash[i] = util.CreateCol(name, fmt.Sprintf("HASH_%d", i), size, comp) + res.inter[i] = util.CreateCol(name, fmt.Sprintf("INTER_%d", i), size, comp) + res.HashFinal[i] = util.CreateCol(name, fmt.Sprintf("HASH_FINAL_%d", i), size, comp) } + + return res } // DefineHasher specifies the constraints of the LogHasher with respect to the ExtractedData fetched from the arithmetization @@ -50,65 +54,75 @@ func DefineHasher(comp *wizard.CompiledIOP, hasher LogHasher, name string, fetch // Needed for the limitless prover to understand that the columns are // not just empty with just padding and suboptimal representation. - pragmas.MarkFullColumn(hasher.inter) - pragmas.MarkFullColumn(hasher.hashFirst) - pragmas.MarkFullColumn(hasher.hashSecond) + for i := range hasher.hash { + pragmas.MarkFullColumn(hasher.hash[i]) + pragmas.MarkFullColumn(hasher.inter[i]) + } // MiMC constraints - comp.InsertMiMC(0, ifaces.QueryIDf("%s_%s", name, "MIMC_CONSTRAINT"), fetched.Hi, hasher.inter, hasher.hashFirst, nil) - comp.InsertMiMC(0, ifaces.QueryIDf("%s_%s", name, "MIMC_CONSTRAINT_SECOND"), fetched.Lo, hasher.hashFirst, hasher.hashSecond, nil) - - // intermediary state integrity - comp.InsertGlobal(0, ifaces.QueryIDf("%s_%s", name, "CONSISTENCY_INTER_AND_HASH_LAST"), // LAST is either hashSecond or hashThird - sym.Sub(hasher.hashSecond, - column.Shift(hasher.inter, 1), - ), - ) - - // inter, the old state column, is initially zero - comp.InsertLocal(0, ifaces.QueryIDf("%s_%s", name, "INTER_LOCAL"), ifaces.ColumnAsVariable(hasher.inter)) - - // constrain HashFinal - commonconstraints.MustBeConstant(comp, hasher.HashFinal) - util.CheckLastELemConsistency(comp, fetched.filterFetched, hasher.hashSecond, hasher.HashFinal, name) + comp.InsertMiMC(0, ifaces.QueryIDf("%s_%s", name, "MIMC_CONSTRAINT"), fetched.Data, hasher.inter, hasher.hash, nil) + + for i := range hasher.hash { + // intermediary state integrity + comp.InsertGlobal(0, ifaces.QueryIDf("%s_CONSISTENCY_INTER_AND_HASH_%d", name, i), + sym.Sub(hasher.hash[i], column.Shift(hasher.inter[i], 1)), + ) + + // inter, the old state column, is initially zero + comp.InsertLocal(0, ifaces.QueryIDf("%s_INTER_LOCAL_%d", name, i), ifaces.ColumnAsVariable(hasher.inter[i])) + + // constrain HashFinal + commonconstraints.MustBeConstant(comp, hasher.HashFinal[i]) + util.CheckLastELemConsistency(comp, fetched.filterFetched, hasher.hash[i], hasher.HashFinal[i], name) + } } // AssignHasher assigns the data in the LogHasher using the ExtractedData fetched from the arithmetization func AssignHasher(run *wizard.ProverRuntime, hasher LogHasher, fetched ExtractedData) { - size := fetched.Hi.Size() - hashFirst := make([]field.Element, size) - hashSecond := make([]field.Element, size) - inter := make([]field.Element, size) + size := fetched.Data[0].Size() + var hash, inter [common.NbLimbU256][]field.Element + for i := range hash { + hash[i] = make([]field.Element, size) + inter[i] = make([]field.Element, size) + } - var ( - hashFinal field.Element - ) + var fetchedData, hashFinal [common.NbLimbU256]field.Element + // the initial state is zero + state := make([]field.Element, common.NbLimbU256) + for i := range state { + state[i].SetZero() + } - state := field.Zero() // the initial state is zero - for i := 0; i < len(hashFirst); i++ { - // first, hash the HI part of the fetched log message - state = mimc.BlockCompression(state, fetched.Hi.GetColAssignmentAt(run, i)) - hashFirst[i].Set(&state) + for i := 0; i < size; i++ { + for j := range fetchedData { + fetchedData[j] = fetched.Data[j].GetColAssignmentAt(run, i) + } - // secondly, hash the Lo part of the fetched log message - state = mimc.BlockCompression(state, fetched.Lo.GetColAssignmentAt(run, i)) - hashSecond[i].Set(&state) + state = common.BlockCompression(state, fetchedData[:]) + for j := range hash { + hash[j][i] = state[j] + } // the data in hashSecond is used to initialize the next initial state, stored in the inter column - if i+1 < len(hashFirst) { - inter[i+1] = hashSecond[i] + if i+1 < size { + for j := range inter { + inter[j][i+1] = state[j] + } } isActive := fetched.filterFetched.GetColAssignmentAt(run, i) // continuously update HashFinal if isActive.IsOne() { - hashFinal.Set(&hashSecond[i]) + for j := range hashFinal { + hashFinal[j] = state[j] + } } } // assign the hasher columns - run.AssignColumn(hasher.hashFirst.GetColID(), smartvectors.NewRegular(hashFirst)) - run.AssignColumn(hasher.hashSecond.GetColID(), smartvectors.NewRegular(hashSecond)) - run.AssignColumn(hasher.inter.GetColID(), smartvectors.NewRegular(inter)) - run.AssignColumn(hasher.HashFinal.GetColID(), smartvectors.NewConstant(hashFinal, size)) + for i := range hasher.hash { + run.AssignColumn(hasher.hash[i].GetColID(), smartvectors.NewRegular(hash[i])) + run.AssignColumn(hasher.inter[i].GetColID(), smartvectors.NewRegular(inter[i])) + run.AssignColumn(hasher.HashFinal[i].GetColID(), smartvectors.NewConstant(hashFinal[i], size)) + } } diff --git a/prover/zkevm/prover/publicInput/logs/mock_logs.go b/prover/zkevm/prover/publicInput/logs/mock_logs.go index 941a201614..9a7ffaa33b 100644 --- a/prover/zkevm/prover/publicInput/logs/mock_logs.go +++ b/prover/zkevm/prover/publicInput/logs/mock_logs.go @@ -1,6 +1,7 @@ package logs import ( + "fmt" eth "github.com/consensys/linea-monorepo/prover/backend/execution/statemanager" "github.com/consensys/linea-monorepo/prover/maths/field" "github.com/consensys/linea-monorepo/prover/protocol/ifaces" @@ -27,28 +28,34 @@ func noTopics(logType int) int { // LogInfo will be a mock data structure containing the minimal amount of information // needed to generate test logs type LogInfo struct { - LogType int - DataSize, noTopics field.Element - AddressHi, AddressLo field.Element - TopicsHi, TopicsLo []field.Element + LogType int + DataSize, noTopics [common.NbLimbU128]field.Element + Address [common.NbLimbEthAddress]field.Element + Topics [][common.NbLimbU256]field.Element } // LogColumns represents the relevant columns for l2l1logs/RollingHash logs from the LogInfo module. type LogColumns struct { IsLog0, IsLog1, IsLog2, IsLog3, IsLog4 ifaces.Column AbsLogNum ifaces.Column - AbsLogNumMax ifaces.Column // total number of logs in the conflated batch - Ct ifaces.Column // counter column used inside a column segment used for one specific log - DataHi, DataLo ifaces.Column // the Hi and Lo parts of outgoing data - TxEmitsLogs ifaces.Column + // total number of logs in the conflated batch + AbsLogNumMax ifaces.Column + // counter column used inside a column segment used for one specific log + Ct ifaces.Column + // the logs outgoing data + Data [common.NbLimbU256]ifaces.Column + TxEmitsLogs ifaces.Column } -// ConvertAddress converts a 20 bytes address into the HI and LO parts on the arithmetization side -func ConvertAddress(address eth.Address) (field.Element, field.Element) { - var hi, lo field.Element - hi.SetBytes(address[:4]) - lo.SetBytes(address[4:]) - return hi, lo +// ConvertAddress converts a 20 bytes address into the 10 16-bit limbs on the arithmetization side +func ConvertAddress(address eth.Address) [common.NbLimbEthAddress]field.Element { + var res [common.NbLimbEthAddress]field.Element + + for i := range res { + res[i].SetBytes(address[i*2 : (i+1)*2]) + } + + return res } // ComputeSize computes the size of columns that have the same shape as the ones in the LogInfo module @@ -59,7 +66,7 @@ func ComputeSize(logs []LogInfo) int { if log.LogType == MISSING_LOG { size++ } else { - size += 2 + len(log.TopicsHi) + size += 2 + len(log.Topics) } } return size @@ -71,13 +78,13 @@ func (logInfo LogInfo) ConvertToL2L1Log() types.Log { // compute the topics var topics []ethCommon.Hash - for i := 0; i < len(logInfo.TopicsHi); i++ { - bytesHi := logInfo.TopicsHi[i].Bytes() - bytesLo := logInfo.TopicsLo[i].Bytes() - hashBytes := make([]byte, 0, 32) - hashBytes = append(hashBytes, bytesHi[16:]...) - hashBytes = append(hashBytes, bytesLo[16:]...) - topics = append(topics, ethCommon.BytesToHash(hashBytes)) + for i := 0; i < len(logInfo.Topics); i++ { + var hashBytes [32]byte + for j := range logInfo.Topics[i] { + bytes := logInfo.Topics[i][j].Bytes() + copy(hashBytes[j*2:(j+1)*2], bytes[field.Bytes-2:]) + } + topics = append(topics, ethCommon.BytesToHash(hashBytes[:])) } var data []byte @@ -131,11 +138,13 @@ func NewLogColumns(comp *wizard.CompiledIOP, size int, name string) LogColumns { AbsLogNum: createCol("ABS_LOG_NUM"), AbsLogNumMax: createCol("ABS_LOG_NUM_MAX"), Ct: createCol("CT"), - DataHi: createCol("OUTGOING_HI"), - DataLo: createCol("OUTGOING_LO"), TxEmitsLogs: createCol("TX_EMITS_LOGS"), } + for i := range res.Data { + res.Data[i] = createCol(fmt.Sprintf("OUTGOING_%d", i)) + } + return res } @@ -145,13 +154,13 @@ type LogColumnsAssignmentBuilder struct { IsLog0, IsLog1, IsLog2, IsLog3, IsLog4 *common.VectorBuilder AbsLogNum, AbsLogNumMax *common.VectorBuilder Ct *common.VectorBuilder - OutgoingHi, OutgoingLo *common.VectorBuilder + Outgoing [common.NbLimbU256]*common.VectorBuilder TxEmitsLogs *common.VectorBuilder } // NewLogColumnsAssignmentBuilder initializes a fresh LogColumnsAssignmentBuilder func NewLogColumnsAssignmentBuilder(lc *LogColumns) LogColumnsAssignmentBuilder { - return LogColumnsAssignmentBuilder{ + res := LogColumnsAssignmentBuilder{ IsLog0: common.NewVectorBuilder(lc.IsLog0), IsLog1: common.NewVectorBuilder(lc.IsLog1), IsLog2: common.NewVectorBuilder(lc.IsLog2), @@ -160,11 +169,14 @@ func NewLogColumnsAssignmentBuilder(lc *LogColumns) LogColumnsAssignmentBuilder AbsLogNum: common.NewVectorBuilder(lc.AbsLogNum), AbsLogNumMax: common.NewVectorBuilder(lc.AbsLogNumMax), Ct: common.NewVectorBuilder(lc.Ct), - OutgoingHi: common.NewVectorBuilder(lc.DataHi), - OutgoingLo: common.NewVectorBuilder(lc.DataLo), TxEmitsLogs: common.NewVectorBuilder(lc.TxEmitsLogs), } + for i := range res.Outgoing { + res.Outgoing[i] = common.NewVectorBuilder(lc.Data[i]) + } + + return res } // PushLogSelectors populates the IsLogX and TxEmitsLogs columns in what will become LogColumns @@ -232,38 +244,53 @@ func (lc *LogColumnsAssignmentBuilder) PadAndAssign(run *wizard.ProverRuntime) { lc.AbsLogNum.PadAndAssign(run) lc.AbsLogNumMax.PadAndAssign(run) lc.Ct.PadAndAssign(run) - lc.OutgoingHi.PadAndAssign(run) - lc.OutgoingLo.PadAndAssign(run) lc.TxEmitsLogs.PadAndAssign(run) + + for i := range lc.Outgoing { + lc.Outgoing[i].PadAndAssign(run) + } } // LogColumnsAssign uses test samples from LogInfo to populate LogColumns uses for testing // in the fetching of messages from L2L1/RollingHash logs func LogColumnsAssign(run *wizard.ProverRuntime, logCols *LogColumns, logs []LogInfo) { builder := NewLogColumnsAssignmentBuilder(logCols) + + addrOffset := common.NbLimbU256 - common.NbLimbEthAddress + for i := 0; i < len(logs); i++ { logType := logs[i].LogType // row 0 builder.PushLogSelectors(logs[i].LogType) builder.PushCounters(i, len(logs), 0) - builder.OutgoingHi.PushField(logs[i].DataSize) - builder.OutgoingLo.PushField(logs[i].noTopics) + + for j := range common.NbLimbU128 { + builder.Outgoing[j].PushField(logs[i].DataSize[j]) + builder.Outgoing[common.NbLimbU128+j].PushField(logs[i].noTopics[j]) + } if logType != MISSING_LOG { // row 1 has a special form builder.PushLogSelectors(logs[i].LogType) builder.PushCounters(i, len(logs), 1) - builder.OutgoingHi.PushField(logs[i].AddressHi) - builder.OutgoingLo.PushField(logs[i].AddressLo) + + for j := 0; j < addrOffset; j++ { + builder.Outgoing[j].PushField(field.Zero()) + } + for j := 0; j < common.NbLimbEthAddress; j++ { + builder.Outgoing[j+addrOffset].PushField(logs[i].Address[j]) + } // subsequent rows contain the topic data for topicNo := 0; topicNo < noTopics(logType); topicNo++ { builder.PushLogSelectors(logs[i].LogType) builder.PushCounters(i, len(logs), topicNo+2) // topicNo+2, starting at row index 2 - builder.OutgoingHi.PushField(logs[i].TopicsHi[topicNo]) - builder.OutgoingLo.PushField(logs[i].TopicsLo[topicNo]) + for j := range common.NbLimbU256 { + builder.Outgoing[j].PushField(logs[i].Topics[topicNo][j]) + } } } } + builder.PadAndAssign(run) } diff --git a/prover/zkevm/prover/publicInput/logs/rolling_selector.go b/prover/zkevm/prover/publicInput/logs/rolling_selector.go index 560522393d..d8c8586b4e 100644 --- a/prover/zkevm/prover/publicInput/logs/rolling_selector.go +++ b/prover/zkevm/prover/publicInput/logs/rolling_selector.go @@ -1,11 +1,13 @@ package logs import ( + "fmt" "github.com/consensys/linea-monorepo/prover/maths/common/smartvectors" "github.com/consensys/linea-monorepo/prover/maths/field" "github.com/consensys/linea-monorepo/prover/protocol/ifaces" "github.com/consensys/linea-monorepo/prover/protocol/wizard" sym "github.com/consensys/linea-monorepo/prover/symbolic" + "github.com/consensys/linea-monorepo/prover/zkevm/prover/common" commonconstraints "github.com/consensys/linea-monorepo/prover/zkevm/prover/common/common_constraints" util "github.com/consensys/linea-monorepo/prover/zkevm/prover/publicInput/utilities" ) @@ -14,26 +16,30 @@ import ( type RollingSelector struct { // Exists contains a 1 if there exists at least one rolling hash log ExistsHash, ExistsMsg ifaces.Column - // the Hi/Lo part of the first Rolling Hash found in the logs - FirstHi, FirstLo ifaces.Column - // the Hi/Lo part of the last Rolling Hash found in the logs - LastHi, LastLo ifaces.Column + // the first/last Rolling Hash found in the logs + First, Last [common.NbLimbU256]ifaces.Column // the first/last message number of the last Rolling hash log - FirstMessageNo, LastMessageNo ifaces.Column + FirstMessageNo, LastMessageNo [common.NbLimbU128]ifaces.Column } // NewRollingSelector returns a new RollingSelector with initialized columns that are not constrained. -func NewRollingSelector(comp *wizard.CompiledIOP, name string, sizeHash, sizeMsg int) *RollingSelector { - return &RollingSelector{ - ExistsHash: util.CreateCol(name, "EXISTS_HASH", sizeHash, comp), - ExistsMsg: util.CreateCol(name, "EXISTS_MSG", sizeMsg, comp), - FirstHi: util.CreateCol(name, "FIRST_HI", sizeHash, comp), - FirstLo: util.CreateCol(name, "FIRST_LO", sizeHash, comp), - LastHi: util.CreateCol(name, "LAST_HI", sizeHash, comp), - LastLo: util.CreateCol(name, "LAST_LO", sizeHash, comp), - FirstMessageNo: util.CreateCol(name, "FIRST_MESSAGE_NO", sizeMsg, comp), - LastMessageNo: util.CreateCol(name, "LAST_MESSAGE_NO", sizeMsg, comp), +func NewRollingSelector(comp *wizard.CompiledIOP, name string, size int) *RollingSelector { + res := &RollingSelector{ + ExistsHash: util.CreateCol(name, "EXISTS_HASH", size, comp), + ExistsMsg: util.CreateCol(name, "EXISTS_MSG", size, comp), } + + for i := range res.First { + res.First[i] = util.CreateCol(name, fmt.Sprintf("FIRST_%d", i), size, comp) + res.Last[i] = util.CreateCol(name, fmt.Sprintf("LAST_%d", i), size, comp) + } + + for i := range res.FirstMessageNo { + res.FirstMessageNo[i] = util.CreateCol(name, fmt.Sprintf("FIRST_MESSAGE_NO_%d", i), size, comp) + res.LastMessageNo[i] = util.CreateCol(name, fmt.Sprintf("LAST_MESSAGE_NO_%d", i), size, comp) + } + + return res } // DefineRollingSelector specifies the constraints of the RollingSelector with respect to the ExtractedData fetched from the arithmetization @@ -46,13 +52,11 @@ func DefineRollingSelector(comp *wizard.CompiledIOP, sel *RollingSelector, name var allCols = []ifaces.Column{ sel.ExistsHash, sel.ExistsMsg, - sel.FirstHi, - sel.FirstLo, - sel.LastHi, - sel.LastLo, - sel.FirstMessageNo, - sel.LastMessageNo, } + allCols = append(allCols, sel.First[:]...) + allCols = append(allCols, sel.Last[:]...) + allCols = append(allCols, sel.FirstMessageNo[:]...) + allCols = append(allCols, sel.LastMessageNo[:]...) for _, col := range allCols { commonconstraints.MustBeConstant(comp, col) @@ -66,60 +70,74 @@ func DefineRollingSelector(comp *wizard.CompiledIOP, sel *RollingSelector, name ), ) // local openings for the first values - comp.InsertLocal(0, ifaces.QueryIDf("%s_%s", name, "FIRST_HI"), - sym.Sub( - fetchedHash.Hi, - sel.FirstHi, - ), - ) - comp.InsertLocal(0, ifaces.QueryIDf("%s_%s", name, "FIRST_LO"), - sym.Sub( - fetchedHash.Lo, - sel.FirstLo, - ), - ) - comp.InsertLocal(0, ifaces.QueryIDf("%s_%s", name, "FIRST_MSG_NO"), - sym.Sub( - fetchedMsg.Lo, - sel.FirstMessageNo, - ), - ) + for i := range sel.First { + comp.InsertLocal(0, ifaces.QueryIDf("%s_FIRST_%d", name, i), + sym.Sub( + fetchedHash.Data[i], + sel.First[i], + ), + ) + } + for i := range sel.FirstMessageNo { + comp.InsertLocal(0, ifaces.QueryIDf("%s_FIRST_MSG_NO_%d", name, i), + sym.Sub( + fetchedMsg.Data[common.NbLimbU128+i], + sel.FirstMessageNo[i], + ), + ) + } // define the consistency constraints - util.CheckLastELemConsistency(comp, isActiveHash, fetchedHash.Hi, sel.LastHi, name) - util.CheckLastELemConsistency(comp, isActiveHash, fetchedHash.Lo, sel.LastLo, name) - util.CheckLastELemConsistency(comp, isActiveMsg, fetchedMsg.Lo, sel.LastMessageNo, name) + for i := range sel.Last { + util.CheckLastELemConsistency(comp, isActiveHash, fetchedHash.Data[i], sel.Last[i], name) + } + for i := range sel.LastMessageNo { + util.CheckLastELemConsistency(comp, isActiveMsg, fetchedMsg.Data[i], sel.LastMessageNo[i], name) + } } // AssignRollingSelector assigns the data in the RollingSelector using the ExtractedData fetched from the arithmetization func AssignRollingSelector(run *wizard.ProverRuntime, selector *RollingSelector, fetchedHash, fetchedMsg ExtractedData) { - sizeHash := fetchedHash.Hi.Size() - sizeMsg := fetchedMsg.Hi.Size() + sizeHash := fetchedHash.Data[0].Size() + sizeMsg := fetchedMsg.Data[0].Size() exists := run.GetColumnAt(fetchedHash.filterFetched.GetColID(), 0) - var lastHi, lastLo, lastMsg field.Element + var last, first [common.NbLimbU256]field.Element + // contains messageNo which is stores in the Lo part of the message + var lastMsg, firstMsg [common.NbLimbU128]field.Element for i := 0; i < sizeHash; i++ { isActive := run.GetColumnAt(fetchedHash.filterFetched.GetColID(), i) - if isActive.IsOne() { - lastHi = run.GetColumnAt(fetchedHash.Hi.GetColID(), i) - lastLo = run.GetColumnAt(fetchedHash.Lo.GetColID(), i) - lastMsg = run.GetColumnAt(fetchedMsg.Lo.GetColID(), i) - } else { + if isActive.IsZero() { break } + + for j := range last { + last[j] = run.GetColumnAt(fetchedHash.Data[j].GetColID(), i) + } + for j := range lastMsg { + lastMsg[j] = run.GetColumnAt(fetchedMsg.Data[j].GetColID(), i) + } } + // compute first values - firstHi := run.GetColumnAt(fetchedHash.Hi.GetColID(), 0) - firstLo := run.GetColumnAt(fetchedHash.Lo.GetColID(), 0) - firstMsg := run.GetColumnAt(fetchedMsg.Lo.GetColID(), 0) + for j := range first { + first[j] = run.GetColumnAt(fetchedHash.Data[j].GetColID(), 0) + } + for j := range firstMsg { + firstMsg[j] = run.GetColumnAt(fetchedMsg.Data[common.NbLimbU128+j].GetColID(), 0) + } // assign the RollingSelector columns run.AssignColumn(selector.ExistsHash.GetColID(), smartvectors.NewConstant(exists, sizeHash)) run.AssignColumn(selector.ExistsMsg.GetColID(), smartvectors.NewConstant(exists, sizeMsg)) - run.AssignColumn(selector.FirstHi.GetColID(), smartvectors.NewConstant(firstHi, sizeHash)) - run.AssignColumn(selector.FirstLo.GetColID(), smartvectors.NewConstant(firstLo, sizeHash)) - run.AssignColumn(selector.FirstMessageNo.GetColID(), smartvectors.NewConstant(firstMsg, sizeMsg)) - run.AssignColumn(selector.LastHi.GetColID(), smartvectors.NewConstant(lastHi, sizeHash)) - run.AssignColumn(selector.LastLo.GetColID(), smartvectors.NewConstant(lastLo, sizeHash)) - run.AssignColumn(selector.LastMessageNo.GetColID(), smartvectors.NewConstant(lastMsg, sizeMsg)) + + for i := range first { + run.AssignColumn(selector.First[i].GetColID(), smartvectors.NewConstant(first[i], sizeHash)) + run.AssignColumn(selector.Last[i].GetColID(), smartvectors.NewConstant(last[i], sizeHash)) + } + + for i := range firstMsg { + run.AssignColumn(selector.FirstMessageNo[i].GetColID(), smartvectors.NewConstant(firstMsg[i], sizeMsg)) + run.AssignColumn(selector.LastMessageNo[i].GetColID(), smartvectors.NewConstant(lastMsg[i], sizeMsg)) + } } diff --git a/prover/zkevm/prover/publicInput/logs/selectors.go b/prover/zkevm/prover/publicInput/logs/selectors.go index 2d8a53aa58..0c02ae9a31 100644 --- a/prover/zkevm/prover/publicInput/logs/selectors.go +++ b/prover/zkevm/prover/publicInput/logs/selectors.go @@ -9,6 +9,7 @@ import ( "github.com/consensys/linea-monorepo/prover/protocol/ifaces" "github.com/consensys/linea-monorepo/prover/protocol/wizard" sym "github.com/consensys/linea-monorepo/prover/symbolic" + pcommon "github.com/consensys/linea-monorepo/prover/zkevm/prover/common" commonconstraints "github.com/consensys/linea-monorepo/prover/zkevm/prover/common/common_constraints" "github.com/ethereum/go-ethereum/common" ) @@ -75,23 +76,13 @@ func IsLogType(columns LogColumns, logType int) ifaces.Column { } } -// GetSelectorFirstTopicHi returns the appropriate selector column for L2L1 or RollingHash logs -func GetSelectorFirstTopicHi(sel Selectors, logType int) ifaces.Column { +// GetSelectorFirstTopic returns the appropriate selector column for L2L1 or RollingHash logs +func GetSelectorFirstTopic(sel Selectors, logType int) [pcommon.NbLimbU256]ifaces.Column { switch logType { case L2L1: - return sel.SelectFirstTopicL2L1Hi + return sel.SelectFirstTopicL2L1 default: // RollingMsgNo or RollingHash - return sel.SelectFirstTopicRollingHi - } -} - -// GetSelectorFirstTopicLo returns the appropriate selector column for L2L1 or RollingHash logs -func GetSelectorFirstTopicLo(sel Selectors, logType int) ifaces.Column { - switch logType { - case L2L1: - return sel.SelectFirstTopicL2L1Lo - default: // RollingMsgNo or RollingHash - return sel.SelectFirstTopicRollingLo + return sel.SelectFirstTopicRolling } } @@ -143,18 +134,18 @@ type Selectors struct { SelectorCounter0, SelectorCounter1, SelectorCounter3, SelectorCounter4, SelectorCounter5 ifaces.Column ComputeSelectorCounter0, ComputeSelectorCounter1, ComputeSelectorCounter3, ComputeSelectorCounter4, ComputeSelectorCounter5 wizard.ProverAction // SelectFirstTopicL2L1Hi/Lo is 1 on rows where the first topic has the shape expected from L2L1 logs - SelectFirstTopicL2L1Hi, SelectFirstTopicL2L1Lo ifaces.Column - ComputeSelectFirstTopicL2L1Hi, ComputeSelectFirstTopicL2L1Lo wizard.ProverAction + SelectFirstTopicL2L1 [pcommon.NbLimbU256]ifaces.Column + ComputeSelectFirstTopicL2L1 [pcommon.NbLimbU256]wizard.ProverAction // SelectFirstTopicRollingHi/Lo is 1 on rows where the first topic has the shape expected from L2L1 logs - SelectFirstTopicRollingHi, SelectFirstTopicRollingLo ifaces.Column - ComputeSelectFirstTopicRollingHi, ComputeSelectFirstTopicRollingLo wizard.ProverAction + SelectFirstTopicRolling [pcommon.NbLimbU256]ifaces.Column + ComputeSelectFirstTopicRolling [pcommon.NbLimbU256]wizard.ProverAction // columns containing the hi and lo parts of l2BridgeAddress - L2BridgeAddressColHI, L2BridgeAddressColLo ifaces.Column + L2BridgeAddressCol [pcommon.NbLimbEthAddress]ifaces.Column // SelectorL2BridgeAddressHi/Lo is 1 on rows where the OutgoingHi/Lo columns contain the bridge address, // as expected from L2L1 logs - SelectorL2BridgeAddressHi, SelectorL2BridgeAddressLo ifaces.Column - ComputeSelectorL2BridgeAddressHi, ComputeSelectorL2BridgeAddressLo wizard.ProverAction + SelectorL2BridgeAddress [pcommon.NbLimbU256]ifaces.Column + ComputeSelectorL2BridgeAddress [pcommon.NbLimbU256]wizard.ProverAction } /* @@ -187,55 +178,58 @@ func NewSelectorColumns(comp *wizard.CompiledIOP, lc LogColumns) Selectors { ) // compute the expected data in the first topic of a L2L1 log - var firstTopicL2L1Hi, firstTopicL2L1Lo field.Element + var firstTopicL2L1 [pcommon.NbLimbU256]field.Element firstTopicBytes := bridge.L2L1Topic0() // fixed expected value for the topic on the first topic row - firstTopicL2L1Hi.SetBytes(firstTopicBytes[:16]) - firstTopicL2L1Lo.SetBytes(firstTopicBytes[16:]) + for i := range firstTopicL2L1 { + firstTopicL2L1[i].SetBytes(firstTopicBytes[i*2 : (i+1)*2]) + } // selectors that light up when OutgoingHi/OutgoingLo contain the expected first topic data - SelectFirstTopicL2L1Hi, ComputeSelectFirstTopicL2L1Hi := dedicated.IsZero( - comp, - sym.Sub(lc.DataHi, firstTopicL2L1Hi), - ) - - SelectFirstTopicL2L1Lo, ComputeSelectFirstTopicL2L1Lo := dedicated.IsZero( - comp, - sym.Sub(lc.DataLo, firstTopicL2L1Lo), - ) + var SelectFirstTopicL2L1 [pcommon.NbLimbU256]ifaces.Column + var ComputeSelectFirstTopicL2L1 [pcommon.NbLimbU256]wizard.ProverAction + for i := range SelectFirstTopicL2L1 { + SelectFirstTopicL2L1[i], ComputeSelectFirstTopicL2L1[i] = dedicated.IsZero( + comp, + sym.Sub(lc.Data[i], firstTopicL2L1[i]), + ) + } // compute the expected data in the first topic of a rolling hash log - var firstTopicRollingHi, firstTopicRollingLo field.Element + var firstTopicRolling [pcommon.NbLimbU256]field.Element firstTopicRollingBytes := bridge.GetRollingHashUpdateTopic0() // fixed expected value for the topic on the first topic row - firstTopicRollingHi.SetBytes(firstTopicRollingBytes[:16]) - firstTopicRollingLo.SetBytes(firstTopicRollingBytes[16:]) + for i := range firstTopicRolling { + firstTopicRolling[i].SetBytes(firstTopicRollingBytes[i*2 : (i+1)*2]) + } // selectors that light up when OutgoingHi/OutgoingLo contain the expected first topic data - SelectFirstTopicRollingHi, ComputeSelectFirstTopicRollingHi := dedicated.IsZero( - comp, - sym.Sub(lc.DataHi, firstTopicRollingHi), - ) - - SelectFirstTopicRollingLo, ComputeSelectFirstTopicRollingLo := dedicated.IsZero( - comp, - sym.Sub(lc.DataLo, firstTopicRollingLo), - ) + var SelectFirstTopicRolling [pcommon.NbLimbU256]ifaces.Column + var ComputeSelectFirstTopicRolling [pcommon.NbLimbU256]wizard.ProverAction + for i := range SelectFirstTopicRolling { + SelectFirstTopicRolling[i], ComputeSelectFirstTopicRolling[i] = dedicated.IsZero( + comp, + sym.Sub(lc.Data[i], firstTopicRolling[i]), + ) + } - bridgeAddrColHi := comp.InsertCommit(0, ifaces.ColIDf("LOGS_FETCHER_BRIDGE_ADDRESS_HI"), lc.DataHi.Size()) - bridgeAddrColLo := comp.InsertCommit(0, ifaces.ColIDf("LOGS_FETCHER_BRIDGE_ADDRESS_LO"), lc.DataLo.Size()) + var bridgeAddrCol [pcommon.NbLimbEthAddress]ifaces.Column + // selectors that light up when OutgoingHi/OutgoingLo contain the Hi/Lo parts of the l2BridgeAddress + var SelectorL2BridgeAddress [pcommon.NbLimbU256]ifaces.Column + var ComputeSelectorL2BridgeAddress [pcommon.NbLimbU256]wizard.ProverAction - commonconstraints.MustBeConstant(comp, bridgeAddrColHi) - commonconstraints.MustBeConstant(comp, bridgeAddrColLo) + offset := pcommon.NbLimbU256 - pcommon.NbLimbEthAddress + for i := range bridgeAddrCol { + bridgeAddrCol[i] = comp.InsertCommit(0, ifaces.ColIDf("LOGS_FETCHER_BRIDGE_ADDRESS_%d", i), lc.Data[i].Size()) + commonconstraints.MustBeConstant(comp, bridgeAddrCol[i]) - // selectors that light up when OutgoingHi/OutgoingLo contain the Hi/Lo parts of the l2BridgeAddress - SelectorL2BridgeAddressHi, ComputeSelectorL2BridgeAddressHi := dedicated.IsZero( - comp, - sym.Sub(lc.DataHi, bridgeAddrColHi), - ) + iOffset := i + offset + SelectorL2BridgeAddress[iOffset], ComputeSelectorL2BridgeAddress[iOffset] = + dedicated.IsZero(comp, sym.Sub(lc.Data[iOffset], bridgeAddrCol[i])) + } - SelectorL2BridgeAddressLo, ComputeSelectorL2BridgeAddressLo := dedicated.IsZero( - comp, - sym.Sub(lc.DataLo, bridgeAddrColLo), - ) + // first limbs are zeroes as the address is 20 bytes long, while the data can be up to 32 bytes long + for i := 0; i < offset; i++ { + SelectorL2BridgeAddress[i], ComputeSelectorL2BridgeAddress[i] = dedicated.IsZero(comp, lc.Data[i]) + } // generate the final selector object res := Selectors{ @@ -252,23 +246,16 @@ func NewSelectorColumns(comp *wizard.CompiledIOP, lc LogColumns) Selectors { ComputeSelectorCounter4: ComputeSelectorCounter4, ComputeSelectorCounter5: ComputeSelectorCounter5, // selectors that light up on rows that contain the expected first topic for L2L1 logs - SelectFirstTopicL2L1Hi: SelectFirstTopicL2L1Hi, - ComputeSelectFirstTopicL2L1Hi: ComputeSelectFirstTopicL2L1Hi, - SelectFirstTopicL2L1Lo: SelectFirstTopicL2L1Lo, - ComputeSelectFirstTopicL2L1Lo: ComputeSelectFirstTopicL2L1Lo, + SelectFirstTopicL2L1: SelectFirstTopicL2L1, + ComputeSelectFirstTopicL2L1: ComputeSelectFirstTopicL2L1, // selectors that light up on rows that contain the expected first topic for Rolling hashes - SelectFirstTopicRollingHi: SelectFirstTopicRollingHi, - ComputeSelectFirstTopicRollingHi: ComputeSelectFirstTopicRollingHi, - SelectFirstTopicRollingLo: SelectFirstTopicRollingLo, - ComputeSelectFirstTopicRollingLo: ComputeSelectFirstTopicRollingLo, + SelectFirstTopicRolling: SelectFirstTopicRolling, + ComputeSelectFirstTopicRolling: ComputeSelectFirstTopicRolling, // columns and a helper field which contain the l2bridgeAddress - L2BridgeAddressColHI: bridgeAddrColHi, - L2BridgeAddressColLo: bridgeAddrColLo, + L2BridgeAddressCol: bridgeAddrCol, // selectors that light up on rows that contain the expected l2bridgeAddress - SelectorL2BridgeAddressHi: SelectorL2BridgeAddressHi, - ComputeSelectorL2BridgeAddressHi: ComputeSelectorL2BridgeAddressHi, - SelectorL2BridgeAddressLo: SelectorL2BridgeAddressLo, - ComputeSelectorL2BridgeAddressLo: ComputeSelectorL2BridgeAddressLo, + SelectorL2BridgeAddress: SelectorL2BridgeAddress, + ComputeSelectorL2BridgeAddress: ComputeSelectorL2BridgeAddress, } return res } @@ -276,12 +263,13 @@ func NewSelectorColumns(comp *wizard.CompiledIOP, lc LogColumns) Selectors { // Assign values for the selectors func (sel Selectors) Assign(run *wizard.ProverRuntime, l2BridgeAddress common.Address) { - addrHi, addrLo := ConvertAddress(statemanager.Address(l2BridgeAddress)) - size := sel.L2BridgeAddressColHI.Size() + addr := ConvertAddress(statemanager.Address(l2BridgeAddress)) + size := sel.L2BridgeAddressCol[0].Size() // assign the columns that contain the l2 bridge address - run.AssignColumn(sel.L2BridgeAddressColHI.GetColID(), smartvectors.NewConstant(addrHi, size)) - run.AssignColumn(sel.L2BridgeAddressColLo.GetColID(), smartvectors.NewConstant(addrLo, size)) + for i := range addr { + run.AssignColumn(sel.L2BridgeAddressCol[i].GetColID(), smartvectors.NewConstant(addr[i], size)) + } // now we assign the dedicated selectors for counters sel.ComputeSelectorCounter0.Run(run) @@ -291,12 +279,13 @@ func (sel Selectors) Assign(run *wizard.ProverRuntime, l2BridgeAddress common.Ad sel.ComputeSelectorCounter5.Run(run) // now we assign the dedicated selectors for the two type of first topic - sel.ComputeSelectFirstTopicL2L1Hi.Run(run) - sel.ComputeSelectFirstTopicL2L1Lo.Run(run) - sel.ComputeSelectFirstTopicRollingHi.Run(run) - sel.ComputeSelectFirstTopicRollingLo.Run(run) + for i := range sel.SelectFirstTopicL2L1 { + sel.ComputeSelectFirstTopicL2L1[i].Run(run) + sel.ComputeSelectFirstTopicRolling[i].Run(run) + } // now we assign the dedicated selectors for the bridge address - sel.ComputeSelectorL2BridgeAddressHi.Run(run) - sel.ComputeSelectorL2BridgeAddressLo.Run(run) + for i := range sel.SelectorL2BridgeAddress { + sel.ComputeSelectorL2BridgeAddress[i].Run(run) + } } diff --git a/prover/zkevm/prover/publicInput/logs/testing_util.go b/prover/zkevm/prover/publicInput/logs/testing_util.go index b947711fbc..c8a0d8c7e4 100644 --- a/prover/zkevm/prover/publicInput/logs/testing_util.go +++ b/prover/zkevm/prover/publicInput/logs/testing_util.go @@ -5,46 +5,35 @@ import ( eth "github.com/consensys/linea-monorepo/prover/backend/execution/statemanager" "github.com/consensys/linea-monorepo/prover/maths/field" "github.com/consensys/linea-monorepo/prover/utils/types" + "github.com/consensys/linea-monorepo/prover/zkevm/prover/common" ) -// FirstTopicL2l1 is a helper function that outputs the Hi/Lo parts of the expected first topic of an L2L1 log -func FirstTopicL2l1() (field.Element, field.Element) { - var firstTopicHi, firstTopicLo field.Element +// FirstTopicL2l1 is a helper function that outputs the limbs of the expected first topic of an L2L1 log +func FirstTopicL2l1() [common.NbLimbU256]field.Element { + var firstTopic [common.NbLimbU256]field.Element firstTopicBytes := bridge.L2L1Topic0() // fixed expected value for the topic on the first topic row - firstTopicHi.SetBytes(firstTopicBytes[:16]) - firstTopicLo.SetBytes(firstTopicBytes[16:]) - return firstTopicHi, firstTopicLo + for i := range firstTopic { + firstTopic[i].SetBytes(firstTopicBytes[i*2 : (i+1)*2]) + } + return firstTopic } -// FirstTopicRolling is a helper function that outputs the Hi/Lo parts of the expected first topic of a RollingHash log -func FirstTopicRolling() (field.Element, field.Element) { - var firstTopicRollingHi, firstTopicRollingLo field.Element +// FirstTopicRolling is a helper function that outputs the limbs of the expected first topic of a RollingHash log +func FirstTopicRolling() [common.NbLimbU256]field.Element { + var firstTopicRolling [common.NbLimbU256]field.Element firstTopicRollingBytes := bridge.GetRollingHashUpdateTopic0() // fixed expected value for the topic on the first topic row - firstTopicRollingHi.SetBytes(firstTopicRollingBytes[:16]) - firstTopicRollingLo.SetBytes(firstTopicRollingBytes[16:]) - return firstTopicRollingHi, firstTopicRollingLo + for i := range firstTopicRolling { + firstTopicRolling[i].SetBytes(firstTopicRollingBytes[i*2 : (i+1)*2]) + } + return firstTopicRolling } // GenerateTopicsAndAddresses is a common test function that outputs sample topics and addresses -func GenerateTopicsAndAddresses() ([]field.Element, []field.Element, []field.Element, []field.Element) { +func GenerateTopicsAndAddresses() ([][common.NbLimbU256]field.Element, [][common.NbLimbEthAddress]field.Element) { var ( - topicsHi = []field.Element{ - field.NewElement(11), - field.NewElement(21), - field.NewElement(31), - field.NewElement(41), - field.NewElement(51), - field.NewElement(61), - } - topicsLo = []field.Element{ - field.NewElement(91), - field.NewElement(101), - field.NewElement(111), - field.NewElement(121), - field.NewElement(131), - field.NewElement(141), - } - address = []eth.Address{ + topicsHi = []uint64{11, 21, 31, 41, 51, 61} + topicsLo = []uint64{91, 101, 111, 121, 131, 141} + address = []eth.Address{ types.DummyAddress(54), types.DummyAddress(64), types.DummyAddress(74), @@ -54,55 +43,49 @@ func GenerateTopicsAndAddresses() ([]field.Element, []field.Element, []field.Ele ) // convert the test addresses into Hi and Lo parts - addressHi := make([]field.Element, len(address)) - addressLo := make([]field.Element, len(address)) + addressRaw := make([][common.NbLimbEthAddress]field.Element, len(address)) for i := 0; i < len(address); i++ { - hi, lo := ConvertAddress(address[i]) - addressHi[i] = hi - addressLo[i] = lo + addressRaw[i] = ConvertAddress(address[i]) + } + + // put hi and low parts of the topics at correct postitions in the limbs + topicsRaw := make([][common.NbLimbU256]field.Element, len(topicsHi)) + for i := 0; i < len(topicsHi); i++ { + topicsRaw[i] = newU256WithHiLoParts(topicsHi[i], topicsLo[i]) } - return topicsHi, topicsLo, addressHi, addressLo + return topicsRaw, addressRaw } // GenerateSimpleL2L1Test generates test log info for testing the fetcher of message data from L2L1 logs func GenerateSimpleL2L1Test() ([]LogInfo, types.EthAddress, string) { // get sample topics and addresses - topicsHi, topicsLo, addressHi, addressLo := GenerateTopicsAndAddresses() + topics, address := GenerateTopicsAndAddresses() // fixed expected value for the topic on the first topic row - firstTopicHi, firstTopicLo := FirstTopicL2l1() + firstTopic := FirstTopicL2l1() // compute a dummy bridge address for testing bridgeAddress := types.DummyAddress(2) - bridgeAddrHi, bridgeAddrLo := ConvertAddress(bridgeAddress) + bridgeAddr := ConvertAddress(bridgeAddress) var testLogs = [...]LogInfo{ { - LogType: 2, // log with 2 topics - DataSize: field.Element{21}, - noTopics: field.Element{2}, - AddressHi: addressHi[0], - AddressLo: addressLo[0], - TopicsHi: []field.Element{topicsHi[0], topicsHi[1]}, - TopicsLo: []field.Element{topicsLo[0], topicsLo[1]}, + LogType: 2, // log with 2 topics + DataSize: newU128WithLoPart(21), + noTopics: newU128WithLoPart(2), + Address: address[0], + Topics: [][common.NbLimbU256]field.Element{topics[0], topics[1]}, }, { - LogType: 4, // an L2L1 log of the type we are interested in - DataSize: field.Element{213}, - noTopics: field.Element{4}, - AddressHi: bridgeAddrHi, - AddressLo: bridgeAddrLo, - TopicsHi: []field.Element{firstTopicHi, topicsHi[3], topicsHi[4], topicsHi[5]}, - TopicsLo: []field.Element{firstTopicLo, topicsLo[3], topicsLo[4], topicsLo[5]}, + LogType: 4, // an L2L1 log of the type we are interested in + DataSize: newU128WithLoPart(213), + noTopics: newU128WithLoPart(4), + Address: bridgeAddr, + Topics: [][common.NbLimbU256]field.Element{firstTopic, topics[3], topics[4], topics[5]}, }, { - LogType: MISSING_LOG, // missing log - DataSize: field.Element{}, - noTopics: field.Element{}, - AddressHi: field.Element{}, - AddressLo: field.Element{}, - TopicsHi: nil, - TopicsLo: nil, + LogType: MISSING_LOG, // missing log + Topics: nil, }, } return testLogs[:], bridgeAddress, "GenerateSimpleL2L1Test" @@ -111,32 +94,28 @@ func GenerateSimpleL2L1Test() ([]LogInfo, types.EthAddress, string) { // GenerateSimpleRollingTest generates test log info for testing the fetcher of message data from RollingHash logs func GenerateSimpleRollingTest() ([]LogInfo, types.EthAddress, string) { // get sample topics and addresses - topicsHi, topicsLo, addressHi, addressLo := GenerateTopicsAndAddresses() + topics, address := GenerateTopicsAndAddresses() // fixed expected value for the topic on the first topic row - firstTopicRollingHi, firstTopicRollingLo := FirstTopicRolling() + firstTopicRolling := FirstTopicRolling() // compute a dummy bridge address for testing bridgeAddress := types.DummyAddress(2) - bridgeAddrHi, bridgeAddrLo := ConvertAddress(bridgeAddress) + bridgeAddr := ConvertAddress(bridgeAddress) var testLogs = [...]LogInfo{ { - LogType: 3, // RollingHash log with 3 topics - DataSize: field.Element{3}, - noTopics: field.Element{3}, - AddressHi: bridgeAddrHi, - AddressLo: bridgeAddrLo, - TopicsHi: []field.Element{firstTopicRollingHi, topicsHi[2], topicsHi[3]}, - TopicsLo: []field.Element{firstTopicRollingLo, topicsLo[2], topicsLo[3]}, + LogType: 3, // RollingHash log with 3 topics + DataSize: newU128WithLoPart(3), + noTopics: newU128WithLoPart(3), + Address: bridgeAddr, + Topics: [][common.NbLimbU256]field.Element{firstTopicRolling, topics[2], topics[3]}, }, { - LogType: 2, // log with 2 topics - DataSize: field.Element{21}, - noTopics: field.Element{2}, - AddressHi: addressHi[0], - AddressLo: addressLo[0], - TopicsHi: []field.Element{topicsHi[0], topicsHi[1]}, - TopicsLo: []field.Element{topicsLo[0], topicsLo[1]}, + LogType: 2, // log with 2 topics + DataSize: newU128WithLoPart(21), + noTopics: newU128WithLoPart(2), + Address: address[0], + Topics: [][common.NbLimbU256]field.Element{topics[0], topics[1]}, }, } return testLogs[:], bridgeAddress, "GenerateSimpleRollingTest" @@ -145,77 +124,60 @@ func GenerateSimpleRollingTest() ([]LogInfo, types.EthAddress, string) { // GenerateTestWithoutRelevantLogs generates tests that contain no relevant L2L1/RollingHash logs func GenerateTestWithoutRelevantLogs() ([]LogInfo, types.EthAddress, string) { // get sample topics and addresses - topicsHi, topicsLo, addressHi, addressLo := GenerateTopicsAndAddresses() + topics, address := GenerateTopicsAndAddresses() // fixed expected value for the topic on the first topic row - firstTopicHi, firstTopicLo := FirstTopicL2l1() + firstTopic := FirstTopicL2l1() // compute a dummy bridge address for testing bridgeAddress := types.DummyAddress(2) - bridgeAddrHi, bridgeAddrLo := ConvertAddress(bridgeAddress) + bridgeAddr := ConvertAddress(bridgeAddress) var testLogs = [...]LogInfo{ { - LogType: 2, // log with 2 topics - DataSize: field.Element{21}, - noTopics: field.Element{2}, - AddressHi: addressHi[0], - AddressLo: addressLo[0], - TopicsHi: []field.Element{topicsHi[0], topicsHi[1]}, - TopicsLo: []field.Element{topicsLo[0], topicsLo[1]}, + LogType: 2, // log with 2 topics + DataSize: newU128WithLoPart(21), + noTopics: newU128WithLoPart(2), + Address: address[0], + Topics: [][common.NbLimbU256]field.Element{topics[0], topics[1]}, }, { - LogType: 0, // log with 0 topics - DataSize: field.Element{21}, - noTopics: field.Element{21}, - AddressHi: field.Element{121}, - AddressLo: field.Element{333}, - TopicsHi: nil, - TopicsLo: nil, + LogType: 0, // log with 0 topics + DataSize: newU128WithLoPart(21), + noTopics: newU128WithLoPart(21), + Address: newU160WithHiLoParts(121, 333), + Topics: nil, }, { - LogType: 3, // log with 3 topics - DataSize: field.Element{3}, - noTopics: field.Element{3}, - AddressHi: addressHi[1], - AddressLo: addressLo[1], - TopicsHi: []field.Element{topicsHi[1], topicsHi[2], topicsHi[3]}, - TopicsLo: []field.Element{topicsLo[1], topicsLo[2], topicsLo[3]}, + LogType: 3, // log with 3 topics + DataSize: newU128WithLoPart(3), + noTopics: newU128WithLoPart(3), + Address: address[1], + Topics: [][common.NbLimbU256]field.Element{topics[1], topics[2], topics[3]}, }, { - LogType: 4, // log of type 4 which has a wrong bridge data and will be skipped - DataSize: field.Element{213}, - noTopics: field.Element{4}, - AddressHi: addressHi[0], - AddressLo: addressLo[0], - TopicsHi: []field.Element{firstTopicHi, topicsHi[3], topicsHi[4], topicsHi[5]}, - TopicsLo: []field.Element{firstTopicLo, topicsLo[3], topicsLo[4], topicsLo[5]}, + LogType: 4, // log of type 4 which has a wrong bridge data and will be skipped + DataSize: newU128WithLoPart(213), + noTopics: newU128WithLoPart(4), + Address: address[0], + Topics: [][common.NbLimbU256]field.Element{firstTopic, topics[3], topics[4], topics[5]}, }, { - LogType: 3, // log with 3 topics, but not a RollingHash log (wrong address) - DataSize: field.Element{191}, - noTopics: field.Element{191}, - AddressHi: addressHi[3], - AddressLo: addressLo[3], - TopicsHi: []field.Element{topicsHi[1], topicsHi[4], topicsHi[5]}, - TopicsLo: []field.Element{topicsLo[1], topicsLo[4], topicsLo[5]}, + LogType: 3, // log with 3 topics, but not a RollingHash log (wrong address) + DataSize: newU128WithLoPart(191), + noTopics: newU128WithLoPart(191), + Address: address[3], + Topics: [][common.NbLimbU256]field.Element{topics[1], topics[4], topics[5]}, }, { - LogType: 4, // log of type 4 which has a wrong first topic in TopicsHi/TopicsLo - DataSize: field.Element{213}, - noTopics: field.Element{4}, - AddressHi: bridgeAddrHi, - AddressLo: bridgeAddrLo, - TopicsHi: []field.Element{topicsHi[3], topicsHi[3], topicsHi[4], topicsHi[5]}, - TopicsLo: []field.Element{topicsHi[3], topicsLo[3], topicsLo[4], topicsLo[5]}, + LogType: 4, // log of type 4 which has a wrong first topic in TopicsHi/TopicsLo + DataSize: newU128WithLoPart(213), + noTopics: newU128WithLoPart(4), + Address: bridgeAddr, + Topics: [][common.NbLimbU256]field.Element{topics[3], topics[3], topics[4], topics[5]}, }, { - LogType: MISSING_LOG, // missing log - DataSize: field.Element{}, - noTopics: field.Element{}, - AddressHi: field.Element{}, - AddressLo: field.Element{}, - TopicsHi: nil, - TopicsLo: nil, + LogType: MISSING_LOG, // missing log + Topics: nil, }, } return testLogs[:], bridgeAddress, "GenerateTestWithoutRelevantLogs" @@ -224,115 +186,110 @@ func GenerateTestWithoutRelevantLogs() ([]LogInfo, types.EthAddress, string) { // GenerateTestData generates test log info for testing the fetcher of message data from L2L1/RollingHash logs func GenerateLargeTest() ([]LogInfo, types.EthAddress, string) { // get sample topics and addresses - topicsHi, topicsLo, addressHi, addressLo := GenerateTopicsAndAddresses() + topics, address := GenerateTopicsAndAddresses() // fixed expected value for the topic on the first topic row for L2L1/RollingHash logs - firstTopicL2L1Hi, firstTopicL2L1Lo := FirstTopicL2l1() - firstTopicRollingHi, firstTopicRollingLo := FirstTopicRolling() + firstTopicL2L1 := FirstTopicL2l1() + firstTopicRolling := FirstTopicRolling() // compute a dummy bridge address for testing bridgeAddress := types.DummyAddress(2) - bridgeAddrHi, bridgeAddrLo := ConvertAddress(bridgeAddress) + bridgeAddr := ConvertAddress(bridgeAddress) var testLogs = [...]LogInfo{ { - LogType: 2, // log with 2 topics - DataSize: field.Element{21}, - noTopics: field.Element{2}, - AddressHi: addressHi[0], - AddressLo: addressLo[0], - TopicsHi: []field.Element{topicsHi[0], topicsHi[1]}, - TopicsLo: []field.Element{topicsLo[0], topicsLo[1]}, + LogType: 2, // log with 2 topics + DataSize: newU128WithLoPart(21), + noTopics: newU128WithLoPart(2), + Address: address[0], + Topics: [][common.NbLimbU256]field.Element{topics[0], topics[1]}, }, { - LogType: 0, // log with 0 topics - DataSize: field.Element{21}, - noTopics: field.Element{21}, - AddressHi: field.Element{121}, - AddressLo: field.Element{333}, - TopicsHi: nil, - TopicsLo: nil, + LogType: 0, // log with 0 topics + DataSize: newU128WithLoPart(21), + noTopics: newU128WithLoPart(21), + Address: newU160WithHiLoParts(121, 333), // dummy address + Topics: nil, }, { - LogType: 3, // RollingHash log with 3 topics - DataSize: field.Element{3}, - noTopics: field.Element{3}, - AddressHi: bridgeAddrHi, - AddressLo: bridgeAddrLo, - TopicsHi: []field.Element{firstTopicRollingHi, topicsHi[2], topicsHi[3]}, - TopicsLo: []field.Element{firstTopicRollingLo, topicsLo[2], topicsLo[3]}, + LogType: 3, // RollingHash log with 3 topics + DataSize: newU128WithLoPart(3), + noTopics: newU128WithLoPart(3), + Address: bridgeAddr, + Topics: [][common.NbLimbU256]field.Element{firstTopicRolling, topics[2], topics[3]}, }, { - LogType: 4, // an L2L1 log of the type we are interested in - DataSize: field.Element{213}, - noTopics: field.Element{4}, - AddressHi: bridgeAddrHi, - AddressLo: bridgeAddrLo, - TopicsHi: []field.Element{firstTopicL2L1Hi, topicsHi[3], topicsHi[4], topicsHi[5]}, - TopicsLo: []field.Element{firstTopicL2L1Lo, topicsLo[3], topicsLo[4], topicsLo[5]}, + LogType: 4, // an L2L1 log of the type we are interested in + DataSize: newU128WithLoPart(213), + noTopics: newU128WithLoPart(4), + Address: bridgeAddr, + Topics: [][common.NbLimbU256]field.Element{firstTopicL2L1, topics[3], topics[4], topics[5]}, }, { - LogType: 4, // log of type 4 which has a wrong bridge data and will be skipped - DataSize: field.Element{213}, - noTopics: field.Element{4}, - AddressHi: addressHi[0], - AddressLo: addressLo[0], - TopicsHi: []field.Element{firstTopicL2L1Hi, topicsHi[3], topicsHi[4], topicsHi[5]}, - TopicsLo: []field.Element{firstTopicL2L1Lo, topicsLo[3], topicsLo[4], topicsLo[5]}, + LogType: 4, // log of type 4 which has a wrong bridge data and will be skipped + DataSize: newU128WithLoPart(213), + noTopics: newU128WithLoPart(4), + Address: address[0], + Topics: [][common.NbLimbU256]field.Element{firstTopicL2L1, topics[3], topics[4], topics[5]}, }, { - LogType: 3, // log with 3 topics, but not a RollingHash log - DataSize: field.Element{191}, - noTopics: field.Element{191}, - AddressHi: addressHi[3], - AddressLo: addressLo[3], - TopicsHi: []field.Element{topicsHi[1], topicsHi[4], topicsHi[5]}, - TopicsLo: []field.Element{topicsLo[1], topicsLo[4], topicsLo[5]}, + LogType: 3, // log with 3 topics, but not a RollingHash log + DataSize: newU128WithLoPart(191), + noTopics: newU128WithLoPart(191), + Address: address[3], + Topics: [][common.NbLimbU256]field.Element{topics[1], topics[4], topics[5]}, }, { - LogType: 4, // an L2L1 log of the type we are interested in - DataSize: field.Element{34443}, - noTopics: field.Element{4}, - AddressHi: bridgeAddrHi, - AddressLo: bridgeAddrLo, - TopicsHi: []field.Element{firstTopicL2L1Hi, topicsHi[2], topicsHi[3], topicsHi[4]}, - TopicsLo: []field.Element{firstTopicL2L1Lo, topicsLo[2], topicsLo[3], topicsLo[4]}, + LogType: 4, // an L2L1 log of the type we are interested in + DataSize: newU128WithLoPart(34443), + noTopics: newU128WithLoPart(4), + Address: bridgeAddr, + Topics: [][common.NbLimbU256]field.Element{firstTopicL2L1, topics[2], topics[3], topics[4]}, }, { - LogType: 4, // log of type 4 which has a wrong first topic in TopicsHi/TopicsLo - DataSize: field.Element{213}, - noTopics: field.Element{4}, - AddressHi: bridgeAddrHi, - AddressLo: bridgeAddrLo, - TopicsHi: []field.Element{topicsHi[3], topicsHi[3], topicsHi[4], topicsHi[5]}, - TopicsLo: []field.Element{topicsHi[3], topicsLo[3], topicsLo[4], topicsLo[5]}, + LogType: 4, // log of type 4 which has a wrong first topic in TopicsHi/TopicsLo + DataSize: newU128WithLoPart(213), + noTopics: newU128WithLoPart(4), + Address: bridgeAddr, + Topics: [][common.NbLimbU256]field.Element{topics[3], topics[3], topics[4], topics[5]}, }, { - LogType: 4, // an L2L1 log of the type we are interested in - DataSize: field.Element{100}, - noTopics: field.Element{4}, - AddressHi: bridgeAddrHi, - AddressLo: bridgeAddrLo, - TopicsHi: []field.Element{firstTopicL2L1Hi, topicsHi[0], topicsHi[4], topicsHi[5]}, - TopicsLo: []field.Element{firstTopicL2L1Lo, topicsLo[0], topicsLo[4], topicsLo[5]}, + LogType: 4, // an L2L1 log of the type we are interested in + DataSize: newU128WithLoPart(100), + noTopics: newU128WithLoPart(4), + Address: bridgeAddr, + Topics: [][common.NbLimbU256]field.Element{firstTopicL2L1, topics[0], topics[4], topics[5]}, }, { - LogType: MISSING_LOG, // missing log - DataSize: field.Element{}, - noTopics: field.Element{}, - AddressHi: field.Element{}, - AddressLo: field.Element{}, - TopicsHi: nil, - TopicsLo: nil, + LogType: MISSING_LOG, // missing log + Topics: nil, }, { - LogType: 3, // RollingHash log with 3 topics - DataSize: field.Element{312}, - noTopics: field.Element{3}, - AddressHi: bridgeAddrHi, - AddressLo: bridgeAddrLo, - TopicsHi: []field.Element{firstTopicRollingHi, topicsHi[0], topicsHi[5]}, - TopicsLo: []field.Element{firstTopicRollingLo, topicsLo[0], topicsLo[5]}, + LogType: 3, // RollingHash log with 3 topics + DataSize: newU128WithLoPart(312), + noTopics: newU128WithLoPart(3), + Address: bridgeAddr, + Topics: [][common.NbLimbU256]field.Element{firstTopicRolling, topics[0], topics[5]}, }, } return testLogs[:], bridgeAddress, "GenerateLargeTest" } + +func newU128WithLoPart(lo uint64) [common.NbLimbU128]field.Element { + var res [common.NbLimbU128]field.Element + res[common.NbLimbU128-1].SetUint64(lo) + return res +} + +func newU160WithHiLoParts(hi, lo uint64) [common.NbLimbEthAddress]field.Element { + var res [common.NbLimbEthAddress]field.Element + res[common.NbLimbEthAddress-common.NbLimbU128-1].SetUint64(hi) + res[common.NbLimbEthAddress-1].SetUint64(lo) + return res +} + +func newU256WithHiLoParts(hi, lo uint64) [common.NbLimbU256]field.Element { + var res [common.NbLimbU256]field.Element + res[common.NbLimbU128-1].SetUint64(hi) + res[common.NbLimbU256-1].SetUint64(lo) + return res +} diff --git a/prover/zkevm/prover/publicInput/public_input.go b/prover/zkevm/prover/publicInput/public_input.go index ab355d89b4..113dfc749f 100644 --- a/prover/zkevm/prover/publicInput/public_input.go +++ b/prover/zkevm/prover/publicInput/public_input.go @@ -1,12 +1,14 @@ package publicInput import ( + "fmt" "github.com/consensys/linea-monorepo/prover/protocol/accessors" "github.com/consensys/linea-monorepo/prover/protocol/ifaces" "github.com/consensys/linea-monorepo/prover/protocol/query" "github.com/consensys/linea-monorepo/prover/protocol/wizard" "github.com/consensys/linea-monorepo/prover/utils" "github.com/consensys/linea-monorepo/prover/utils/types" + pcommon "github.com/consensys/linea-monorepo/prover/zkevm/prover/common" "github.com/consensys/linea-monorepo/prover/zkevm/prover/hash/generic" "github.com/consensys/linea-monorepo/prover/zkevm/prover/hash/importpad" pack "github.com/consensys/linea-monorepo/prover/zkevm/prover/hash/packing" @@ -18,7 +20,7 @@ import ( "github.com/ethereum/go-ethereum/common" ) -var ( +const ( DataNbBytes = "DataNbBytes" DataChecksum = "DataChecksum" L2MessageHash = "L2MessageHash" @@ -28,16 +30,13 @@ var ( FinalBlockNumber = "FinalBlockNumber" InitialBlockTimestamp = "InitialBlockTimestamp" FinalBlockTimestamp = "FinalBlockTimestamp" - FirstRollingHashUpdate_0 = "FirstRollingHashUpdate_0" - FirstRollingHashUpdate_1 = "FirstRollingHashUpdate_1" - LastRollingHashUpdate_0 = "LastRollingHashUpdate_0" - LastRollingHashUpdate_1 = "LastRollingHashUpdate_1" + FirstRollingHashUpdate = "FirstRollingHashUpdate" + LastRollingHashUpdate = "LastRollingHashUpdate" FirstRollingHashUpdateNumber = "FirstRollingHashUpdateNumber" LastRollingHashNumberUpdate = "LastRollingHashNumberUpdate" ChainID = "ChainID" NBytesChainID = "NBytesChainID" - L2MessageServiceAddrHi = "L2MessageServiceAddrHi" - L2MessageServiceAddrLo = "L2MessageServiceAddrLo" + L2MessageServiceAddr = "L2MessageServiceAddr" ) // PublicInput collects a number of submodules responsible for collecting the @@ -51,7 +50,7 @@ type PublicInput struct { LogHasher logs.LogHasher ExecMiMCHasher edc.MIMCHasher DataNbBytes ifaces.Column - ChainID ifaces.Column + ChainID [pcommon.NbLimbU128]ifaces.Column ChainIDNBytes ifaces.Column Extractor FunctionalInputExtractor } @@ -92,53 +91,61 @@ func NewPublicInputZkEVM(comp *wizard.CompiledIOP, settings *Settings, ss *state settings.Name = "PUBLIC_INPUT" - return newPublicInput( - comp, - &InputModules{ - BlockData: &arith.BlockDataCols{ - RelBlock: getCol("blockdata.REL_BLOCK"), - Inst: getCol("blockdata.INST"), - Ct: getCol("blockdata.CT"), - DataHi: getCol("blockdata.DATA_HI"), - DataLo: getCol("blockdata.DATA_LO"), - FirstBlock: getCol("blockdata.FIRST_BLOCK_NUMBER"), - }, - TxnData: &arith.TxnData{ - AbsTxNum: getCol("txndata.ABS_TX_NUM"), - AbsTxNumMax: getCol("txndata.ABS_TX_NUM_MAX"), - Ct: getCol("txndata.CT"), - FromHi: getCol("txndata.FROM_HI"), - FromLo: getCol("txndata.FROM_LO"), - IsLastTxOfBlock: getCol("txndata.IS_LAST_TX_OF_BLOCK"), - RelBlock: getCol("txndata.REL_BLOCK"), - RelTxNum: getCol("txndata.REL_TX_NUM"), - RelTxNumMax: getCol("txndata.REL_TX_NUM_MAX"), - }, - RlpTxn: &arith.RlpTxn{ - AbsTxNum: getCol("rlptxn.ABS_TX_NUM"), - AbsTxNumMax: getCol("rlptxn.ABS_TX_NUM_INFINY"), - ToHashByProver: getCol("rlptxn.TO_HASH_BY_PROVER"), - Limb: getCol("rlptxn.LIMB"), - NBytes: getCol("rlptxn.nBYTES"), - Done: getCol("rlptxn.DONE"), - IsPhaseChainID: getCol("rlptxn.IS_PHASE_CHAIN_ID"), - }, - LogCols: logs.LogColumns{ - IsLog0: getCol("loginfo.IS_LOG_X_0"), - IsLog1: getCol("loginfo.IS_LOG_X_1"), - IsLog2: getCol("loginfo.IS_LOG_X_2"), - IsLog3: getCol("loginfo.IS_LOG_X_3"), - IsLog4: getCol("loginfo.IS_LOG_X_4"), - AbsLogNum: getCol("loginfo.ABS_LOG_NUM"), - AbsLogNumMax: getCol("loginfo.ABS_LOG_NUM_MAX"), - Ct: getCol("loginfo.CT"), - DataHi: getCol("loginfo.DATA_HI"), - DataLo: getCol("loginfo.DATA_LO"), - TxEmitsLogs: getCol("loginfo.TXN_EMITS_LOGS"), - }, - StateSummary: ss, + inputModules := &InputModules{ + BlockData: &arith.BlockDataCols{ + RelBlock: getCol("blockdata.REL_BLOCK"), + Inst: getCol("blockdata.INST"), + Ct: getCol("blockdata.CT"), + }, + TxnData: &arith.TxnData{ + AbsTxNum: getCol("txndata.ABS_TX_NUM"), + AbsTxNumMax: getCol("txndata.ABS_TX_NUM_MAX"), + Ct: getCol("txndata.CT"), + IsLastTxOfBlock: getCol("txndata.IS_LAST_TX_OF_BLOCK"), + RelBlock: getCol("txndata.REL_BLOCK"), + RelTxNum: getCol("txndata.REL_TX_NUM"), + RelTxNumMax: getCol("txndata.REL_TX_NUM_MAX"), + }, + RlpTxn: &arith.RlpTxn{ + AbsTxNum: getCol("rlptxn.ABS_TX_NUM"), + AbsTxNumMax: getCol("rlptxn.ABS_TX_NUM_INFINY"), + ToHashByProver: getCol("rlptxn.TO_HASH_BY_PROVER"), + NBytes: getCol("rlptxn.nBYTES"), + Done: getCol("rlptxn.DONE"), + IsPhaseChainID: getCol("rlptxn.IS_PHASE_CHAIN_ID"), }, - *settings) + LogCols: logs.LogColumns{ + IsLog0: getCol("loginfo.IS_LOG_X_0"), + IsLog1: getCol("loginfo.IS_LOG_X_1"), + IsLog2: getCol("loginfo.IS_LOG_X_2"), + IsLog3: getCol("loginfo.IS_LOG_X_3"), + IsLog4: getCol("loginfo.IS_LOG_X_4"), + AbsLogNum: getCol("loginfo.ABS_LOG_NUM"), + AbsLogNumMax: getCol("loginfo.ABS_LOG_NUM_MAX"), + Ct: getCol("loginfo.CT"), + TxEmitsLogs: getCol("loginfo.TXN_EMITS_LOGS"), + }, + StateSummary: ss, + } + + for i := range inputModules.RlpTxn.Limbs { + inputModules.RlpTxn.Limbs[i] = getCol(fmt.Sprintf("rlptxn.LIMB_%d", i)) + } + + for i := range inputModules.BlockData.FirstBlock { + inputModules.BlockData.FirstBlock[i] = getCol(fmt.Sprintf("blockdata.FIRST_BLOCK_NUMBER_%d", i)) + } + + for i := range pcommon.NbLimbU256 { + inputModules.BlockData.Data[i] = getCol(fmt.Sprintf("blockdata.DATA_%d", i)) + inputModules.LogCols.Data[i] = getCol(fmt.Sprintf("loginfo.DATA_%d", i)) + } + + for i := range inputModules.TxnData.From { + inputModules.TxnData.From[i] = getCol(fmt.Sprintf("txndata.FROM_%d", i)) + } + + return newPublicInput(comp, inputModules, *settings) } // newPublicInput receives as input a series of modules and returns a *PublicInput and @@ -164,7 +171,7 @@ func newPublicInput( fetchedRollingHash := logs.NewExtractedData(comp, inp.LogCols.Ct.Size(), "PUBLIC_INPUT_ROLLING_HASH") logSelectors := logs.NewSelectorColumns(comp, inp.LogCols) logHasherL2l1 := logs.NewLogHasher(comp, inp.LogCols.Ct.Size(), "PUBLIC_INPUT_L2L1LOGS") - rollingSelector := logs.NewRollingSelector(comp, "PUBLIC_INPUT_ROLLING_SEL", fetchedRollingHash.Hi.Size(), fetchedRollingMsg.Hi.Size()) + rollingSelector := logs.NewRollingSelector(comp, "PUBLIC_INPUT_ROLLING_SEL", fetchedRollingHash.Data[0].Size(), fetchedRollingMsg.Data[0].Size()) // Define Logs: Fetchers, Selectors and Hasher logs.DefineExtractedData(comp, inp.LogCols, logSelectors, fetchedL2L1, logs.L2L1) @@ -203,7 +210,7 @@ func newPublicInput( Index: execDataCollector.Ct, ToHash: execDataCollector.IsActive, NBytes: execDataCollector.NoBytes, - Limb: execDataCollector.Limb, + Limbs: execDataCollector.Limb, }}, PaddingStrategy: generic.MiMCUsecase, } @@ -296,55 +303,100 @@ func (pi *PublicInput) generateExtractor(comp *wizard.CompiledIOP) { return comp.InsertLocalOpening(0, ifaces.QueryIDf("%s_%s", "PUBLIC_INPUT_LOCAL_OPENING", col.GetColID()), col) } - initialRollingHash := [2]query.LocalOpening{ - createNewLocalOpening(pi.RollingHashFetcher.FirstHi), - createNewLocalOpening(pi.RollingHashFetcher.FirstLo), - } - - finalRollingHash := [2]query.LocalOpening{ - createNewLocalOpening(pi.RollingHashFetcher.LastHi), - createNewLocalOpening(pi.RollingHashFetcher.LastLo), - } - pi.Extractor = FunctionalInputExtractor{ - DataNbBytes: createNewLocalOpening(pi.DataNbBytes), - DataChecksum: createNewLocalOpening(pi.ExecMiMCHasher.HashFinal), - L2MessageHash: createNewLocalOpening(pi.LogHasher.HashFinal), - InitialStateRootHash: createNewLocalOpening(pi.RootHashFetcher.First), - FinalStateRootHash: createNewLocalOpening(pi.RootHashFetcher.Last), - InitialBlockNumber: createNewLocalOpening(pi.TimestampFetcher.FirstBlockID), - FinalBlockNumber: createNewLocalOpening(pi.TimestampFetcher.LastBlockID), - InitialBlockTimestamp: createNewLocalOpening(pi.TimestampFetcher.First), - FinalBlockTimestamp: createNewLocalOpening(pi.TimestampFetcher.Last), - FirstRollingHashUpdate: initialRollingHash, - LastRollingHashUpdate: finalRollingHash, - FirstRollingHashUpdateNumber: createNewLocalOpening(pi.RollingHashFetcher.FirstMessageNo), - LastRollingHashUpdateNumber: createNewLocalOpening(pi.RollingHashFetcher.LastMessageNo), - ChainID: createNewLocalOpening(pi.ChainID), - NBytesChainID: createNewLocalOpening(pi.ChainIDNBytes), - L2MessageServiceAddrHi: createNewLocalOpening(pi.Aux.logSelectors.L2BridgeAddressColHI), - L2MessageServiceAddrLo: createNewLocalOpening(pi.Aux.logSelectors.L2BridgeAddressColLo), + DataNbBytes: createNewLocalOpening(pi.DataNbBytes), + DataChecksum: createNewLocalOpening(pi.ExecMiMCHasher.HashFinal), + L2MessageHash: createNewLocalOpening(pi.LogHasher.HashFinal), + NBytesChainID: createNewLocalOpening(pi.ChainIDNBytes), } comp.PublicInputs = append(comp.PublicInputs, wizard.PublicInput{Name: DataNbBytes, Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.DataNbBytes, 0)}, wizard.PublicInput{Name: DataChecksum, Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.DataChecksum, 0)}, wizard.PublicInput{Name: L2MessageHash, Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.L2MessageHash, 0)}, - wizard.PublicInput{Name: InitialStateRootHash, Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.InitialStateRootHash, 0)}, - wizard.PublicInput{Name: FinalStateRootHash, Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.FinalStateRootHash, 0)}, - wizard.PublicInput{Name: InitialBlockNumber, Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.InitialBlockNumber, 0)}, - wizard.PublicInput{Name: FinalBlockNumber, Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.FinalBlockNumber, 0)}, - wizard.PublicInput{Name: InitialBlockTimestamp, Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.InitialBlockTimestamp, 0)}, - wizard.PublicInput{Name: FinalBlockTimestamp, Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.FinalBlockTimestamp, 0)}, - wizard.PublicInput{Name: FirstRollingHashUpdate_0, Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.FirstRollingHashUpdate[0], 0)}, - wizard.PublicInput{Name: FirstRollingHashUpdate_1, Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.FirstRollingHashUpdate[1], 0)}, - wizard.PublicInput{Name: LastRollingHashUpdate_0, Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.LastRollingHashUpdate[0], 0)}, - wizard.PublicInput{Name: LastRollingHashUpdate_1, Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.LastRollingHashUpdate[1], 0)}, - wizard.PublicInput{Name: FirstRollingHashUpdateNumber, Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.FirstRollingHashUpdateNumber, 0)}, - wizard.PublicInput{Name: LastRollingHashNumberUpdate, Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.LastRollingHashUpdateNumber, 0)}, - wizard.PublicInput{Name: ChainID, Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.ChainID, 0)}, wizard.PublicInput{Name: NBytesChainID, Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.NBytesChainID, 0)}, - wizard.PublicInput{Name: L2MessageServiceAddrHi, Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.L2MessageServiceAddrHi, 0)}, - wizard.PublicInput{Name: L2MessageServiceAddrLo, Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.L2MessageServiceAddrLo, 0)}, ) + + for i := range pcommon.NbLimbU256 { + pi.Extractor.InitialStateRootHash[i] = createNewLocalOpening(pi.RootHashFetcher.First[i]) + pi.Extractor.FinalStateRootHash[i] = createNewLocalOpening(pi.RootHashFetcher.Last[i]) + pi.Extractor.FirstRollingHashUpdate[i] = createNewLocalOpening(pi.RollingHashFetcher.First[i]) + pi.Extractor.LastRollingHashUpdate[i] = createNewLocalOpening(pi.RollingHashFetcher.Last[i]) + + comp.PublicInputs = append(comp.PublicInputs, + wizard.PublicInput{ + Name: fmt.Sprintf("%s_%d", InitialStateRootHash, i), + Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.InitialStateRootHash[i], 0), + }, + wizard.PublicInput{ + Name: fmt.Sprintf("%s_%d", FinalStateRootHash, i), + Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.FinalStateRootHash[i], 0), + }, + wizard.PublicInput{ + Name: fmt.Sprintf("%s_%d", FirstRollingHashUpdate, i), + Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.FirstRollingHashUpdate[i], 0), + }, + wizard.PublicInput{ + Name: fmt.Sprintf("%s_%d", LastRollingHashUpdate, i), + Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.LastRollingHashUpdate[i], 0), + }, + ) + } + + for i := range pcommon.NbLimbEthAddress { + pi.Extractor.L2MessageServiceAddr[i] = createNewLocalOpening(pi.Aux.logSelectors.L2BridgeAddressCol[i]) + + comp.PublicInputs = append(comp.PublicInputs, + wizard.PublicInput{ + Name: fmt.Sprintf("%s_%d", L2MessageServiceAddr, i), + Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.L2MessageServiceAddr[i], 0), + }, + ) + } + + for i := range pcommon.NbLimbU128 { + pi.Extractor.InitialBlockTimestamp[i] = createNewLocalOpening(pi.TimestampFetcher.First[i]) + pi.Extractor.FinalBlockTimestamp[i] = createNewLocalOpening(pi.TimestampFetcher.Last[i]) + pi.Extractor.FirstRollingHashUpdateNumber[i] = createNewLocalOpening(pi.RollingHashFetcher.FirstMessageNo[i]) + pi.Extractor.LastRollingHashUpdateNumber[i] = createNewLocalOpening(pi.RollingHashFetcher.LastMessageNo[i]) + pi.Extractor.ChainID[i] = createNewLocalOpening(pi.ChainID[i]) + + comp.PublicInputs = append(comp.PublicInputs, + wizard.PublicInput{ + Name: fmt.Sprintf("%s_%d", InitialBlockTimestamp, i), + Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.InitialBlockTimestamp[i], 0), + }, + wizard.PublicInput{ + Name: fmt.Sprintf("%s_%d", FinalBlockTimestamp, i), + Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.FinalBlockTimestamp[i], 0), + }, + wizard.PublicInput{ + Name: fmt.Sprintf("%s_%d", FirstRollingHashUpdateNumber, i), + Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.FirstRollingHashUpdateNumber[i], 0), + }, + wizard.PublicInput{ + Name: fmt.Sprintf("%s_%d", LastRollingHashNumberUpdate, i), + Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.LastRollingHashUpdateNumber[i], 0), + }, + wizard.PublicInput{ + Name: fmt.Sprintf("%s_%d", ChainID, i), + Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.ChainID[i], 0)}, + ) + } + + for i := range pcommon.NbLimbU48 { + pi.Extractor.InitialBlockNumber[i] = createNewLocalOpening(pi.TimestampFetcher.FirstBlockID[i]) + pi.Extractor.FinalBlockNumber[i] = createNewLocalOpening(pi.TimestampFetcher.LastBlockID[i]) + + comp.PublicInputs = append(comp.PublicInputs, + wizard.PublicInput{ + Name: fmt.Sprintf("%s_%d", InitialBlockNumber, i), + Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.InitialBlockNumber[i], 0), + }, + wizard.PublicInput{ + Name: fmt.Sprintf("%s_%d", FinalBlockNumber, i), + Acc: accessors.NewLocalOpeningAccessor(pi.Extractor.FinalBlockNumber[i], 0), + }, + ) + } } diff --git a/prover/zkevm/prover/publicInput/testdata/blockdata_mock.csv b/prover/zkevm/prover/publicInput/testdata/blockdata_mock.csv index 3420a2e76f..03b7929287 100644 --- a/prover/zkevm/prover/publicInput/testdata/blockdata_mock.csv +++ b/prover/zkevm/prover/publicInput/testdata/blockdata_mock.csv @@ -1,65 +1,65 @@ -REL_BLOCK,INST,CT,DATA_HI,DATA_LO,FIRST_BLOCK_NUMBER -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0,0,0,0,0 -0,0x0,0,0,0,0 -1,0x41,0,0,0,1500 -1,0x42,0,0,0xa,1500 -1,0x43,0,0,0,1500 -1,0x44,0,0,0,1500 -1,0x45,0,0,0xa,1500 -1,0x46,0,0,0,1500 -1,0x48,0,0,0,1500 -2,0x41,0,0,0,1500 -2,0x42,0,0,0xab,1500 -2,0x43,0,0,0,1500 -2,0x44,0,0,0,1500 -2,0x45,0,0,0,1500 -2,0x46,0,0,0,1500 -2,0x48,0,0,0,1500 -3,0,0,0,0,1500 -3,0x42,0,0,0xbc,1500 -3,0,0,0,0,1500 -3,0,1,0,0,1500 -3,0,2,0,0,1500 -3,0,3,0,0,1500 -3,0,4,0,0,1500 -4,0,5,0,0,1500 -4,0x42,0,0,0xcd,1500 -4,0x42,1,0,0xcd,1500 -4,0x43,0,0,0xa,1105 -4,0x43,1,0,0xa,1105 -4,0x44,0,0,0xa,1200 -4,0x45,0,0,0xa,1500 -4,0x45,1,0,0xa,1500 -4,0x45,2,0,0xa,1500 -4,0x45,3,0,0xa,1500 -4,0x45,4,0,0xa,1500 -4,0x46,0,0,0xa,1500 -4,0x48,1,0,0xc,1500 \ No newline at end of file +REL_BLOCK,INST,CT,DATA_0,DATA_1,DATA_2,DATA_3,DATA_4,DATA_5,DATA_6,DATA_7,DATA_8,DATA_9,DATA_10,DATA_11,DATA_12,DATA_13,DATA_14,DATA_15,FIRST_BLOCK_NUMBER_0,FIRST_BLOCK_NUMBER_1,FIRST_BLOCK_NUMBER_2 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0x0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +1,0x41,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x05dc +1,0x42,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x000a,0x0000,0x0000,0x05dc +1,0x43,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x05dc +1,0x44,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x05dc +1,0x45,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x000a,0x0000,0x0000,0x05dc +1,0x46,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x05dc +1,0x48,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x05dc +2,0x41,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x05dc +2,0x42,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x00ab,0x0000,0x0000,0x05dc +2,0x43,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x05dc +2,0x44,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x05dc +2,0x45,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x05dc +2,0x46,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x05dc +2,0x48,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x05dc +3,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x05dc +3,0x42,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x00bc,0x0000,0x0000,0x05dc +3,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x05dc +3,0,1,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x05dc +3,0,2,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x05dc +3,0,3,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x05dc +3,0,4,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x05dc +4,0,5,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x05dc +4,0x42,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x00cd,0x0000,0x0000,0x05dc +4,0x42,1,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x00cd,0x0000,0x0000,0x05dc +4,0x43,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x000a,0x0000,0x0000,0x0451 +4,0x43,1,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x000a,0x0000,0x0000,0x0451 +4,0x44,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x000a,0x0000,0x0000,0x04b0 +4,0x45,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x000a,0x0000,0x0000,0x05dc +4,0x45,1,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x000a,0x0000,0x0000,0x05dc +4,0x45,2,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x000a,0x0000,0x0000,0x05dc +4,0x45,3,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x000a,0x0000,0x0000,0x05dc +4,0x45,4,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x000a,0x0000,0x0000,0x05dc +4,0x46,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x000a,0x0000,0x0000,0x05dc +4,0x48,1,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x000c,0x0000,0x0000,0x05dc diff --git a/prover/zkevm/prover/publicInput/testdata/rlp_txn_mock.csv b/prover/zkevm/prover/publicInput/testdata/rlp_txn_mock.csv index 87cb3bb8f5..cded53667b 100644 --- a/prover/zkevm/prover/publicInput/testdata/rlp_txn_mock.csv +++ b/prover/zkevm/prover/publicInput/testdata/rlp_txn_mock.csv @@ -1,65 +1,65 @@ -RT.ABS_TX_NUM,RT.ABS_TX_NUM_MAX,RL.TO_HASH_BY_PROVER,RL.DONE,RL.IS_PHASE_CHAIN_ID,RL.NBYTES,RL.LIMB -0,0,0,0,0,0,0 -0,0,0,0,0,0,0 -0,0,0,0,0,0,0 -0,0,0,0,0,0,0 -0,0,0,0,0,0,0 -0,0,0,0,0,0,0 -0,0,0,0,0,0,0 -0,0,0,0,0,0,0 -0,0,0,0,0,0,0 -0,0,0,0,0,0,0 -0,0,0,0,0,0,0 -0,0,0,0,0,0,0 -0,0,0,0,0,0,0 -0,0,0,0,0,0,0 -0,0,0,0,0,0,0 -0,0,0,0,0,0,0 -0,0,0,0,0,0,0 -0,0,0,0,0,0,0 -0,0,0,0,0,0,0 -0,0,0,0,0,0,0 -0,0,0,0,0,0,0 -0,0,0,0,0,0,0 -0,0,0,0,0,0,0 -0,0,0,0,0,0,0 -1,10,0,0,0,0,0 -1,10,1,0,0,1,0xf9000000000000000000000000000000 -1,10,0,0,0,0,0 -1,10,1,1,1,2,0xccc00000000000000000000000000000 -2,10,1,0,0,3,0x12345600000000000000000000000000 -2,10,0,0,0,0,0 -2,10,1,0,0,1,0xf0000000000000000000000000000000 -2,10,0,0,0,0,0 -2,10,0,0,0,0,0 -3,10,0,0,0,0,0 -3,10,1,0,0,4,0x47d5f9a0000000000000000000000000 -3,10,0,0,0,0,0 -4,10,0,0,0,0,0 -4,10,0,0,0,0,0 -4,10,1,0,0,4,0x477aaaa0000000000000000000000000 -4,10,1,0,0,2,0xaaaa0000000000000000000000000000 -4,10,1,0,0,1,0xcc000000000000000000000000000000 -4,10,0,0,0,0,0 -5,10,0,0,0,0,0 -5,10,0,0,0,0,0 -5,10,1,0,0,1,0xbb000000000000000000000000000000 -6,10,0,0,0,0,0 -6,10,1,0,0,5,0xaaaaaaaaaa0000000000000000000000 -6,10,0,0,0,0,0 -7,10,0,0,0,0,0 -7,10,1,0,0,1,0xff000000000000000000000000000000 -8,10,0,0,0,0,0 -8,10,0,0,0,0,0 -8,10,1,0,0,2,0xaaa00000000000000000000000000000 -8,10,0,0,0,0,0 -9,10,0,0,0,0,0 -9,10,1,0,0,2,0xaaa00000000000000000000000000000 -10,10,0,0,0,0,0 -10,10,0,0,0,0,0 -10,10,1,0,0,2,0xeee00000000000000000000000000000 -10,10,1,0,0,5,0x1a2a3a4a5a0000000000000000000000 -10,10,0,0,0,0,0 -10,10,1,0,0,3,0xb1b2b300000000000000000000000000 -10,10,1,0,0,1,0x10000000000000000000000000000000 -10,10,0,0,0,0,0 \ No newline at end of file +RT.ABS_TX_NUM,RT.ABS_TX_NUM_MAX,RL.TO_HASH_BY_PROVER,RL.DONE,RL.IS_PHASE_CHAIN_ID,RL.NBYTES,RL.LIMB_0,RL.LIMB_1,RL.LIMB_2,RL.LIMB_3,RL.LIMB_4,RL.LIMB_5,RL.LIMB_6,RL.LIMB_7 +0,0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +0,0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +1,10,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +1,10,1,0,0,1,0xf900,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +1,10,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +1,10,1,1,1,2,0xccc0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +2,10,1,0,0,3,0x1234,0x5600,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +2,10,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +2,10,1,0,0,1,0xf000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +2,10,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +2,10,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +3,10,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +3,10,1,0,0,4,0x47d5,0xf9a0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +3,10,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +4,10,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +4,10,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +4,10,1,0,0,4,0x477a,0xaaa0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +4,10,1,0,0,2,0xaaaa,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +4,10,1,0,0,1,0xcc00,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +4,10,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +5,10,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +5,10,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +5,10,1,0,0,1,0xbb00,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +6,10,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +6,10,1,0,0,5,0xaaaa,0xaaaa,0xaa00,0x0000,0x0000,0x0000,0x0000,0x0000 +6,10,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +7,10,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +7,10,1,0,0,1,0xff00,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +8,10,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +8,10,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +8,10,1,0,0,2,0xaaa0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +8,10,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +9,10,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +9,10,1,0,0,2,0xaaa0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +10,10,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +10,10,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +10,10,1,0,0,2,0xeee0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +10,10,1,0,0,5,0x1a2a,0x3a4a,0x5a00,0x0000,0x0000,0x0000,0x0000,0x0000 +10,10,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +10,10,1,0,0,3,0xb1b2,0xb300,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +10,10,1,0,0,1,0x1000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 +10,10,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000 diff --git a/prover/zkevm/prover/publicInput/testdata/txndata_mock.csv b/prover/zkevm/prover/publicInput/testdata/txndata_mock.csv index 2b5f5f1b0d..078e88551a 100644 --- a/prover/zkevm/prover/publicInput/testdata/txndata_mock.csv +++ b/prover/zkevm/prover/publicInput/testdata/txndata_mock.csv @@ -1,65 +1,65 @@ -TD.ABS_TX_NUM,TD.ABS_TX_NUM_MAX,TD.REL_TX_NUM,TD.REL_TX_NUM_MAX,TD.CT,TD.FROM_HI,TD.FROM_LO,TD.IS_LAST_TX_OF_BLOCK,TD.REL_BLOCK -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -0,0,0,0,0,0,0,0,0 -1,10,1,3,0,0xaaaaaaaa,0xffffffffffffffffffffffffffffffff,0,1 -1,10,1,3,1,0xaaaaaaaa,0xffffffffffffffffffffffffffffffff,0,1 -2,10,2,3,0,0xbbbbbbbb,0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,0,1 -2,10,2,3,1,0xbbbbbbbb,0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,0,1 -3,10,3,3,0,0xcccccccc,0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,1,1 -3,10,3,3,1,0xcccccccc,0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,1,1 -3,10,3,3,2,0xcccccccc,0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,1,1 -4,10,1,4,0,0xdddddddd,0xcccccccccccccccccccccccccccccccc,0,2 -4,10,1,4,1,0xdddddddd,0xcccccccccccccccccccccccccccccccc,0,2 -5,10,2,4,0,0xeeeeeeee,0xdddddddddddddddddddddddddddddddd,0,2 -5,10,2,4,1,0xeeeeeeee,0xdddddddddddddddddddddddddddddddd,0,2 -6,10,3,4,0,0xffffffff,0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee,0,2 -6,10,3,4,1,0xffffffff,0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee,0,2 -7,10,4,4,0,0xaaaaaaaa,0xffffffffffffffffffffffffffffffff,1,2 -7,10,4,4,1,0xaaaaaaaa,0xffffffffffffffffffffffffffffffff,1,2 -8,10,1,2,0,0xbbbbbbbb,0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,0,3 -8,10,1,2,1,0xbbbbbbbb,0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,0,3 -9,10,2,2,0,0xcccccccc,0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,1,3 -9,10,2,2,1,0xcccccccc,0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,1,3 -10,10,1,1,0,0xdddddddd,0xcccccccccccccccccccccccccccccccc,1,4 -10,10,1,1,1,0xdddddddd,0xcccccccccccccccccccccccccccccccc,1,4 -10,10,1,1,2,0xdddddddd,0xcccccccccccccccccccccccccccccccc,1,4 -10,10,1,1,3,0xdddddddd,0xcccccccccccccccccccccccccccccccc,1,4 -10,10,1,1,4,0xdddddddd,0xcccccccccccccccccccccccccccccccc,1,4 -10,10,1,1,5,0xdddddddd,0xcccccccccccccccccccccccccccccccc,1,4 -10,10,1,1,6,0xdddddddd,0xcccccccccccccccccccccccccccccccc,1,4 -10,10,1,1,7,0xdddddddd,0xcccccccccccccccccccccccccccccccc,1,4 \ No newline at end of file +TD.ABS_TX_NUM,TD.ABS_TX_NUM_MAX,TD.REL_TX_NUM,TD.REL_TX_NUM_MAX,TD.CT,TD.FROM_0,TD.FROM_1,TD.FROM_2,TD.FROM_3,TD.FROM_4,TD.FROM_5,TD.FROM_6,TD.FROM_7,TD.FROM_8,TD.FROM_9,TD.IS_LAST_TX_OF_BLOCK,TD.REL_BLOCK +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +0,0,0,0,0,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0x0000,0,0 +1,10,1,3,0,0xaaaa,0xaaaa,0xffff,0xffff,0xffff,0xffff,0xffff,0xffff,0xffff,0xffff,0,1 +1,10,1,3,1,0xaaaa,0xaaaa,0xffff,0xffff,0xffff,0xffff,0xffff,0xffff,0xffff,0xffff,0,1 +2,10,2,3,0,0xbbbb,0xbbbb,0xaaaa,0xaaaa,0xaaaa,0xaaaa,0xaaaa,0xaaaa,0xaaaa,0xaaaa,0,1 +2,10,2,3,1,0xbbbb,0xbbbb,0xaaaa,0xaaaa,0xaaaa,0xaaaa,0xaaaa,0xaaaa,0xaaaa,0xaaaa,0,1 +3,10,3,3,0,0xcccc,0xcccc,0xbbbb,0xbbbb,0xbbbb,0xbbbb,0xbbbb,0xbbbb,0xbbbb,0xbbbb,1,1 +3,10,3,3,1,0xcccc,0xcccc,0xbbbb,0xbbbb,0xbbbb,0xbbbb,0xbbbb,0xbbbb,0xbbbb,0xbbbb,1,1 +3,10,3,3,2,0xcccc,0xcccc,0xbbbb,0xbbbb,0xbbbb,0xbbbb,0xbbbb,0xbbbb,0xbbbb,0xbbbb,1,1 +4,10,1,4,0,0xdddd,0xdddd,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0,2 +4,10,1,4,1,0xdddd,0xdddd,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0,2 +5,10,2,4,0,0xeeee,0xeeee,0xdddd,0xdddd,0xdddd,0xdddd,0xdddd,0xdddd,0xdddd,0xdddd,0,2 +5,10,2,4,1,0xeeee,0xeeee,0xdddd,0xdddd,0xdddd,0xdddd,0xdddd,0xdddd,0xdddd,0xdddd,0,2 +6,10,3,4,0,0xffff,0xffff,0xeeee,0xeeee,0xeeee,0xeeee,0xeeee,0xeeee,0xeeee,0xeeee,0,2 +6,10,3,4,1,0xffff,0xffff,0xeeee,0xeeee,0xeeee,0xeeee,0xeeee,0xeeee,0xeeee,0xeeee,0,2 +7,10,4,4,0,0xaaaa,0xaaaa,0xffff,0xffff,0xffff,0xffff,0xffff,0xffff,0xffff,0xffff,1,2 +7,10,4,4,1,0xaaaa,0xaaaa,0xffff,0xffff,0xffff,0xffff,0xffff,0xffff,0xffff,0xffff,1,2 +8,10,1,2,0,0xbbbb,0xbbbb,0xaaaa,0xaaaa,0xaaaa,0xaaaa,0xaaaa,0xaaaa,0xaaaa,0xaaaa,0,3 +8,10,1,2,1,0xbbbb,0xbbbb,0xaaaa,0xaaaa,0xaaaa,0xaaaa,0xaaaa,0xaaaa,0xaaaa,0xaaaa,0,3 +9,10,2,2,0,0xcccc,0xcccc,0xbbbb,0xbbbb,0xbbbb,0xbbbb,0xbbbb,0xbbbb,0xbbbb,0xbbbb,1,3 +9,10,2,2,1,0xcccc,0xcccc,0xbbbb,0xbbbb,0xbbbb,0xbbbb,0xbbbb,0xbbbb,0xbbbb,0xbbbb,1,3 +10,10,1,1,0,0xdddd,0xdddd,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,1,4 +10,10,1,1,1,0xdddd,0xdddd,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,1,4 +10,10,1,1,2,0xdddd,0xdddd,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,1,4 +10,10,1,1,3,0xdddd,0xdddd,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,1,4 +10,10,1,1,4,0xdddd,0xdddd,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,1,4 +10,10,1,1,5,0xdddd,0xdddd,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,1,4 +10,10,1,1,6,0xdddd,0xdddd,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,1,4 +10,10,1,1,7,0xdddd,0xdddd,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,0xcccc,1,4 diff --git a/prover/zkevm/prover/publicInput/utilities/utilities.go b/prover/zkevm/prover/publicInput/utilities/utilities.go index 0f79c1cf82..3bb279b416 100644 --- a/prover/zkevm/prover/publicInput/utilities/utilities.go +++ b/prover/zkevm/prover/publicInput/utilities/utilities.go @@ -9,6 +9,7 @@ import ( "github.com/consensys/linea-monorepo/prover/protocol/ifaces" "github.com/consensys/linea-monorepo/prover/protocol/wizard" sym "github.com/consensys/linea-monorepo/prover/symbolic" + "github.com/consensys/linea-monorepo/prover/utils" "github.com/consensys/linea-monorepo/prover/utils/csvtraces" "github.com/ethereum/go-ethereum/core/vm" ) @@ -91,3 +92,33 @@ func CheckLastELemConsistency(comp *wizard.CompiledIOP, isActive ifaces.Column, ), ) } + +// Multi16bitLimbAdd adds a uint64 to a multi-limb number represented as a slice of 16-bit field.Element. +func Multi16bitLimbAdd(a []field.Element, carry uint64) []field.Element { + if len(a) == 0 { + utils.Panic("Multi16bitLimbAdd: zero limbs") + } + + const ( + bits = 16 + mask = (1 << bits) - 1 + ) + + res := make([]field.Element, len(a)) + for i := len(a) - 1; i >= 0; i-- { + v := a[i].Uint64() + if v > mask { + utils.Panic("Multi16bitLimbAdd: a[%d]=%d exceeds %d bits", i, v, bits) + } + + sum := v + carry + res[i].SetUint64(sum & mask) + carry = sum >> bits + } + + if carry != 0 { + utils.Panic("Multi16bitLimbAdd: overflow adding %d to %v", carry, a) + } + + return res +} diff --git a/prover/zkevm/prover/publicInput/utilities/utilities_test.go b/prover/zkevm/prover/publicInput/utilities/utilities_test.go new file mode 100644 index 0000000000..8d972bc1ac --- /dev/null +++ b/prover/zkevm/prover/publicInput/utilities/utilities_test.go @@ -0,0 +1,130 @@ +package utilities + +import ( + "github.com/consensys/linea-monorepo/prover/maths/field" + "github.com/stretchr/testify/require" + "testing" +) + +type testCase struct { + name string + aVals []uint64 + b uint64 + expected []uint64 + expectPanic bool +} + +func TestMultiLimbAdd16Bit(t *testing.T) { + tests := []testCase{ + { + name: "empty_slice", + aVals: []uint64{}, + b: 1, + expectPanic: true, + }, + { + name: "single_limb_no_carry", + aVals: []uint64{100}, + b: 50, + expected: []uint64{150}, + }, + { + name: "single_limb_with_carry_no_overflow", + aVals: []uint64{0xFFFE}, + b: 1, + expected: []uint64{0xFFFF}, + }, + { + name: "single_limb_overflow", + aVals: []uint64{0xFFFF}, + b: 1, + expectPanic: true, + }, + { + name: "two_limbs_no_carry", + aVals: []uint64{1, 2}, + b: 3, + expected: []uint64{1, 5}, + }, + { + name: "two_limbs_with_carry", + aVals: []uint64{1, 0xFFFF}, + b: 1, + expected: []uint64{2, 0}, + }, + { + name: "cascade_carry", + aVals: []uint64{0, 0xFFFF, 0xFFFF}, + b: 1, + expected: []uint64{1, 0, 0}, + }, + { + name: "overflow_multi_limb", + aVals: []uint64{0xFFFF, 0xFFFF}, + b: 1, + expectPanic: true, + }, + { + name: "initial_limb_exceeds_mask", + aVals: []uint64{0, 0x10000}, + b: 0, + expectPanic: true, + }, + { + name: "single_limb_large_b_overflow", + aVals: []uint64{0}, + b: 0x1_0000, + expectPanic: true, + }, + { + name: "two_limbs_large_b_no_overflow", + aVals: []uint64{1, 2}, + b: 0x1_0001, + expected: []uint64{2, 3}, + }, + { + name: "two_limbs_large_b_overflow", + aVals: []uint64{0, 0}, + b: 1 << 32, + expectPanic: true, + }, + { + name: "three_limbs_large_b_no_overflow", + aVals: []uint64{1, 2, 3}, + b: 0x1_0001_0002, + expected: []uint64{2, 3, 5}, + }, + { + name: "three_limbs_large_b_overflow", + aVals: []uint64{0xFFFF, 0xFFFF, 0xFFFF}, + b: 1 << 48, + expectPanic: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + limbs := make([]field.Element, len(tc.aVals)) + for i, v := range tc.aVals { + limbs[i].SetUint64(v) + } + + if tc.expectPanic { + require.Panics(t, func() { + Multi16bitLimbAdd(limbs, tc.b) + }) + return + } + + var res []field.Element + require.NotPanics(t, func() { + res = Multi16bitLimbAdd(limbs, tc.b) + }) + + for i, want := range tc.expected { + got := res[i].Uint64() + require.Equal(t, want, got, "limb %d mismatch", i) + } + }) + } +}