Skip to content

Commit

Permalink
version, fmt
Browse files Browse the repository at this point in the history
  • Loading branch information
rkdud007 committed Aug 12, 2024
1 parent 9686a2e commit ae93aef
Show file tree
Hide file tree
Showing 16 changed files with 157 additions and 373 deletions.
136 changes: 39 additions & 97 deletions src/HdpExecutionStore.sol
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
// SPDX-License-Identifier: GPL-3.0
pragma solidity ^0.8.4;
pragma solidity ^0.8.20;

import {MerkleProof} from "openzeppelin-contracts/contracts/utils/cryptography/MerkleProof.sol";
import {UUPSUpgradeable} from "@openzeppelin/contracts-upgradeable/proxy/utils/UUPSUpgradeable.sol";
Expand All @@ -9,7 +9,10 @@ import {ISharpFactsAggregator} from "./interfaces/ISharpFactsAggregator.sol";
import {IAggregatorsFactory} from "./interfaces/IAggregatorsFactory.sol";

import {BlockSampledDatalake, BlockSampledDatalakeCodecs} from "./datatypes/datalake/BlockSampledDatalakeCodecs.sol";
import {TransactionsInBlockDatalake, TransactionsInBlockDatalakeCodecs} from "./datatypes/datalake/TransactionsInBlockDatalakeCodecs.sol";
import {
TransactionsInBlockDatalake,
TransactionsInBlockDatalakeCodecs
} from "./datatypes/datalake/TransactionsInBlockDatalakeCodecs.sol";
import {ComputationalTask, ComputationalTaskCodecs} from "./datatypes/datalake/ComputeCodecs.sol";
import {ModuleTask, ModuleCodecs} from "./datatypes/module/ModuleCodecs.sol";

Expand Down Expand Up @@ -51,16 +54,10 @@ contract HdpExecutionStore is UUPSUpgradeable, OwnableUpgradeable {
event MmrRootCached(uint256 mmrId, uint256 mmrSize, bytes32 mmrRoot);

/// @notice emitted when a new task with block sampled datalake is scheduled
event TaskWithBlockSampledDatalakeScheduled(
BlockSampledDatalake datalake,
ComputationalTask task
);
event TaskWithBlockSampledDatalakeScheduled(BlockSampledDatalake datalake, ComputationalTask task);

/// @notice emitted when a new task with transactions in block datalake is scheduled
event TaskWithTransactionsInBlockDatalakeScheduled(
TransactionsInBlockDatalake datalake,
ComputationalTask task
);
event TaskWithTransactionsInBlockDatalakeScheduled(TransactionsInBlockDatalake datalake, ComputationalTask task);

/// @notice emitted when a new module task is scheduled
event ModuleTaskScheduled(ModuleTask moduleTask);
Expand All @@ -81,14 +78,12 @@ contract HdpExecutionStore is UUPSUpgradeable, OwnableUpgradeable {
mapping(bytes32 => TaskResult) public cachedTasksResult;

/// @notice mapping of chain id => mmr id => mmr size => mmr root
mapping(uint256 => mapping(uint256 => mapping(uint256 => bytes32)))
public cachedMMRsRoots;

function initialize(
IFactsRegistry factsRegistry,
IAggregatorsFactory aggregatorsFactory,
bytes32 programHash
) public initializer {
mapping(uint256 => mapping(uint256 => mapping(uint256 => bytes32))) public cachedMMRsRoots;

function initialize(IFactsRegistry factsRegistry, IAggregatorsFactory aggregatorsFactory, bytes32 programHash)
public
initializer
{
__Ownable_init(msg.sender);
__UUPSUpgradeable_init();

Expand All @@ -99,9 +94,7 @@ contract HdpExecutionStore is UUPSUpgradeable, OwnableUpgradeable {
}

/// @dev Allow to set a new implementation
function _authorizeUpgrade(
address newImplementation
) internal override onlyOwner {}
function _authorizeUpgrade(address newImplementation) internal override onlyOwner {}

/// @notice Set the program hash for the HDP program
function setProgramHash(bytes32 programHash) external onlyOwner {
Expand All @@ -115,20 +108,11 @@ contract HdpExecutionStore is UUPSUpgradeable, OwnableUpgradeable {
/// @notice Caches the MMR root for a given MMR id
/// @notice Get MMR size and root from the aggregator and cache it
function cacheMmrRoot(uint256 mmrId) public {
ISharpFactsAggregator aggregator = AGGREGATORS_FACTORY.aggregatorsById(
mmrId
);
ISharpFactsAggregator.AggregatorState
memory aggregatorState = aggregator.aggregatorState();
cachedMMRsRoots[CHAIN_ID][mmrId][
aggregatorState.mmrSize
] = aggregatorState.poseidonMmrRoot;

emit MmrRootCached(
mmrId,
aggregatorState.mmrSize,
aggregatorState.poseidonMmrRoot
);
ISharpFactsAggregator aggregator = AGGREGATORS_FACTORY.aggregatorsById(mmrId);
ISharpFactsAggregator.AggregatorState memory aggregatorState = aggregator.aggregatorState();
cachedMMRsRoots[CHAIN_ID][mmrId][aggregatorState.mmrSize] = aggregatorState.poseidonMmrRoot;

emit MmrRootCached(mmrId, aggregatorState.mmrSize, aggregatorState.poseidonMmrRoot);
}

/// @notice Requests the execution of a task with a block sampled datalake
Expand All @@ -147,15 +131,9 @@ contract HdpExecutionStore is UUPSUpgradeable, OwnableUpgradeable {
}

// Store the task result
cachedTasksResult[taskCommitment] = TaskResult({
status: TaskStatus.SCHEDULED,
result: ""
});

emit TaskWithBlockSampledDatalakeScheduled(
blockSampledDatalake,
computationalTask
);
cachedTasksResult[taskCommitment] = TaskResult({status: TaskStatus.SCHEDULED, result: ""});

emit TaskWithBlockSampledDatalakeScheduled(blockSampledDatalake, computationalTask);
}

/// @notice Requests the execution of a task with a transactions in block datalake
Expand All @@ -174,22 +152,14 @@ contract HdpExecutionStore is UUPSUpgradeable, OwnableUpgradeable {
}

// Store the task result
cachedTasksResult[taskCommitment] = TaskResult({
status: TaskStatus.SCHEDULED,
result: ""
});

emit TaskWithTransactionsInBlockDatalakeScheduled(
transactionsInBlockDatalake,
computationalTask
);
cachedTasksResult[taskCommitment] = TaskResult({status: TaskStatus.SCHEDULED, result: ""});

emit TaskWithTransactionsInBlockDatalakeScheduled(transactionsInBlockDatalake, computationalTask);
}

/// @notice Requests the execution of a task with a module
/// @param moduleTask module task
function requestExecutionOfModuleTask(
ModuleTask calldata moduleTask
) external {
function requestExecutionOfModuleTask(ModuleTask calldata moduleTask) external {
bytes32 taskCommitment = moduleTask.commit();

// Ensure task is not already scheduled
Expand All @@ -198,10 +168,7 @@ contract HdpExecutionStore is UUPSUpgradeable, OwnableUpgradeable {
}

// Store the task result
cachedTasksResult[taskCommitment] = TaskResult({
status: TaskStatus.SCHEDULED,
result: ""
});
cachedTasksResult[taskCommitment] = TaskResult({status: TaskStatus.SCHEDULED, result: ""});

emit ModuleTaskScheduled(moduleTask);
}
Expand Down Expand Up @@ -253,9 +220,7 @@ contract HdpExecutionStore is UUPSUpgradeable, OwnableUpgradeable {
bytes32 programOutputHash = keccak256(abi.encodePacked(programOutput));

// Compute GPS fact hash
bytes32 gpsFactHash = keccak256(
abi.encode(PROGRAM_HASH, programOutputHash)
);
bytes32 gpsFactHash = keccak256(abi.encode(PROGRAM_HASH, programOutputHash));

// Ensure GPS fact is registered
if (!SHARP_FACTS_REGISTRY.isValid(gpsFactHash)) {
Expand All @@ -269,63 +234,42 @@ contract HdpExecutionStore is UUPSUpgradeable, OwnableUpgradeable {
bytes32[] memory resultInclusionProof = resultsInclusionProofs[i];

// Convert the low and high 128 bits to a single 256 bit value
bytes32 resultMerkleRoot = bytes32(
(resultMerkleRootHigh << 128) | resultMerkleRootLow
);
bytes32 taskMerkleRoot = bytes32(
(taskMerkleRootHigh << 128) | taskMerkleRootLow
);
bytes32 resultMerkleRoot = bytes32((resultMerkleRootHigh << 128) | resultMerkleRootLow);
bytes32 taskMerkleRoot = bytes32((taskMerkleRootHigh << 128) | taskMerkleRootLow);

// Compute the Merkle leaf of the task
bytes32 taskCommitment = taskCommitments[i];
bytes32 taskMerkleLeaf = standardLeafHash(taskCommitment);
// Ensure that the task is included in the batch, by verifying the Merkle proof
bool isVerifiedTask = taskInclusionProof.verify(
taskMerkleRoot,
taskMerkleLeaf
);
bool isVerifiedTask = taskInclusionProof.verify(taskMerkleRoot, taskMerkleLeaf);

if (!isVerifiedTask) {
revert NotInBatch();
}

// Compute the Merkle leaf of the task result
bytes32 taskResultCommitment = keccak256(
abi.encode(taskCommitment, computationalTaskResult)
);
bytes32 taskResultMerkleLeaf = standardLeafHash(
taskResultCommitment
);
bytes32 taskResultCommitment = keccak256(abi.encode(taskCommitment, computationalTaskResult));
bytes32 taskResultMerkleLeaf = standardLeafHash(taskResultCommitment);
// Ensure that the task result is included in the batch, by verifying the Merkle proof
bool isVerifiedResult = resultInclusionProof.verify(
resultMerkleRoot,
taskResultMerkleLeaf
);
bool isVerifiedResult = resultInclusionProof.verify(resultMerkleRoot, taskResultMerkleLeaf);

if (!isVerifiedResult) {
revert NotInBatch();
}

// Store the task result
cachedTasksResult[taskCommitment] = TaskResult({
status: TaskStatus.FINALIZED,
result: computationalTaskResult
});
cachedTasksResult[taskCommitment] =
TaskResult({status: TaskStatus.FINALIZED, result: computationalTaskResult});
}
}

/// @notice Load MMR root from cache with given mmrId and mmrSize
function loadMmrRoot(
uint256 mmrId,
uint256 mmrSize
) public view returns (bytes32) {
function loadMmrRoot(uint256 mmrId, uint256 mmrSize) public view returns (bytes32) {
return cachedMMRsRoots[CHAIN_ID][mmrId][mmrSize];
}

/// @notice Returns the result of a finalized task
function getFinalizedTaskResult(
bytes32 taskCommitment
) external view returns (bytes32) {
function getFinalizedTaskResult(bytes32 taskCommitment) external view returns (bytes32) {
// Ensure task is finalized
if (cachedTasksResult[taskCommitment].status != TaskStatus.FINALIZED) {
revert NotFinalized();
Expand All @@ -334,9 +278,7 @@ contract HdpExecutionStore is UUPSUpgradeable, OwnableUpgradeable {
}

/// @notice Returns the status of a task
function getTaskStatus(
bytes32 taskCommitment
) external view returns (TaskStatus) {
function getTaskStatus(bytes32 taskCommitment) external view returns (TaskStatus) {
return cachedTasksResult[taskCommitment].status;
}

Expand Down
2 changes: 1 addition & 1 deletion src/MockedSharpFactsRegistry.sol
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
// SPDX-License-Identifier: GPL-3.0
pragma solidity ^0.8.4;
pragma solidity ^0.8.20;

contract MockedSharpFactsRegistry {
mapping(bytes32 => bool) public isValid;
Expand Down
2 changes: 1 addition & 1 deletion src/datatypes/Task.sol
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
// SPDX-License-Identifier: GPL-3.0
pragma solidity ^0.8.4;
pragma solidity ^0.8.20;

/// @notice Task type.
enum TaskCode {
Expand Down
2 changes: 1 addition & 1 deletion src/datatypes/datalake/BlockSampledDatalakeCodecs.sol
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
// SPDX-License-Identifier: GPL-3.0
pragma solidity ^0.8.4;
pragma solidity ^0.8.20;

import {DatalakeCode} from "./Datalake.sol";

Expand Down
2 changes: 1 addition & 1 deletion src/datatypes/datalake/ComputeCodecs.sol
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
// SPDX-License-Identifier: GPL-3.0
pragma solidity ^0.8.4;
pragma solidity ^0.8.20;

import {TaskCode} from "../Task.sol";

Expand Down
2 changes: 1 addition & 1 deletion src/datatypes/datalake/Datalake.sol
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
// SPDX-License-Identifier: GPL-3.0
pragma solidity ^0.8.4;
pragma solidity ^0.8.20;

/// @notice Datalake type.
enum DatalakeCode {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
// SPDX-License-Identifier: GPL-3.0
pragma solidity ^0.8.4;
pragma solidity ^0.8.20;

import {DatalakeCode} from "./Datalake.sol";

Expand Down
2 changes: 1 addition & 1 deletion src/datatypes/module/ModuleCodecs.sol
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
// SPDX-License-Identifier: GPL-3.0
pragma solidity ^0.8.4;
pragma solidity ^0.8.20;

import {TaskCode} from "../Task.sol";

Expand Down
2 changes: 1 addition & 1 deletion src/interfaces/IAggregatorsFactory.sol
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
// SPDX-License-Identifier: GPL-3.0
pragma solidity ^0.8.4;
pragma solidity ^0.8.20;

import {ISharpFactsAggregator} from "./ISharpFactsAggregator.sol";

Expand Down
2 changes: 1 addition & 1 deletion src/interfaces/IFactsRegistry.sol
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
// SPDX-License-Identifier: GPL-3.0
pragma solidity ^0.8.4;
pragma solidity ^0.8.20;

/// @notice Interface for the facts registry (https://github.com/starkware-libs/starkex-contracts/blob/master/scalable-dex/contracts/src/components/FactRegistry.sol)
interface IFactsRegistry {
Expand Down
2 changes: 1 addition & 1 deletion src/interfaces/ISharpFactsAggregator.sol
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
// SPDX-License-Identifier: GPL-3.0
pragma solidity ^0.8.4;
pragma solidity ^0.8.20;

/// @notice Interface for the SharpFactsAggregator.
interface ISharpFactsAggregator {
Expand Down
2 changes: 1 addition & 1 deletion src/lib/Uint256Splitter.sol
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
// SPDX-License-Identifier: GPL-3.0
pragma solidity ^0.8.4;
pragma solidity ^0.8.20;

/// Not a expected type.
error InvalidType();
Expand Down
33 changes: 10 additions & 23 deletions test/BasicHdpExecutionStore.t.sol
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
// SPDX-License-Identifier: GPL-3.0
pragma solidity ^0.8.4;
pragma solidity ^0.8.20;

import {Test} from "forge-std/Test.sol";
import {ERC1967Proxy} from "openzeppelin-contracts/contracts/proxy/ERC1967/ERC1967Proxy.sol";
Expand All @@ -19,10 +19,7 @@ contract MockFactsRegistry is IFactsRegistry {
contract MockAggregatorsFactory is IAggregatorsFactory {
mapping(uint256 => ISharpFactsAggregator) public aggregatorsById;

function createAggregator(
uint256 id,
ISharpFactsAggregator aggregator
) external {
function createAggregator(uint256 id, ISharpFactsAggregator aggregator) external {
aggregatorsById[id] = aggregator;
}
}
Expand All @@ -37,13 +34,12 @@ contract MockSharpFactsAggregator is ISharpFactsAggregator {
}

function aggregatorState() external view returns (AggregatorState memory) {
return
AggregatorState({
poseidonMmrRoot: usedMmrRoot,
keccakMmrRoot: bytes32(0),
mmrSize: usedMmrSize,
continuableParentHash: bytes32(0)
});
return AggregatorState({
poseidonMmrRoot: usedMmrRoot,
keccakMmrRoot: bytes32(0),
mmrSize: usedMmrSize,
continuableParentHash: bytes32(0)
});
}
}

Expand All @@ -65,19 +61,10 @@ contract HdpExecutionStoreTest is Test {
bytes32 oldPrgramHash = bytes32(uint256(1));
hdp = new HdpExecutionStore();
proxy = new ERC1967Proxy(
address(hdp),
abi.encodeCall(
hdp.initialize,
(factsRegistry, aggregatorsFactory, oldPrgramHash)
)
address(hdp), abi.encodeCall(hdp.initialize, (factsRegistry, aggregatorsFactory, oldPrgramHash))
);

emit log_bytes(
abi.encodeCall(
hdp.initialize,
(factsRegistry, aggregatorsFactory, oldPrgramHash)
)
);
emit log_bytes(abi.encodeCall(hdp.initialize, (factsRegistry, aggregatorsFactory, oldPrgramHash)));

assertEq(hdp.getProgramHash(), oldPrgramHash);
bytes32 newProgramHash = bytes32(uint256(2));
Expand Down
Loading

0 comments on commit ae93aef

Please sign in to comment.