diff --git a/packages/contracts/contracts/rewards/RewardsManager.sol b/packages/contracts/contracts/rewards/RewardsManager.sol index 767449026..9a85b0274 100644 --- a/packages/contracts/contracts/rewards/RewardsManager.sol +++ b/packages/contracts/contracts/rewards/RewardsManager.sol @@ -7,15 +7,20 @@ pragma abicoder v2; // solhint-disable gas-increment-by-one, gas-indexed-events, gas-small-strings, gas-strict-inequalities import { SafeMath } from "@openzeppelin/contracts/math/SafeMath.sol"; +import { IERC165 } from "@openzeppelin/contracts/introspection/IERC165.sol"; import { GraphUpgradeable } from "../upgrades/GraphUpgradeable.sol"; import { Managed } from "../governance/Managed.sol"; import { MathUtils } from "../staking/libs/MathUtils.sol"; import { IGraphToken } from "@graphprotocol/interfaces/contracts/contracts/token/IGraphToken.sol"; -import { RewardsManagerV5Storage } from "./RewardsManagerStorage.sol"; +import { RewardsManagerV6Storage } from "./RewardsManagerStorage.sol"; import { IRewardsIssuer } from "@graphprotocol/interfaces/contracts/contracts/rewards/IRewardsIssuer.sol"; import { IRewardsManager } from "@graphprotocol/interfaces/contracts/contracts/rewards/IRewardsManager.sol"; +import { IIssuanceAllocationDistribution } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocationDistribution.sol"; +import { IIssuanceTarget } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceTarget.sol"; +import { IRewardsEligibility } from "@graphprotocol/interfaces/contracts/issuance/eligibility/IRewardsEligibility.sol"; +import { RewardsReclaim } from "@graphprotocol/interfaces/contracts/contracts/rewards/RewardsReclaim.sol"; /** * @title Rewards Manager Contract @@ -27,6 +32,10 @@ import { IRewardsManager } from "@graphprotocol/interfaces/contracts/contracts/r * total rewards for the Subgraph are split up for each Indexer based on much they have Staked on * that Subgraph. * + * @dev If an `issuanceAllocator` is set, it is used to determine the amount of GRT to be issued per block. + * Otherwise, the `issuancePerBlock` variable is used. In relation to the IssuanceAllocator, this contract + * is a self-minting target responsible for directly minting allocated GRT. + * * Note: * The contract provides getter functions to query the state of accrued rewards: * - getAccRewardsPerSignal @@ -37,7 +46,7 @@ import { IRewardsManager } from "@graphprotocol/interfaces/contracts/contracts/r * until the actual takeRewards function is called. * custom:security-contact Please email security+contracts@ thegraph.com (remove space) if you find any bugs. We might have an active bug bounty program. */ -contract RewardsManager is RewardsManagerV5Storage, GraphUpgradeable, IRewardsManager { +contract RewardsManager is RewardsManagerV6Storage, GraphUpgradeable, IERC165, IRewardsManager, IIssuanceTarget { using SafeMath for uint256; /// @dev Fixed point scaling factor used for decimals in reward calculations @@ -61,6 +70,14 @@ contract RewardsManager is RewardsManagerV5Storage, GraphUpgradeable, IRewardsMa */ event RewardsDenied(address indexed indexer, address indexed allocationID); + /** + * @notice Emitted when rewards are denied to an indexer due to eligibility + * @param indexer Address of the indexer being denied rewards + * @param allocationID Address of the allocation being denied rewards + * @param amount Amount of rewards that would have been assigned + */ + event RewardsDeniedDueToEligibility(address indexed indexer, address indexed allocationID, uint256 amount); + /** * @notice Emitted when a subgraph is denied for claiming rewards * @param subgraphDeploymentID Subgraph deployment ID being denied @@ -75,6 +92,49 @@ contract RewardsManager is RewardsManagerV5Storage, GraphUpgradeable, IRewardsMa */ event SubgraphServiceSet(address indexed oldSubgraphService, address indexed newSubgraphService); + /** + * @notice Emitted when the issuance allocator is set + * @param oldIssuanceAllocator Previous issuance allocator address + * @param newIssuanceAllocator New issuance allocator address + */ + event IssuanceAllocatorSet(address indexed oldIssuanceAllocator, address indexed newIssuanceAllocator); + + /** + * @notice Emitted when the rewards eligibility oracle contract is set + * @param oldRewardsEligibilityOracle Previous rewards eligibility oracle address + * @param newRewardsEligibilityOracle New rewards eligibility oracle address + */ + event RewardsEligibilityOracleSet( + address indexed oldRewardsEligibilityOracle, + address indexed newRewardsEligibilityOracle + ); + + /** + * @notice Emitted when a reclaim address is set + * @param reason The reclaim reason identifier + * @param oldAddress Previous address + * @param newAddress New address + */ + event ReclaimAddressSet(bytes32 indexed reason, address indexed oldAddress, address indexed newAddress); + + /** + * @notice Emitted when rewards are reclaimed to a configured address + * @param reason The reclaim reason identifier + * @param amount Amount of rewards reclaimed + * @param indexer Address of the indexer + * @param allocationID Address of the allocation + * @param subgraphDeploymentID Subgraph deployment ID for the allocation + * @param data Additional context data for the reclaim + */ + event RewardsReclaimed( + bytes32 indexed reason, + uint256 amount, + address indexed indexer, + address indexed allocationID, + bytes32 subgraphDeploymentID, + bytes data + ); + // -- Modifiers -- /** @@ -93,12 +153,27 @@ contract RewardsManager is RewardsManagerV5Storage, GraphUpgradeable, IRewardsMa Managed._initialize(_controller); } + /** + * @inheritdoc IERC165 + * @dev Implements ERC165 interface detection + * Returns true if this contract implements the interface defined by interfaceId. + * See: https://eips.ethereum.org/EIPS/eip-165 + */ + function supportsInterface(bytes4 interfaceId) public view virtual override returns (bool) { + return + interfaceId == type(IERC165).interfaceId || + interfaceId == type(IIssuanceTarget).interfaceId || + interfaceId == type(IRewardsManager).interfaceId; + } + // -- Config -- /** * @inheritdoc IRewardsManager + * @dev When an IssuanceAllocator is set, the effective issuance will be determined by the allocator, + * but this local value can still be updated for cases when the allocator is later removed. * - * @dev The issuance is defined as a fixed amount of rewards per block in GRT. + * The issuance is defined as a fixed amount of rewards per block in GRT. * Whenever this function is called in layer 2, the updateL2MintAllowance function * _must_ be called on the L1GraphTokenGateway in L1, to ensure the bridge can mint the * right amount of tokens. @@ -152,6 +227,90 @@ contract RewardsManager is RewardsManagerV5Storage, GraphUpgradeable, IRewardsMa emit SubgraphServiceSet(oldSubgraphService, _subgraphService); } + /** + * @inheritdoc IIssuanceTarget + * @dev This function facilitates upgrades by providing a standard way for targets + * to change their allocator. Only the governor can call this function. + * Note that the IssuanceAllocator can be set to the zero address to disable use of an allocator, and + * use the local `issuancePerBlock` variable instead to control issuance. + */ + function setIssuanceAllocator(address newIssuanceAllocator) external override onlyGovernor { + if (address(issuanceAllocator) != newIssuanceAllocator) { + // Update rewards calculation before changing the issuance allocator + updateAccRewardsPerSignal(); + + // Check that the contract supports the IIssuanceAllocationDistribution interface + // Allow zero address to disable the allocator + if (newIssuanceAllocator != address(0)) { + require( + IERC165(newIssuanceAllocator).supportsInterface(type(IIssuanceAllocationDistribution).interfaceId), + "Contract does not support IIssuanceAllocationDistribution interface" + ); + } + + address oldIssuanceAllocator = address(issuanceAllocator); + issuanceAllocator = IIssuanceAllocationDistribution(newIssuanceAllocator); + emit IssuanceAllocatorSet(oldIssuanceAllocator, newIssuanceAllocator); + } + } + + /** + * @inheritdoc IIssuanceTarget + * @dev Ensures that all reward calculations are up-to-date with the current block + * before any allocation changes take effect. + * + * This function can be called by anyone to update the rewards calculation state. + * The IssuanceAllocator calls this function before changing a target's allocation to ensure + * all issuance is properly accounted for with the current issuance rate before applying an + * issuance allocation change. + */ + function beforeIssuanceAllocationChange() external override { + // Update rewards calculation with the current issuance rate + updateAccRewardsPerSignal(); + } + + /** + * @inheritdoc IRewardsManager + * @dev Note that the rewards eligibility oracle can be set to the zero address to disable use of an oracle, in + * which case no indexers will be denied rewards due to eligibility. + */ + function setRewardsEligibilityOracle(address newRewardsEligibilityOracle) external override onlyGovernor { + if (address(rewardsEligibilityOracle) != newRewardsEligibilityOracle) { + // Check that the contract supports the IRewardsEligibility interface + // Allow zero address to disable the oracle + if (newRewardsEligibilityOracle != address(0)) { + require( + IERC165(newRewardsEligibilityOracle).supportsInterface(type(IRewardsEligibility).interfaceId), + "Contract does not support IRewardsEligibility interface" + ); + } + + address oldRewardsEligibilityOracle = address(rewardsEligibilityOracle); + rewardsEligibilityOracle = IRewardsEligibility(newRewardsEligibilityOracle); + emit RewardsEligibilityOracleSet(oldRewardsEligibilityOracle, newRewardsEligibilityOracle); + } + } + + /** + * @inheritdoc IRewardsManager + * @dev bytes32(0) is reserved as an invalid reason to prevent accidental misconfiguration + * and catch uninitialized reason identifiers. + * + * IMPORTANT: Changes take effect immediately and retroactively. All unclaimed rewards from + * previous periods will be sent to the new reclaim address when they are eventually reclaimed, + * regardless of which address was configured when the rewards were originally accrued. + */ + function setReclaimAddress(bytes32 reason, address newAddress) external override onlyGovernor { + require(reason != bytes32(0), "Cannot set reclaim address for (bytes32(0))"); + + address oldAddress = reclaimAddresses[reason]; + + if (oldAddress != newAddress) { + reclaimAddresses[reason] = newAddress; + emit ReclaimAddressSet(reason, oldAddress, newAddress); + } + } + // -- Denylist -- /** @@ -180,6 +339,17 @@ contract RewardsManager is RewardsManagerV5Storage, GraphUpgradeable, IRewardsMa // -- Getters -- + /** + * @inheritdoc IRewardsManager + * @dev Gets the effective issuance per block, taking into account the IssuanceAllocator if set + */ + function getRewardsIssuancePerBlock() public view override returns (uint256) { + return + address(issuanceAllocator) != address(0) + ? issuanceAllocator.getTargetIssuancePerBlock(address(this)).selfIssuanceRate + : issuancePerBlock; + } + /** * @inheritdoc IRewardsManager * @dev Linear formula: `x = r * t` @@ -197,8 +367,10 @@ contract RewardsManager is RewardsManagerV5Storage, GraphUpgradeable, IRewardsMa if (t == 0) { return 0; } - // ...or if issuance is zero - if (issuancePerBlock == 0) { + + uint256 rewardsIssuancePerBlock = getRewardsIssuancePerBlock(); + + if (rewardsIssuancePerBlock == 0) { return 0; } @@ -209,7 +381,7 @@ contract RewardsManager is RewardsManagerV5Storage, GraphUpgradeable, IRewardsMa return 0; } - uint256 x = issuancePerBlock.mul(t); + uint256 x = rewardsIssuancePerBlock.mul(t); // Get the new issuance per signalled token // We multiply the decimals to keep the precision as fixed-point number @@ -370,51 +542,169 @@ contract RewardsManager is RewardsManagerV5Storage, GraphUpgradeable, IRewardsMa } /** - * @inheritdoc IRewardsManager - * @dev This function can only be called by an authorized rewards issuer which are - * the staking contract (for legacy allocations), and the subgraph service (for new allocations). - * Mints 0 tokens if the allocation is not active. + * @notice Calculate rewards for an allocation + * @param rewardsIssuer Address of the rewards issuer calling the function + * @param allocationID Address of the allocation + * @return rewards Amount of rewards calculated + * @return indexer Address of the indexer + * @return subgraphDeploymentID Subgraph deployment ID */ - function takeRewards(address _allocationID) external override returns (uint256) { - address rewardsIssuer = msg.sender; - require( - rewardsIssuer == address(staking()) || rewardsIssuer == address(subgraphService), - "Caller must be a rewards issuer" - ); - + function _calcAllocationRewards( + address rewardsIssuer, + address allocationID + ) private returns (uint256 rewards, address indexer, bytes32 subgraphDeploymentID) { ( bool isActive, - address indexer, - bytes32 subgraphDeploymentID, + address _indexer, + bytes32 _subgraphDeploymentID, uint256 tokens, uint256 accRewardsPerAllocatedToken, uint256 accRewardsPending - ) = IRewardsIssuer(rewardsIssuer).getAllocationData(_allocationID); + ) = IRewardsIssuer(rewardsIssuer).getAllocationData(allocationID); + + uint256 updatedAccRewardsPerAllocatedToken = onSubgraphAllocationUpdate(_subgraphDeploymentID); + + rewards = isActive + ? accRewardsPending.add( + _calcRewards(tokens, accRewardsPerAllocatedToken, updatedAccRewardsPerAllocatedToken) + ) + : 0; + + indexer = _indexer; + subgraphDeploymentID = _subgraphDeploymentID; + } - uint256 updatedAccRewardsPerAllocatedToken = onSubgraphAllocationUpdate(subgraphDeploymentID); + /** + * @notice Common function to reclaim rewards to a configured address + * @param reason The reclaim reason identifier + * @param rewards Amount of rewards to reclaim + * @param indexer Address of the indexer + * @param allocationID Address of the allocation + * @param subgraphDeploymentID Subgraph deployment ID for the allocation + * @param data Additional context data for the reclaim + * @return reclaimed The amount of rewards that were reclaimed (0 if no reclaim address set) + */ + function _reclaimRewards( + bytes32 reason, + uint256 rewards, + address indexer, + address allocationID, + bytes32 subgraphDeploymentID, + bytes memory data + ) private returns (uint256 reclaimed) { + address target = reclaimAddresses[reason]; + if (0 < rewards && target != address(0)) { + graphToken().mint(target, rewards); + emit RewardsReclaimed(reason, rewards, indexer, allocationID, subgraphDeploymentID, data); + reclaimed = rewards; + } + } - // Do not do rewards on denied subgraph deployments ID + /** + * @notice Check if rewards should be denied and attempt to reclaim them + * @param rewards Amount of rewards to check + * @param indexer Address of the indexer + * @param allocationID Address of the allocation + * @param subgraphDeploymentID Subgraph deployment ID for the allocation + * @return denied True if rewards should be denied (either reclaimed or dropped), false if they should be minted + * @dev First successful reclaim wins - checks performed in order with short-circuit on reclaim: + * 1. Subgraph deny list: emit RewardsDenied. If reclaim address set → reclaim and return (STOP, eligibility not checked) + * 2. Indexer eligibility: Checked if subgraph not denied OR denied without reclaim address. Emit RewardsDeniedDueToEligibility. If reclaim address set → reclaim and return + * Multiple denial events may be emitted only when multiple checks fail without reclaim addresses configured. + * Any failing check without a reclaim address still denies rewards (drops them without minting). + */ + function _deniedRewards( + uint256 rewards, + address indexer, + address allocationID, + bytes32 subgraphDeploymentID + ) private returns (bool denied) { if (isDenied(subgraphDeploymentID)) { - emit RewardsDenied(indexer, _allocationID); - return 0; + emit RewardsDenied(indexer, allocationID); + if ( + 0 < + _reclaimRewards( + RewardsReclaim.SUBGRAPH_DENIED, + rewards, + indexer, + allocationID, + subgraphDeploymentID, + "" + ) + ) { + return true; // Successfully reclaimed, deny rewards + } + denied = true; // Denied but no reclaim address } - uint256 rewards = 0; - if (isActive) { - // Calculate rewards accrued by this allocation - rewards = accRewardsPending.add( - _calcRewards(tokens, accRewardsPerAllocatedToken, updatedAccRewardsPerAllocatedToken) - ); - if (rewards > 0) { - // Mint directly to rewards issuer for the reward amount - // The rewards issuer contract will do bookkeeping of the reward and - // assign in proportion to each stakeholder incentive - graphToken().mint(rewardsIssuer, rewards); + if (address(rewardsEligibilityOracle) != address(0) && !rewardsEligibilityOracle.isEligible(indexer)) { + emit RewardsDeniedDueToEligibility(indexer, allocationID, rewards); + if ( + 0 < + _reclaimRewards( + RewardsReclaim.INDEXER_INELIGIBLE, + rewards, + indexer, + allocationID, + subgraphDeploymentID, + "" + ) + ) { + return true; // Successfully reclaimed, deny rewards } + denied = true; // Denied but no reclaim address } + } + /** + * @inheritdoc IRewardsManager + * @dev This function can only be called by an authorized rewards issuer which are + * the staking contract (for legacy allocations), and the subgraph service (for new allocations). + * Mints 0 tokens if the allocation is not active. + * @dev First successful reclaim wins - short-circuits on reclaim: + * - If subgraph denied with reclaim address → reclaim to SUBGRAPH_DENIED address (eligibility NOT checked) + * - If subgraph not denied OR denied without address, then check eligibility → reclaim to INDEXER_INELIGIBLE if configured + * - Subsequent denial emitted only when earlier denial has no reclaim address + * - Any denial without reclaim address drops rewards (no minting) + */ + function takeRewards(address _allocationID) external override returns (uint256) { + address rewardsIssuer = msg.sender; + require( + rewardsIssuer == address(staking()) || rewardsIssuer == address(subgraphService), + "Caller must be a rewards issuer" + ); + + (uint256 rewards, address indexer, bytes32 subgraphDeploymentID) = _calcAllocationRewards( + rewardsIssuer, + _allocationID + ); + + if (rewards == 0) return 0; + if (_deniedRewards(rewards, indexer, _allocationID, subgraphDeploymentID)) return 0; + + graphToken().mint(rewardsIssuer, rewards); emit HorizonRewardsAssigned(indexer, _allocationID, rewards); return rewards; } + + /** + * @inheritdoc IRewardsManager + * @dev bytes32(0) as a reason is reserved as a no-op and will not be reclaimed. + */ + function reclaimRewards( + bytes32 reason, + address allocationID, + bytes calldata data + ) external override returns (uint256) { + address rewardsIssuer = msg.sender; + require(rewardsIssuer == address(subgraphService), "Not a rewards issuer"); + + (uint256 rewards, address indexer, bytes32 subgraphDeploymentID) = _calcAllocationRewards( + rewardsIssuer, + allocationID + ); + + return _reclaimRewards(reason, rewards, indexer, allocationID, subgraphDeploymentID, data); + } } diff --git a/packages/contracts/contracts/rewards/RewardsManagerStorage.sol b/packages/contracts/contracts/rewards/RewardsManagerStorage.sol index d78eb81ef..5cc134bf7 100644 --- a/packages/contracts/contracts/rewards/RewardsManagerStorage.sol +++ b/packages/contracts/contracts/rewards/RewardsManagerStorage.sol @@ -7,6 +7,8 @@ pragma solidity ^0.7.6 || 0.8.27; +import { IIssuanceAllocationDistribution } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocationDistribution.sol"; +import { IRewardsEligibility } from "@graphprotocol/interfaces/contracts/issuance/eligibility/IRewardsEligibility.sol"; import { IRewardsIssuer } from "@graphprotocol/interfaces/contracts/contracts/rewards/IRewardsIssuer.sol"; import { IRewardsManager } from "@graphprotocol/interfaces/contracts/contracts/rewards/IRewardsManager.sol"; import { Managed } from "../governance/Managed.sol"; @@ -76,3 +78,19 @@ contract RewardsManagerV5Storage is RewardsManagerV4Storage { /// @notice Address of the subgraph service IRewardsIssuer public subgraphService; } + +/** + * @title RewardsManagerV6Storage + * @author Edge & Node + * @notice Storage layout for RewardsManager V6 + * Includes support for Rewards Eligibility Oracle, Issuance Allocator, and reclaim addresses. + */ +contract RewardsManagerV6Storage is RewardsManagerV5Storage { + /// @notice Address of the rewards eligibility oracle contract + IRewardsEligibility public rewardsEligibilityOracle; + /// @notice Address of the issuance allocator + IIssuanceAllocationDistribution public issuanceAllocator; + /// @notice Mapping of reclaim reason identifiers to reclaim addresses + /// @dev Uses bytes32 for extensibility. See RewardsReclaim library for canonical reasons. + mapping(bytes32 => address) public reclaimAddresses; +} diff --git a/packages/contracts/contracts/tests/MockERC165.sol b/packages/contracts/contracts/tests/MockERC165.sol new file mode 100644 index 000000000..056493fd3 --- /dev/null +++ b/packages/contracts/contracts/tests/MockERC165.sol @@ -0,0 +1,20 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity 0.7.6; + +import { IERC165 } from "@openzeppelin/contracts/introspection/IERC165.sol"; + +/** + * @title MockERC165 + * @author Edge & Node + * @dev Minimal implementation of IERC165 for testing + * @notice Used to test interface validation - supports only ERC165, not specific interfaces + */ +contract MockERC165 is IERC165 { + /** + * @inheritdoc IERC165 + */ + function supportsInterface(bytes4 interfaceId) public pure override returns (bool) { + return interfaceId == type(IERC165).interfaceId; + } +} diff --git a/packages/contracts/contracts/tests/MockIssuanceAllocator.sol b/packages/contracts/contracts/tests/MockIssuanceAllocator.sol new file mode 100644 index 000000000..6113b8bc0 --- /dev/null +++ b/packages/contracts/contracts/tests/MockIssuanceAllocator.sol @@ -0,0 +1,76 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +// solhint-disable gas-increment-by-one, gas-indexed-events, named-parameters-mapping, use-natspec + +pragma solidity 0.7.6; +pragma abicoder v2; + +import { IERC165 } from "@openzeppelin/contracts/introspection/IERC165.sol"; +import { TargetIssuancePerBlock } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocatorTypes.sol"; +import { IIssuanceAllocationDistribution } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocationDistribution.sol"; +import { IIssuanceTarget } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceTarget.sol"; + +/** + * @title MockIssuanceAllocator + * @dev A simple mock contract for the IssuanceAllocator interfaces used by RewardsManager. + */ +contract MockIssuanceAllocator is IERC165, IIssuanceAllocationDistribution { + /// @dev Mapping to store TargetIssuancePerBlock for each target + mapping(address => TargetIssuancePerBlock) private _targetIssuance; + + /** + * @dev Call beforeIssuanceAllocationChange on a target + * @param target The target contract address + */ + function callBeforeIssuanceAllocationChange(address target) external { + IIssuanceTarget(target).beforeIssuanceAllocationChange(); + } + + /** + * @inheritdoc IIssuanceAllocationDistribution + */ + function getTargetIssuancePerBlock(address target) external view override returns (TargetIssuancePerBlock memory) { + return _targetIssuance[target]; + } + + /** + * @inheritdoc IIssuanceAllocationDistribution + * @dev Mock always returns current block number + */ + function distributeIssuance() external view override returns (uint256) { + return block.number; + } + + /** + * @dev Set target issuance directly for testing + * @param target The target contract address + * @param allocatorIssuance The allocator issuance per block + * @param selfIssuance The self issuance per block + * @param callBefore Whether to call beforeIssuanceAllocationChange on the target + */ + function setTargetAllocation( + address target, + uint256 allocatorIssuance, + uint256 selfIssuance, + bool callBefore + ) external { + if (callBefore) { + IIssuanceTarget(target).beforeIssuanceAllocationChange(); + } + _targetIssuance[target] = TargetIssuancePerBlock({ + allocatorIssuanceRate: allocatorIssuance, + allocatorIssuanceBlockAppliedTo: block.number, + selfIssuanceRate: selfIssuance, + selfIssuanceBlockAppliedTo: block.number + }); + } + + /** + * @inheritdoc IERC165 + */ + function supportsInterface(bytes4 interfaceId) public pure override returns (bool) { + return + interfaceId == type(IIssuanceAllocationDistribution).interfaceId || + interfaceId == type(IERC165).interfaceId; + } +} diff --git a/packages/contracts/contracts/tests/MockRewardsEligibilityOracle.sol b/packages/contracts/contracts/tests/MockRewardsEligibilityOracle.sol new file mode 100644 index 000000000..6b13d4d76 --- /dev/null +++ b/packages/contracts/contracts/tests/MockRewardsEligibilityOracle.sol @@ -0,0 +1,71 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +// solhint-disable named-parameters-mapping + +pragma solidity 0.7.6; + +import { IRewardsEligibility } from "@graphprotocol/interfaces/contracts/issuance/eligibility/IRewardsEligibility.sol"; +import { IERC165 } from "@openzeppelin/contracts/introspection/IERC165.sol"; + +/** + * @title MockRewardsEligibilityOracle + * @author Edge & Node + * @notice A simple mock contract for the RewardsEligibilityOracle interface + * @dev A simple mock contract for the RewardsEligibilityOracle interface + */ +contract MockRewardsEligibilityOracle is IRewardsEligibility, IERC165 { + /// @dev Mapping to store eligibility status for each indexer + mapping(address => bool) private eligible; + + /// @dev Mapping to track which indexers have been explicitly set + mapping(address => bool) private isSet; + + /// @dev Default response for indexers not explicitly set + bool private defaultResponse; + + /** + * @notice Constructor + * @param newDefaultResponse Default response for isEligible + */ + constructor(bool newDefaultResponse) { + defaultResponse = newDefaultResponse; + } + + /** + * @notice Set whether a specific indexer is eligible + * @param indexer The indexer address + * @param eligibility Whether the indexer is eligible + */ + function setIndexerEligible(address indexer, bool eligibility) external { + eligible[indexer] = eligibility; + isSet[indexer] = true; + } + + /** + * @notice Set the default response for indexers not explicitly set + * @param newDefaultResponse The default response + */ + function setDefaultResponse(bool newDefaultResponse) external { + defaultResponse = newDefaultResponse; + } + + /** + * @inheritdoc IRewardsEligibility + */ + function isEligible(address indexer) external view override returns (bool) { + // If the indexer has been explicitly set, return that value + if (isSet[indexer]) { + return eligible[indexer]; + } + + // Otherwise return the default response + return defaultResponse; + } + + /** + * @inheritdoc IERC165 + */ + function supportsInterface(bytes4 interfaceId) public pure override returns (bool) { + return interfaceId == type(IRewardsEligibility).interfaceId || interfaceId == type(IERC165).interfaceId; + } +} diff --git a/packages/contracts/contracts/tests/MockSubgraphService.sol b/packages/contracts/contracts/tests/MockSubgraphService.sol new file mode 100644 index 000000000..75049b399 --- /dev/null +++ b/packages/contracts/contracts/tests/MockSubgraphService.sol @@ -0,0 +1,129 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +// solhint-disable named-parameters-mapping + +pragma solidity 0.7.6; + +import { IRewardsIssuer } from "@graphprotocol/interfaces/contracts/contracts/rewards/IRewardsIssuer.sol"; + +/** + * @title MockSubgraphService + * @author Edge & Node + * @notice A mock contract for testing SubgraphService as a rewards issuer + * @dev Implements IRewardsIssuer interface to simulate SubgraphService behavior in tests + */ +contract MockSubgraphService is IRewardsIssuer { + /// @dev Struct to store allocation data + struct Allocation { + bool isActive; + address indexer; + bytes32 subgraphDeploymentId; + uint256 tokens; + uint256 accRewardsPerAllocatedToken; + uint256 accRewardsPending; + } + + /// @dev Mapping of allocation ID to allocation data + mapping(address => Allocation) private allocations; + + /// @dev Mapping of subgraph deployment ID to total allocated tokens + mapping(bytes32 => uint256) private subgraphAllocatedTokens; + + /** + * @notice Set allocation data for testing + * @param allocationId The allocation ID + * @param isActive Whether the allocation is active + * @param indexer The indexer address + * @param subgraphDeploymentId The subgraph deployment ID + * @param tokens Amount of allocated tokens + * @param accRewardsPerAllocatedToken Rewards snapshot + * @param accRewardsPending Accumulated rewards pending + */ + function setAllocation( + address allocationId, + bool isActive, + address indexer, + bytes32 subgraphDeploymentId, + uint256 tokens, + uint256 accRewardsPerAllocatedToken, + uint256 accRewardsPending + ) external { + allocations[allocationId] = Allocation({ + isActive: isActive, + indexer: indexer, + subgraphDeploymentId: subgraphDeploymentId, + tokens: tokens, + accRewardsPerAllocatedToken: accRewardsPerAllocatedToken, + accRewardsPending: accRewardsPending + }); + } + + /** + * @notice Set total allocated tokens for a subgraph + * @param subgraphDeploymentId The subgraph deployment ID + * @param tokens Total tokens allocated + */ + function setSubgraphAllocatedTokens(bytes32 subgraphDeploymentId, uint256 tokens) external { + subgraphAllocatedTokens[subgraphDeploymentId] = tokens; + } + + /** + * @inheritdoc IRewardsIssuer + */ + function getAllocationData( + address allocationId + ) + external + view + override + returns ( + bool isActive, + address indexer, + bytes32 subgraphDeploymentId, + uint256 tokens, + uint256 accRewardsPerAllocatedToken, + uint256 accRewardsPending + ) + { + Allocation memory allocation = allocations[allocationId]; + return ( + allocation.isActive, + allocation.indexer, + allocation.subgraphDeploymentId, + allocation.tokens, + allocation.accRewardsPerAllocatedToken, + allocation.accRewardsPending + ); + } + + /** + * @inheritdoc IRewardsIssuer + */ + function getSubgraphAllocatedTokens(bytes32 subgraphDeploymentId) external view override returns (uint256) { + return subgraphAllocatedTokens[subgraphDeploymentId]; + } + + /** + * @notice Helper function to call reclaimRewards on RewardsManager for testing + * @param rewardsManager Address of the RewardsManager contract + * @param reason Reason identifier for reclaiming rewards + * @param allocationId The allocation ID + * @param contextData Additional context data for the reclaim + * @return Amount of rewards reclaimed + */ + function callReclaimRewards( + address rewardsManager, + bytes32 reason, + address allocationId, + bytes calldata contextData + ) external returns (uint256) { + // Call reclaimRewards on the RewardsManager + // solhint-disable-next-line avoid-low-level-calls + (bool success, bytes memory data) = rewardsManager.call( + // solhint-disable-next-line gas-small-strings + abi.encodeWithSignature("reclaimRewards(bytes32,address,bytes)", reason, allocationId, contextData) + ); + require(success, "reclaimRewards call failed"); + return abi.decode(data, (uint256)); + } +} diff --git a/packages/contracts/test/.solcover.js b/packages/contracts/test/.solcover.js index 7181b78fa..125581cd1 100644 --- a/packages/contracts/test/.solcover.js +++ b/packages/contracts/test/.solcover.js @@ -1,4 +1,4 @@ -const skipFiles = ['bancor', 'ens', 'erc1056', 'arbitrum', 'tests/arbitrum'] +const skipFiles = ['bancor', 'ens', 'erc1056', 'arbitrum', 'tests', '*Mock.sol'] module.exports = { providerOptions: { diff --git a/packages/contracts/test/tests/unit/rewards/rewards-calculations.test.ts b/packages/contracts/test/tests/unit/rewards/rewards-calculations.test.ts new file mode 100644 index 000000000..b100905b0 --- /dev/null +++ b/packages/contracts/test/tests/unit/rewards/rewards-calculations.test.ts @@ -0,0 +1,389 @@ +import { Curation } from '@graphprotocol/contracts' +import { EpochManager } from '@graphprotocol/contracts' +import { GraphToken } from '@graphprotocol/contracts' +import { IStaking } from '@graphprotocol/contracts' +import { RewardsManager } from '@graphprotocol/contracts' +import { + deriveChannelKey, + formatGRT, + GraphNetworkContracts, + helpers, + randomHexBytes, + toBN, + toGRT, +} from '@graphprotocol/sdk' +import type { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' +import { BigNumber as BN } from 'bignumber.js' +import { expect } from 'chai' +import { BigNumber, constants } from 'ethers' +import hre from 'hardhat' + +import { NetworkFixture } from '../lib/fixtures' + +const { HashZero, WeiPerEther } = constants + +const toRound = (n: BigNumber) => formatGRT(n.add(toGRT('0.5'))).split('.')[0] + +describe('Rewards - Calculations', () => { + const graph = hre.graph() + let governor: SignerWithAddress + let curator1: SignerWithAddress + let curator2: SignerWithAddress + let indexer1: SignerWithAddress + let indexer2: SignerWithAddress + let assetHolder: SignerWithAddress + + let fixture: NetworkFixture + + let contracts: GraphNetworkContracts + let grt: GraphToken + let curation: Curation + let epochManager: EpochManager + let staking: IStaking + let rewardsManager: RewardsManager + + // Derive some channel keys for each indexer used to sign attestations + const channelKey1 = deriveChannelKey() + + const subgraphDeploymentID1 = randomHexBytes() + const subgraphDeploymentID2 = randomHexBytes() + + const allocationID1 = channelKey1.address + + const metadata = HashZero + + const ISSUANCE_RATE_PERIODS = 4 // blocks required to issue 800 GRT rewards + const ISSUANCE_PER_BLOCK = toBN('200000000000000000000') // 200 GRT every block + + // Core formula that gets accumulated rewards per signal for a period of time + const getRewardsPerSignal = (k: BN, t: BN, s: BN): string => { + if (s.eq(0)) { + return '0' + } + return k.times(t).div(s).toPrecision(18).toString() + } + + // Tracks the accumulated rewards as totalSignalled or supply changes across snapshots + class RewardsTracker { + totalSignalled = BigNumber.from(0) + lastUpdatedBlock = 0 + accumulated = BigNumber.from(0) + + static async create() { + const tracker = new RewardsTracker() + await tracker.snapshot() + return tracker + } + + async snapshot() { + this.accumulated = this.accumulated.add(await this.accrued()) + this.totalSignalled = await grt.balanceOf(curation.address) + this.lastUpdatedBlock = await helpers.latestBlock() + return this + } + + async elapsedBlocks() { + const currentBlock = await helpers.latestBlock() + return currentBlock - this.lastUpdatedBlock + } + + async accrued() { + const nBlocks = await this.elapsedBlocks() + return this.accruedByElapsed(nBlocks) + } + + accruedByElapsed(nBlocks: BigNumber | number) { + const n = getRewardsPerSignal( + new BN(ISSUANCE_PER_BLOCK.toString()), + new BN(nBlocks.toString()), + new BN(this.totalSignalled.toString()), + ) + return toGRT(n) + } + } + + // Test accumulated rewards per signal + const shouldGetNewRewardsPerSignal = async (nBlocks = ISSUANCE_RATE_PERIODS) => { + // -- t0 -- + const tracker = await RewardsTracker.create() + + // Jump + await helpers.mine(nBlocks) + + // -- t1 -- + + // Contract calculation + const contractAccrued = await rewardsManager.getNewRewardsPerSignal() + // Local calculation + const expectedAccrued = await tracker.accrued() + + // Check + expect(toRound(expectedAccrued)).eq(toRound(contractAccrued)) + return expectedAccrued + } + + before(async function () { + const testAccounts = await graph.getTestAccounts() + ;[indexer1, indexer2, curator1, curator2, assetHolder] = testAccounts + ;({ governor } = await graph.getNamedAccounts()) + + fixture = new NetworkFixture(graph.provider) + contracts = await fixture.load(governor) + grt = contracts.GraphToken as GraphToken + curation = contracts.Curation as Curation + epochManager = contracts.EpochManager + staking = contracts.Staking as IStaking + rewardsManager = contracts.RewardsManager + + // 200 GRT per block + await rewardsManager.connect(governor).setIssuancePerBlock(ISSUANCE_PER_BLOCK) + + // Distribute test funds + for (const wallet of [indexer1, indexer2, curator1, curator2, assetHolder]) { + await grt.connect(governor).mint(wallet.address, toGRT('1000000')) + await grt.connect(wallet).approve(staking.address, toGRT('1000000')) + await grt.connect(wallet).approve(curation.address, toGRT('1000000')) + } + }) + + beforeEach(async function () { + await fixture.setUp() + }) + + afterEach(async function () { + await fixture.tearDown() + }) + + context('issuing rewards', function () { + beforeEach(async function () { + // 5% minute rate (4 blocks) + await rewardsManager.connect(governor).setIssuancePerBlock(ISSUANCE_PER_BLOCK) + }) + + describe('getNewRewardsPerSignal', function () { + it('accrued per signal when no tokens signalled', async function () { + // When there is no tokens signalled no rewards are accrued + await helpers.mineEpoch(epochManager) + const accrued = await rewardsManager.getNewRewardsPerSignal() + expect(accrued).eq(0) + }) + + it('accrued per signal when tokens signalled', async function () { + // Update total signalled + const tokensToSignal = toGRT('1000') + await curation.connect(curator1).mint(subgraphDeploymentID1, tokensToSignal, 0) + + // Check + await shouldGetNewRewardsPerSignal() + }) + + it('accrued per signal when signalled tokens w/ many subgraphs', async function () { + // Update total signalled + await curation.connect(curator1).mint(subgraphDeploymentID1, toGRT('1000'), 0) + + // Check + await shouldGetNewRewardsPerSignal() + + // Update total signalled + await curation.connect(curator2).mint(subgraphDeploymentID2, toGRT('250'), 0) + + // Check + await shouldGetNewRewardsPerSignal() + }) + }) + + describe('updateAccRewardsPerSignal', function () { + it('update the accumulated rewards per signal state', async function () { + // Update total signalled + await curation.connect(curator1).mint(subgraphDeploymentID1, toGRT('1000'), 0) + // Snapshot + const tracker = await RewardsTracker.create() + + // Update + await rewardsManager.connect(governor).updateAccRewardsPerSignal() + const contractAccrued = await rewardsManager.accRewardsPerSignal() + + // Check + const expectedAccrued = await tracker.accrued() + expect(toRound(expectedAccrued)).eq(toRound(contractAccrued)) + }) + + it('update the accumulated rewards per signal state after many blocks', async function () { + // Update total signalled + await curation.connect(curator1).mint(subgraphDeploymentID1, toGRT('1000'), 0) + // Snapshot + const tracker = await RewardsTracker.create() + + // Jump + await helpers.mine(ISSUANCE_RATE_PERIODS) + + // Update + await rewardsManager.connect(governor).updateAccRewardsPerSignal() + const contractAccrued = await rewardsManager.accRewardsPerSignal() + + // Check + const expectedAccrued = await tracker.accrued() + expect(toRound(expectedAccrued)).eq(toRound(contractAccrued)) + }) + }) + + describe('getAccRewardsForSubgraph', function () { + it('accrued for each subgraph', async function () { + // Curator1 - Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + const tracker1 = await RewardsTracker.create() + + // Curator2 - Update total signalled + const signalled2 = toGRT('500') + await curation.connect(curator2).mint(subgraphDeploymentID2, signalled2, 0) + + // Snapshot + const tracker2 = await RewardsTracker.create() + await tracker1.snapshot() + + // Jump + await helpers.mine(ISSUANCE_RATE_PERIODS) + + // Snapshot + await tracker1.snapshot() + await tracker2.snapshot() + + // Calculate rewards + const rewardsPerSignal1 = tracker1.accumulated + const rewardsPerSignal2 = tracker2.accumulated + const expectedRewardsSG1 = rewardsPerSignal1.mul(signalled1).div(WeiPerEther) + const expectedRewardsSG2 = rewardsPerSignal2.mul(signalled2).div(WeiPerEther) + + // Get rewards from contract + const contractRewardsSG1 = await rewardsManager.getAccRewardsForSubgraph(subgraphDeploymentID1) + const contractRewardsSG2 = await rewardsManager.getAccRewardsForSubgraph(subgraphDeploymentID2) + + // Check + expect(toRound(expectedRewardsSG1)).eq(toRound(contractRewardsSG1)) + expect(toRound(expectedRewardsSG2)).eq(toRound(contractRewardsSG2)) + }) + + it('should return zero rewards when subgraph signal is below minimum threshold', async function () { + // Set a high minimum signal threshold + const highMinimumSignal = toGRT('2000') + await rewardsManager.connect(governor).setMinimumSubgraphSignal(highMinimumSignal) + + // Signal less than the minimum threshold + const lowSignal = toGRT('1000') + await curation.connect(curator1).mint(subgraphDeploymentID1, lowSignal, 0) + + // Jump some blocks to potentially accrue rewards + await helpers.mine(ISSUANCE_RATE_PERIODS) + + // Check that no rewards are accrued due to minimum signal threshold + const contractRewards = await rewardsManager.getAccRewardsForSubgraph(subgraphDeploymentID1) + expect(contractRewards).eq(0) + }) + }) + + describe('onSubgraphSignalUpdate', function () { + it('update the accumulated rewards for subgraph state', async function () { + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + // Snapshot + const tracker1 = await RewardsTracker.create() + + // Jump + await helpers.mine(ISSUANCE_RATE_PERIODS) + + // Update + await rewardsManager.connect(governor).onSubgraphSignalUpdate(subgraphDeploymentID1) + + // Check + const contractRewardsSG1 = (await rewardsManager.subgraphs(subgraphDeploymentID1)).accRewardsForSubgraph + const rewardsPerSignal1 = await tracker1.accrued() + const expectedRewardsSG1 = rewardsPerSignal1.mul(signalled1).div(WeiPerEther) + expect(toRound(expectedRewardsSG1)).eq(toRound(contractRewardsSG1)) + + const contractAccrued = await rewardsManager.accRewardsPerSignal() + const expectedAccrued = await tracker1.accrued() + expect(toRound(expectedAccrued)).eq(toRound(contractAccrued)) + + const contractBlockUpdated = await rewardsManager.accRewardsPerSignalLastBlockUpdated() + const expectedBlockUpdated = await helpers.latestBlock() + expect(expectedBlockUpdated).eq(contractBlockUpdated) + }) + }) + + describe('getAccRewardsPerAllocatedToken', function () { + it('accrued per allocated token', async function () { + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Allocate + const tokensToAllocate = toGRT('12500') + await staking.connect(indexer1).stake(tokensToAllocate) + await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + + // Jump + await helpers.mine(ISSUANCE_RATE_PERIODS) + + // Check + const sg1 = await rewardsManager.subgraphs(subgraphDeploymentID1) + // We trust this function because it was individually tested in previous test + const accRewardsForSubgraphSG1 = await rewardsManager.getAccRewardsForSubgraph(subgraphDeploymentID1) + const accruedRewardsSG1 = accRewardsForSubgraphSG1.sub(sg1.accRewardsForSubgraphSnapshot) + const expectedRewardsAT1 = accruedRewardsSG1.mul(WeiPerEther).div(tokensToAllocate) + const contractRewardsAT1 = (await rewardsManager.getAccRewardsPerAllocatedToken(subgraphDeploymentID1))[0] + expect(expectedRewardsAT1).eq(contractRewardsAT1) + }) + }) + + describe('onSubgraphAllocationUpdate', function () { + it('update the accumulated rewards for allocated tokens state', async function () { + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Allocate + const tokensToAllocate = toGRT('12500') + await staking.connect(indexer1).stake(tokensToAllocate) + await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + + // Jump + await helpers.mine(ISSUANCE_RATE_PERIODS) + + // Prepare expected results + const expectedSubgraphRewards = toGRT('1400') // 7 blocks since signaling to when we do getAccRewardsForSubgraph + const expectedRewardsAT = toGRT('0.08') // allocated during 5 blocks: 1000 GRT, divided by 12500 allocated tokens + + // Update + await rewardsManager.connect(governor).onSubgraphAllocationUpdate(subgraphDeploymentID1) + + // Check on demand results saved + const subgraph = await rewardsManager.subgraphs(subgraphDeploymentID1) + const contractSubgraphRewards = await rewardsManager.getAccRewardsForSubgraph(subgraphDeploymentID1) + const contractRewardsAT = subgraph.accRewardsPerAllocatedToken + + expect(toRound(expectedSubgraphRewards)).eq(toRound(contractSubgraphRewards)) + expect(toRound(expectedRewardsAT.mul(1000))).eq(toRound(contractRewardsAT.mul(1000))) + }) + }) + }) +}) diff --git a/packages/contracts/test/tests/unit/rewards/rewards-config.test.ts b/packages/contracts/test/tests/unit/rewards/rewards-config.test.ts new file mode 100644 index 000000000..8edcbb113 --- /dev/null +++ b/packages/contracts/test/tests/unit/rewards/rewards-config.test.ts @@ -0,0 +1,158 @@ +import { Curation } from '@graphprotocol/contracts' +import { GraphToken } from '@graphprotocol/contracts' +import { IStaking } from '@graphprotocol/contracts' +import { RewardsManager } from '@graphprotocol/contracts' +import { GraphNetworkContracts, helpers, randomHexBytes, toBN, toGRT } from '@graphprotocol/sdk' +import type { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' +import { expect } from 'chai' +import hre from 'hardhat' + +import { NetworkFixture } from '../lib/fixtures' + +const ISSUANCE_PER_BLOCK = toBN('200000000000000000000') // 200 GRT every block + +describe('Rewards - Configuration', () => { + const graph = hre.graph() + let governor: SignerWithAddress + let indexer1: SignerWithAddress + let indexer2: SignerWithAddress + let curator1: SignerWithAddress + let curator2: SignerWithAddress + let oracle: SignerWithAddress + let assetHolder: SignerWithAddress + + let fixture: NetworkFixture + + let contracts: GraphNetworkContracts + let grt: GraphToken + let curation: Curation + let staking: IStaking + let rewardsManager: RewardsManager + + const subgraphDeploymentID1 = randomHexBytes() + + before(async function () { + const testAccounts = await graph.getTestAccounts() + ;[indexer1, indexer2, curator1, curator2, oracle, assetHolder] = testAccounts + ;({ governor } = await graph.getNamedAccounts()) + + fixture = new NetworkFixture(graph.provider) + contracts = await fixture.load(governor) + grt = contracts.GraphToken as GraphToken + curation = contracts.Curation as Curation + staking = contracts.Staking as IStaking + rewardsManager = contracts.RewardsManager + + // 200 GRT per block + await rewardsManager.connect(governor).setIssuancePerBlock(ISSUANCE_PER_BLOCK) + + // Distribute test funds + for (const wallet of [indexer1, indexer2, curator1, curator2, assetHolder]) { + await grt.connect(governor).mint(wallet.address, toGRT('1000000')) + await grt.connect(wallet).approve(staking.address, toGRT('1000000')) + await grt.connect(wallet).approve(curation.address, toGRT('1000000')) + } + }) + + beforeEach(async function () { + await fixture.setUp() + }) + + afterEach(async function () { + await fixture.tearDown() + }) + + describe('configuration', function () { + describe('initialize', function () { + it('should revert when called on implementation contract', async function () { + // Try to call initialize on the implementation contract (should revert with onlyImpl) + const tx = rewardsManager.connect(governor).initialize(contracts.Controller.address) + await expect(tx).revertedWith('Only implementation') + }) + }) + + describe('issuance per block update', function () { + it('should reject set issuance per block if unauthorized', async function () { + const tx = rewardsManager.connect(indexer1).setIssuancePerBlock(toGRT('1.025')) + await expect(tx).revertedWith('Only Controller governor') + }) + + it('should set issuance rate to minimum allowed (0)', async function () { + const newIssuancePerBlock = toGRT('0') + await rewardsManager.connect(governor).setIssuancePerBlock(newIssuancePerBlock) + expect(await rewardsManager.issuancePerBlock()).eq(newIssuancePerBlock) + }) + + it('should set issuance rate', async function () { + const newIssuancePerBlock = toGRT('100.025') + await rewardsManager.connect(governor).setIssuancePerBlock(newIssuancePerBlock) + expect(await rewardsManager.issuancePerBlock()).eq(newIssuancePerBlock) + expect(await rewardsManager.accRewardsPerSignalLastBlockUpdated()).eq(await helpers.latestBlock()) + }) + }) + + describe('subgraph availability service', function () { + it('should reject set subgraph oracle if unauthorized', async function () { + const tx = rewardsManager.connect(indexer1).setSubgraphAvailabilityOracle(oracle.address) + await expect(tx).revertedWith('Only Controller governor') + }) + + it('should set subgraph oracle if governor', async function () { + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(oracle.address) + expect(await rewardsManager.subgraphAvailabilityOracle()).eq(oracle.address) + }) + + it('should reject to deny subgraph if not the oracle', async function () { + const tx = rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + await expect(tx).revertedWith('Caller must be the subgraph availability oracle') + }) + + it('should deny subgraph', async function () { + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(oracle.address) + + const tx = rewardsManager.connect(oracle).setDenied(subgraphDeploymentID1, true) + const blockNum = await helpers.latestBlock() + await expect(tx) + .emit(rewardsManager, 'RewardsDenylistUpdated') + .withArgs(subgraphDeploymentID1, blockNum + 1) + expect(await rewardsManager.isDenied(subgraphDeploymentID1)).eq(true) + }) + + it('should allow removing subgraph from denylist', async function () { + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(oracle.address) + + // First deny the subgraph + await rewardsManager.connect(oracle).setDenied(subgraphDeploymentID1, true) + expect(await rewardsManager.isDenied(subgraphDeploymentID1)).eq(true) + + // Then remove from denylist + const tx = rewardsManager.connect(oracle).setDenied(subgraphDeploymentID1, false) + await expect(tx).emit(rewardsManager, 'RewardsDenylistUpdated').withArgs(subgraphDeploymentID1, 0) + expect(await rewardsManager.isDenied(subgraphDeploymentID1)).eq(false) + }) + + it('should reject setMinimumSubgraphSignal if unauthorized', async function () { + const tx = rewardsManager.connect(indexer1).setMinimumSubgraphSignal(toGRT('1000')) + await expect(tx).revertedWith('Not authorized') + }) + + it('should allow setMinimumSubgraphSignal from subgraph availability oracle', async function () { + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(oracle.address) + + const newMinimumSignal = toGRT('2000') + const tx = rewardsManager.connect(oracle).setMinimumSubgraphSignal(newMinimumSignal) + await expect(tx).emit(rewardsManager, 'ParameterUpdated').withArgs('minimumSubgraphSignal') + + expect(await rewardsManager.minimumSubgraphSignal()).eq(newMinimumSignal) + }) + + it('should allow setMinimumSubgraphSignal from governor', async function () { + const newMinimumSignal = toGRT('3000') + const tx = rewardsManager.connect(governor).setMinimumSubgraphSignal(newMinimumSignal) + await expect(tx).emit(rewardsManager, 'ParameterUpdated').withArgs('minimumSubgraphSignal') + + expect(await rewardsManager.minimumSubgraphSignal()).eq(newMinimumSignal) + }) + }) + }) +}) diff --git a/packages/contracts/test/tests/unit/rewards/rewards-distribution.test.ts b/packages/contracts/test/tests/unit/rewards/rewards-distribution.test.ts new file mode 100644 index 000000000..07a0ea0e2 --- /dev/null +++ b/packages/contracts/test/tests/unit/rewards/rewards-distribution.test.ts @@ -0,0 +1,728 @@ +import { Curation } from '@graphprotocol/contracts' +import { EpochManager } from '@graphprotocol/contracts' +import { GraphToken } from '@graphprotocol/contracts' +import { IStaking } from '@graphprotocol/contracts' +import { RewardsManager } from '@graphprotocol/contracts' +import { + deriveChannelKey, + formatGRT, + GraphNetworkContracts, + helpers, + randomHexBytes, + toBN, + toGRT, +} from '@graphprotocol/sdk' +import type { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' +import { expect } from 'chai' +import { BigNumber, constants } from 'ethers' +import hre from 'hardhat' + +import { NetworkFixture } from '../lib/fixtures' + +const MAX_PPM = 1000000 + +// TODO: Behavior change - HorizonRewardsAssigned is no longer emitted when rewards == 0 +// Set to true if the old behavior is restored (emitting event for zero rewards) +const EMIT_EVENT_FOR_ZERO_REWARDS = false + +const { HashZero, WeiPerEther } = constants + +const toRound = (n: BigNumber) => formatGRT(n.add(toGRT('0.5'))).split('.')[0] + +describe('Rewards - Distribution', () => { + const graph = hre.graph() + let delegator: SignerWithAddress + let governor: SignerWithAddress + let curator1: SignerWithAddress + let curator2: SignerWithAddress + let indexer1: SignerWithAddress + let assetHolder: SignerWithAddress + + let fixture: NetworkFixture + + let contracts: GraphNetworkContracts + let grt: GraphToken + let curation: Curation + let epochManager: EpochManager + let staking: IStaking + let rewardsManager: RewardsManager + + // Derive some channel keys for each indexer used to sign attestations + const channelKey1 = deriveChannelKey() + const channelKey2 = deriveChannelKey() + const channelKeyNull = deriveChannelKey() + + const subgraphDeploymentID1 = randomHexBytes() + const subgraphDeploymentID2 = randomHexBytes() + + const allocationID1 = channelKey1.address + const allocationID2 = channelKey2.address + const allocationIDNull = channelKeyNull.address + + const metadata = HashZero + + const ISSUANCE_RATE_PERIODS = 4 // blocks required to issue 800 GRT rewards + const ISSUANCE_PER_BLOCK = toBN('200000000000000000000') // 200 GRT every block + + before(async function () { + ;[delegator, curator1, curator2, indexer1, assetHolder] = await graph.getTestAccounts() + ;({ governor } = await graph.getNamedAccounts()) + + fixture = new NetworkFixture(graph.provider) + contracts = await fixture.load(governor) + grt = contracts.GraphToken as GraphToken + curation = contracts.Curation as Curation + epochManager = contracts.EpochManager + staking = contracts.Staking as IStaking + rewardsManager = contracts.RewardsManager + + // 200 GRT per block + await rewardsManager.connect(governor).setIssuancePerBlock(ISSUANCE_PER_BLOCK) + + // Distribute test funds + for (const wallet of [indexer1, curator1, curator2, assetHolder]) { + await grt.connect(governor).mint(wallet.address, toGRT('1000000')) + await grt.connect(wallet).approve(staking.address, toGRT('1000000')) + await grt.connect(wallet).approve(curation.address, toGRT('1000000')) + } + }) + + beforeEach(async function () { + await fixture.setUp() + }) + + afterEach(async function () { + await fixture.tearDown() + }) + + context('issuing rewards', function () { + beforeEach(async function () { + // 5% minute rate (4 blocks) + await rewardsManager.connect(governor).setIssuancePerBlock(ISSUANCE_PER_BLOCK) + }) + + describe('getRewards', function () { + it('calculate rewards using the subgraph signalled + allocated tokens', async function () { + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Allocate + const tokensToAllocate = toGRT('12500') + await staking.connect(indexer1).stake(tokensToAllocate) + await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + + // Jump + await helpers.mine(ISSUANCE_RATE_PERIODS) + + // Rewards + const contractRewards = await rewardsManager.getRewards(staking.address, allocationID1) + + // We trust using this function in the test because we tested it + // standalone in a previous test + const contractRewardsAT1 = (await rewardsManager.getAccRewardsPerAllocatedToken(subgraphDeploymentID1))[0] + + const expectedRewards = contractRewardsAT1.mul(tokensToAllocate).div(WeiPerEther) + expect(expectedRewards).eq(contractRewards) + }) + it('rewards should be zero if the allocation is closed', async function () { + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Allocate + const tokensToAllocate = toGRT('12500') + await staking.connect(indexer1).stake(tokensToAllocate) + await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + + // Jump + await helpers.mine(ISSUANCE_RATE_PERIODS) + await helpers.mineEpoch(epochManager) + + // Close allocation + await staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + + // Rewards + const contractRewards = await rewardsManager.getRewards(staking.address, allocationID1) + expect(contractRewards).eq(BigNumber.from(0)) + }) + it('rewards should be zero if the allocation does not exist', async function () { + // Rewards + const contractRewards = await rewardsManager.getRewards(staking.address, allocationIDNull) + expect(contractRewards).eq(BigNumber.from(0)) + }) + }) + + describe('takeRewards', function () { + interface DelegationParameters { + indexingRewardCut: BigNumber + queryFeeCut: BigNumber + cooldownBlocks: number + } + + async function setupIndexerAllocation() { + // Setup + await epochManager.connect(governor).setEpochLength(10) + + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Allocate + const tokensToAllocate = toGRT('12500') + await staking.connect(indexer1).stake(tokensToAllocate) + await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + } + + async function setupIndexerAllocationSignalingAfter() { + // Setup + await epochManager.connect(governor).setEpochLength(10) + + // Allocate + const tokensToAllocate = toGRT('12500') + await staking.connect(indexer1).stake(tokensToAllocate) + await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + } + + async function setupIndexerAllocationWithDelegation( + tokensToDelegate: BigNumber, + delegationParams: DelegationParameters, + ) { + const tokensToAllocate = toGRT('12500') + + // Setup + await epochManager.connect(governor).setEpochLength(10) + + // Transfer some funds from the curator, I don't want to mint new tokens + await grt.connect(curator1).transfer(delegator.address, tokensToDelegate) + await grt.connect(delegator).approve(staking.address, tokensToDelegate) + + // Stake and set delegation parameters + await staking.connect(indexer1).stake(tokensToAllocate) + await staking + .connect(indexer1) + .setDelegationParameters(delegationParams.indexingRewardCut, delegationParams.queryFeeCut, 0) + + // Delegate + await staking.connect(delegator).delegate(indexer1.address, tokensToDelegate) + + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Allocate + await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + } + + it('should distribute rewards on closed allocation and stake', async function () { + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + // Setup + await setupIndexerAllocation() + + // Jump + await helpers.mineEpoch(epochManager) + + // Before state + const beforeTokenSupply = await grt.totalSupply() + const beforeIndexer1Stake = await staking.getIndexerStakedTokens(indexer1.address) + const beforeIndexer1Balance = await grt.balanceOf(indexer1.address) + const beforeStakingBalance = await grt.balanceOf(staking.address) + + // All the rewards in this subgraph go to this allocation. + // Rewards per token will be (issuancePerBlock * nBlocks) / allocatedTokens + // The first snapshot is after allocating, that is 2 blocks after the signal is minted. + // The final snapshot is when we close the allocation, that happens 9 blocks after signal is minted. + // So the rewards will be ((issuancePerBlock * 7) / allocatedTokens) * allocatedTokens + const expectedIndexingRewards = toGRT('1400') + + // Close allocation. At this point rewards should be collected for that indexer + const tx = await staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + const receipt = await tx.wait() + const event = rewardsManager.interface.parseLog(receipt.logs[1]).args + expect(event.indexer).eq(indexer1.address) + expect(event.allocationID).eq(allocationID1) + expect(toRound(event.amount)).eq(toRound(expectedIndexingRewards)) + + // After state + const afterTokenSupply = await grt.totalSupply() + const afterIndexer1Stake = await staking.getIndexerStakedTokens(indexer1.address) + const afterIndexer1Balance = await grt.balanceOf(indexer1.address) + const afterStakingBalance = await grt.balanceOf(staking.address) + + // Check that rewards are put into indexer stake + const expectedIndexerStake = beforeIndexer1Stake.add(expectedIndexingRewards) + const expectedTokenSupply = beforeTokenSupply.add(expectedIndexingRewards) + // Check stake should have increased with the rewards staked + expect(toRound(afterIndexer1Stake)).eq(toRound(expectedIndexerStake)) + // Check indexer balance remains the same + expect(afterIndexer1Balance).eq(beforeIndexer1Balance) + // Check indexing rewards are kept in the staking contract + expect(toRound(afterStakingBalance)).eq(toRound(beforeStakingBalance.add(expectedIndexingRewards))) + // Check that tokens have been minted + expect(toRound(afterTokenSupply)).eq(toRound(expectedTokenSupply)) + }) + + it('does not revert with an underflow if the minimum signal changes', async function () { + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + // Setup + await setupIndexerAllocation() + + await rewardsManager.connect(governor).setMinimumSubgraphSignal(toGRT(14000)) + + // Jump + await helpers.mineEpoch(epochManager) + + // Close allocation. At this point rewards should be collected for that indexer + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + if (EMIT_EVENT_FOR_ZERO_REWARDS) { + await expect(tx) + .emit(rewardsManager, 'HorizonRewardsAssigned') + .withArgs(indexer1.address, allocationID1, toBN(0)) + } else { + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + } + }) + + it('does not revert with an underflow if the minimum signal changes, and signal came after allocation', async function () { + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + // Setup + await setupIndexerAllocationSignalingAfter() + + await rewardsManager.connect(governor).setMinimumSubgraphSignal(toGRT(14000)) + + // Jump + await helpers.mineEpoch(epochManager) + + // Close allocation. At this point rewards should be collected for that indexer + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + if (EMIT_EVENT_FOR_ZERO_REWARDS) { + await expect(tx) + .emit(rewardsManager, 'HorizonRewardsAssigned') + .withArgs(indexer1.address, allocationID1, toBN(0)) + } else { + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + } + }) + + it('does not revert if signal was already under minimum', async function () { + await rewardsManager.connect(governor).setMinimumSubgraphSignal(toGRT(2000)) + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + // Setup + await setupIndexerAllocation() + + // Jump + await helpers.mineEpoch(epochManager) + // Close allocation. At this point rewards should be collected for that indexer + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + + if (EMIT_EVENT_FOR_ZERO_REWARDS) { + await expect(tx) + .emit(rewardsManager, 'HorizonRewardsAssigned') + .withArgs(indexer1.address, allocationID1, toBN(0)) + } else { + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + } + }) + + it('should distribute rewards on closed allocation and send to destination', async function () { + const destinationAddress = randomHexBytes(20) + await staking.connect(indexer1).setRewardsDestination(destinationAddress) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + // Setup + await setupIndexerAllocation() + + // Jump + await helpers.mineEpoch(epochManager) + + // Before state + const beforeTokenSupply = await grt.totalSupply() + const beforeIndexer1Stake = await staking.getIndexerStakedTokens(indexer1.address) + const beforeDestinationBalance = await grt.balanceOf(destinationAddress) + const beforeStakingBalance = await grt.balanceOf(staking.address) + + // All the rewards in this subgraph go to this allocation. + // Rewards per token will be (issuancePerBlock * nBlocks) / allocatedTokens + // The first snapshot is after allocating, that is 2 blocks after the signal is minted. + // The final snapshot is when we close the allocation, that happens 9 blocks after signal is minted. + // So the rewards will be ((issuancePerBlock * 7) / allocatedTokens) * allocatedTokens + const expectedIndexingRewards = toGRT('1400') + + // Close allocation. At this point rewards should be collected for that indexer + const tx = await staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + const receipt = await tx.wait() + const event = rewardsManager.interface.parseLog(receipt.logs[1]).args + expect(event.indexer).eq(indexer1.address) + expect(event.allocationID).eq(allocationID1) + expect(toRound(event.amount)).eq(toRound(expectedIndexingRewards)) + + // After state + const afterTokenSupply = await grt.totalSupply() + const afterIndexer1Stake = await staking.getIndexerStakedTokens(indexer1.address) + const afterDestinationBalance = await grt.balanceOf(destinationAddress) + const afterStakingBalance = await grt.balanceOf(staking.address) + + // Check that rewards are properly assigned + const expectedIndexerStake = beforeIndexer1Stake + const expectedTokenSupply = beforeTokenSupply.add(expectedIndexingRewards) + // Check stake should not have changed + expect(toRound(afterIndexer1Stake)).eq(toRound(expectedIndexerStake)) + // Check indexing rewards are received by the rewards destination + expect(toRound(afterDestinationBalance)).eq(toRound(beforeDestinationBalance.add(expectedIndexingRewards))) + // Check indexing rewards were not sent to the staking contract + expect(afterStakingBalance).eq(beforeStakingBalance) + // Check that tokens have been minted + expect(toRound(afterTokenSupply)).eq(toRound(expectedTokenSupply)) + }) + + it('should distribute rewards on closed allocation w/delegators', async function () { + // Setup + const delegationParams = { + indexingRewardCut: toBN('823000'), // 82.30% + queryFeeCut: toBN('80000'), // 8% + cooldownBlocks: 0, + } + const tokensToDelegate = toGRT('2000') + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + // Setup the allocation and delegators + await setupIndexerAllocationWithDelegation(tokensToDelegate, delegationParams) + + // Jump + await helpers.mineEpoch(epochManager) + + // Before state + const beforeTokenSupply = await grt.totalSupply() + const beforeDelegationPool = await staking.delegationPools(indexer1.address) + const beforeIndexer1Stake = await staking.getIndexerStakedTokens(indexer1.address) + + // Close allocation. At this point rewards should be collected for that indexer + await staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + + // After state + const afterTokenSupply = await grt.totalSupply() + const afterDelegationPool = await staking.delegationPools(indexer1.address) + const afterIndexer1Stake = await staking.getIndexerStakedTokens(indexer1.address) + + // Check that rewards are put into indexer stake (only indexer cut) + // Check that rewards are put into delegators pool accordingly + + // All the rewards in this subgraph go to this allocation. + // Rewards per token will be (issuancePerBlock * nBlocks) / allocatedTokens + // The first snapshot is after allocating, that is 1 block after the signal is minted. + // The final snapshot is when we close the allocation, that happens 4 blocks after signal is minted. + // So the rewards will be ((issuancePerBlock * 3) / allocatedTokens) * allocatedTokens + const expectedIndexingRewards = toGRT('600') + // Calculate delegators cut + const indexerRewards = delegationParams.indexingRewardCut.mul(expectedIndexingRewards).div(toBN(MAX_PPM)) + // Calculate indexer cut + const delegatorsRewards = expectedIndexingRewards.sub(indexerRewards) + // Check + const expectedIndexerStake = beforeIndexer1Stake.add(indexerRewards) + const expectedDelegatorsPoolTokens = beforeDelegationPool.tokens.add(delegatorsRewards) + const expectedTokenSupply = beforeTokenSupply.add(expectedIndexingRewards) + expect(toRound(afterIndexer1Stake)).eq(toRound(expectedIndexerStake)) + expect(toRound(afterDelegationPool.tokens)).eq(toRound(expectedDelegatorsPoolTokens)) + // Check that tokens have been minted + expect(toRound(afterTokenSupply)).eq(toRound(expectedTokenSupply)) + }) + + it('should deny rewards if subgraph on denylist', async function () { + // Setup + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + await setupIndexerAllocation() + + // Jump + await helpers.mineEpoch(epochManager) + + // Close allocation. At this point rewards should be collected for that indexer + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx).emit(rewardsManager, 'RewardsDenied').withArgs(indexer1.address, allocationID1) + }) + + it('should handle zero rewards scenario correctly', async function () { + // Setup allocation with zero issuance to create zero rewards scenario + await rewardsManager.connect(governor).setIssuancePerBlock(0) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + // Before state + const beforeTokenSupply = await grt.totalSupply() + const beforeStakingBalance = await grt.balanceOf(staking.address) + + // Close allocation. At this point rewards should be zero + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + if (EMIT_EVENT_FOR_ZERO_REWARDS) { + await expect(tx).emit(rewardsManager, 'HorizonRewardsAssigned').withArgs(indexer1.address, allocationID1, 0) + } else { + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + } + + // After state - should be unchanged since no rewards were minted + const afterTokenSupply = await grt.totalSupply() + const afterStakingBalance = await grt.balanceOf(staking.address) + + // Check that no tokens were minted (rewards were 0) + expect(afterTokenSupply).eq(beforeTokenSupply) + expect(afterStakingBalance).eq(beforeStakingBalance) + }) + }) + }) + + describe('edge scenarios', function () { + it('close allocation on a subgraph that no longer have signal', async function () { + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Allocate + const tokensToAllocate = toGRT('12500') + await staking.connect(indexer1).stake(tokensToAllocate) + await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + + // Jump + await helpers.mineEpoch(epochManager) + + // Remove all signal from the subgraph + const curatorShares = await curation.getCuratorSignal(curator1.address, subgraphDeploymentID1) + await curation.connect(curator1).burn(subgraphDeploymentID1, curatorShares, 0) + + // Close allocation. At this point rewards should be collected for that indexer + await staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + }) + }) + + describe('multiple allocations', function () { + it('two allocations in the same block with a GRT burn in the middle should succeed', async function () { + // If rewards are not monotonically increasing, this can trigger + // a subtraction overflow error as seen in mainnet tx: + // 0xb6bf7bbc446720a7409c482d714aebac239dd62e671c3c94f7e93dd3a61835ab + await helpers.mineEpoch(epochManager) + + // Setup + await epochManager.connect(governor).setEpochLength(10) + + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Stake + const tokensToStake = toGRT('12500') + await staking.connect(indexer1).stake(tokensToStake) + + // Allocate simultaneously, burning in the middle + const tokensToAlloc = toGRT('5000') + await helpers.setAutoMine(false) + const tx1 = await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAlloc, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + const tx2 = await grt.connect(indexer1).burn(toGRT(1)) + const tx3 = await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAlloc, + allocationID2, + metadata, + await channelKey2.generateProof(indexer1.address), + ) + + await helpers.mine() + await helpers.setAutoMine(true) + + await expect(tx1).emit(staking, 'AllocationCreated') + await expect(tx2).emit(grt, 'Transfer') + await expect(tx3).emit(staking, 'AllocationCreated') + }) + it('two simultanous-similar allocations should get same amount of rewards', async function () { + await helpers.mineEpoch(epochManager) + + // Setup + await epochManager.connect(governor).setEpochLength(10) + + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Stake + const tokensToStake = toGRT('12500') + await staking.connect(indexer1).stake(tokensToStake) + + // Allocate simultaneously + const tokensToAlloc = toGRT('5000') + const tx1 = await staking.populateTransaction.allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAlloc, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + const tx2 = await staking.populateTransaction.allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAlloc, + allocationID2, + metadata, + await channelKey2.generateProof(indexer1.address), + ) + await staking.connect(indexer1).multicall([tx1.data, tx2.data]) + + // Jump + await helpers.mineEpoch(epochManager) + + // Close allocations simultaneously + const tx3 = await staking.populateTransaction.closeAllocation(allocationID1, randomHexBytes()) + const tx4 = await staking.populateTransaction.closeAllocation(allocationID2, randomHexBytes()) + const tx5 = await staking.connect(indexer1).multicall([tx3.data, tx4.data]) + + // Both allocations should receive the same amount of rewards + const receipt = await tx5.wait() + const event1 = rewardsManager.interface.parseLog(receipt.logs[1]).args + const event2 = rewardsManager.interface.parseLog(receipt.logs[5]).args + expect(event1.amount).eq(event2.amount) + }) + }) + + describe('rewards progression when collecting query fees', function () { + it('collect query fees with two subgraphs and one allocation', async function () { + async function getRewardsAccrual(subgraphs) { + const [sg1, sg2] = await Promise.all(subgraphs.map((sg) => rewardsManager.getAccRewardsForSubgraph(sg))) + return { + sg1, + sg2, + all: sg1.add(sg2), + } + } + + // set curation percentage + await staking.connect(governor).setCurationPercentage(100000) + + // allow the asset holder + const tokensToCollect = toGRT('10000') + + // signal in two subgraphs in the same block + const subgraphs = [subgraphDeploymentID1, subgraphDeploymentID2] + for (const sub of subgraphs) { + await curation.connect(curator1).mint(sub, toGRT('1500'), 0) + } + + // snapshot block before any accrual (we substract 1 because accrual starts after the first mint happens) + const b1 = await epochManager.blockNum().then((x) => x.toNumber() - 1) + + // allocate + const tokensToAllocate = toGRT('12500') + await staking + .connect(indexer1) + .multicall([ + await staking.populateTransaction.stake(tokensToAllocate).then((tx) => tx.data), + await staking.populateTransaction + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + .then((tx) => tx.data), + ]) + + // move time fwd + await helpers.mineEpoch(epochManager) + + // collect funds into staking for that sub + await staking.connect(assetHolder).collect(tokensToCollect, allocationID1) + + // check rewards diff + await rewardsManager.getRewards(staking.address, allocationID1).then(formatGRT) + + await helpers.mine() + const accrual = await getRewardsAccrual(subgraphs) + const b2 = await epochManager.blockNum().then((x) => x.toNumber()) + + // round comparison because there is a small precision error due to dividing and accrual per signal + expect(toRound(accrual.all)).eq(toRound(ISSUANCE_PER_BLOCK.mul(b2 - b1))) + }) + }) +}) diff --git a/packages/contracts/test/tests/unit/rewards/rewards-eligibility-oracle.test.ts b/packages/contracts/test/tests/unit/rewards/rewards-eligibility-oracle.test.ts new file mode 100644 index 000000000..57a742ec5 --- /dev/null +++ b/packages/contracts/test/tests/unit/rewards/rewards-eligibility-oracle.test.ts @@ -0,0 +1,501 @@ +import { Curation } from '@graphprotocol/contracts' +import { EpochManager } from '@graphprotocol/contracts' +import { GraphToken } from '@graphprotocol/contracts' +import { IStaking } from '@graphprotocol/contracts' +import { RewardsManager } from '@graphprotocol/contracts' +import { deriveChannelKey, GraphNetworkContracts, helpers, randomHexBytes, toGRT } from '@graphprotocol/sdk' +import type { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' +import { expect } from 'chai' +import { constants } from 'ethers' +import hre from 'hardhat' + +import { NetworkFixture } from '../lib/fixtures' + +const { HashZero } = constants + +describe('Rewards - Eligibility Oracle', () => { + const graph = hre.graph() + let curator1: SignerWithAddress + let governor: SignerWithAddress + let indexer1: SignerWithAddress + + let fixture: NetworkFixture + + let contracts: GraphNetworkContracts + let grt: GraphToken + let curation: Curation + let epochManager: EpochManager + let staking: IStaking + let rewardsManager: RewardsManager + + // Derive channel key for indexer used to sign attestations + const channelKey1 = deriveChannelKey() + + const subgraphDeploymentID1 = randomHexBytes() + + const allocationID1 = channelKey1.address + + const metadata = HashZero + + const ISSUANCE_PER_BLOCK = toGRT('200') // 200 GRT every block + + async function setupIndexerAllocation() { + // Setup + await epochManager.connect(governor).setEpochLength(10) + + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Allocate + const tokensToAllocate = toGRT('12500') + await staking.connect(indexer1).stake(tokensToAllocate) + await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + } + + before(async function () { + const testAccounts = await graph.getTestAccounts() + curator1 = testAccounts[0] + indexer1 = testAccounts[1] + ;({ governor } = await graph.getNamedAccounts()) + + fixture = new NetworkFixture(graph.provider) + contracts = await fixture.load(governor) + grt = contracts.GraphToken as GraphToken + curation = contracts.Curation as Curation + epochManager = contracts.EpochManager + staking = contracts.Staking as IStaking + rewardsManager = contracts.RewardsManager + + // 200 GRT per block + await rewardsManager.connect(governor).setIssuancePerBlock(ISSUANCE_PER_BLOCK) + + // Distribute test funds + for (const wallet of [indexer1, curator1]) { + await grt.connect(governor).mint(wallet.address, toGRT('1000000')) + await grt.connect(wallet).approve(staking.address, toGRT('1000000')) + await grt.connect(wallet).approve(curation.address, toGRT('1000000')) + } + }) + + beforeEach(async function () { + await fixture.setUp() + }) + + afterEach(async function () { + await fixture.tearDown() + }) + + describe('rewards eligibility oracle', function () { + it('should reject setRewardsEligibilityOracle if unauthorized', async function () { + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(true) + await mockOracle.deployed() + const tx = rewardsManager.connect(indexer1).setRewardsEligibilityOracle(mockOracle.address) + await expect(tx).revertedWith('Only Controller governor') + }) + + it('should set rewards eligibility oracle if governor', async function () { + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(true) + await mockOracle.deployed() + + const tx = rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + await expect(tx) + .emit(rewardsManager, 'RewardsEligibilityOracleSet') + .withArgs(constants.AddressZero, mockOracle.address) + + expect(await rewardsManager.rewardsEligibilityOracle()).eq(mockOracle.address) + }) + + it('should allow setting rewards eligibility oracle to zero address', async function () { + // First set an oracle + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(true) + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Then set to zero address to disable + const tx = rewardsManager.connect(governor).setRewardsEligibilityOracle(constants.AddressZero) + await expect(tx) + .emit(rewardsManager, 'RewardsEligibilityOracleSet') + .withArgs(mockOracle.address, constants.AddressZero) + + expect(await rewardsManager.rewardsEligibilityOracle()).eq(constants.AddressZero) + }) + + it('should reject setting oracle that does not support interface', async function () { + // Try to set an EOA (externally owned account) as the rewards eligibility oracle + const tx = rewardsManager.connect(governor).setRewardsEligibilityOracle(indexer1.address) + // EOA doesn't have code, so the call will revert (error message may vary by ethers version) + await expect(tx).to.be.reverted + }) + + it('should reject setting oracle that does not support IRewardsEligibility interface', async function () { + // Deploy a contract that supports ERC165 but not IRewardsEligibility + const MockERC165Factory = await hre.ethers.getContractFactory('contracts/tests/MockERC165.sol:MockERC165') + const mockERC165 = await MockERC165Factory.deploy() + await mockERC165.deployed() + + const tx = rewardsManager.connect(governor).setRewardsEligibilityOracle(mockERC165.address) + await expect(tx).revertedWith('Contract does not support IRewardsEligibility interface') + }) + + it('should not emit event when setting same oracle address', async function () { + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(true) + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Setting the same oracle again should not emit an event + const tx = rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + await expect(tx).to.not.emit(rewardsManager, 'RewardsEligibilityOracleSet') + }) + }) + + describe('rewards eligibility in takeRewards', function () { + it('should deny rewards due to rewards eligibility oracle', async function () { + // Setup rewards eligibility oracle that denies rewards for indexer1 + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(false) // Default to deny + await mockOracle.deployed() + + // Set the rewards eligibility oracle + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + // Calculate expected rewards (for verification in the event) + const expectedIndexingRewards = toGRT('1400') + + // Close allocation. At this point rewards should be denied due to eligibility + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx) + .emit(rewardsManager, 'RewardsDeniedDueToEligibility') + .withArgs(indexer1.address, allocationID1, expectedIndexingRewards) + }) + + it('should allow rewards when rewards eligibility oracle approves', async function () { + // Setup rewards eligibility oracle that allows rewards for indexer1 + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(true) // Default to allow + await mockOracle.deployed() + + // Set the rewards eligibility oracle + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + // Calculate expected rewards + const expectedIndexingRewards = toGRT('1400') + + // Close allocation. At this point rewards should be assigned normally + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx) + .emit(rewardsManager, 'HorizonRewardsAssigned') + .withArgs(indexer1.address, allocationID1, expectedIndexingRewards) + }) + }) + + describe('rewards eligibility oracle and denylist interaction', function () { + it('should prioritize denylist over REO when both deny', async function () { + // Setup BOTH denial mechanisms + // 1. Setup denylist + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + + // 2. Setup REO that also denies + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(false) // Deny + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + // Close allocation - both checks will be performed + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + + const expectedIndexingRewards = toGRT('1400') + + // Verify: Both denial events are emitted (new "first successful reclaim" behavior) + // Since neither has a reclaim address configured, both checks run and both events emit + await expect(tx).emit(rewardsManager, 'RewardsDenied').withArgs(indexer1.address, allocationID1) + await expect(tx) + .emit(rewardsManager, 'RewardsDeniedDueToEligibility') + .withArgs(indexer1.address, allocationID1, expectedIndexingRewards) + + // Rewards are dropped (no reclaim happens since neither has address configured) + await expect(tx).to.not.emit(rewardsManager, 'RewardsReclaimed') + }) + + it('should check REO when denylist allows but indexer ineligible', async function () { + // Setup: Subgraph is allowed (no denylist), but indexer is ineligible + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(false) // Deny indexer + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + const expectedIndexingRewards = toGRT('1400') + + // Close allocation - REO should be checked + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx) + .emit(rewardsManager, 'RewardsDeniedDueToEligibility') + .withArgs(indexer1.address, allocationID1, expectedIndexingRewards) + }) + + it('should handle indexer becoming ineligible mid-allocation', async function () { + // Setup: Indexer starts eligible + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(true) // Start eligible + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation while indexer is eligible + await setupIndexerAllocation() + + // Jump to next epoch (rewards accrue) + await helpers.mineEpoch(epochManager) + + // Change eligibility AFTER allocation created but BEFORE closing + await mockOracle.setIndexerEligible(indexer1.address, false) + + const expectedIndexingRewards = toGRT('1600') + + // Close allocation - should be denied at close time (not creation time) + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx) + .emit(rewardsManager, 'RewardsDeniedDueToEligibility') + .withArgs(indexer1.address, allocationID1, expectedIndexingRewards) + }) + + it('should handle indexer becoming eligible mid-allocation', async function () { + // Setup: Indexer starts ineligible + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(false) // Start ineligible + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation while indexer is ineligible + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + // Change eligibility before closing + await mockOracle.setIndexerEligible(indexer1.address, true) + + const expectedIndexingRewards = toGRT('1600') + + // Close allocation - should now be allowed + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx) + .emit(rewardsManager, 'HorizonRewardsAssigned') + .withArgs(indexer1.address, allocationID1, expectedIndexingRewards) + }) + + it('should handle denylist being added mid-allocation', async function () { + // Setup: Start with subgraph NOT denied + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation when subgraph is allowed + await setupIndexerAllocation() + + // Jump to next epoch (rewards accrue) + await helpers.mineEpoch(epochManager) + + // Deny the subgraph before closing allocation + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + + // Close allocation - should be denied even though it was created when allowed + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx).emit(rewardsManager, 'RewardsDenied').withArgs(indexer1.address, allocationID1) + }) + + it('should handle denylist being removed mid-allocation', async function () { + // Setup: Start with subgraph denied + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation (can still allocate to denied subgraph) + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + // Remove from denylist before closing + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, false) + + const expectedIndexingRewards = toGRT('1600') + + // Close allocation - should now get rewards + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx) + .emit(rewardsManager, 'HorizonRewardsAssigned') + .withArgs(indexer1.address, allocationID1, expectedIndexingRewards) + }) + + it('should allow rewards when REO is zero address (disabled)', async function () { + // Ensure REO is not set (zero address = disabled) + expect(await rewardsManager.rewardsEligibilityOracle()).eq(constants.AddressZero) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + const expectedIndexingRewards = toGRT('1400') + + // Close allocation - should get rewards (no eligibility check when REO is zero) + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx) + .emit(rewardsManager, 'HorizonRewardsAssigned') + .withArgs(indexer1.address, allocationID1, expectedIndexingRewards) + }) + + it('should verify event structure differences between denial mechanisms', async function () { + // Test 1: Denylist denial - event WITHOUT amount + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + + await helpers.mineEpoch(epochManager) + await setupIndexerAllocation() + await helpers.mineEpoch(epochManager) + + const tx1 = await staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + const receipt1 = await tx1.wait() + + // Find the RewardsDenied event - search in logs as events may be from different contracts + const rewardsDeniedEvent = receipt1.logs + .map((log) => { + try { + return rewardsManager.interface.parseLog(log) + } catch { + return null + } + }) + .find((event) => event?.name === 'RewardsDenied') + + expect(rewardsDeniedEvent).to.not.be.undefined + + // Verify it only has indexer and allocationID (no amount parameter) + expect(rewardsDeniedEvent?.args?.indexer).to.equal(indexer1.address) + expect(rewardsDeniedEvent?.args?.allocationID).to.equal(allocationID1) + // RewardsDenied has only 2 args, amount should not exist + expect(rewardsDeniedEvent?.args?.amount).to.be.undefined + + // Reset for test 2 + await fixture.tearDown() + await fixture.setUp() + + // Test 2: REO denial - event WITH amount + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(false) + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + await helpers.mineEpoch(epochManager) + await setupIndexerAllocation() + await helpers.mineEpoch(epochManager) + + const tx2 = await staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + const receipt2 = await tx2.wait() + + // Find the RewardsDeniedDueToEligibility event + const eligibilityEvent = receipt2.logs + .map((log) => { + try { + return rewardsManager.interface.parseLog(log) + } catch { + return null + } + }) + .find((event) => event?.name === 'RewardsDeniedDueToEligibility') + + expect(eligibilityEvent).to.not.be.undefined + + // Verify it has indexer, allocationID, AND amount + expect(eligibilityEvent?.args?.indexer).to.equal(indexer1.address) + expect(eligibilityEvent?.args?.allocationID).to.equal(allocationID1) + expect(eligibilityEvent?.args?.amount).to.not.be.undefined + expect(eligibilityEvent?.args?.amount).to.be.gt(0) // Shows what they would have gotten + }) + }) +}) diff --git a/packages/contracts/test/tests/unit/rewards/rewards-interface.test.ts b/packages/contracts/test/tests/unit/rewards/rewards-interface.test.ts new file mode 100644 index 000000000..721deb45c --- /dev/null +++ b/packages/contracts/test/tests/unit/rewards/rewards-interface.test.ts @@ -0,0 +1,116 @@ +import { RewardsManager } from '@graphprotocol/contracts' +import { IERC165__factory, IIssuanceTarget__factory, IRewardsManager__factory } from '@graphprotocol/interfaces/types' +import { GraphNetworkContracts, toGRT } from '@graphprotocol/sdk' +import type { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' +import { expect } from 'chai' +import hre from 'hardhat' + +import { NetworkFixture } from '../lib/fixtures' + +describe('RewardsManager interfaces', () => { + const graph = hre.graph() + let governor: SignerWithAddress + + let fixture: NetworkFixture + + let contracts: GraphNetworkContracts + let rewardsManager: RewardsManager + + before(async function () { + ;({ governor } = await graph.getNamedAccounts()) + + fixture = new NetworkFixture(graph.provider) + contracts = await fixture.load(governor) + rewardsManager = contracts.RewardsManager + + // Set a default issuance per block + await rewardsManager.connect(governor).setIssuancePerBlock(toGRT('200')) + }) + + beforeEach(async function () { + await fixture.setUp() + }) + + afterEach(async function () { + await fixture.tearDown() + }) + + /** + * Interface ID Stability Tests + * + * These tests verify that interface IDs remain stable across builds. + * Changes to these IDs indicate breaking changes to the interface definitions. + * + * If a test fails: + * 1. Verify the interface change was intentional + * 2. Understand the impact on deployed contracts + * 3. Update the expected ID if the change is correct + * 4. Document the breaking change in release notes + */ + describe('Interface ID Stability', () => { + it('IERC165 should have stable interface ID', () => { + expect(IERC165__factory.interfaceId).to.equal('0x01ffc9a7') + }) + + it('IIssuanceTarget should have stable interface ID', () => { + expect(IIssuanceTarget__factory.interfaceId).to.equal('0xaee4dc43') + }) + + it('IRewardsManager should have stable interface ID', () => { + expect(IRewardsManager__factory.interfaceId).to.equal('0x45dd0aa0') + }) + }) + + describe('supportsInterface', function () { + it('should support IIssuanceTarget interface', async function () { + const supports = await rewardsManager.supportsInterface(IIssuanceTarget__factory.interfaceId) + expect(supports).to.be.true + }) + + it('should support IRewardsManager interface', async function () { + const supports = await rewardsManager.supportsInterface(IRewardsManager__factory.interfaceId) + expect(supports).to.be.true + }) + + it('should support IERC165 interface', async function () { + const supports = await rewardsManager.supportsInterface(IERC165__factory.interfaceId) + expect(supports).to.be.true + }) + + it('should return false for unsupported interfaces', async function () { + // Test with an unknown interface ID + const unknownInterfaceId = '0x12345678' // Random interface ID + const supports = await rewardsManager.supportsInterface(unknownInterfaceId) + expect(supports).to.be.false + }) + }) + + describe('calcRewards', function () { + it('should calculate rewards correctly', async function () { + const tokens = toGRT('1000') + const accRewardsPerAllocatedToken = toGRT('0.5') + + // Expected: (1000 * 0.5 * 1e18) / 1e18 = 500 GRT + const expectedRewards = toGRT('500') + + const rewards = await rewardsManager.calcRewards(tokens, accRewardsPerAllocatedToken) + expect(rewards).to.equal(expectedRewards) + }) + + it('should return 0 when tokens is 0', async function () { + const tokens = toGRT('0') + const accRewardsPerAllocatedToken = toGRT('0.5') + + const rewards = await rewardsManager.calcRewards(tokens, accRewardsPerAllocatedToken) + expect(rewards).to.equal(0) + }) + + it('should return 0 when accRewardsPerAllocatedToken is 0', async function () { + const tokens = toGRT('1000') + const accRewardsPerAllocatedToken = toGRT('0') + + const rewards = await rewardsManager.calcRewards(tokens, accRewardsPerAllocatedToken) + expect(rewards).to.equal(0) + }) + }) +}) diff --git a/packages/contracts/test/tests/unit/rewards/rewards-issuance-allocator.test.ts b/packages/contracts/test/tests/unit/rewards/rewards-issuance-allocator.test.ts new file mode 100644 index 000000000..c74679ad9 --- /dev/null +++ b/packages/contracts/test/tests/unit/rewards/rewards-issuance-allocator.test.ts @@ -0,0 +1,416 @@ +import { Curation } from '@graphprotocol/contracts' +import { GraphToken } from '@graphprotocol/contracts' +import { RewardsManager } from '@graphprotocol/contracts' +import { GraphNetworkContracts, helpers, randomHexBytes, toGRT } from '@graphprotocol/sdk' +import type { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' +import { expect } from 'chai' +import { constants } from 'ethers' +import hre from 'hardhat' + +import { NetworkFixture } from '../lib/fixtures' + +describe('Rewards - Issuance Allocator', () => { + const graph = hre.graph() + let curator1: SignerWithAddress + let governor: SignerWithAddress + let indexer1: SignerWithAddress + + let fixture: NetworkFixture + + let contracts: GraphNetworkContracts + let grt: GraphToken + let curation: Curation + let rewardsManager: RewardsManager + + const subgraphDeploymentID1 = randomHexBytes() + + const ISSUANCE_PER_BLOCK = toGRT('200') // 200 GRT every block + + before(async function () { + const testAccounts = await graph.getTestAccounts() + curator1 = testAccounts[0] + indexer1 = testAccounts[1] + ;({ governor } = await graph.getNamedAccounts()) + + fixture = new NetworkFixture(graph.provider) + contracts = await fixture.load(governor) + grt = contracts.GraphToken as GraphToken + curation = contracts.Curation as Curation + rewardsManager = contracts.RewardsManager as RewardsManager + + // 200 GRT per block + await rewardsManager.connect(governor).setIssuancePerBlock(ISSUANCE_PER_BLOCK) + + // Distribute test funds + for (const wallet of [curator1]) { + await grt.connect(governor).mint(wallet.address, toGRT('1000000')) + await grt.connect(wallet).approve(curation.address, toGRT('1000000')) + } + }) + + beforeEach(async function () { + await fixture.setUp() + // Reset issuance allocator to ensure we use direct issuancePerBlock + await rewardsManager.connect(governor).setIssuanceAllocator(constants.AddressZero) + }) + + afterEach(async function () { + await fixture.tearDown() + }) + + describe('setIssuanceAllocator', function () { + describe('ERC-165 validation', function () { + it('should successfully set an issuance allocator that supports the interface', async function () { + // Deploy a mock issuance allocator that supports ERC-165 and IIssuanceAllocationDistribution + const MockIssuanceAllocatorFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockIssuanceAllocator.sol:MockIssuanceAllocator', + ) + const mockAllocator = await MockIssuanceAllocatorFactory.deploy() + await mockAllocator.deployed() + + // Should succeed because MockIssuanceAllocator supports IIssuanceAllocationDistribution + await expect(rewardsManager.connect(governor).setIssuanceAllocator(mockAllocator.address)) + .to.emit(rewardsManager, 'IssuanceAllocatorSet') + .withArgs(constants.AddressZero, mockAllocator.address) + + // Verify the allocator was set + expect(await rewardsManager.issuanceAllocator()).to.equal(mockAllocator.address) + }) + + it('should revert when setting to EOA address (no contract code)', async function () { + const eoaAddress = indexer1.address + + // Should revert because EOAs don't have contract code to call supportsInterface on + await expect(rewardsManager.connect(governor).setIssuanceAllocator(eoaAddress)).to.be.reverted + }) + + it('should revert when setting to contract that does not support IIssuanceAllocationDistribution', async function () { + // Deploy a contract that supports ERC-165 but not IIssuanceAllocationDistribution + const MockERC165Factory = await hre.ethers.getContractFactory('contracts/tests/MockERC165.sol:MockERC165') + const mockERC165 = await MockERC165Factory.deploy() + await mockERC165.deployed() + + // Should revert because the contract doesn't support IIssuanceAllocationDistribution + await expect(rewardsManager.connect(governor).setIssuanceAllocator(mockERC165.address)).to.be.revertedWith( + 'Contract does not support IIssuanceAllocationDistribution interface', + ) + }) + + it('should validate interface before updating rewards calculation', async function () { + // This test ensures that ERC165 validation happens before updateAccRewardsPerSignal + // Deploy a contract that supports ERC-165 but not IIssuanceAllocationDistribution + const MockERC165Factory = await hre.ethers.getContractFactory('contracts/tests/MockERC165.sol:MockERC165') + const mockERC165 = await MockERC165Factory.deploy() + await mockERC165.deployed() + + // Should revert with interface error, not with any rewards calculation error + await expect(rewardsManager.connect(governor).setIssuanceAllocator(mockERC165.address)).to.be.revertedWith( + 'Contract does not support IIssuanceAllocationDistribution interface', + ) + }) + }) + + describe('access control', function () { + it('should revert when called by non-governor', async function () { + const MockIssuanceAllocatorFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockIssuanceAllocator.sol:MockIssuanceAllocator', + ) + const mockAllocator = await MockIssuanceAllocatorFactory.deploy() + await mockAllocator.deployed() + + // Should revert because indexer1 is not the governor + await expect(rewardsManager.connect(indexer1).setIssuanceAllocator(mockAllocator.address)).to.be.revertedWith( + 'Only Controller governor', + ) + }) + }) + + describe('state management', function () { + it('should allow setting issuance allocator to zero address (disable)', async function () { + // First set a valid allocator + const MockIssuanceAllocatorFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockIssuanceAllocator.sol:MockIssuanceAllocator', + ) + const mockAllocator = await MockIssuanceAllocatorFactory.deploy() + await mockAllocator.deployed() + + await rewardsManager.connect(governor).setIssuanceAllocator(mockAllocator.address) + expect(await rewardsManager.issuanceAllocator()).to.equal(mockAllocator.address) + + // Now disable by setting to zero address + await expect(rewardsManager.connect(governor).setIssuanceAllocator(constants.AddressZero)) + .to.emit(rewardsManager, 'IssuanceAllocatorSet') + .withArgs(mockAllocator.address, constants.AddressZero) + + expect(await rewardsManager.issuanceAllocator()).to.equal(constants.AddressZero) + + // Should now use local issuancePerBlock again + expect(await rewardsManager.getRewardsIssuancePerBlock()).eq(ISSUANCE_PER_BLOCK) + }) + + it('should emit IssuanceAllocatorSet event when setting allocator', async function () { + const MockIssuanceAllocatorFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockIssuanceAllocator.sol:MockIssuanceAllocator', + ) + const mockIssuanceAllocator = await MockIssuanceAllocatorFactory.deploy() + await mockIssuanceAllocator.deployed() + + const tx = rewardsManager.connect(governor).setIssuanceAllocator(mockIssuanceAllocator.address) + await expect(tx) + .emit(rewardsManager, 'IssuanceAllocatorSet') + .withArgs(constants.AddressZero, mockIssuanceAllocator.address) + }) + + it('should not emit event when setting to same allocator address', async function () { + // Deploy a mock issuance allocator + const MockIssuanceAllocatorFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockIssuanceAllocator.sol:MockIssuanceAllocator', + ) + const mockAllocator = await MockIssuanceAllocatorFactory.deploy() + await mockAllocator.deployed() + + // Set the allocator first time + await rewardsManager.connect(governor).setIssuanceAllocator(mockAllocator.address) + + // Setting to same address should not emit event + const tx = await rewardsManager.connect(governor).setIssuanceAllocator(mockAllocator.address) + const receipt = await tx.wait() + + // Filter for IssuanceAllocatorSet events + const events = receipt.events?.filter((e) => e.event === 'IssuanceAllocatorSet') || [] + expect(events.length).to.equal(0) + }) + + it('should update rewards before changing issuance allocator', async function () { + // This test verifies that updateAccRewardsPerSignal is called when setting allocator + const MockIssuanceAllocatorFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockIssuanceAllocator.sol:MockIssuanceAllocator', + ) + const mockIssuanceAllocator = await MockIssuanceAllocatorFactory.deploy() + await mockIssuanceAllocator.deployed() + + // Setting the allocator should trigger updateAccRewardsPerSignal + // We can't easily test this directly, but we can verify the allocator was set + await rewardsManager.connect(governor).setIssuanceAllocator(mockIssuanceAllocator.address) + expect(await rewardsManager.issuanceAllocator()).eq(mockIssuanceAllocator.address) + }) + }) + }) + + describe('getRewardsIssuancePerBlock', function () { + it('should return issuancePerBlock when no issuanceAllocator is set', async function () { + const expectedIssuance = toGRT('100.025') + await rewardsManager.connect(governor).setIssuancePerBlock(expectedIssuance) + + // Ensure no issuanceAllocator is set + expect(await rewardsManager.issuanceAllocator()).eq(constants.AddressZero) + + // Should return the direct issuancePerBlock value + expect(await rewardsManager.getRewardsIssuancePerBlock()).eq(expectedIssuance) + }) + + it('should return value from issuanceAllocator when set', async function () { + // Create a mock IssuanceAllocator + const MockIssuanceAllocatorFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockIssuanceAllocator.sol:MockIssuanceAllocator', + ) + const mockIssuanceAllocator = await MockIssuanceAllocatorFactory.deploy() + await mockIssuanceAllocator.deployed() + + // Set the mock allocator on RewardsManager + await rewardsManager.connect(governor).setIssuanceAllocator(mockIssuanceAllocator.address) + + // Verify the allocator was set + expect(await rewardsManager.issuanceAllocator()).eq(mockIssuanceAllocator.address) + + // Set RewardsManager as a self-minting target with 25 GRT per block + const expectedIssuance = toGRT('25') + await mockIssuanceAllocator['setTargetAllocation(address,uint256,uint256,bool)']( + rewardsManager.address, + 0, // allocator issuance + expectedIssuance, // self issuance + true, + ) + + // Should return the value from the allocator, not the local issuancePerBlock + expect(await rewardsManager.getRewardsIssuancePerBlock()).eq(expectedIssuance) + }) + + it('should return 0 when issuanceAllocator is set but target not registered as self-minter', async function () { + // Create a mock IssuanceAllocator + const MockIssuanceAllocatorFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockIssuanceAllocator.sol:MockIssuanceAllocator', + ) + const mockIssuanceAllocator = await MockIssuanceAllocatorFactory.deploy() + await mockIssuanceAllocator.deployed() + + // Set the mock allocator on RewardsManager + await rewardsManager.connect(governor).setIssuanceAllocator(mockIssuanceAllocator.address) + + // Set RewardsManager as an allocator-minting target (only allocator issuance) + await mockIssuanceAllocator['setTargetAllocation(address,uint256,uint256,bool)']( + rewardsManager.address, + toGRT('25'), // allocator issuance + 0, // self issuance + false, + ) + + // Should return 0 because it's not a self-minting target + expect(await rewardsManager.getRewardsIssuancePerBlock()).eq(0) + }) + }) + + describe('setIssuancePerBlock', function () { + it('should allow setIssuancePerBlock when issuanceAllocator is set', async function () { + // Create and set a mock IssuanceAllocator + const MockIssuanceAllocatorFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockIssuanceAllocator.sol:MockIssuanceAllocator', + ) + const mockIssuanceAllocator = await MockIssuanceAllocatorFactory.deploy() + await mockIssuanceAllocator.deployed() + await rewardsManager.connect(governor).setIssuanceAllocator(mockIssuanceAllocator.address) + + // Should allow setting issuancePerBlock even when allocator is set + const newIssuancePerBlock = toGRT('100') + await rewardsManager.connect(governor).setIssuancePerBlock(newIssuancePerBlock) + + // The local issuancePerBlock should be updated + expect(await rewardsManager.issuancePerBlock()).eq(newIssuancePerBlock) + + // But the effective issuance should still come from the allocator + // (assuming the allocator returns a different value) + expect(await rewardsManager.getRewardsIssuancePerBlock()).not.eq(newIssuancePerBlock) + }) + }) + + describe('beforeIssuanceAllocationChange', function () { + it('should handle beforeIssuanceAllocationChange correctly', async function () { + // Create and set a mock IssuanceAllocator + const MockIssuanceAllocatorFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockIssuanceAllocator.sol:MockIssuanceAllocator', + ) + const mockIssuanceAllocator = await MockIssuanceAllocatorFactory.deploy() + await mockIssuanceAllocator.deployed() + await rewardsManager.connect(governor).setIssuanceAllocator(mockIssuanceAllocator.address) + + // Anyone should be able to call this function + await rewardsManager.connect(governor).beforeIssuanceAllocationChange() + + // Should also succeed when called by the allocator + await mockIssuanceAllocator.callBeforeIssuanceAllocationChange(rewardsManager.address) + }) + }) + + describe('issuance allocator integration', function () { + let mockIssuanceAllocator: any + + beforeEach(async function () { + // Create and setup mock allocator + const MockIssuanceAllocatorFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockIssuanceAllocator.sol:MockIssuanceAllocator', + ) + mockIssuanceAllocator = await MockIssuanceAllocatorFactory.deploy() + await mockIssuanceAllocator.deployed() + }) + + it('should accumulate rewards using allocator rate over time', async function () { + // Setup: Create signal + const totalSignal = toGRT('1000') + await curation.connect(curator1).mint(subgraphDeploymentID1, totalSignal, 0) + + // Set allocator with specific rate (50 GRT per block, different from local 200 GRT) + const allocatorRate = toGRT('50') + await mockIssuanceAllocator.setTargetAllocation(rewardsManager.address, 0, allocatorRate, false) + await rewardsManager.connect(governor).setIssuanceAllocator(mockIssuanceAllocator.address) + + // Snapshot state after setting allocator + const rewardsAfterSet = await rewardsManager.getAccRewardsPerSignal() + + // Mine blocks to accrue rewards at allocator rate + const blocksToMine = 10 + await helpers.mine(blocksToMine) + + // Get accumulated rewards + const rewardsAfterMining = await rewardsManager.getAccRewardsPerSignal() + const actualAccrued = rewardsAfterMining.sub(rewardsAfterSet) + + // Calculate expected rewards: (rate × blocks) / totalSignal + // Expected = (50 GRT × 10 blocks) / 1000 GRT signal = 0.5 GRT per signal + const expectedAccrued = allocatorRate.mul(blocksToMine).mul(toGRT('1')).div(totalSignal) + + // Verify rewards accumulated at allocator rate (not local rate of 200 GRT/block) + expect(actualAccrued).to.eq(expectedAccrued) + + // Verify NOT using local rate (would be 4x higher: 200 vs 50) + const wrongExpected = ISSUANCE_PER_BLOCK.mul(blocksToMine).mul(toGRT('1')).div(totalSignal) + expect(actualAccrued).to.not.eq(wrongExpected) + }) + + it('should maintain reward consistency when switching between rates', async function () { + // Setup: Create signal + const totalSignal = toGRT('2000') + await curation.connect(curator1).mint(subgraphDeploymentID1, totalSignal, 0) + + // Snapshot initial state + const block0 = await helpers.latestBlock() + const rewards0 = await rewardsManager.getAccRewardsPerSignal() + + // Phase 1: Accrue at local rate (200 GRT/block) + await helpers.mine(5) + const block1 = await helpers.latestBlock() + const rewards1 = await rewardsManager.getAccRewardsPerSignal() + + // Calculate phase 1 accrual + const blocksPhase1 = block1 - block0 + const phase1Accrued = rewards1.sub(rewards0) + const expectedPhase1 = ISSUANCE_PER_BLOCK.mul(blocksPhase1).mul(toGRT('1')).div(totalSignal) + expect(phase1Accrued).to.eq(expectedPhase1) + + // Phase 2: Switch to allocator with different rate (100 GRT/block) + const allocatorRate = toGRT('100') + await mockIssuanceAllocator.setTargetAllocation(rewardsManager.address, 0, allocatorRate, false) + await rewardsManager.connect(governor).setIssuanceAllocator(mockIssuanceAllocator.address) + + const block2 = await helpers.latestBlock() + const rewards2 = await rewardsManager.getAccRewardsPerSignal() + + await helpers.mine(8) + const block3 = await helpers.latestBlock() + const rewards3 = await rewardsManager.getAccRewardsPerSignal() + + // Calculate phase 2 accrual (includes the setIssuanceAllocator block at local rate) + const blocksPhase2 = block3 - block2 + const phase2Accrued = rewards3.sub(rewards2) + const expectedPhase2 = allocatorRate.mul(blocksPhase2).mul(toGRT('1')).div(totalSignal) + expect(phase2Accrued).to.eq(expectedPhase2) + + // Phase 3: Switch back to local rate (200 GRT/block) + await rewardsManager.connect(governor).setIssuanceAllocator(constants.AddressZero) + + const block4 = await helpers.latestBlock() + const rewards4 = await rewardsManager.getAccRewardsPerSignal() + + await helpers.mine(4) + const block5 = await helpers.latestBlock() + const rewards5 = await rewardsManager.getAccRewardsPerSignal() + + // Calculate phase 3 accrual + const blocksPhase3 = block5 - block4 + const phase3Accrued = rewards5.sub(rewards4) + const expectedPhase3 = ISSUANCE_PER_BLOCK.mul(blocksPhase3).mul(toGRT('1')).div(totalSignal) + expect(phase3Accrued).to.eq(expectedPhase3) + + // Verify total consistency: all rewards from start to end must equal sum of all phases + // including the transition blocks (setIssuanceAllocator calls mine blocks too) + const transitionPhase1to2 = rewards2.sub(rewards1) // Block mined by setIssuanceAllocator + const transitionPhase2to3 = rewards4.sub(rewards3) // Block mined by removing allocator + const totalExpected = phase1Accrued + .add(transitionPhase1to2) + .add(phase2Accrued) + .add(transitionPhase2to3) + .add(phase3Accrued) + const totalActual = rewards5.sub(rewards0) + expect(totalActual).to.eq(totalExpected) + }) + }) +}) diff --git a/packages/contracts/test/tests/unit/rewards/rewards-reclaim.test.ts b/packages/contracts/test/tests/unit/rewards/rewards-reclaim.test.ts new file mode 100644 index 000000000..b5bd11413 --- /dev/null +++ b/packages/contracts/test/tests/unit/rewards/rewards-reclaim.test.ts @@ -0,0 +1,588 @@ +import { Curation } from '@graphprotocol/contracts' +import { EpochManager } from '@graphprotocol/contracts' +import { GraphToken } from '@graphprotocol/contracts' +import { IStaking } from '@graphprotocol/contracts' +import { RewardsManager } from '@graphprotocol/contracts' +import { deriveChannelKey, GraphNetworkContracts, helpers, randomHexBytes, toGRT } from '@graphprotocol/sdk' +import type { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' +import { expect } from 'chai' +import { constants, utils } from 'ethers' +import hre from 'hardhat' + +import { NetworkFixture } from '../lib/fixtures' + +const { HashZero } = constants + +// Reclaim reason identifiers (matching RewardsReclaim.sol) +const INDEXER_INELIGIBLE = utils.id('INDEXER_INELIGIBLE') +const SUBGRAPH_DENIED = utils.id('SUBGRAPH_DENIED') +const CLOSE_ALLOCATION = utils.id('CLOSE_ALLOCATION') + +describe('Rewards - Reclaim Addresses', () => { + const graph = hre.graph() + let curator1: SignerWithAddress + let governor: SignerWithAddress + let indexer1: SignerWithAddress + let reclaimWallet: SignerWithAddress + let otherWallet: SignerWithAddress + + let fixture: NetworkFixture + + let contracts: GraphNetworkContracts + let grt: GraphToken + let curation: Curation + let epochManager: EpochManager + let staking: IStaking + let rewardsManager: RewardsManager + + // Derive channel key for indexer used to sign attestations + const channelKey1 = deriveChannelKey() + + const subgraphDeploymentID1 = randomHexBytes() + + const allocationID1 = channelKey1.address + + const metadata = HashZero + + const ISSUANCE_PER_BLOCK = toGRT('200') // 200 GRT every block + + async function setupIndexerAllocation() { + // Setup + await epochManager.connect(governor).setEpochLength(10) + + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Allocate + const tokensToAllocate = toGRT('12500') + await staking.connect(indexer1).stake(tokensToAllocate) + await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + } + + before(async function () { + const testAccounts = await graph.getTestAccounts() + curator1 = testAccounts[0] + indexer1 = testAccounts[1] + reclaimWallet = testAccounts[2] + otherWallet = testAccounts[3] + ;({ governor } = await graph.getNamedAccounts()) + + fixture = new NetworkFixture(graph.provider) + contracts = await fixture.load(governor) + grt = contracts.GraphToken as GraphToken + curation = contracts.Curation as Curation + epochManager = contracts.EpochManager + staking = contracts.Staking as IStaking + rewardsManager = contracts.RewardsManager + + // 200 GRT per block + await rewardsManager.connect(governor).setIssuancePerBlock(ISSUANCE_PER_BLOCK) + + // Distribute test funds + for (const wallet of [indexer1, curator1]) { + await grt.connect(governor).mint(wallet.address, toGRT('1000000')) + await grt.connect(wallet).approve(staking.address, toGRT('1000000')) + await grt.connect(wallet).approve(curation.address, toGRT('1000000')) + } + }) + + beforeEach(async function () { + await fixture.setUp() + }) + + afterEach(async function () { + await fixture.tearDown() + }) + + describe('setReclaimAddress', function () { + it('should reject if not governor', async function () { + const tx = rewardsManager.connect(indexer1).setReclaimAddress(INDEXER_INELIGIBLE, reclaimWallet.address) + await expect(tx).revertedWith('Only Controller governor') + }) + + it('should reject setting reclaim address for bytes32(0)', async function () { + const tx = rewardsManager.connect(governor).setReclaimAddress(HashZero, reclaimWallet.address) + await expect(tx).revertedWith('Cannot set reclaim address for (bytes32(0))') + }) + + it('should set eligibility reclaim address if governor', async function () { + const tx = rewardsManager.connect(governor).setReclaimAddress(INDEXER_INELIGIBLE, reclaimWallet.address) + await expect(tx) + .emit(rewardsManager, 'ReclaimAddressSet') + .withArgs(INDEXER_INELIGIBLE, constants.AddressZero, reclaimWallet.address) + + expect(await rewardsManager.reclaimAddresses(INDEXER_INELIGIBLE)).eq(reclaimWallet.address) + }) + + it('should set subgraph denied reclaim address if governor', async function () { + const tx = rewardsManager.connect(governor).setReclaimAddress(SUBGRAPH_DENIED, reclaimWallet.address) + await expect(tx) + .emit(rewardsManager, 'ReclaimAddressSet') + .withArgs(SUBGRAPH_DENIED, constants.AddressZero, reclaimWallet.address) + + expect(await rewardsManager.reclaimAddresses(SUBGRAPH_DENIED)).eq(reclaimWallet.address) + }) + + it('should allow setting to zero address', async function () { + await rewardsManager.connect(governor).setReclaimAddress(INDEXER_INELIGIBLE, reclaimWallet.address) + + const tx = rewardsManager.connect(governor).setReclaimAddress(INDEXER_INELIGIBLE, constants.AddressZero) + await expect(tx) + .emit(rewardsManager, 'ReclaimAddressSet') + .withArgs(INDEXER_INELIGIBLE, reclaimWallet.address, constants.AddressZero) + + expect(await rewardsManager.reclaimAddresses(INDEXER_INELIGIBLE)).eq(constants.AddressZero) + }) + + it('should not emit event when setting same address', async function () { + await rewardsManager.connect(governor).setReclaimAddress(INDEXER_INELIGIBLE, reclaimWallet.address) + + const tx = rewardsManager.connect(governor).setReclaimAddress(INDEXER_INELIGIBLE, reclaimWallet.address) + await expect(tx).to.not.emit(rewardsManager, 'ReclaimAddressSet') + }) + }) + + describe('reclaim denied rewards - subgraph denylist', function () { + it('should mint to reclaim address when subgraph denied and reclaim address set', async function () { + // Setup reclaim address + await rewardsManager.connect(governor).setReclaimAddress(SUBGRAPH_DENIED, reclaimWallet.address) + + // Setup denylist + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + // Calculate expected rewards + const expectedRewards = toGRT('1400') + + // Check reclaim wallet balance before + const balanceBefore = await grt.balanceOf(reclaimWallet.address) + + // Close allocation - should emit both denial and reclaim events + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx).emit(rewardsManager, 'RewardsDenied').withArgs(indexer1.address, allocationID1) + await expect(tx) + .emit(rewardsManager, 'RewardsReclaimed') + .withArgs(SUBGRAPH_DENIED, expectedRewards, indexer1.address, allocationID1, subgraphDeploymentID1, '0x') + + // Check reclaim wallet received the rewards + const balanceAfter = await grt.balanceOf(reclaimWallet.address) + expect(balanceAfter.sub(balanceBefore)).eq(expectedRewards) + }) + + it('should not mint to reclaim address when reclaim address not set', async function () { + // Do NOT set reclaim address (defaults to zero address) + + // Setup denylist + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + // Close allocation - should only emit denial event, not reclaim + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx).emit(rewardsManager, 'RewardsDenied').withArgs(indexer1.address, allocationID1) + await expect(tx).to.not.emit(rewardsManager, 'RewardsReclaimed') + }) + }) + + describe('reclaim denied rewards - eligibility', function () { + it('should mint to reclaim address when eligibility denied and reclaim address set', async function () { + // Setup reclaim address + await rewardsManager.connect(governor).setReclaimAddress(INDEXER_INELIGIBLE, reclaimWallet.address) + + // Setup eligibility oracle that denies + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(false) // Deny + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + // Calculate expected rewards + const expectedRewards = toGRT('1400') + + // Check reclaim wallet balance before + const balanceBefore = await grt.balanceOf(reclaimWallet.address) + + // Close allocation - should emit both denial and reclaim events + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx) + .emit(rewardsManager, 'RewardsDeniedDueToEligibility') + .withArgs(indexer1.address, allocationID1, expectedRewards) + await expect(tx) + .emit(rewardsManager, 'RewardsReclaimed') + .withArgs(INDEXER_INELIGIBLE, expectedRewards, indexer1.address, allocationID1, subgraphDeploymentID1, '0x') + + // Check reclaim wallet received the rewards + const balanceAfter = await grt.balanceOf(reclaimWallet.address) + expect(balanceAfter.sub(balanceBefore)).eq(expectedRewards) + }) + + it('should not mint to reclaim address when reclaim address not set', async function () { + // Do NOT set reclaim address (defaults to zero address) + + // Setup eligibility oracle that denies + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(false) // Deny + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + const expectedRewards = toGRT('1400') + + // Close allocation - should only emit denial event, not reclaim + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx) + .emit(rewardsManager, 'RewardsDeniedDueToEligibility') + .withArgs(indexer1.address, allocationID1, expectedRewards) + await expect(tx).to.not.emit(rewardsManager, 'RewardsReclaimed') + }) + }) + + describe('reclaim precedence - first successful reclaim wins', function () { + it('should reclaim to SUBGRAPH_DENIED when both fail and both addresses configured', async function () { + // Setup BOTH reclaim addresses + await rewardsManager.connect(governor).setReclaimAddress(SUBGRAPH_DENIED, reclaimWallet.address) + await rewardsManager.connect(governor).setReclaimAddress(INDEXER_INELIGIBLE, otherWallet.address) + + // Setup denylist + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + + // Setup eligibility oracle that denies + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(false) // Deny + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + const expectedRewards = toGRT('1400') + + // Check balances before + const subgraphDeniedBalanceBefore = await grt.balanceOf(reclaimWallet.address) + const indexerIneligibleBalanceBefore = await grt.balanceOf(otherWallet.address) + + // Close allocation - should reclaim to SUBGRAPH_DENIED address (first check) + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx).emit(rewardsManager, 'RewardsDenied').withArgs(indexer1.address, allocationID1) + await expect(tx) + .emit(rewardsManager, 'RewardsReclaimed') + .withArgs(SUBGRAPH_DENIED, expectedRewards, indexer1.address, allocationID1, subgraphDeploymentID1, '0x') + + // Only SUBGRAPH_DENIED wallet should receive rewards (first successful reclaim wins) + const subgraphDeniedBalanceAfter = await grt.balanceOf(reclaimWallet.address) + const indexerIneligibleBalanceAfter = await grt.balanceOf(otherWallet.address) + + expect(subgraphDeniedBalanceAfter.sub(subgraphDeniedBalanceBefore)).eq(expectedRewards) + expect(indexerIneligibleBalanceAfter.sub(indexerIneligibleBalanceBefore)).eq(0) + }) + + it('should reclaim to INDEXER_INELIGIBLE when both fail but only second address configured', async function () { + // Setup ONLY INDEXER_INELIGIBLE reclaim address (not SUBGRAPH_DENIED) + await rewardsManager.connect(governor).setReclaimAddress(INDEXER_INELIGIBLE, otherWallet.address) + + // Setup denylist + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + + // Setup eligibility oracle that denies + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(false) // Deny + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + const expectedRewards = toGRT('1400') + + // Check balance before + const balanceBefore = await grt.balanceOf(otherWallet.address) + + // Close allocation - should emit both denial events, but only reclaim to INDEXER_INELIGIBLE + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx).emit(rewardsManager, 'RewardsDenied').withArgs(indexer1.address, allocationID1) + await expect(tx) + .emit(rewardsManager, 'RewardsDeniedDueToEligibility') + .withArgs(indexer1.address, allocationID1, expectedRewards) + await expect(tx) + .emit(rewardsManager, 'RewardsReclaimed') + .withArgs(INDEXER_INELIGIBLE, expectedRewards, indexer1.address, allocationID1, subgraphDeploymentID1, '0x') + + // INDEXER_INELIGIBLE wallet should receive rewards + const balanceAfter = await grt.balanceOf(otherWallet.address) + expect(balanceAfter.sub(balanceBefore)).eq(expectedRewards) + }) + + it('should drop rewards when both fail and neither address configured', async function () { + // Do NOT set any reclaim addresses + + // Setup denylist + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + + // Setup eligibility oracle that denies + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(false) // Deny + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + const expectedRewards = toGRT('1400') + + // Close allocation - should emit both denial events but NO reclaim + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx).emit(rewardsManager, 'RewardsDenied').withArgs(indexer1.address, allocationID1) + await expect(tx) + .emit(rewardsManager, 'RewardsDeniedDueToEligibility') + .withArgs(indexer1.address, allocationID1, expectedRewards) + await expect(tx).to.not.emit(rewardsManager, 'RewardsReclaimed') + }) + + it('should drop rewards when subgraph denied without address even if indexer eligible', async function () { + // Do NOT set SUBGRAPH_DENIED reclaim address + + // Setup denylist + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + + // Setup eligibility oracle that ALLOWS (indexer is eligible) + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(true) // Allow + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + // Close allocation - should emit denied event but NO eligibility event, NO reclaim + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx).emit(rewardsManager, 'RewardsDenied').withArgs(indexer1.address, allocationID1) + await expect(tx).to.not.emit(rewardsManager, 'RewardsDeniedDueToEligibility') + await expect(tx).to.not.emit(rewardsManager, 'RewardsReclaimed') + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + }) + }) + + describe('reclaimRewards - force close allocation', function () { + let mockSubgraphService: any + + beforeEach(async function () { + // Deploy mock subgraph service + const MockSubgraphServiceFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockSubgraphService.sol:MockSubgraphService', + ) + mockSubgraphService = await MockSubgraphServiceFactory.deploy() + await mockSubgraphService.deployed() + + // Set it as the subgraph service in rewards manager + await rewardsManager.connect(governor).setSubgraphService(mockSubgraphService.address) + }) + + it('should reclaim rewards when reclaim address is set', async function () { + // Set reclaim address for ForceCloseAllocation + await rewardsManager.connect(governor).setReclaimAddress(CLOSE_ALLOCATION, reclaimWallet.address) + + // Setup allocation in real staking contract + await setupIndexerAllocation() + + // Also set allocation data in mock so RewardsManager can query it + const tokensAllocated = toGRT('12500') + await mockSubgraphService.setAllocation( + allocationID1, + true, // isActive + indexer1.address, + subgraphDeploymentID1, + tokensAllocated, + 0, // accRewardsPerAllocatedToken starts at 0 + 0, // accRewardsPending + ) + await mockSubgraphService.setSubgraphAllocatedTokens(subgraphDeploymentID1, tokensAllocated) + + // Jump to next epoch to accrue rewards + await helpers.mineEpoch(epochManager) + + // Check balance before + const balanceBefore = await grt.balanceOf(reclaimWallet.address) + + // Call reclaimRewards via mock subgraph service + const tx = await mockSubgraphService.callReclaimRewards( + rewardsManager.address, + CLOSE_ALLOCATION, + allocationID1, + '0x', + ) + + // Verify event was emitted (don't check exact amount, it depends on rewards calculation) + await expect(tx).emit(rewardsManager, 'RewardsReclaimed') + + // Check balance after - should have increased + const balanceAfter = await grt.balanceOf(reclaimWallet.address) + const rewardsClaimed = balanceAfter.sub(balanceBefore) + expect(rewardsClaimed).to.be.gt(0) + }) + + it('should not reclaim when reclaim address is not set', async function () { + // Do NOT set reclaim address (defaults to zero) + + // Setup allocation in real staking contract + await setupIndexerAllocation() + + // Also set allocation data in mock + const tokensAllocated = toGRT('12500') + await mockSubgraphService.setAllocation( + allocationID1, + true, + indexer1.address, + subgraphDeploymentID1, + tokensAllocated, + 0, + 0, + ) + await mockSubgraphService.setSubgraphAllocatedTokens(subgraphDeploymentID1, tokensAllocated) + + // Jump to next epoch to accrue rewards + await helpers.mineEpoch(epochManager) + + // Call reclaimRewards via mock subgraph service - should not emit RewardsReclaimed + const tx = await mockSubgraphService.callReclaimRewards( + rewardsManager.address, + CLOSE_ALLOCATION, + allocationID1, + '0x', + ) + await expect(tx).to.not.emit(rewardsManager, 'RewardsReclaimed') + }) + + it('should return 0 and not emit when reclaim address is not set and no rewards', async function () { + // Do NOT set reclaim address (zero address) + + // Setup allocation but mark it as inactive (no rewards) + const tokensAllocated = toGRT('12500') + await mockSubgraphService.setAllocation( + allocationID1, + false, // NOT active - this will return 0 rewards + indexer1.address, + subgraphDeploymentID1, + tokensAllocated, + 0, + 0, + ) + await mockSubgraphService.setSubgraphAllocatedTokens(subgraphDeploymentID1, tokensAllocated) + + // Call reclaimRewards - should return 0 and not emit + const result = await mockSubgraphService.callStatic.callReclaimRewards( + rewardsManager.address, + CLOSE_ALLOCATION, + allocationID1, + '0x', + ) + expect(result).eq(0) + + const tx = await mockSubgraphService.callReclaimRewards( + rewardsManager.address, + CLOSE_ALLOCATION, + allocationID1, + '0x', + ) + await expect(tx).to.not.emit(rewardsManager, 'RewardsReclaimed') + }) + + it('should reject when called by unauthorized address', async function () { + // Try to call reclaimRewards directly from indexer1 (not the subgraph service) + // Note: Contract types need to be regenerated after interface changes + // Using manual encoding for now + const abiCoder = hre.ethers.utils.defaultAbiCoder + const selector = hre.ethers.utils.id('reclaimRewards(bytes32,address,bytes)').slice(0, 10) + const params = abiCoder.encode(['bytes32', 'address', 'bytes'], [CLOSE_ALLOCATION, allocationID1, '0x']) + const data = selector + params.slice(2) + + const tx = indexer1.sendTransaction({ + to: rewardsManager.address, + data: data, + }) + await expect(tx).revertedWith('Not a rewards issuer') + }) + }) +}) diff --git a/packages/contracts/test/tests/unit/rewards/rewards-subgraph-service.test.ts b/packages/contracts/test/tests/unit/rewards/rewards-subgraph-service.test.ts new file mode 100644 index 000000000..a8c3b0c08 --- /dev/null +++ b/packages/contracts/test/tests/unit/rewards/rewards-subgraph-service.test.ts @@ -0,0 +1,482 @@ +import { Curation } from '@graphprotocol/contracts' +import { GraphToken } from '@graphprotocol/contracts' +import { RewardsManager } from '@graphprotocol/contracts' +import { GraphNetworkContracts, helpers, randomAddress, randomHexBytes, toGRT } from '@graphprotocol/sdk' +import type { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' +import { expect } from 'chai' +import { constants } from 'ethers' +import hre from 'hardhat' +import { network } from 'hardhat' + +import { NetworkFixture } from '../lib/fixtures' + +// TODO: Behavior change - HorizonRewardsAssigned is no longer emitted when rewards == 0 +// Set to true if the old behavior is restored (emitting event for zero rewards) +const EMIT_EVENT_FOR_ZERO_REWARDS = false + +describe('Rewards - SubgraphService', () => { + const graph = hre.graph() + let curator1: SignerWithAddress + let governor: SignerWithAddress + let indexer1: SignerWithAddress + + let fixture: NetworkFixture + + let contracts: GraphNetworkContracts + let grt: GraphToken + let curation: Curation + let rewardsManager: RewardsManager + + const subgraphDeploymentID1 = randomHexBytes() + const allocationID1 = randomAddress() + + const ISSUANCE_PER_BLOCK = toGRT('200') // 200 GRT every block + + before(async function () { + const testAccounts = await graph.getTestAccounts() + curator1 = testAccounts[0] + indexer1 = testAccounts[1] + ;({ governor } = await graph.getNamedAccounts()) + + fixture = new NetworkFixture(graph.provider) + contracts = await fixture.load(governor) + grt = contracts.GraphToken as GraphToken + curation = contracts.Curation as Curation + rewardsManager = contracts.RewardsManager + + // 200 GRT per block + await rewardsManager.connect(governor).setIssuancePerBlock(ISSUANCE_PER_BLOCK) + + // Distribute test funds + for (const wallet of [indexer1, curator1]) { + await grt.connect(governor).mint(wallet.address, toGRT('1000000')) + await grt.connect(wallet).approve(curation.address, toGRT('1000000')) + } + }) + + beforeEach(async function () { + await fixture.setUp() + }) + + afterEach(async function () { + await fixture.tearDown() + }) + + describe('subgraph service configuration', function () { + it('should reject setSubgraphService if unauthorized', async function () { + const newService = randomAddress() + const tx = rewardsManager.connect(indexer1).setSubgraphService(newService) + await expect(tx).revertedWith('Only Controller governor') + }) + + it('should set subgraph service if governor', async function () { + const newService = randomAddress() + const tx = rewardsManager.connect(governor).setSubgraphService(newService) + + await expect(tx).emit(rewardsManager, 'SubgraphServiceSet').withArgs(constants.AddressZero, newService) + + expect(await rewardsManager.subgraphService()).eq(newService) + }) + + it('should allow setting to zero address', async function () { + const service = randomAddress() + await rewardsManager.connect(governor).setSubgraphService(service) + + const tx = rewardsManager.connect(governor).setSubgraphService(constants.AddressZero) + await expect(tx).emit(rewardsManager, 'SubgraphServiceSet').withArgs(service, constants.AddressZero) + + expect(await rewardsManager.subgraphService()).eq(constants.AddressZero) + }) + + it('should emit event when setting different address', async function () { + const service1 = randomAddress() + const service2 = randomAddress() + + await rewardsManager.connect(governor).setSubgraphService(service1) + + // Setting a different address should emit event + const tx = await rewardsManager.connect(governor).setSubgraphService(service2) + await expect(tx).emit(rewardsManager, 'SubgraphServiceSet').withArgs(service1, service2) + }) + }) + + describe('subgraph service as rewards issuer', function () { + let mockSubgraphService: any + + beforeEach(async function () { + // Deploy mock SubgraphService + const MockSubgraphServiceFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockSubgraphService.sol:MockSubgraphService', + ) + mockSubgraphService = await MockSubgraphServiceFactory.deploy() + await mockSubgraphService.deployed() + + // Set it on RewardsManager + await rewardsManager.connect(governor).setSubgraphService(mockSubgraphService.address) + }) + + describe('getRewards from subgraph service', function () { + it('should calculate rewards for subgraph service allocations', async function () { + // Setup: Create signal for rewards calculation + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Setup allocation data in mock + const tokensAllocated = toGRT('12500') + await mockSubgraphService.setAllocation( + allocationID1, + true, // isActive + indexer1.address, + subgraphDeploymentID1, + tokensAllocated, + 0, // accRewardsPerAllocatedToken + 0, // accRewardsPending + ) + + await mockSubgraphService.setSubgraphAllocatedTokens(subgraphDeploymentID1, tokensAllocated) + + // Mine some blocks to accrue rewards + await helpers.mine(10) + + // Get rewards - should return calculated amount + const rewards = await rewardsManager.getRewards(mockSubgraphService.address, allocationID1) + expect(rewards).to.be.gt(0) + }) + + it('should return zero for inactive allocation', async function () { + // Setup allocation as inactive + await mockSubgraphService.setAllocation( + allocationID1, + false, // isActive = false + indexer1.address, + subgraphDeploymentID1, + toGRT('12500'), + 0, + 0, + ) + + const rewards = await rewardsManager.getRewards(mockSubgraphService.address, allocationID1) + expect(rewards).to.equal(0) + }) + + it('should reject getRewards from non-rewards-issuer contract', async function () { + const randomContract = randomAddress() + const tx = rewardsManager.getRewards(randomContract, allocationID1) + await expect(tx).revertedWith('Not a rewards issuer') + }) + }) + + describe('takeRewards from subgraph service', function () { + it('should take rewards through subgraph service', async function () { + // Setup: Create signal for rewards calculation + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Setup allocation data in mock + const tokensAllocated = toGRT('12500') + await mockSubgraphService.setAllocation( + allocationID1, + true, // isActive + indexer1.address, + subgraphDeploymentID1, + tokensAllocated, + 0, // accRewardsPerAllocatedToken + 0, // accRewardsPending + ) + + await mockSubgraphService.setSubgraphAllocatedTokens(subgraphDeploymentID1, tokensAllocated) + + // Mine some blocks to accrue rewards + await helpers.mine(10) + + // Before state + const beforeSubgraphServiceBalance = await grt.balanceOf(mockSubgraphService.address) + const beforeTotalSupply = await grt.totalSupply() + + // Impersonate the mock subgraph service contract + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [mockSubgraphService.address], + }) + await network.provider.send('hardhat_setBalance', [mockSubgraphService.address, '0x1000000000000000000']) + + const mockSubgraphServiceSigner = await hre.ethers.getSigner(mockSubgraphService.address) + + // Take rewards (called by subgraph service) + const tx = await rewardsManager.connect(mockSubgraphServiceSigner).takeRewards(allocationID1) + const receipt = await tx.wait() + + // Stop impersonating + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [mockSubgraphService.address], + }) + + // Parse the event + const event = receipt.logs + .map((log: any) => { + try { + return rewardsManager.interface.parseLog(log) + } catch { + return null + } + }) + .find((e: any) => e?.name === 'HorizonRewardsAssigned') + + expect(event).to.not.be.undefined + expect(event?.args.indexer).to.equal(indexer1.address) + expect(event?.args.allocationID).to.equal(allocationID1) + expect(event?.args.amount).to.be.gt(0) + + // After state - verify tokens minted to subgraph service + const afterSubgraphServiceBalance = await grt.balanceOf(mockSubgraphService.address) + const afterTotalSupply = await grt.totalSupply() + + expect(afterSubgraphServiceBalance).to.be.gt(beforeSubgraphServiceBalance) + expect(afterTotalSupply).to.be.gt(beforeTotalSupply) + }) + + it('should return zero rewards for inactive allocation', async function () { + // Setup allocation as inactive + await mockSubgraphService.setAllocation( + allocationID1, + false, // isActive = false + indexer1.address, + subgraphDeploymentID1, + toGRT('12500'), + 0, + 0, + ) + + // Impersonate the mock subgraph service contract + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [mockSubgraphService.address], + }) + await network.provider.send('hardhat_setBalance', [mockSubgraphService.address, '0x1000000000000000000']) + + const mockSubgraphServiceSigner = await hre.ethers.getSigner(mockSubgraphService.address) + + // Take rewards should return 0 + const tx = rewardsManager.connect(mockSubgraphServiceSigner).takeRewards(allocationID1) + if (EMIT_EVENT_FOR_ZERO_REWARDS) { + await expect(tx).emit(rewardsManager, 'HorizonRewardsAssigned').withArgs(indexer1.address, allocationID1, 0) + } else { + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + } + + // Stop impersonating + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [mockSubgraphService.address], + }) + }) + + it('should reject takeRewards from non-rewards-issuer contract', async function () { + const tx = rewardsManager.connect(indexer1).takeRewards(allocationID1) + await expect(tx).revertedWith('Caller must be a rewards issuer') + }) + + it('should handle zero rewards scenario', async function () { + // Setup with zero issuance + await rewardsManager.connect(governor).setIssuancePerBlock(0) + + // Setup allocation + await mockSubgraphService.setAllocation( + allocationID1, + true, + indexer1.address, + subgraphDeploymentID1, + toGRT('12500'), + 0, + 0, + ) + + await mockSubgraphService.setSubgraphAllocatedTokens(subgraphDeploymentID1, toGRT('12500')) + + // Mine blocks + await helpers.mine(10) + + // Impersonate the mock subgraph service contract + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [mockSubgraphService.address], + }) + await network.provider.send('hardhat_setBalance', [mockSubgraphService.address, '0x1000000000000000000']) + + const mockSubgraphServiceSigner = await hre.ethers.getSigner(mockSubgraphService.address) + + // Take rewards should succeed with 0 amount + const tx = rewardsManager.connect(mockSubgraphServiceSigner).takeRewards(allocationID1) + if (EMIT_EVENT_FOR_ZERO_REWARDS) { + await expect(tx).emit(rewardsManager, 'HorizonRewardsAssigned').withArgs(indexer1.address, allocationID1, 0) + } else { + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + } + + // Stop impersonating + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [mockSubgraphService.address], + }) + }) + }) + + describe('mixed allocations from staking and subgraph service', function () { + it('should account for both staking and subgraph service allocations in getAccRewardsPerAllocatedToken', async function () { + // This test verifies that getSubgraphAllocatedTokens is called for both issuers + // and rewards are distributed proportionally + + // Setup: Create signal + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Setup subgraph service allocation + const tokensFromSubgraphService = toGRT('5000') + await mockSubgraphService.setSubgraphAllocatedTokens(subgraphDeploymentID1, tokensFromSubgraphService) + + // Note: We can't easily create a real staking allocation in this test + // but the contract code at lines 381-388 loops through both issuers + // and sums their allocated tokens. This test verifies the subgraph service path. + + // Mine some blocks + await helpers.mine(5) + + // Get accumulated rewards per allocated token + const [accRewardsPerAllocatedToken, accRewardsForSubgraph] = + await rewardsManager.getAccRewardsPerAllocatedToken(subgraphDeploymentID1) + + // Should have calculated rewards based on subgraph service allocations + expect(accRewardsPerAllocatedToken).to.be.gt(0) + expect(accRewardsForSubgraph).to.be.gt(0) + }) + + it('should handle case where only subgraph service has allocations', async function () { + // Setup: Create signal + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Only subgraph service has allocations + const tokensFromSubgraphService = toGRT('10000') + await mockSubgraphService.setSubgraphAllocatedTokens(subgraphDeploymentID1, tokensFromSubgraphService) + + // Mine blocks + await helpers.mine(5) + + // Get rewards + const [accRewardsPerAllocatedToken] = await rewardsManager.getAccRewardsPerAllocatedToken(subgraphDeploymentID1) + + expect(accRewardsPerAllocatedToken).to.be.gt(0) + }) + + it('should return zero when neither issuer has allocations', async function () { + // Setup: Create signal but no allocations + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // No allocations from either issuer + await mockSubgraphService.setSubgraphAllocatedTokens(subgraphDeploymentID1, 0) + + // Mine blocks + await helpers.mine(5) + + // Get rewards - should return 0 when no allocations + const [accRewardsPerAllocatedToken, accRewardsForSubgraph] = + await rewardsManager.getAccRewardsPerAllocatedToken(subgraphDeploymentID1) + + expect(accRewardsPerAllocatedToken).to.equal(0) + expect(accRewardsForSubgraph).to.be.gt(0) // Subgraph still accrues, but no per-token rewards + }) + }) + + describe('subgraph service with denylist and eligibility', function () { + it('should deny rewards from subgraph service when subgraph is on denylist', async function () { + // Setup denylist + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + + // Setup allocation with some pending rewards so rewards > 0 + await mockSubgraphService.setAllocation( + allocationID1, + true, + indexer1.address, + subgraphDeploymentID1, + toGRT('12500'), + 0, + toGRT('100'), // accRewardsPending > 0 so rewards will be calculated + ) + + await mockSubgraphService.setSubgraphAllocatedTokens(subgraphDeploymentID1, toGRT('12500')) + + // Impersonate the mock subgraph service contract + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [mockSubgraphService.address], + }) + await network.provider.send('hardhat_setBalance', [mockSubgraphService.address, '0x1000000000000000000']) + + const mockSubgraphServiceSigner = await hre.ethers.getSigner(mockSubgraphService.address) + + // Take rewards should be denied + const tx = rewardsManager.connect(mockSubgraphServiceSigner).takeRewards(allocationID1) + await expect(tx).emit(rewardsManager, 'RewardsDenied').withArgs(indexer1.address, allocationID1) + + // Stop impersonating + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [mockSubgraphService.address], + }) + }) + + it('should deny rewards from subgraph service when indexer is ineligible', async function () { + // Setup REO that denies indexer1 + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockREO = await MockRewardsEligibilityOracleFactory.deploy(false) // Deny by default + await mockREO.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockREO.address) + + // Setup: Create signal + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Setup allocation + const tokensAllocated = toGRT('12500') + await mockSubgraphService.setAllocation( + allocationID1, + true, + indexer1.address, + subgraphDeploymentID1, + tokensAllocated, + 0, + 0, + ) + + await mockSubgraphService.setSubgraphAllocatedTokens(subgraphDeploymentID1, tokensAllocated) + + // Mine blocks to accrue rewards + await helpers.mine(5) + + // Impersonate the mock subgraph service contract + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [mockSubgraphService.address], + }) + await network.provider.send('hardhat_setBalance', [mockSubgraphService.address, '0x1000000000000000000']) + + const mockSubgraphServiceSigner = await hre.ethers.getSigner(mockSubgraphService.address) + + // Take rewards should be denied due to eligibility + const tx = rewardsManager.connect(mockSubgraphServiceSigner).takeRewards(allocationID1) + await expect(tx).emit(rewardsManager, 'RewardsDeniedDueToEligibility') + + // Stop impersonating + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [mockSubgraphService.address], + }) + }) + }) + }) +}) diff --git a/packages/contracts/test/tests/unit/rewards/rewards.test.ts b/packages/contracts/test/tests/unit/rewards/rewards.test.ts index e6171cc13..b4f9e68c2 100644 --- a/packages/contracts/test/tests/unit/rewards/rewards.test.ts +++ b/packages/contracts/test/tests/unit/rewards/rewards.test.ts @@ -15,15 +15,23 @@ import { import type { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' import { BigNumber as BN } from 'bignumber.js' import { expect } from 'chai' -import { BigNumber, constants } from 'ethers' +import { BigNumber, constants, utils } from 'ethers' import hre from 'hardhat' import { NetworkFixture } from '../lib/fixtures' const MAX_PPM = 1000000 +// TODO: Behavior change - HorizonRewardsAssigned is no longer emitted when rewards == 0 +// Set to true if the old behavior is restored (emitting event for zero rewards) +const EMIT_EVENT_FOR_ZERO_REWARDS = false + const { HashZero, WeiPerEther } = constants +// Reclaim reason identifiers (matching RewardsReclaim.sol) +const INDEXER_INELIGIBLE = utils.id('INDEXER_INELIGIBLE') +const SUBGRAPH_DENIED = utils.id('SUBGRAPH_DENIED') + const toRound = (n: BigNumber) => formatGRT(n.add(toGRT('0.5'))).split('.')[0] describe('Rewards', () => { @@ -711,9 +719,13 @@ describe('Rewards', () => { // Close allocation. At this point rewards should be collected for that indexer const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) - await expect(tx) - .emit(rewardsManager, 'HorizonRewardsAssigned') - .withArgs(indexer1.address, allocationID1, toBN(0)) + if (EMIT_EVENT_FOR_ZERO_REWARDS) { + await expect(tx) + .emit(rewardsManager, 'HorizonRewardsAssigned') + .withArgs(indexer1.address, allocationID1, toBN(0)) + } else { + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + } }) it('does not revert with an underflow if the minimum signal changes, and signal came after allocation', async function () { @@ -729,9 +741,13 @@ describe('Rewards', () => { // Close allocation. At this point rewards should be collected for that indexer const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) - await expect(tx) - .emit(rewardsManager, 'HorizonRewardsAssigned') - .withArgs(indexer1.address, allocationID1, toBN(0)) + if (EMIT_EVENT_FOR_ZERO_REWARDS) { + await expect(tx) + .emit(rewardsManager, 'HorizonRewardsAssigned') + .withArgs(indexer1.address, allocationID1, toBN(0)) + } else { + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + } }) it('does not revert if signal was already under minimum', async function () { @@ -746,9 +762,13 @@ describe('Rewards', () => { // Close allocation. At this point rewards should be collected for that indexer const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) - await expect(tx) - .emit(rewardsManager, 'HorizonRewardsAssigned') - .withArgs(indexer1.address, allocationID1, toBN(0)) + if (EMIT_EVENT_FOR_ZERO_REWARDS) { + await expect(tx) + .emit(rewardsManager, 'HorizonRewardsAssigned') + .withArgs(indexer1.address, allocationID1, toBN(0)) + } else { + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + } }) it('should distribute rewards on closed allocation and send to destination', async function () { @@ -889,7 +909,11 @@ describe('Rewards', () => { // Close allocation. At this point rewards should be zero const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) - await expect(tx).emit(rewardsManager, 'HorizonRewardsAssigned').withArgs(indexer1.address, allocationID1, 0) + if (EMIT_EVENT_FOR_ZERO_REWARDS) { + await expect(tx).emit(rewardsManager, 'HorizonRewardsAssigned').withArgs(indexer1.address, allocationID1, 0) + } else { + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + } // After state - should be unchanged since no rewards were minted const afterTokenSupply = await grt.totalSupply() @@ -899,6 +923,88 @@ describe('Rewards', () => { expect(afterTokenSupply).eq(beforeTokenSupply) expect(afterStakingBalance).eq(beforeStakingBalance) }) + + it('should handle zero rewards with denylist and reclaim address', async function () { + // Setup reclaim address for SubgraphDenied + const reclaimWallet = assetHolder + await rewardsManager.connect(governor).setReclaimAddress(SUBGRAPH_DENIED, reclaimWallet.address) + + // Setup denylist + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation with zero rewards (no signal) + const tokensToAllocate = toGRT('12500') + await staking.connect(indexer1).stake(tokensToAllocate) + await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + + // Close allocation immediately (same epoch) - should have zero rewards + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + + // Should not emit events for zero rewards + await expect(tx).to.not.emit(rewardsManager, 'RewardsDenied') + await expect(tx).to.not.emit(rewardsManager, 'RewardsReclaimed') + if (EMIT_EVENT_FOR_ZERO_REWARDS) { + await expect(tx).emit(rewardsManager, 'HorizonRewardsAssigned').withArgs(indexer1.address, allocationID1, 0) + } else { + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + } + }) + + it('should handle zero rewards with eligibility oracle and reclaim address', async function () { + // Setup reclaim address for IndexerIneligible + const reclaimWallet = assetHolder + await rewardsManager.connect(governor).setReclaimAddress(INDEXER_INELIGIBLE, reclaimWallet.address) + + // Setup eligibility oracle that denies + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(false) // Deny + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation with zero rewards (no signal) + const tokensToAllocate = toGRT('12500') + await staking.connect(indexer1).stake(tokensToAllocate) + await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + + // Close allocation immediately (same epoch) - should have zero rewards + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + + // Should not emit events for zero rewards + await expect(tx).to.not.emit(rewardsManager, 'RewardsDeniedDueToEligibility') + await expect(tx).to.not.emit(rewardsManager, 'RewardsReclaimed') + if (EMIT_EVENT_FOR_ZERO_REWARDS) { + await expect(tx).emit(rewardsManager, 'HorizonRewardsAssigned').withArgs(indexer1.address, allocationID1, 0) + } else { + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + } + }) }) }) diff --git a/packages/horizon/test/unit/escrow/getters.t.sol b/packages/horizon/test/unit/escrow/getters.t.sol index 262192125..ded655b39 100644 --- a/packages/horizon/test/unit/escrow/getters.t.sol +++ b/packages/horizon/test/unit/escrow/getters.t.sol @@ -34,12 +34,21 @@ contract GraphEscrowGettersTest is GraphEscrowTest { uint256 amountDeposit, uint256 amountThawing, uint256 amountCollected - ) public useGateway useDeposit(amountDeposit) { - vm.assume(amountThawing > 0); - vm.assume(amountDeposit > 0); - vm.assume(amountDeposit >= amountThawing); - vm.assume(amountDeposit >= amountCollected); - vm.assume(amountDeposit - amountCollected < amountThawing); + ) public useGateway { + // Limit thawing and collected to half of MAX_STAKING_TOKENS to ensure valid deposit range + amountThawing = bound(amountThawing, 1, MAX_STAKING_TOKENS / 2); + amountCollected = bound(amountCollected, 1, MAX_STAKING_TOKENS / 2); + + // amountDeposit must be: + // - >= amountThawing (so we can thaw that amount) + // - >= amountCollected (so we can collect that amount) + // - < amountThawing + amountCollected (so that after collecting, balance < thawing) + // With the above bounds, this range is guaranteed to be valid + uint256 minDeposit = amountThawing > amountCollected ? amountThawing : amountCollected; + uint256 maxDeposit = amountThawing + amountCollected - 1; + amountDeposit = bound(amountDeposit, minDeposit, maxDeposit); + + _depositTokens(users.verifier, users.indexer, amountDeposit); // thaw some funds _thawEscrow(users.verifier, users.indexer, amountThawing); diff --git a/packages/interfaces/contracts/contracts/rewards/IRewardsManager.sol b/packages/interfaces/contracts/contracts/rewards/IRewardsManager.sol index 72a73e19b..9c297203a 100644 --- a/packages/interfaces/contracts/contracts/rewards/IRewardsManager.sol +++ b/packages/interfaces/contracts/contracts/rewards/IRewardsManager.sol @@ -43,6 +43,25 @@ interface IRewardsManager { */ function setSubgraphService(address subgraphService) external; + /** + * @notice Set the rewards eligibility oracle address + * @param newRewardsEligibilityOracle The address of the rewards eligibility oracle + */ + function setRewardsEligibilityOracle(address newRewardsEligibilityOracle) external; + + /** + * @notice Set the reclaim address for a specific reason + * @dev Address to mint tokens for denied/reclaimed rewards. Set to zero to disable. + * + * IMPORTANT: Changes take effect immediately and retroactively. All unclaimed rewards from + * previous periods will be sent to the new reclaim address when they are eventually reclaimed, + * regardless of which address was configured when the rewards were originally accrued. + * + * @param reason The reclaim reason identifier (see RewardsReclaim library for canonical reasons) + * @param newReclaimAddress The address to receive tokens + */ + function setReclaimAddress(bytes32 reason, address newReclaimAddress) external; + // -- Denylist -- /** @@ -67,6 +86,13 @@ interface IRewardsManager { // -- Getters -- + /** + * @notice Gets the effective issuance per block for rewards + * @dev Takes into account the issuance allocator if set + * @return The effective issuance per block + */ + function getRewardsIssuancePerBlock() external view returns (uint256); + /** * @notice Gets the issuance of rewards per signal since last updated * @return newly accrued rewards per signal since last update @@ -129,6 +155,17 @@ interface IRewardsManager { */ function takeRewards(address allocationID) external returns (uint256); + /** + * @notice Reclaim rewards for an allocation + * @dev This function can only be called by an authorized rewards issuer. + * Calculates pending rewards and mints them to the configured reclaim address. + * @param reason The reclaim reason identifier (see RewardsReclaim library for canonical reasons) + * @param allocationID Allocation + * @param data Arbitrary data to include in the RewardsReclaimed event for additional context + * @return The amount of rewards that were reclaimed (0 if no reclaim address set) + */ + function reclaimRewards(bytes32 reason, address allocationID, bytes calldata data) external returns (uint256); + // -- Hooks -- /** diff --git a/packages/interfaces/contracts/contracts/rewards/RewardsReclaim.sol b/packages/interfaces/contracts/contracts/rewards/RewardsReclaim.sol new file mode 100644 index 000000000..dab4eed71 --- /dev/null +++ b/packages/interfaces/contracts/contracts/rewards/RewardsReclaim.sol @@ -0,0 +1,63 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity ^0.7.6 || ^0.8.0; + +/** + * @title RewardsReclaim + * @author Edge & Node + * @notice Canonical definitions for rewards reclaim reasons + * @dev Uses bytes32 identifiers (like OpenZeppelin roles) to allow decentralized extension. + * New reasons can be defined by any contract without modifying this library. + * These constants provide standard reasons used across The Graph Protocol. + * + * Note: bytes32(0) is reserved and cannot be used as a reclaim reason. This design prevents: + * 1. Accidental misconfiguration from setting a reclaim address for an invalid/uninitialized reason + * 2. Invalid reclaim operations when a reason identifier was not properly set + * The zero value serves as a sentinel to catch configuration errors at the protocol level. + * + * How reclaim reasons are used depends on the specific implementation. Different contracts + * may handle multiple applicable reclaim reasons differently. + */ +library RewardsReclaim { + /** + * @notice Reclaim rewards - indexer failed eligibility check + * @dev Indexer is not eligible to receive rewards according to eligibility oracle + */ + bytes32 public constant INDEXER_INELIGIBLE = keccak256("INDEXER_INELIGIBLE"); + + /** + * @notice Reclaim rewards - subgraph is on denylist + * @dev Subgraph deployment has been denied rewards by availability oracle + */ + bytes32 public constant SUBGRAPH_DENIED = keccak256("SUBGRAPH_DENIED"); + + /** + * @notice Reclaim rewards - POI submitted too late + * @dev Proof of Indexing was submitted after the staleness deadline + */ + bytes32 public constant STALE_POI = keccak256("STALE_POI"); + + /** + * @notice Reclaim rewards - allocation has no tokens + * @dev Altruistic allocation (zero tokens) is not eligible for rewards + */ + bytes32 public constant ALTRUISTIC_ALLOCATION = keccak256("ALTRUISTIC_ALLOCATION"); + + /** + * @notice Reclaim rewards - no POI provided + * @dev Allocation closed without providing a Proof of Indexing + */ + bytes32 public constant ZERO_POI = keccak256("ZERO_POI"); + + /** + * @notice Reclaim rewards - allocation created in current epoch + * @dev Allocation must exist for at least one full epoch to earn rewards + */ + bytes32 public constant ALLOCATION_TOO_YOUNG = keccak256("ALLOCATION_TOO_YOUNG"); + + /** + * @notice Reclaim rewards - allocation closed without POI + * @dev Allocation was closed without providing a Proof of Indexing + */ + bytes32 public constant CLOSE_ALLOCATION = keccak256("CLOSE_ALLOCATION"); +} diff --git a/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationAdministration.sol b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationAdministration.sol new file mode 100644 index 000000000..31c7779dd --- /dev/null +++ b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationAdministration.sol @@ -0,0 +1,155 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity ^0.7.6 || ^0.8.0; + +import { IIssuanceTarget } from "./IIssuanceTarget.sol"; +import { SelfMintingEventMode } from "./IIssuanceAllocatorTypes.sol"; + +/** + * @title IIssuanceAllocationAdministration + * @author Edge & Node + * @notice Interface for administrative operations on the issuance allocator. + * These functions are typically restricted to the governor role. + */ +interface IIssuanceAllocationAdministration { + /** + * @notice Set the issuance per block. + * @param newIssuancePerBlock New issuance per block + * @return applied True if the value is applied (including if already the case) + * @dev Requires distribution to have reached block.number + */ + function setIssuancePerBlock(uint256 newIssuancePerBlock) external returns (bool applied); + + /** + * @notice Set the issuance per block, requiring distribution has reached at least the specified block + * @param newIssuancePerBlock New issuance per block + * @param minDistributedBlock Minimum block number that distribution must have reached + * @return applied True if the value is applied (including if already the case), false if distribution hasn't reached minDistributedBlock + * @dev Governance should explicitly call + * distributePendingIssuance(blockNumber) first if distribution is behind minDistributedBlock. + * @dev This allows configuration changes while paused: first call distributePendingIssuance(blockNumber), + * then call this function with the same or lower blockNumber. + */ + function setIssuancePerBlock( + uint256 newIssuancePerBlock, + uint256 minDistributedBlock + ) external returns (bool applied); + + /** + * @notice Set the allocation for a target with only allocator minting + * @param target The target contract to update + * @param allocatorMintingRate Allocator-minting rate for the target (tokens per block) + * @return applied True if the value is applied (including if already the case), false if not applied + * @dev This variant sets selfMintingRate to 0 and evenIfDistributionPending to false + */ + function setTargetAllocation(IIssuanceTarget target, uint256 allocatorMintingRate) external returns (bool applied); + + /** + * @notice Set the allocation for a target with both allocator and self minting + * @param target The target contract to update + * @param allocatorMintingRate Allocator-minting rate for the target (tokens per block) + * @param selfMintingRate Self-minting rate for the target (tokens per block) + * @return applied True if the value is applied (including if already the case), false if not applied + * @dev This variant sets evenIfDistributionPending to false + */ + function setTargetAllocation( + IIssuanceTarget target, + uint256 allocatorMintingRate, + uint256 selfMintingRate + ) external returns (bool applied); + + /** + * @notice Set the allocation for a target, provided distribution has reached at least the specified block + * @param target The target contract to update + * @param allocatorMintingRate Allocator-minting rate for the target (tokens per block) + * @param selfMintingRate Self-minting rate for the target (tokens per block) + * @param minDistributedBlock Minimum block number that distribution must have reached + * @return applied True if the value is applied (including if already the case), false if distribution hasn't reached minDistributedBlock and therefore the change was not applied + * @dev Governance should explicitly call + * distributePendingIssuance(blockNumber) first if paused and not distributed up to minDistributedBlock block. + * @dev This allows configuration changes while paused: first call distributePendingIssuance(blockNumber), + * then call this function with the same or lower blockNumber. + */ + function setTargetAllocation( + IIssuanceTarget target, + uint256 allocatorMintingRate, + uint256 selfMintingRate, + uint256 minDistributedBlock + ) external returns (bool applied); + + /** + * @notice Notify a specific target about an upcoming allocation change + * @param target Address of the target to notify + * @return notified True if notification was sent or already sent this block, false otherwise + */ + function notifyTarget(address target) external returns (bool notified); + + /** + * @notice Force set the lastChangeNotifiedBlock for a target to a specific block number + * @param target Address of the target to update + * @param blockNumber Block number to set as the lastChangeNotifiedBlock + * @return notificationBlock The block number that was set + * @dev This can be used to enable notification to be sent again (by setting to a past block) + * @dev or to prevent notification until a future block (by setting to current or future block). + */ + function forceTargetNoChangeNotificationBlock( + address target, + uint256 blockNumber + ) external returns (uint256 notificationBlock); + + /** + * @notice Set the address that receives the default portion of issuance not allocated to other targets + * @param newAddress The new default target address (can be address(0)) + * @return applied True if applied + */ + function setDefaultTarget(address newAddress) external returns (bool applied); + + /** + * @notice Set the address that receives the default portion of issuance not allocated to other targets + * @param newAddress The new default target address (can be address(0)) + * @param minDistributedBlock Minimum block number that distribution must have reached + * @return applied True if applied, false if distribution has not reached minDistributedBlock and therefore the change was not applied + * @dev Governance should explicitly call + * distributePendingIssuance(blockNumber) first if paused and distribution is not up to minDistributedBlock block. + * then call this function with the same or lower blockNumber. + */ + function setDefaultTarget(address newAddress, uint256 minDistributedBlock) external returns (bool applied); + + /** + * @notice Distribute pending accumulated allocator-minting issuance + * @dev Distributes accumulated allocator-minting issuance using current rates + * (retroactively applied to the period from lastDistributionBlock to current block). + * Prioritizes non-default targets getting full rates; default gets remainder. + * @dev Finalizes self-minting accumulation for the period being distributed. + * @return distributedBlock Block number that issuance was distributed up to + */ + function distributePendingIssuance() external returns (uint256 distributedBlock); + + /** + * @notice Distribute pending accumulated allocator-minting issuance up to specified block + * @param toBlockNumber Block number to distribute up to (must be <= block.number and >= lastDistributionBlock) + * @dev Distributes accumulated allocator-minting issuance using current rates + * (retroactively applied to the period from lastDistributionBlock to toBlockNumber). + * Prioritizes non-default targets getting full rates; default gets remainder. + * @dev Finalizes self-minting accumulation for the period being distributed. + * @return distributedBlock Block number that issuance was distributed up to + */ + function distributePendingIssuance(uint256 toBlockNumber) external returns (uint256 distributedBlock); + + /** + * @notice Set the self-minting event emission mode + * @param newMode The new emission mode (None, Aggregate, or PerTarget) + * @return applied True if the mode was set (including if already set to that mode) + * @dev None: Skip event emission entirely (lowest gas) + * @dev Aggregate: Emit single aggregated event for all self-minting (medium gas) + * @dev PerTarget: Emit events for each target with self-minting (highest gas) + * @dev Self-minting targets should call getTargetIssuancePerBlock() rather than relying on events + */ + function setSelfMintingEventMode(SelfMintingEventMode newMode) external returns (bool applied); + + /** + * @notice Get the current self-minting event emission mode + * @return mode The current emission mode + */ + function getSelfMintingEventMode() external view returns (SelfMintingEventMode mode); +} diff --git a/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationData.sol b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationData.sol new file mode 100644 index 000000000..f1e35d91d --- /dev/null +++ b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationData.sol @@ -0,0 +1,22 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity ^0.7.6 || ^0.8.0; +pragma abicoder v2; + +import { AllocationTarget } from "./IIssuanceAllocatorTypes.sol"; + +/** + * @title IIssuanceAllocationData + * @author Edge & Node + * @notice Interface for querying issuance allocation target data + * @dev This interface provides access to internal allocation target information, + * primarily useful for operators and off-chain monitoring systems. + */ +interface IIssuanceAllocationData { + /** + * @notice Get target data for a specific target + * @param target Address of the target + * @return AllocationTarget struct containing target information including lastChangeNotifiedBlock + */ + function getTargetData(address target) external view returns (AllocationTarget memory); +} diff --git a/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationDistribution.sol b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationDistribution.sol new file mode 100644 index 000000000..affbb84e4 --- /dev/null +++ b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationDistribution.sol @@ -0,0 +1,35 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity ^0.7.6 || ^0.8.0; +pragma abicoder v2; + +import { TargetIssuancePerBlock } from "./IIssuanceAllocatorTypes.sol"; + +/** + * @title IIssuanceAllocationDistribution + * @author Edge & Node + * @notice Interface for distribution and target interaction with the issuance allocator. + * This is the minimal interface that targets need to interact with the allocator. + */ +interface IIssuanceAllocationDistribution { + /** + * @notice Distribute issuance to allocated non-self-minting targets. + * @return Block number that issuance has been distributed to. That will normally be the current block number, unless the contract is paused. + * + * @dev When the contract is paused, no issuance is distributed and lastIssuanceBlock is not updated. + * @dev This function is permissionless and can be called by anyone, including targets as part of their normal flow. + */ + function distributeIssuance() external returns (uint256); + + /** + * @notice Target issuance per block information + * @param target Address of the target + * @return targetIssuance TargetIssuancePerBlock struct containing allocatorIssuanceBlockAppliedTo, selfIssuanceBlockAppliedTo, allocatorIssuanceRate, and selfIssuanceRate + * @dev This function does not revert when paused, instead the caller is expected to correctly read and apply the information provided. + * @dev Targets should check allocatorIssuanceBlockAppliedTo and selfIssuanceBlockAppliedTo - if either is not the current block, that type of issuance is paused for that target. + * @dev Targets should not check the allocator's pause state directly, but rely on the blockAppliedTo fields to determine if issuance is paused. + */ + function getTargetIssuancePerBlock( + address target + ) external view returns (TargetIssuancePerBlock memory targetIssuance); +} diff --git a/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationStatus.sol b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationStatus.sol new file mode 100644 index 000000000..ca0ca3129 --- /dev/null +++ b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationStatus.sol @@ -0,0 +1,59 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity ^0.7.6 || ^0.8.0; +pragma abicoder v2; + +import { Allocation, DistributionState } from "./IIssuanceAllocatorTypes.sol"; + +/** + * @title IIssuanceAllocationStatus + * @author Edge & Node + * @notice Interface for read-only status and query operations on the issuance allocator. + * All functions in this interface are view functions that provide information about + * the current state of the allocator, including allocations and system status. + */ +interface IIssuanceAllocationStatus { + /** + * @notice Get the current allocation for a target + * @param target Address of the target + * @return allocation Allocation struct containing total, allocator-minting, and self-minting allocations + */ + function getTargetAllocation(address target) external view returns (Allocation memory allocation); + + /** + * @notice Get the current global allocation totals + * @return allocation Allocation struct containing total, allocator-minting, and self-minting allocations across all targets + */ + function getTotalAllocation() external view returns (Allocation memory allocation); + + /** + * @notice Get all allocated target addresses + * @return targets Array of target addresses + */ + function getTargets() external view returns (address[] memory targets); + + /** + * @notice Get a specific allocated target address by index + * @param index The index of the target address to retrieve + * @return target The target address at the specified index + */ + function getTargetAt(uint256 index) external view returns (address target); + + /** + * @notice Get the number of allocated targets + * @return count The total number of allocated targets + */ + function getTargetCount() external view returns (uint256 count); + + /** + * @notice Get the current issuance per block + * @return issuancePerBlock The current issuance per block + */ + function getIssuancePerBlock() external view returns (uint256 issuancePerBlock); + + /** + * @notice Get pending issuance distribution state + * @return distributionState DistributionState struct containing block tracking and accumulation info + */ + function getDistributionState() external view returns (DistributionState memory distributionState); +} diff --git a/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocatorTypes.sol b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocatorTypes.sol new file mode 100644 index 000000000..2d24dba1d --- /dev/null +++ b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocatorTypes.sol @@ -0,0 +1,66 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity ^0.7.6 || ^0.8.0; +pragma abicoder v2; + +/** + * @notice Controls self-minting event emission behavior to manage gas costs + * @dev None skips event emission entirely (lowest gas) + * @dev Aggregate emits a single aggregated event for all self-minting + * @dev PerTarget emits events for each target with self-minting (highest gas) + */ +enum SelfMintingEventMode { + None, + Aggregate, + PerTarget +} + +/** + * @notice Target issuance per block information + * @param allocatorIssuanceRate Issuance rate for allocator-minting (tokens per block) + * @param allocatorIssuanceBlockAppliedTo The block up to which allocator issuance has been applied + * @param selfIssuanceRate Issuance rate for self-minting (tokens per block) + * @param selfIssuanceBlockAppliedTo The block up to which self issuance has been applied + */ +struct TargetIssuancePerBlock { + uint256 allocatorIssuanceRate; + uint256 allocatorIssuanceBlockAppliedTo; + uint256 selfIssuanceRate; + uint256 selfIssuanceBlockAppliedTo; +} + +/** + * @notice Allocation information + * @param totalAllocationRate Total allocation rate (tokens per block: allocatorMintingRate + selfMintingRate) + * @param allocatorMintingRate Allocator-minting allocation rate (tokens per block) + * @param selfMintingRate Self-minting allocation rate (tokens per block) + */ +struct Allocation { + uint256 totalAllocationRate; + uint256 allocatorMintingRate; + uint256 selfMintingRate; +} + +/** + * @notice Allocation target information + * @param allocatorMintingRate The allocator-minting allocation rate (tokens per block) + * @param selfMintingRate The self-minting allocation rate (tokens per block) + * @param lastChangeNotifiedBlock Last block when this target was notified of changes + */ +struct AllocationTarget { + uint256 allocatorMintingRate; + uint256 selfMintingRate; + uint256 lastChangeNotifiedBlock; +} + +/** + * @notice Distribution state information + * @param lastDistributionBlock Last block where allocator-minting issuance was distributed + * @param lastSelfMintingBlock Last block where self-minting issuance was applied + * @param selfMintingOffset Self-minting that offsets allocator-minting budget (starts during pause, clears on distribution) + */ +struct DistributionState { + uint256 lastDistributionBlock; + uint256 lastSelfMintingBlock; + uint256 selfMintingOffset; +} diff --git a/packages/interfaces/contracts/issuance/allocate/IIssuanceTarget.sol b/packages/interfaces/contracts/issuance/allocate/IIssuanceTarget.sol new file mode 100644 index 000000000..3fe539b95 --- /dev/null +++ b/packages/interfaces/contracts/issuance/allocate/IIssuanceTarget.sol @@ -0,0 +1,27 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity ^0.7.6 || ^0.8.0; + +/** + * @title IIssuanceTarget + * @author Edge & Node + * @notice Interface for contracts that receive issuance from an issuance allocator + */ +interface IIssuanceTarget { + /** + * @notice Called by the issuance allocator before the target's issuance allocation changes + * @dev The target should ensure that all issuance related calculations are up-to-date + * with the current block so that an allocation change can be applied correctly. + * Note that the allocation could change multiple times in the same block after + * this function has been called, only the final allocation is relevant. + */ + function beforeIssuanceAllocationChange() external; + + /** + * @notice Sets the issuance allocator for this target + * @dev This function facilitates upgrades by providing a standard way for targets + * to change their allocator. Implementations can define their own access control. + * @param newIssuanceAllocator Address of the issuance allocator + */ + function setIssuanceAllocator(address newIssuanceAllocator) external; +} diff --git a/packages/interfaces/contracts/issuance/allocate/ISendTokens.sol b/packages/interfaces/contracts/issuance/allocate/ISendTokens.sol new file mode 100644 index 000000000..3f67358ae --- /dev/null +++ b/packages/interfaces/contracts/issuance/allocate/ISendTokens.sol @@ -0,0 +1,19 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity ^0.7.6 || ^0.8.0; + +/** + * @title ISendTokens + * @author Edge & Node + * @notice Interface for contracts that can send tokens to arbitrary addresses + * @dev This interface provides a simple token transfer capability for contracts + * that need to distribute or send tokens programmatically. + */ +interface ISendTokens { + /** + * @notice Send tokens to a specified address + * @param to The address to send tokens to + * @param amount The amount of tokens to send + */ + function sendTokens(address to, uint256 amount) external; +} diff --git a/packages/interfaces/contracts/issuance/common/IPausableControl.sol b/packages/interfaces/contracts/issuance/common/IPausableControl.sol new file mode 100644 index 000000000..83cfbc364 --- /dev/null +++ b/packages/interfaces/contracts/issuance/common/IPausableControl.sol @@ -0,0 +1,34 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity ^0.7.6 || ^0.8.0; + +/** + * @title IPausableControl + * @author Edge & Node + * @notice Interface for contracts that support pause/unpause functionality + * @dev This interface extends standard pausable functionality with explicit + * pause and unpause functions. Contracts implementing this interface allow + * authorized accounts to pause and unpause contract operations. + * Events (Paused, Unpaused) are inherited from OpenZeppelin's PausableUpgradeable. + */ +interface IPausableControl { + /** + * @notice Pause the contract + * @dev Pauses contract operations. Only functions using whenNotPaused + * modifier will be affected. + */ + function pause() external; + + /** + * @notice Unpause the contract + * @dev Resumes contract operations. Only functions using whenPaused + * modifier will be affected. + */ + function unpause() external; + + /** + * @notice Check if the contract is currently paused + * @return True if the contract is paused, false otherwise + */ + function paused() external view returns (bool); +} diff --git a/packages/interfaces/contracts/issuance/eligibility/IRewardsEligibility.sol b/packages/interfaces/contracts/issuance/eligibility/IRewardsEligibility.sol new file mode 100644 index 000000000..53c8acf85 --- /dev/null +++ b/packages/interfaces/contracts/issuance/eligibility/IRewardsEligibility.sol @@ -0,0 +1,19 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity ^0.7.6 || ^0.8.0; + +/** + * @title IRewardsEligibility + * @author Edge & Node + * @notice Minimal interface for checking indexer rewards eligibility + * @dev This is the interface that consumers (e.g., RewardsManager) need to check + * if an indexer is eligible to receive rewards + */ +interface IRewardsEligibility { + /** + * @notice Check if an indexer is eligible to receive rewards + * @param indexer Address of the indexer + * @return True if the indexer is eligible to receive rewards, false otherwise + */ + function isEligible(address indexer) external view returns (bool); +} diff --git a/packages/issuance/.markdownlint.json b/packages/issuance/.markdownlint.json new file mode 100644 index 000000000..18947b0be --- /dev/null +++ b/packages/issuance/.markdownlint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../.markdownlint.json" +} diff --git a/packages/issuance/.solcover.js b/packages/issuance/.solcover.js new file mode 100644 index 000000000..d8bbec4bb --- /dev/null +++ b/packages/issuance/.solcover.js @@ -0,0 +1,15 @@ +module.exports = { + skipFiles: ['test/'], + providerOptions: { + mnemonic: 'myth like bonus scare over problem client lizard pioneer submit female collect', + network_id: 1337, + }, + // Use default istanbulFolder: './coverage' + // Exclude 'html' to avoid duplicate HTML files (lcov already generates HTML in lcov-report/) + istanbulReporter: ['lcov', 'text', 'json'], + configureYulOptimizer: true, + mocha: { + grep: '@skip-on-coverage', + invert: true, + }, +} diff --git a/packages/issuance/.solhint.json b/packages/issuance/.solhint.json new file mode 100644 index 000000000..d30847305 --- /dev/null +++ b/packages/issuance/.solhint.json @@ -0,0 +1,3 @@ +{ + "extends": ["solhint:recommended", "./../../.solhint.json"] +} diff --git a/packages/issuance/README.md b/packages/issuance/README.md new file mode 100644 index 000000000..16e2520b6 --- /dev/null +++ b/packages/issuance/README.md @@ -0,0 +1,62 @@ +# The Graph Issuance Contracts + +This package contains smart contracts for The Graph's issuance functionality. + +## Overview + +The issuance contracts handle token issuance mechanisms for The Graph protocol. + +### Contracts + +- **[IssuanceAllocator](contracts/allocate/IssuanceAllocator.md)** - Central distribution hub for token issuance, allocating tokens to different protocol components based on configured proportions +- **[RewardsEligibilityOracle](contracts/eligibility/RewardsEligibilityOracle.md)** - Oracle-based eligibility system for indexer rewards with time-based expiration +- **DirectAllocation** - Simple target contract for receiving and distributing allocated tokens + +## Development + +### Setup + +```bash +# Install dependencies +pnpm install + +# Build +pnpm build + +# Test +pnpm test +``` + +### Testing + +To run the tests: + +```bash +pnpm test +``` + +For coverage: + +```bash +pnpm test:coverage +``` + +### Linting + +To lint the contracts and tests: + +```bash +pnpm lint +``` + +### Contract Size + +To check contract sizes: + +```bash +pnpm size +``` + +## License + +GPL-2.0-or-later diff --git a/packages/issuance/contracts/allocate/DirectAllocation.sol b/packages/issuance/contracts/allocate/DirectAllocation.sol new file mode 100644 index 000000000..cbc042c14 --- /dev/null +++ b/packages/issuance/contracts/allocate/DirectAllocation.sol @@ -0,0 +1,101 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity 0.8.27; + +import { IIssuanceTarget } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceTarget.sol"; +import { ISendTokens } from "@graphprotocol/interfaces/contracts/issuance/allocate/ISendTokens.sol"; +import { BaseUpgradeable } from "../common/BaseUpgradeable.sol"; + +// solhint-disable-next-line no-unused-import +import { ERC165Upgradeable } from "@openzeppelin/contracts-upgradeable/utils/introspection/ERC165Upgradeable.sol"; // Used by @inheritdoc + +/** + * @title DirectAllocation + * @author Edge & Node + * @notice A simple contract that receives tokens from the IssuanceAllocator and allows + * an authorized operator to withdraw them. + * + * @dev This contract is designed to be an allocator-minting target in the IssuanceAllocator. + * The IssuanceAllocator will mint tokens directly to this contract, and the authorized + * operator can send them to individual addresses as needed. + * + * This contract is pausable by the PAUSE_ROLE. When paused, tokens cannot be sent. + * @custom:security-contact Please email security+contracts@thegraph.com if you find any bugs. We might have an active bug bounty program. + */ +contract DirectAllocation is BaseUpgradeable, IIssuanceTarget, ISendTokens { + // -- Custom Errors -- + + /// @notice Thrown when token transfer fails + /// @param to The address that the transfer was attempted to + /// @param amount The amount of tokens that failed to transfer + error SendTokensFailed(address to, uint256 amount); + + // -- Events -- + + /// @notice Emitted when tokens are sent + /// @param to The address that received the tokens + /// @param amount The amount of tokens sent + event TokensSent(address indexed to, uint256 indexed amount); + // Do not need to index amount, ignoring gas-indexed-events warning. + + /// @notice Emitted before the issuance allocation changes + event BeforeIssuanceAllocationChange(); + + // -- Constructor -- + + /** + * @notice Constructor for the DirectAllocation contract + * @dev This contract is upgradeable, but we use the constructor to pass the Graph Token address + * to the base contract. + * @param graphToken Address of the Graph Token contract + * @custom:oz-upgrades-unsafe-allow constructor + */ + constructor(address graphToken) BaseUpgradeable(graphToken) {} + + // -- Initialization -- + + /** + * @notice Initialize the DirectAllocation contract + * @param governor Address that will have the GOVERNOR_ROLE + */ + function initialize(address governor) external virtual initializer { + __BaseUpgradeable_init(governor); + } + + // -- ERC165 -- + + /** + * @inheritdoc ERC165Upgradeable + */ + function supportsInterface(bytes4 interfaceId) public view virtual override returns (bool) { + return + interfaceId == type(IIssuanceTarget).interfaceId || + interfaceId == type(ISendTokens).interfaceId || + super.supportsInterface(interfaceId); + } + + // -- External Functions -- + + /** + * @inheritdoc ISendTokens + */ + function sendTokens(address to, uint256 amount) external override onlyRole(OPERATOR_ROLE) whenNotPaused { + require(GRAPH_TOKEN.transfer(to, amount), SendTokensFailed(to, amount)); + emit TokensSent(to, amount); + } + + /** + * @dev For DirectAllocation, this is a no-op since we don't need to perform any calculations + * before an allocation change. We simply receive tokens from the IssuanceAllocator. + * @inheritdoc IIssuanceTarget + */ + function beforeIssuanceAllocationChange() external virtual override { + emit BeforeIssuanceAllocationChange(); + } + + /** + * @dev No-op for DirectAllocation; issuanceAllocator is not stored. + * @inheritdoc IIssuanceTarget + */ + function setIssuanceAllocator(address issuanceAllocator) external virtual override onlyRole(GOVERNOR_ROLE) {} +} diff --git a/packages/issuance/contracts/allocate/IssuanceAllocator.md b/packages/issuance/contracts/allocate/IssuanceAllocator.md new file mode 100644 index 000000000..47ff7233d --- /dev/null +++ b/packages/issuance/contracts/allocate/IssuanceAllocator.md @@ -0,0 +1,252 @@ +# IssuanceAllocator + +The IssuanceAllocator is a smart contract responsible for allocating token issuance to different components of The Graph protocol. It calculates issuance for all targets based on their configured rates (tokens per block) and handles minting for allocator-minting targets. + +## Overview + +The contract operates as a central distribution hub for newly minted Graph tokens, ensuring that different protocol components receive their allocated share of token issuance according to configured rates. It maintains a 100% allocation invariant through a default target mechanism, where any unallocated portion automatically goes to the default target. It supports both allocator-minting targets (recommended for new targets) and self-minting targets (for backwards compatibility), with the ability to have mixed allocations primarily for migration scenarios. + +## Architecture + +### Allocation Types + +The contract supports two types of allocation: + +1. **Allocator-minting allocation**: The IssuanceAllocator calculates and mints tokens directly to targets. This is the recommended approach for new targets as it provides robust control over token issuance through the IssuanceAllocator. + +2. **Self-minting allocation**: The IssuanceAllocator calculates issuance but does not mint tokens directly. Instead, targets call `getTargetIssuancePerBlock()` to determine their allocation and mint tokens themselves. This feature exists primarily for backwards compatibility with existing contracts like the RewardsManager. + +While targets can technically have both types of allocation simultaneously, this is not the expected configuration. (It could be useful for migration scenarios where a self-minting target is gradually transitioning to allocator-minting allocation.) + +### Roles + +The contract uses role-based access control: + +- **GOVERNOR_ROLE**: Can set issuance rates, manage target allocations, notify targets, and perform all governance actions +- **PAUSE_ROLE**: Can pause contract operations (inherited from BaseUpgradeable) + +### Pause and Accumulation System + +The IssuanceAllocator includes a pause and accumulation system designed to respond to operational issues while preserving issuance integrity: + +#### Pause Behavior + +When the contract is paused: + +- **Distribution stops**: `distributeIssuance()` returns early without minting any tokens, returning the last block when issuance was distributed. +- **Accumulation begins**: Self-minting allowances accumulate in `selfMintingOffset`, reducing the allocator-minting budget. When distribution resumes, current rates are applied retroactively to the entire undistributed period. +- **Self-minting continues**: Self-minting targets can still query their allocation, but should check the `blockAppliedTo` fields to respect pause state. Because RewardsManager does not check `blockAppliedTo` and will mint tokens even when the allocator is paused, the initial implementation does not pause self-minting targets. (This behavior is subject to change in future versions, and new targets should check `blockAppliedTo`.) Note that RewardsManager is independently pausable. +- **Configuration allowed**: Governance functions like `setIssuancePerBlock()` and `setTargetAllocation()` still work. Rate changes apply immediately. When distribution resumes (either automatically when unpaused or manually via `distributePendingIssuance()`), the current rates are used retroactively for the entire undistributed period from `lastDistributionBlock` to the distribution block. +- **Notifications continue**: Targets are still notified of allocation changes even when paused, and should check the `blockAppliedTo` fields to correctly apply changes. + +#### Accumulation Logic + +During pause periods, the contract tracks self-minting allowances that reduce the allocator-minting budget: + +- `lastSelfMintingBlock`: Updated to current block whenever self-minting advances (continuously, even when paused) +- `selfMintingOffset`: Accumulates self-minting amounts that will reduce the allocator-minting budget when distribution resumes +- Calculation: `totalSelfMintingRate * blocksSinceLastSelfMinting` +- **Conservative accumulation**: Once accumulation starts (during pause), it continues through any unpaused periods until distribution clears it. + +#### Recovery Process + +When distribution resumes: + +1. **Automatic distribution**: `distributeIssuance()` detects accumulated self-minting and triggers retroactive distribution +2. **Manual distribution**: `distributePendingIssuance()` can be called directly by governance, even while paused +3. **Retroactive application**: Current rates are applied retroactively to the entire undistributed period +4. **Budget reduction**: Accumulated self-minting reduces the allocator-minting budget for the period +5. **Priority distribution**: Non-default targets receive their full rates first (if budget allows), default target receives remainder +6. **Clean slate**: After distribution to current block, `selfMintingOffset` is reset to 0 + +#### Use Cases + +This system enables: + +- **Rapid response**: Pause immediately during operational issues without losing track of issuance +- **Investigation time**: Allow time to investigate and resolve issues while maintaining issuance accounting +- **Gradual recovery**: Distribute accumulated issuance manually or automatically when ready +- **Target changes**: Modify allocations during pause periods, with accumulated issuance distributed according to updated allocations + +## Allocation Logic + +### Rate-Based System + +The contract uses absolute rates (tokens per block) rather than proportional allocations: + +- Each target has an `allocatorMintingRate` (tokens per block for allocator-minting) +- Each target has a `selfMintingRate` (tokens per block for self-minting) +- The default target automatically receives: `issuancePerBlock - sum(all other targets' rates)` + +### Distribution Calculation + +For each target during normal distribution, only the allocator-minting portion is distributed: + +```solidity +targetIssuance = targetAllocatorMintingRate * blocksSinceLastDistribution +``` + +For self-minting targets, they query their rate via `getTargetIssuancePerBlock()`: + +```solidity +selfIssuanceRate = targetSelfMintingRate +``` + +### Allocation Constraints and Invariants + +- **100% Invariant**: `sum(all allocatorMintingRates) + sum(all selfMintingRates) == issuancePerBlock` (always) +- **Default Target**: Automatically adjusted to maintain the 100% invariant when other allocations change +- **Available Budget**: When setting a target's allocation, available budget = default target's allocator rate + target's current total rate +- **Removing Targets**: Setting both rates to 0 removes the target from the active list (except default target) +- **Rounding**: Small rounding losses may occur during proportional distribution (when budget is insufficient) +- **Mixed Allocations**: Each target can have both allocator-minting and self-minting rates, though typically only one is used + +## Change Notification System + +Before any allocation changes, targets are notified via the `IIssuanceTarget.beforeIssuanceAllocationChange()` function. This allows targets to: + +- Update their internal state to the current block +- Prepare for the allocation change +- Ensure consistency in their reward calculations + +### Notification Rules + +- Each target is notified at most once per block (unless overridden via `forceTargetNoChangeNotificationBlock()`) +- Notifications are tracked per target using `lastChangeNotifiedBlock` +- Failed notifications cause the entire transaction to revert +- Use `forceTargetNoChangeNotificationBlock()` to skip notification for malfunctioning targets before removing them +- Notifications always occur when allocations change (even when paused) +- Manual notification is available for gas limit recovery via `notifyTarget()` + +## Gas Limit Recovery + +The contract includes several mechanisms to handle potential gas limit issues: + +### Potential Issues + +1. **Large target arrays**: Many targets could exceed gas limits during distribution +2. **Expensive notifications**: Target notification calls could consume too much gas +3. **Malfunctioning targets**: Target contracts that revert when notified + +### Recovery Mechanisms + +1. **Pause functionality**: Contract can be paused to stop operations during recovery +2. **Individual target notification**: `notifyTarget()` allows notifying targets one by one (will revert if target notification reverts) +3. **Force notification override**: `forceTargetNoChangeNotificationBlock()` can skip problematic targets +4. **Controlled distribution**: Functions accept `minDistributedBlock` parameter to allow configuration changes while paused (after calling `distributePendingIssuance(blockNumber)`) +5. **Target removal**: Use `forceTargetNoChangeNotificationBlock()` to skip notification, then remove malfunctioning targets by setting both rates to 0 +6. **Pending issuance distribution**: `distributePendingIssuance()` can be called manually to distribute accumulated issuance + +## Usage Patterns + +### Initial Setup + +**Note: This section is a work-in-progress discussion document for planning deployment, not finalized implementation documentation.** + +**The verification steps documented here are minimal deployment verification checks. These should be complemented by appropriate functional testing and verification as needed for production deployment.** + +**Prerequisites:** + +- GraphToken contract deployed +- RewardsManager upgraded with `setIssuanceAllocator()` function +- GraphIssuanceProxyAdmin deployed with protocol governance as owner + +To safely replicate existing issuance configuration during RewardsManager migration: + +- Default target starts as `address(0)` (that will not be minted to), allowing initial configuration without minting to any targets +- Deployment uses atomic initialization via proxy constructor (prevents front-running) +- Deployment account performs initial configuration, then transfers control to governance +- Granting of minter role can be delayed until replication of initial configuration with upgraded RewardsManager is verified to allow seamless transition to use of IssuanceAllocator +- **Governance control**: This contract uses OpenZeppelin's TransparentUpgradeableProxy pattern (not custom GraphProxy). GraphIssuanceProxyAdmin (owned by protocol governance) controls upgrades, while GOVERNOR_ROLE controls operations. The same governance address should have both roles. + +**Deployment sequence:** + +1. **Deploy and initialize** (deployment account) + - Deploy IssuanceAllocator implementation with GraphToken address + - Deploy TransparentUpgradeableProxy with implementation, GraphIssuanceProxyAdmin, and initialization data + - **Atomic initialization**: `initialize(deploymentAccountAddress)` called via proxy constructor + - Deployment account receives GOVERNOR_ROLE (temporary, for configuration) + - Automatically creates default target at `targetAddresses[0] = address(0)` + - Sets `lastDistributionBlock = block.number` + - **Security**: Front-running prevented by atomic deployment + initialization +2. **Set issuance rate** (deployment account) + - Query current rate from RewardsManager: `rate = rewardsManager.issuancePerBlock()` + - Call `setIssuancePerBlock(rate)` to replicate existing rate + - All issuance allocated to default target (`address(0)`) + - No tokens minted (default target cannot receive mints) +3. **Assign RewardsManager allocation** (deployment account) + - Call `setTargetAllocation(rewardsManagerAddress, 0, issuancePerBlock)` + - `allocatorMintingRate = 0` (RewardsManager will self-mint) + - `selfMintingRate = issuancePerBlock` (RewardsManager receives 100% allocation) + - Default target automatically adjusts to zero allocation +4. **Verify configuration before transfer** (deployment account) + - Verify contract is not paused (`paused()` returns false) + - Verify `getIssuancePerBlock()` returns expected rate (matches RewardsManager) + - Verify `getTargetAllocation(rewardsManager)` shows correct self-minting configuration + - Verify only two targets exist: `targetAddresses[0] = address(0)` and `targetAddresses[1] = rewardsManager` + - Verify default target is `address(0)` with zero allocation + - Contract is ready to transfer control to governance +5. **Distribute issuance** (anyone - no role required) + - Call `distributeIssuance()` to bring contract to fully current state + - Updates `lastDistributionBlock` to current block + - Verifies distribution mechanism is functioning correctly + - No tokens minted (no minter role yet, all allocation to self-minting RM) +6. **Set pause controls and transfer governance** (deployment account) + - Grant PAUSE_ROLE to pause guardian (same account as used for RewardsManager pause control) + - Grant GOVERNOR_ROLE to actual governor address (protocol governance multisig) + - Revoke GOVERNOR_ROLE from deployment account (MUST grant to governance first, then revoke) + - **Note**: Upgrade control (via GraphIssuanceProxyAdmin) is separate from GOVERNOR_ROLE +7. **Verify deployment and configuration** (governor) + - **Bytecode verification**: Verify deployed implementation bytecode matches expected contract + - **Access control**: + - Verify governance address has GOVERNOR_ROLE + - Verify deployment account does NOT have GOVERNOR_ROLE + - Verify pause guardian has PAUSE_ROLE + - **Off-chain**: Review all RoleGranted events since deployment to verify no other addresses have GOVERNOR_ROLE or PAUSE_ROLE + - **Pause state**: Verify contract is not paused (`paused()` returns false) + - **Issuance rate**: Verify `getIssuancePerBlock()` matches RewardsManager rate exactly + - **Target configuration**: + - Verify only two targets exist: `targetAddresses[0] = address(0)` and `targetAddresses[1] = rewardsManager` + - Verify default target is `address(0)` with zero allocation + - Verify `getTargetAllocation(rewardsManager)` shows correct self-minting allocation (100%) + - **Proxy configuration**: + - Verify GraphIssuanceProxyAdmin controls the proxy + - Verify GraphIssuanceProxyAdmin owner is protocol governance +8. **Configure RewardsManager** (governor) + - Call `rewardsManager.setIssuanceAllocator(issuanceAllocatorAddress)` + - RewardsManager will now query IssuanceAllocator for its issuance rate + - RewardsManager continues to mint tokens itself (self-minting) +9. **Grant minter role** (governor, only when configuration verified) + - Grant minter role to IssuanceAllocator on Graph Token +10. **Set default target** (governor, optional, recommended) + +- Call `setDefaultTarget()` to receive future unallocated issuance + +### Normal Operation + +1. Targets or external actors call `distributeIssuance()` periodically +2. Governor adjusts issuance rates as needed via `setIssuancePerBlock()` +3. Governor adds/removes/modifies targets via `setTargetAllocation()` overloads +4. Self-minting targets query their allocation via `getTargetIssuancePerBlock()` + +### Emergency Scenarios + +- **Gas limit issues**: Use pause, individual notifications, and `minDistributedBlock` parameters with `distributePendingIssuance()` +- **Target failures**: Use `forceTargetNoChangeNotificationBlock()` to skip notification, then remove problematic targets by setting both rates to 0 +- **Configuration while paused**: Call `distributePendingIssuance(blockNumber)` first, then use `minDistributedBlock` parameter in setter functions + +### For L1 Bridge Integration + +When `setIssuancePerBlock()` is called, the L1GraphTokenGateway's `updateL2MintAllowance()` function must be called to ensure the bridge can mint the correct amount of tokens on L2. + +## Security Considerations + +- Only governor can modify allocations and issuance rates +- Interface validation prevents adding incompatible targets +- 100% allocation invariant maintained automatically through default target mechanism +- Budget validation prevents over-allocation +- Pause functionality provides emergency stop capability +- Notification system ensures targets can prepare for changes +- Self-minting targets should respect paused state (check `blockAppliedTo` fields) +- Reentrancy guards protect governance functions +- Default target mechanism ensures total issuance never exceeds configured rate diff --git a/packages/issuance/contracts/allocate/IssuanceAllocator.sol b/packages/issuance/contracts/allocate/IssuanceAllocator.sol new file mode 100644 index 000000000..8e5fbeeb4 --- /dev/null +++ b/packages/issuance/contracts/allocate/IssuanceAllocator.sol @@ -0,0 +1,1216 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity 0.8.27; + +import { + TargetIssuancePerBlock, + Allocation, + AllocationTarget, + DistributionState, + SelfMintingEventMode +} from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocatorTypes.sol"; +import { IIssuanceAllocationDistribution } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocationDistribution.sol"; +import { IIssuanceAllocationAdministration } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocationAdministration.sol"; +import { IIssuanceAllocationStatus } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocationStatus.sol"; +import { IIssuanceAllocationData } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocationData.sol"; +import { IIssuanceTarget } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceTarget.sol"; +import { BaseUpgradeable } from "../common/BaseUpgradeable.sol"; +import { ReentrancyGuardTransient } from "@openzeppelin/contracts/utils/ReentrancyGuardTransient.sol"; +import { IERC165 } from "@openzeppelin/contracts/utils/introspection/IERC165.sol"; + +// solhint-disable-next-line no-unused-import +import { ERC165Upgradeable } from "@openzeppelin/contracts-upgradeable/utils/introspection/ERC165Upgradeable.sol"; // Used by @inheritdoc + +/** + * @title IssuanceAllocator + * @author Edge & Node + * @notice This contract is responsible for allocating token issuance to different components + * of the protocol. It calculates issuance for all targets based on their configured rates + * (tokens per block) and handles minting for allocator-minting targets. + * + * @dev The contract maintains a 100% allocation invariant through a default target mechanism: + * - A default target exists at targetAddresses[0] (initialized to address(0)) + * - The default target automatically receives any unallocated portion of issuance + * - Total allocation across all targets always equals issuancePerBlock (tracked as absolute rates) + * - The default target address can be changed via setDefaultTarget() + * - When the default address is address(0), this 'unallocated' portion is not minted + * - Regular targets cannot be set as the default target address + * + * @dev The contract supports two types of allocation for each target: + * 1. Allocator-minting allocation: The IssuanceAllocator calculates and mints tokens directly to targets + * for this portion of their allocation. + * + * 2. Self-minting allocation: The IssuanceAllocator calculates issuance but does not mint tokens directly. + * Instead, targets are expected to call `getTargetIssuancePerBlock` to determine their self-minting + * issuance amount and mint tokens themselves. This feature is primarily intended for backwards + * compatibility with existing contracts like the RewardsManager. + * + * Each target can have both allocator-minting and self-minting allocations. New targets are expected + * to use allocator-minting allocation to provide more robust control over token issuance through + * the IssuanceAllocator. The self-minting allocation is intended only for backwards compatibility + * with existing contracts. + * + * @dev Pause Behavior: + * - Allocator-minting: Completely suspended during pause. No tokens minted, lastDistributionBlock frozen. + * When unpaused, distributes retroactively using current rates for entire undistributed period. (Distribution will be triggered by calling distributeIssuance() when not paused.) + * - Self-minting: Continues tracking via events and accumulation during pause. Accumulated self-minting + * reduces allocator-minting budget when distribution resumes, ensuring total issuance conservation. + * - Ongoing accumulation: Once accumulation starts (during pause), continues through any unpaused + * periods until distribution clears it, preventing loss of self-minting allowances across pause cycles. + * - Tracking divergence: lastSelfMintingBlock advances during pause (for allowance tracking) while + * lastDistributionBlock stays frozen (no allocator-minting). This is intentional and correct. + * + * @dev Issuance Accounting Invariants: + * The contract maintains strict accounting to ensure total token issuance never exceeds the configured + * issuancePerBlock rate over any time period. This section provides the mathematical foundation for + * understanding the relationship between self-minting and allocator-minting. + * + * Key Invariants: + * 1. Allocation Completeness: For all blocks b, totalAllocatorRate_b + totalSelfMintingRate_b = issuancePerBlock_b + * This ensures 100% of issuance is always allocated across all targets. + * + * 2. Self-Minting Accumulation: For any undistributed block range [fromBlock, toBlock]: + * selfMintingOffset = Σ(totalSelfMintingRate_b) for all b in range + * where totalSelfMintingRate_b is the end-state rate for block b. + * + * 3. Rate Constraint: For all blocks b, totalSelfMintingRate_b ≤ issuancePerBlock_b + * This follows from invariant (1) since 0 ≤ totalAllocatorRate_b. + * + * 4. Issuance Upper Bound: For any distribution period with blocks = toBlock - fromBlock + 1: + * Let issuancePerBlock_final = current issuancePerBlock at distribution time + * + * From invariants (2) and (3): + * selfMintingOffset ≤ Σ(issuancePerBlock_b) + * + * Allocator-minting budget for period: + * available = max(0, issuancePerBlock_final * blocks - selfMintingOffset) + * + * Total minted (self + allocator) for period: + * ≤ max(selfMintingOffset, issuancePerBlock_final * blocks) + * ≤ Σ(issuancePerBlock_b) + * + * Therefore, total issuance never exceeds the sum of configured rates during the period. + * + * 5. Offset Reconciliation: During pending distribution, selfMintingOffset is adjusted to account for + * the period's issuance budget. When distribution catches up to current block, the offset is cleared. + * Any remaining offset when cleared represents self-minting that occurred beyond what the final + * issuancePerBlock rate would allow for the period. This is acceptable because: + * a) Self-minting targets were operating under rates that were valid at the time + * b) The total minted still respects the Σ(issuancePerBlock_b) bound (invariant 4) + * c) Clearing the offset prevents it from affecting future distributions + * d) The SelfMintingOffsetReconciled event provides visibility into all offset adjustments + * + * This design ensures that even when issuancePerBlock or allocation rates change over time, and even + * when self-minting targets mint independently, the total tokens minted never exceeds the sum of + * configured issuance rates during the period. + * + * @dev There are a number of scenarios where the IssuanceAllocator could run into issues, including: + * 1. The targetAddresses array could grow large enough that it exceeds the gas limit when calling distributeIssuance. + * 2. When notifying targets of allocation changes the calls to `beforeIssuanceAllocationChange` could exceed the gas limit. + * 3. Target contracts could revert when notifying them of changes via `beforeIssuanceAllocationChange`. + * While in practice the IssuanceAllocator is expected to have a relatively small number of trusted targets, and the + * gas limit is expected to be high enough to handle the above scenarios, the following would allow recovery: + * 1. The contract can be paused, which can help make the recovery process easier to manage. + * 2. The GOVERNOR_ROLE can directly trigger change notification to individual targets. As there is per target + * tracking of the lastChangeNotifiedBlock, this can reduce the gas cost of other operations and allow + * for graceful recovery. + * 3. If a target reverts when notifying it of changes or notifying it is too expensive, the GOVERNOR_ROLE can use `forceTargetNoChangeNotificationBlock()` + * to skip notifying that particular target of changes. + * + * In combination these should allow recovery from gas limit issues or malfunctioning targets, with fine-grained control over + * which targets are notified of changes and when. + * + * @dev Reentrancy Protection: + * The contract code is designed to be reentrant-safe and should be carefully reviewed and maintained + * to preserve this property. However, reentrancy guards (using transient storage per EIP-1153) are + * applied to governance functions that modify configuration or state as an additional layer of defense. + * This provides protection against potential issues if the multi-sig governor role were to have known + * signatures that could be exploited by malicious actors to trigger reentrant calls. + * + * The `distributeIssuance()` function intentionally does NOT have a reentrancy guard to allow + * legitimate use cases where targets call it during notifications (e.g., to claim pending issuance + * before allocation changes). This is safe because distributeIssuance() has built-in block-tracking + * protection (preventing double-distribution in the same block), makes no external calls that could + * expose inconsistent state, and does not modify allocations. + * @custom:security-contact Please email security+contracts@thegraph.com if you find any bugs. We might have an active bug bounty program. + */ +contract IssuanceAllocator is + BaseUpgradeable, + ReentrancyGuardTransient, + IIssuanceAllocationDistribution, + IIssuanceAllocationAdministration, + IIssuanceAllocationStatus, + IIssuanceAllocationData +{ + // -- Namespaced Storage -- + + /// @notice ERC-7201 storage location for IssuanceAllocator + bytes32 private constant ISSUANCE_ALLOCATOR_STORAGE_LOCATION = + // solhint-disable-next-line gas-small-strings + keccak256(abi.encode(uint256(keccak256("graphprotocol.storage.IssuanceAllocator")) - 1)) & + ~bytes32(uint256(0xff)); + + /// @notice Main storage structure for IssuanceAllocator using ERC-7201 namespaced storage + /// @param issuancePerBlock Total issuance per block across all targets + /// @param lastDistributionBlock Last block when allocator-minting issuance was distributed + /// @param lastSelfMintingBlock Last block when self-minting was advanced + /// @param selfMintingOffset Self-minting that offsets allocator-minting budget (accumulates during pause, clears on distribution) + /// @param allocationTargets Mapping of target addresses to their allocation data + /// @param targetAddresses Array of all target addresses (including default target at index 0) + /// @param totalSelfMintingRate Total self-minting rate (tokens per block) across all targets + /// @param selfMintingEventMode Controls self-minting event emission behavior (PerTarget, Aggregate, or None) + /// @dev Design invariant: totalAllocatorRate + totalSelfMintingRate == issuancePerBlock (always 100% allocated) + /// @dev Design invariant: targetAddresses[0] is always the default target address + /// @dev Design invariant: 1 <= targetAddresses.length (default target always exists) + /// @dev Design invariant: default target (targetAddresses[0]) is automatically adjusted to maintain 100% total + /// @custom:storage-location erc7201:graphprotocol.storage.IssuanceAllocator + struct IssuanceAllocatorData { + uint256 issuancePerBlock; + uint256 lastDistributionBlock; + uint256 lastSelfMintingBlock; + uint256 selfMintingOffset; + mapping(address => AllocationTarget) allocationTargets; + address[] targetAddresses; + uint256 totalSelfMintingRate; + SelfMintingEventMode selfMintingEventMode; + } + + /** + * @notice Returns the storage struct for IssuanceAllocator + * @return $ contract storage + */ + function _getIssuanceAllocatorStorage() private pure returns (IssuanceAllocatorData storage $) { + // solhint-disable-previous-line use-natspec + // Solhint does not support $ return variable in natspec + + bytes32 slot = ISSUANCE_ALLOCATOR_STORAGE_LOCATION; + // solhint-disable-next-line no-inline-assembly + assembly { + $.slot := slot + } + } + + // -- Custom Errors -- + + /// @notice Thrown when attempting to add a target with zero address + error TargetAddressCannotBeZero(); + + /// @notice Thrown when the total allocation would exceed available budget + /// @param requested The total requested allocation (allocator + self minting) + /// @param available The available budget for this target + error InsufficientAllocationAvailable(uint256 requested, uint256 available); + + /// @notice Thrown when attempting to decrease issuance rate without sufficient unallocated budget + /// @param oldRate The current issuance rate + /// @param newRate The proposed new issuance rate + /// @param unallocated The unallocated budget available to absorb the decrease + error InsufficientUnallocatedForRateDecrease(uint256 oldRate, uint256 newRate, uint256 unallocated); + + /// @notice Thrown when a target does not support the IIssuanceTarget interface + /// @param target The target address that doesn't support the interface + error TargetDoesNotSupportIIssuanceTarget(address target); + + /// @notice Thrown when toBlockNumber is out of valid range for accumulation + /// @param toBlock The invalid block number provided + /// @param minBlock The minimum valid block number (lastDistributionBlock) + /// @param maxBlock The maximum valid block number (current block) + error ToBlockOutOfRange(uint256 toBlock, uint256 minBlock, uint256 maxBlock); + + /// @notice Thrown when attempting to set allocation for the default target + /// @param defaultTarget The address of the default target + error CannotSetAllocationForDefaultTarget(address defaultTarget); + + /// @notice Thrown when attempting to set default target address to a normally allocated target + /// @param target The target address that already has an allocation + error CannotSetDefaultToAllocatedTarget(address target); + + // -- Events -- + + /// @notice Emitted when issuance is distributed to a target + /// @param target The address of the target that received issuance + /// @param amount The amount of tokens distributed + /// @param fromBlock First block included in this distribution (inclusive) + /// @param toBlock Last block included in this distribution (inclusive). Range is [fromBlock, toBlock] + event IssuanceDistributed( + address indexed target, + uint256 amount, + uint256 indexed fromBlock, + uint256 indexed toBlock + ); // solhint-disable-line gas-indexed-events + + /// @notice Emitted when a target's allocation is updated + /// @param target The address of the target whose allocation was updated + /// @param newAllocatorMintingRate The new allocator-minting rate (tokens per block) for the target + /// @param newSelfMintingRate The new self-minting rate (tokens per block) for the target + event TargetAllocationUpdated(address indexed target, uint256 newAllocatorMintingRate, uint256 newSelfMintingRate); // solhint-disable-line gas-indexed-events + // Do not need to index rate values + + /// @notice Emitted when the issuance per block is updated + /// @param oldIssuancePerBlock The previous issuance per block amount + /// @param newIssuancePerBlock The new issuance per block amount + event IssuancePerBlockUpdated(uint256 oldIssuancePerBlock, uint256 newIssuancePerBlock); // solhint-disable-line gas-indexed-events + // Do not need to index issuance per block values + + /// @notice Emitted when the default target is updated + /// @param oldAddress The previous default target address + /// @param newAddress The new default target address + event DefaultTargetUpdated(address indexed oldAddress, address indexed newAddress); + + /// @notice Emitted when self-minting allowance is calculated for a target + /// @param target The address of the target with self-minting allocation + /// @param amount The amount of tokens available for self-minting + /// @param fromBlock First block included in this allowance period (inclusive) + /// @param toBlock Last block included in this allowance period (inclusive). Range is [fromBlock, toBlock] + event IssuanceSelfMintAllowance( + address indexed target, + uint256 amount, + uint256 indexed fromBlock, + uint256 indexed toBlock + ); // solhint-disable-line gas-indexed-events + + /* solhint-disable gas-indexed-events */ + /// @notice Emitted when self-minting offset is reconciled during pending distribution + /// @param offsetBefore The self-minting offset before reconciliation + /// @param offsetAfter The self-minting offset after reconciliation (0 when caught up to current block) + /// @param totalForPeriod The total issuance budget for the distributed period + /// @param fromBlock First block in the distribution period (inclusive) + /// @param toBlock Last block in the distribution period (inclusive) + /// @dev This event provides visibility into the accounting reconciliation between self-minting + /// and allocator-minting budgets during pending distribution. When offsetAfter is 0, the contract + /// has fully caught up with distribution. When offsetAfter > 0, there remains accumulated offset + /// that will be applied to future distributions. + event SelfMintingOffsetReconciled( + uint256 offsetBefore, + uint256 offsetAfter, + uint256 totalForPeriod, + uint256 indexed fromBlock, + uint256 indexed toBlock + ); + /* solhint-enable gas-indexed-events */ + + /* solhint-disable gas-indexed-events */ + /// @notice Emitted when self-minting offset accumulates during pause or catch-up + /// @param offsetBefore The self-minting offset before accumulation + /// @param offsetAfter The self-minting offset after accumulation + /// @param fromBlock First block in the accumulation period (inclusive) + /// @param toBlock Last block in the accumulation period (inclusive) + /// @dev This event provides visibility into offset growth during pause periods or while catching up + /// after unpause. Together with SelfMintingOffsetReconciled, provides complete accounting of all + /// offset changes. + event SelfMintingOffsetAccumulated( + uint256 offsetBefore, + uint256 offsetAfter, + uint256 indexed fromBlock, + uint256 indexed toBlock + ); + /* solhint-enable gas-indexed-events */ + + /// @notice Emitted when self-minting allowance is calculated in aggregate mode + /// @param totalAmount The total amount of tokens available for self-minting across all targets + /// @param fromBlock First block included in this allowance period (inclusive) + /// @param toBlock Last block included in this allowance period (inclusive) + /// @dev This event is emitted when selfMintingEventMode is Aggregate, providing a single event + /// instead of per-target events to reduce gas costs + event IssuanceSelfMintAllowanceAggregate(uint256 totalAmount, uint256 indexed fromBlock, uint256 indexed toBlock); // solhint-disable-line gas-indexed-events + + /// @notice Emitted when self-minting event mode is changed + /// @param oldMode The previous event emission mode + /// @param newMode The new event emission mode + event SelfMintingEventModeUpdated(SelfMintingEventMode oldMode, SelfMintingEventMode newMode); + + // -- Constructor -- + + /** + * @notice Constructor for the IssuanceAllocator contract + * @dev This contract is upgradeable, but we use the constructor to pass the Graph Token address + * to the base contract. + * @param _graphToken Address of the Graph Token contract + * @custom:oz-upgrades-unsafe-allow constructor + */ + constructor(address _graphToken) BaseUpgradeable(_graphToken) {} + + // -- Initialization -- + + /** + * @notice Initialize the IssuanceAllocator contract + * @param _governor Address that will have the GOVERNOR_ROLE + * @dev Initializes with a default target at index 0 set to address(0) + * @dev Default target will receive all unallocated issuance (initially 0 until rate is set) + * @dev lastDistributionBlock is set to block.number as a safety guard against pausing before + * configuration. lastSelfMintingBlock defaults to 0. issuancePerBlock is 0. Once + * setIssuancePerBlock() is called, it triggers _distributeIssuance() which updates + * lastDistributionBlock to current block, establishing the starting point for issuance tracking. + * @dev selfMintingEventMode is initialized to PerTarget + */ + function initialize(address _governor) external virtual initializer { + __BaseUpgradeable_init(_governor); + + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + + // Initialize default target at index 0 with address(0) + // Rates are 0 initially; default gets remainder when issuancePerBlock is set + $.targetAddresses.push(address(0)); + + $.selfMintingEventMode = SelfMintingEventMode.PerTarget; + + // To guard against extreme edge case of pausing before setting issuancePerBlock, we initialize + // lastDistributionBlock to block.number. This should be updated to the correct starting block + // during configuration by governance. + $.lastDistributionBlock = block.number; + } + + // -- Core Functionality -- + + /** + * @inheritdoc ERC165Upgradeable + * @dev Supports the four IssuanceAllocator sub-interfaces + */ + function supportsInterface(bytes4 interfaceId) public view virtual override returns (bool) { + return + interfaceId == type(IIssuanceAllocationDistribution).interfaceId || + interfaceId == type(IIssuanceAllocationAdministration).interfaceId || + interfaceId == type(IIssuanceAllocationStatus).interfaceId || + interfaceId == type(IIssuanceAllocationData).interfaceId || + super.supportsInterface(interfaceId); + } + + /** + * @inheritdoc IIssuanceAllocationDistribution + * @dev Implementation details: + * - For allocator-minting targets, tokens are minted and transferred directly to targets based on their allocation rate + * - For self-minting targets (like the legacy RewardsManager), it does not mint tokens directly. Instead, these contracts are expected to handle minting themselves + * - The self-minting allocation is intended only for backwards compatibility with existing contracts and should not be used for new targets. New targets should use allocator-minting allocation to ensure robust control of token issuance by the IssuanceAllocator + * @dev Pause behavior: + * - When paused: Self-minting allowances tracked via events/accumulation, but no allocator-minting tokens distributed. + * Returns lastDistributionBlock (frozen at pause point). lastSelfMintingBlock advances to current block. + * - When unpaused: Normal distribution if no accumulated self-minting, otherwise retroactive distribution + * using current rates for entire undistributed period, with accumulated self-minting reducing allocator budget. + * - Unless paused, always advances lastDistributionBlock to block.number, even if no issuance to distribute. + * @dev Reentrancy: This function intentionally does NOT have a reentrancy guard to allow targets to + * legitimately call it during notifications (e.g., to claim pending issuance before their allocation changes). + * This is safe because the function has built-in block-tracking protection that prevents double-distribution + * within the same block, makes no external calls that could expose inconsistent state, and does not modify allocations. + */ + function distributeIssuance() external override returns (uint256) { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + // Optimize common case: if already distributed this block, return immediately (~60% gas savings). + // Multiple targets may call this in the same block; first call distributes, rest are no-ops. + return $.lastDistributionBlock == block.number ? block.number : _distributeIssuance(); + } + + /** + * @notice Advances self-minting block and emits allowance events + * @dev When paused, accumulates self-minting amounts. This accumulation reduces the allocator-minting + * budget when distribution resumes, ensuring total issuance stays within bounds. + * When not paused, emits self-minting allowance events based on selfMintingEventMode. + * Called by _distributeIssuance() which anyone can call. + * Optimized for no-op cases: very cheap when already at current block. + */ + function _advanceSelfMintingBlock() private { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + + uint256 previousBlock = $.lastSelfMintingBlock; + if (previousBlock == block.number) return; + + uint256 blocks = block.number - previousBlock; + uint256 fromBlock = previousBlock + 1; + + // Accumulate if currently paused OR if there's existing accumulated balance. + // Once accumulation starts (during pause), continue through any unpaused periods + // until distribution clears the accumulation. This is conservative and allows + // better recovery when distribution is delayed through pause/unpause cycles. + uint256 offsetBefore = $.selfMintingOffset; + if (paused() || 0 < offsetBefore) { + $.selfMintingOffset += $.totalSelfMintingRate * blocks; + + // Emit accumulation event whenever offset changes + if (offsetBefore != $.selfMintingOffset) { + emit SelfMintingOffsetAccumulated(offsetBefore, $.selfMintingOffset, fromBlock, block.number); + } + } + $.lastSelfMintingBlock = block.number; + + // Emit self-minting allowance events based on mode + if (0 < $.totalSelfMintingRate) { + if ($.selfMintingEventMode == SelfMintingEventMode.PerTarget) { + // Emit per-target events (highest gas cost) + for (uint256 i = 0; i < $.targetAddresses.length; ++i) { + address target = $.targetAddresses[i]; + AllocationTarget storage targetData = $.allocationTargets[target]; + + if (0 < targetData.selfMintingRate) { + uint256 amount = targetData.selfMintingRate * blocks; + emit IssuanceSelfMintAllowance(target, amount, fromBlock, block.number); + } + } + } else if ($.selfMintingEventMode == SelfMintingEventMode.Aggregate) { + // Emit single aggregated event (lower gas cost) + uint256 totalAmount = $.totalSelfMintingRate * blocks; + emit IssuanceSelfMintAllowanceAggregate(totalAmount, fromBlock, block.number); + } + // else None: skip event emission entirely (lowest gas cost) + } + } + + /** + * @notice Internal implementation for `distributeIssuance` + * @dev Handles the actual distribution logic. + * @dev Always calls _advanceSelfMintingBlock() first (advances lastSelfMintingBlock, tracks self-minting). + * @dev If paused: Returns lastDistributionBlock without distributing allocator-minting (frozen state). + * @dev If unpaused: Chooses distribution path based on accumulated self-minting: + * - With accumulation: retroactive distribution path (current rates, reduced allocator budget) + * - Without accumulation: normal distribution path (simple per-block minting) + * @return Block number distributed to + */ + function _distributeIssuance() private returns (uint256) { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + _advanceSelfMintingBlock(); + + if (paused()) return $.lastDistributionBlock; + + return 0 < $.selfMintingOffset ? _distributePendingIssuance(block.number) : _performNormalDistribution(); + } + + /** + * @notice Performs normal (non-pending) issuance distribution + * @dev Distributes allocator-minting issuance to all targets based on their rates + * @dev Assumes contract is not paused and pending issuance has already been distributed + * @return Block number distributed to + */ + function _performNormalDistribution() private returns (uint256) { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + + uint256 blocks = block.number - $.lastDistributionBlock; + if (blocks == 0) return $.lastDistributionBlock; + + uint256 fromBlock = $.lastDistributionBlock + 1; + + for (uint256 i = 0; i < $.targetAddresses.length; ++i) { + address target = $.targetAddresses[i]; + if (target == address(0)) continue; + + AllocationTarget storage targetData = $.allocationTargets[target]; + if (0 < targetData.allocatorMintingRate) { + uint256 amount = targetData.allocatorMintingRate * blocks; + GRAPH_TOKEN.mint(target, amount); + emit IssuanceDistributed(target, amount, fromBlock, block.number); + } + } + + $.lastDistributionBlock = block.number; + return block.number; + } + + /** + * @inheritdoc IIssuanceAllocationAdministration + */ + function distributePendingIssuance() external override onlyRole(GOVERNOR_ROLE) nonReentrant returns (uint256) { + return _distributePendingIssuance(block.number); + } + + /** + * @inheritdoc IIssuanceAllocationAdministration + */ + function distributePendingIssuance( + uint256 toBlockNumber + ) external override onlyRole(GOVERNOR_ROLE) nonReentrant returns (uint256) { + return _distributePendingIssuance(toBlockNumber); + } + + /** + * @notice Internal implementation for distributing pending accumulated allocator-minting issuance + * @param toBlockNumber Block number to distribute up to + * @dev Distributes allocator-minting issuance for undistributed period using current rates, + * retroactively applied from lastDistributionBlock to toBlockNumber (inclusive). + * Called when 0 < selfMintingOffset, which occurs after pause periods or delayed distribution. + * @dev Available budget = max(0, issuancePerBlock * blocks - selfMintingOffset). + * Distribution cases: + * (1) available < allocatedTotal: proportional distribution to non-default, default gets zero + * (2) allocatedTotal <= available: full rates to non-default, remainder to default + * Where allocatedTotal is sum of non-default allocator rates * blocks. + * @return Block number that issuance was distributed up to + */ + function _distributePendingIssuance(uint256 toBlockNumber) private returns (uint256) { + _advanceSelfMintingBlock(); + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + + require( + $.lastDistributionBlock <= toBlockNumber && toBlockNumber <= block.number, // solhint-disable-line gas-strict-inequalities + ToBlockOutOfRange(toBlockNumber, $.lastDistributionBlock, block.number) + ); + + uint256 blocks = toBlockNumber - $.lastDistributionBlock; + if (blocks == 0) return toBlockNumber; + + // Overflow is not possible with reasonable parameters. For example, with issuancePerBlock + // at 1e24 (1 million GRT with 18 decimals) and blocks at 1e9 (hundreds of years), the product is + // ~1e33, well below uint256 max (~1e77). Similar multiplications throughout this contract operate + // under the same range assumptions. + uint256 totalForPeriod = $.issuancePerBlock * blocks; + uint256 selfMintingOffset = $.selfMintingOffset; + + uint256 available = selfMintingOffset < totalForPeriod ? totalForPeriod - selfMintingOffset : 0; + + if (0 < available) { + // Calculate non-default allocated rate using the allocation invariant. + // Since totalAllocatorRate + totalSelfMintingRate == issuancePerBlock (100% invariant), + // and default target is part of totalAllocatorRate, we can derive: + // allocatedRate = issuancePerBlock - totalSelfMintingRate - defaultAllocatorRate + address defaultAddress = $.targetAddresses[0]; + AllocationTarget storage defaultTarget = $.allocationTargets[defaultAddress]; + uint256 allocatedRate = $.issuancePerBlock - $.totalSelfMintingRate - defaultTarget.allocatorMintingRate; + + uint256 allocatedTotal = allocatedRate * blocks; + + if (available < allocatedTotal) _distributePendingProportionally(available, allocatedRate, toBlockNumber); + else _distributePendingWithFullRate(blocks, available, allocatedTotal, toBlockNumber); + } + + $.lastDistributionBlock = toBlockNumber; + _reconcileSelfMintingOffset(toBlockNumber, blocks, totalForPeriod, selfMintingOffset); + return toBlockNumber; + } + + /** + * @notice Reconciles self-minting offset after distribution and emits event if changed + * @param toBlockNumber Block number distributed to + * @param blocks Number of blocks in the distribution period + * @param totalForPeriod Total issuance budget for the period + * @param selfMintingOffset Self-minting offset before reconciliation + * @dev Updates accumulated self-minting after distribution. + * Subtracts the period budget used (min of accumulated and totalForPeriod). + * When caught up to current block, clears all since nothing remains to distribute. + */ + function _reconcileSelfMintingOffset( + uint256 toBlockNumber, + uint256 blocks, + uint256 totalForPeriod, + uint256 selfMintingOffset + ) private { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + + uint256 newOffset = toBlockNumber == block.number + ? 0 + : (totalForPeriod < selfMintingOffset ? selfMintingOffset - totalForPeriod : 0); + + // Emit reconciliation event whenever offset changes during pending distribution + if (selfMintingOffset != newOffset) { + emit SelfMintingOffsetReconciled( + selfMintingOffset, + newOffset, + totalForPeriod, + toBlockNumber - blocks + 1, + toBlockNumber + ); + } + + $.selfMintingOffset = newOffset; + } + + /** + * @notice Distribute pending issuance with full rates to non-default targets + * @param blocks Number of blocks in the distribution period + * @param available Total available allocator-minting budget for the period + * @param allocatedTotal Total amount allocated to non-default targets at full rate + * @param toBlockNumber Block number distributing to + * @dev Sufficient budget: non-default targets get full rates, default gets remainder + */ + function _distributePendingWithFullRate( + uint256 blocks, + uint256 available, + uint256 allocatedTotal, + uint256 toBlockNumber + ) internal { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + + uint256 fromBlock = $.lastDistributionBlock + 1; + + // Give non-default targets their full rates + for (uint256 i = 1; i < $.targetAddresses.length; ++i) { + address target = $.targetAddresses[i]; + AllocationTarget storage targetData = $.allocationTargets[target]; + + if (0 < targetData.allocatorMintingRate) { + uint256 amount = targetData.allocatorMintingRate * blocks; + GRAPH_TOKEN.mint(target, amount); + emit IssuanceDistributed(target, amount, fromBlock, toBlockNumber); + } + } + + // Default target gets remainder (may be 0 if exactly matched) + uint256 remainingForDefault = available - allocatedTotal; + if (0 < remainingForDefault) { + address defaultAddress = $.targetAddresses[0]; + if (defaultAddress != address(0)) { + GRAPH_TOKEN.mint(defaultAddress, remainingForDefault); + emit IssuanceDistributed(defaultAddress, remainingForDefault, fromBlock, toBlockNumber); + } + } + } + + /** + * @notice Distribute pending issuance proportionally among non-default targets + * @param available Total available allocator-minting budget for the period + * @param allocatedRate Total rate allocated to non-default targets + * @param toBlockNumber Block number distributing to + * @dev Insufficient budget: non-default targets get proportional shares, default gets zero + * @dev Proportional distribution may result in rounding loss (dust), which is acceptable + */ + function _distributePendingProportionally( + uint256 available, + uint256 allocatedRate, + uint256 toBlockNumber + ) internal { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + + // Defensive: prevent division by zero and handle edge cases. Should not be reachable based on + // caller logic (only called when available < allocatedTotal and both available > 0, blocks > 0). + if (allocatedRate == 0 || available == 0) return; + + uint256 fromBlock = $.lastDistributionBlock + 1; + + // Non-default targets get proportional shares (reduced amounts) + // Default is excluded (receives zero) + for (uint256 i = 1; i < $.targetAddresses.length; ++i) { + address target = $.targetAddresses[i]; + AllocationTarget storage targetData = $.allocationTargets[target]; + + if (0 < targetData.allocatorMintingRate) { + // Proportional distribution using integer division causes rounding loss. + // Since Solidity division always floors (truncates toward zero), this can ONLY lose tokens, + // never over-distribute. The lost tokens (dust) remain unallocated. + // This is acceptable because: + // 1. The amount is negligible (< number of targets) + // 2. It maintains safety (never over-mint) + // 3. Alternative of tracking and distributing dust adds complexity without significant benefit + uint256 amount = (available * targetData.allocatorMintingRate) / allocatedRate; + GRAPH_TOKEN.mint(target, amount); + emit IssuanceDistributed(target, amount, fromBlock, toBlockNumber); + } + } + } + + /** + * @inheritdoc IIssuanceAllocationAdministration + */ + function setIssuancePerBlock( + uint256 newIssuancePerBlock + ) external override onlyRole(GOVERNOR_ROLE) nonReentrant returns (bool) { + return _setIssuancePerBlock(newIssuancePerBlock, block.number); + } + + /** + * @inheritdoc IIssuanceAllocationAdministration + * @dev Implementation details: + * - Requires distribution to have reached at least minDistributedBlock + * - This allows configuration changes after calling distributePendingIssuance(blockNumber) while paused + * - Only the default target is notified (target rates don't change, only default target changes) + * - Target rates stay fixed; default target absorbs the change + * - Whenever the rate is changed, the updateL2MintAllowance function _must_ be called on the L1GraphTokenGateway in L1, to ensure the bridge can mint the right amount of tokens + * @dev Rate changes while paused: The new rate applies retroactively to the entire undistributed + * period when distribution resumes. Governance must exercise caution to ensure rates are applied + * to the correct block range. Use distributePendingIssuance(blockNumber) to control precisely + * which block the new rate applies from. + */ + function setIssuancePerBlock( + uint256 newIssuancePerBlock, + uint256 minDistributedBlock + ) external override onlyRole(GOVERNOR_ROLE) nonReentrant returns (bool) { + return _setIssuancePerBlock(newIssuancePerBlock, minDistributedBlock); + } + + /** + * @notice Internal implementation for setting issuance per block + * @param newIssuancePerBlock New issuance per block + * @param minDistributedBlock Minimum block number that distribution must have reached + * @return True if the value is applied + */ + function _setIssuancePerBlock(uint256 newIssuancePerBlock, uint256 minDistributedBlock) private returns (bool) { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + uint256 oldIssuancePerBlock = $.issuancePerBlock; + if (newIssuancePerBlock == oldIssuancePerBlock) return true; + + if (_distributeIssuance() < minDistributedBlock) return false; + + _notifyTarget($.targetAddresses[0]); + + AllocationTarget storage defaultTarget = $.allocationTargets[$.targetAddresses[0]]; + uint256 unallocated = defaultTarget.allocatorMintingRate; + + require( + oldIssuancePerBlock <= newIssuancePerBlock + unallocated, // solhint-disable-line gas-strict-inequalities + InsufficientUnallocatedForRateDecrease(oldIssuancePerBlock, newIssuancePerBlock, unallocated) + ); + + defaultTarget.allocatorMintingRate = unallocated + newIssuancePerBlock - oldIssuancePerBlock; + $.issuancePerBlock = newIssuancePerBlock; + + emit IssuancePerBlockUpdated(oldIssuancePerBlock, newIssuancePerBlock); + + return true; + } + + /** + * @inheritdoc IIssuanceAllocationAdministration + */ + function setSelfMintingEventMode(SelfMintingEventMode newMode) external onlyRole(GOVERNOR_ROLE) returns (bool) { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + SelfMintingEventMode oldMode = $.selfMintingEventMode; + + if (newMode == oldMode) return true; + + $.selfMintingEventMode = newMode; + emit SelfMintingEventModeUpdated(oldMode, newMode); + + return true; + } + + /** + * @inheritdoc IIssuanceAllocationAdministration + */ + function getSelfMintingEventMode() external view override returns (SelfMintingEventMode) { + return _getIssuanceAllocatorStorage().selfMintingEventMode; + } + + // -- Target Management -- + + /** + * @notice Internal function to notify a target about an upcoming allocation change + * @dev Uses per-target lastChangeNotifiedBlock to prevent reentrancy and duplicate notifications. + * + * Will revert if the target's beforeIssuanceAllocationChange call fails. + * Use forceTargetNoChangeNotificationBlock to skip notification for malfunctioning targets. + * + * @param target Address of the target to notify + * @return True if notification was sent or already sent for this block. Always returns true for address(0) without notifying. + */ + function _notifyTarget(address target) private returns (bool) { + // Skip notification for zero address (default target when unset) + if (target == address(0)) return true; + + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + AllocationTarget storage targetData = $.allocationTargets[target]; + + // Check-effects-interactions pattern: check if already notified this block + // solhint-disable-next-line gas-strict-inequalities + if (block.number <= targetData.lastChangeNotifiedBlock) return true; + + // Effect: update the notification block before external calls + targetData.lastChangeNotifiedBlock = block.number; + + // Interactions: make external call after state changes + // This will revert if the target's notification fails + IIssuanceTarget(target).beforeIssuanceAllocationChange(); + return true; + } + + /** + * @inheritdoc IIssuanceAllocationAdministration + * @dev Implementation details: + * - The target will be notified at most once per block to prevent reentrancy looping + * - Will revert if target notification reverts + */ + function notifyTarget(address target) external override onlyRole(GOVERNOR_ROLE) nonReentrant returns (bool) { + return _notifyTarget(target); + } + + /** + * @inheritdoc IIssuanceAllocationAdministration + * @dev Implementation details: + * - This can be used to enable notification to be sent again (by setting to a past block) or to prevent notification until a future block (by setting to current or future block) + * - Returns the block number that was set, always equal to blockNumber in current implementation + */ + function forceTargetNoChangeNotificationBlock( + address target, + uint256 blockNumber + ) external override onlyRole(GOVERNOR_ROLE) returns (uint256) { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + AllocationTarget storage targetData = $.allocationTargets[target]; + + // Note: No bounds checking on blockNumber is intentional. Governance might need to set + // very high values in unanticipated edge cases or for recovery scenarios. Constraining + // governance flexibility is deemed unnecessary and perhaps counterproductive. + targetData.lastChangeNotifiedBlock = blockNumber; + return blockNumber; + } + + /** + * @inheritdoc IIssuanceAllocationAdministration + */ + function setTargetAllocation( + IIssuanceTarget target, + uint256 allocatorMintingRate + ) external override onlyRole(GOVERNOR_ROLE) nonReentrant returns (bool) { + return _setTargetAllocation(address(target), allocatorMintingRate, 0, block.number); + } + + /** + * @inheritdoc IIssuanceAllocationAdministration + */ + function setTargetAllocation( + IIssuanceTarget target, + uint256 allocatorMintingRate, + uint256 selfMintingRate + ) external override onlyRole(GOVERNOR_ROLE) nonReentrant returns (bool) { + return _setTargetAllocation(address(target), allocatorMintingRate, selfMintingRate, block.number); + } + + /** + * @inheritdoc IIssuanceAllocationAdministration + * @dev Implementation details: + * - Requires distribution has reached at least minDistributedBlock issuance to change allocation + * - This allows configuration changes while paused by being deliberate about which block to distribute to + * - If the new allocations are the same as the current allocations, this function is a no-op + * - If both allocations are 0 and the target doesn't exist, this function is a no-op + * - If both allocations are 0 and the target exists, the target will be removed + * - If any allocation is non-zero and the target doesn't exist, the target will be added + * - Will revert if the total allocation would exceed available capacity (default target + current target allocation) + * - Will revert if attempting to add a target that doesn't support IIssuanceTarget + * @dev Self-minting targets must call getTargetIssuancePerBlock to determine their issuance and mint + * accordingly. See contract header for details on self-minting vs allocator-minting allocation. + */ + function setTargetAllocation( + IIssuanceTarget target, + uint256 allocatorMintingRate, + uint256 selfMintingRate, + uint256 minDistributedBlock + ) external override onlyRole(GOVERNOR_ROLE) nonReentrant returns (bool) { + return _setTargetAllocation(address(target), allocatorMintingRate, selfMintingRate, minDistributedBlock); + } + + /** + * @inheritdoc IIssuanceAllocationAdministration + */ + function setDefaultTarget( + address newAddress + ) external override onlyRole(GOVERNOR_ROLE) nonReentrant returns (bool) { + return _setDefaultTarget(newAddress, block.number); + } + + /** + * @inheritdoc IIssuanceAllocationAdministration + */ + function setDefaultTarget( + address newAddress, + uint256 minDistributedBlock + ) external override onlyRole(GOVERNOR_ROLE) nonReentrant returns (bool) { + return _setDefaultTarget(newAddress, minDistributedBlock); + } + + /** + * @notice Internal implementation for setting default target + * @param newAddress The address to set as the new default target + * @param minDistributedBlock Minimum block number that distribution must have reached + * @return True if the value is applied (including if already the case), false if not applied due to paused state + * @dev The default target automatically receives the portion of issuance not allocated to other targets + * @dev This maintains the invariant that total allocation always equals issuancePerBlock + * @dev Reverts if attempting to set to an address that has a normal (non-default) allocation + * @dev Allocation data is copied from the old default to the new default, including lastChangeNotifiedBlock + * @dev No-op if setting to the same address + */ + function _setDefaultTarget(address newAddress, uint256 minDistributedBlock) internal returns (bool) { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + + address oldAddress = $.targetAddresses[0]; + if (newAddress == oldAddress) return true; + + // Cannot set default target to a normally allocated target + // Check if newAddress is in targetAddresses (excluding index 0 which is the default) + // Note: This is O(n) for the number of targets, which could become expensive as targets increase. + // However, distribution operations already loop through all targets and + // would encounter gas issues first. Recovery mechanisms exist. + for (uint256 i = 1; i < $.targetAddresses.length; ++i) { + require($.targetAddresses[i] != newAddress, CannotSetDefaultToAllocatedTarget(newAddress)); + } + + if (_distributeIssuance() < minDistributedBlock) return false; + + // Notify both old and new addresses of the allocation change + _notifyTarget(oldAddress); + _notifyTarget(newAddress); + + // Preserve the notification block of newAddress before copying old address data + uint256 newAddressNotificationBlock = $.allocationTargets[newAddress].lastChangeNotifiedBlock; + + // Update the default target at index 0 + // This copies allocation data from old to new, including allocatorMintingRate and selfMintingRate + $.targetAddresses[0] = newAddress; + $.allocationTargets[newAddress] = $.allocationTargets[oldAddress]; + delete $.allocationTargets[oldAddress]; + + // Restore the notification block for newAddress (regard as target-specific, not about default) + $.allocationTargets[newAddress].lastChangeNotifiedBlock = newAddressNotificationBlock; + + emit DefaultTargetUpdated(oldAddress, newAddress); + return true; + } + + /** + * @notice Internal implementation for setting target allocation + * @param target Address of the target to update + * @param allocatorMintingRate Allocator-minting rate for the target (tokens per block) + * @param selfMintingRate Self-minting rate for the target (tokens per block) + * @param minDistributedBlock Minimum block number that distribution must have reached + * @return True if the value is applied (including if already the case), false if not applied due to paused state + */ + function _setTargetAllocation( + address target, + uint256 allocatorMintingRate, + uint256 selfMintingRate, + uint256 minDistributedBlock + ) internal returns (bool) { + if (!_validateAllocationChange(target, allocatorMintingRate, selfMintingRate)) return true; + + if (_distributeIssuance() < minDistributedBlock) return false; + + _notifyTarget(target); + _notifyTarget(_getIssuanceAllocatorStorage().targetAddresses[0]); + + // Total allocation calculation and check is delayed until after notifications. + // Distributing and notifying unnecessarily is harmless, but we need to prevent + // reentrancy from looping and changing allocations mid-calculation. + // (Would not be likely to be exploitable due to only governor being able to + // make a call to set target allocation, but better to be paranoid.) + // Validate totals and auto-adjust default allocation BEFORE updating target data + // so we can read the old allocation values + _validateAndUpdateTotalAllocations(target, allocatorMintingRate, selfMintingRate); + + // Then update the target's allocation data + _updateTargetAllocationData(target, allocatorMintingRate, selfMintingRate); + + emit TargetAllocationUpdated(target, allocatorMintingRate, selfMintingRate); + return true; + } + + /** + * @notice Validates allocation change for a target + * @param target Address of the target to validate + * @param allocatorMintingRate Allocator-minting rate for the target (tokens per block) + * @param selfMintingRate Self-minting rate for the target (tokens per block) + * @return True if validation passes and allocation change is needed, false if allocation is already set to these values + * @dev Reverts if target is address(0), default target, or doesn't support IIssuanceTarget (for non-zero rates) + */ + function _validateAllocationChange( + address target, + uint256 allocatorMintingRate, + uint256 selfMintingRate + ) private view returns (bool) { + require(target != address(0), TargetAddressCannotBeZero()); + + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + + require(target != $.targetAddresses[0], CannotSetAllocationForDefaultTarget($.targetAddresses[0])); + + AllocationTarget storage targetData = $.allocationTargets[target]; + + if (targetData.allocatorMintingRate == allocatorMintingRate && targetData.selfMintingRate == selfMintingRate) + return false; // No change needed + + if (allocatorMintingRate != 0 || selfMintingRate != 0) + require( + IERC165(target).supportsInterface(type(IIssuanceTarget).interfaceId), + TargetDoesNotSupportIIssuanceTarget(target) + ); + + return true; + } + + /** + * @notice Updates global allocation totals and auto-adjusts default target to maintain 100% invariant + * @param target Address of the target being updated + * @param allocatorMintingRate New allocator-minting rate for the target (tokens per block) + * @param selfMintingRate New self-minting rate for the target (tokens per block) + * @dev The default target (at targetAddresses[0]) is automatically adjusted to ensure total allocation equals issuancePerBlock + * @dev This function is called BEFORE the target's allocation data has been updated so we can read old values + */ + function _validateAndUpdateTotalAllocations( + address target, + uint256 allocatorMintingRate, + uint256 selfMintingRate + ) private { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + AllocationTarget storage targetData = $.allocationTargets[target]; + AllocationTarget storage defaultTarget = $.allocationTargets[$.targetAddresses[0]]; + + // Calculations occur after notifications in the caller to prevent reentrancy issues + + // availableRate comprises the default target's current allocator-minting rate, + // the target's current allocator-minting rate, and the target's current self-minting rate. + // This maintains the 100% allocation invariant by calculating how much can be reallocated + // to the target without exceeding total available allocation. + uint256 availableRate = defaultTarget.allocatorMintingRate + + targetData.allocatorMintingRate + + targetData.selfMintingRate; + require( + allocatorMintingRate + selfMintingRate <= availableRate, // solhint-disable-line gas-strict-inequalities + InsufficientAllocationAvailable(allocatorMintingRate + selfMintingRate, availableRate) + ); + + defaultTarget.allocatorMintingRate = availableRate - allocatorMintingRate - selfMintingRate; + $.totalSelfMintingRate = $.totalSelfMintingRate - targetData.selfMintingRate + selfMintingRate; + } + + /** + * @notice Sets target allocation values and adds/removes target from active list + * @param target Address of the target being updated + * @param allocatorMintingRate New allocator-minting rate for the target (tokens per block) + * @param selfMintingRate New self-minting rate for the target (tokens per block) + * @dev This function is never called for the default target (at index 0), which is handled separately + */ + function _updateTargetAllocationData( + address target, + uint256 allocatorMintingRate, + uint256 selfMintingRate + ) private { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + AllocationTarget storage targetData = $.allocationTargets[target]; + + // Internal design invariants: + // - targetAddresses[0] is always the default target and is never removed + // - targetAddresses[1..] contains all non-default targets with explicitly set non-zero allocations + // - targetAddresses does not contain duplicates + // - allocationTargets mapping contains allocation data for all targets in targetAddresses + // - default target is automatically adjusted by _validateAndUpdateTotalAllocations + // - Governance actions can create allocationTarget mappings with lastChangeNotifiedBlock set for targets not in targetAddresses. This is valid. + // Therefore: + // - Only add a non-default target to the list if it previously had no allocation + // - Remove a non-default target from the list when setting both allocations to 0 + // - Delete allocationTargets mapping entry when removing a target from targetAddresses + // - Do not set lastChangeNotifiedBlock in this function + if (allocatorMintingRate != 0 || selfMintingRate != 0) { + // Add to list if previously had no allocation + if (targetData.allocatorMintingRate == 0 && targetData.selfMintingRate == 0) $.targetAddresses.push(target); + + targetData.allocatorMintingRate = allocatorMintingRate; + targetData.selfMintingRate = selfMintingRate; + } else { + // Remove target completely (from list and mapping) + _removeTarget(target); + } + } + + /** + * @notice Removes target from targetAddresses array and deletes its allocation data + * @param target Address of the target to remove + * @dev Starts at index 1 since index 0 is always the default target and should never be removed + * @dev Uses swap-and-pop for gas efficiency + */ + function _removeTarget(address target) private { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + + for (uint256 i = 1; i < $.targetAddresses.length; ++i) { + if ($.targetAddresses[i] == target) { + $.targetAddresses[i] = $.targetAddresses[$.targetAddresses.length - 1]; + $.targetAddresses.pop(); + delete $.allocationTargets[target]; + break; + } + } + } + + // -- View Functions -- + + /** + * @inheritdoc IIssuanceAllocationStatus + */ + function getIssuancePerBlock() external view override returns (uint256) { + return _getIssuanceAllocatorStorage().issuancePerBlock; + } + + /** + * @inheritdoc IIssuanceAllocationStatus + */ + function getDistributionState() external view override returns (DistributionState memory) { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + return + DistributionState({ + lastDistributionBlock: $.lastDistributionBlock, + lastSelfMintingBlock: $.lastSelfMintingBlock, + selfMintingOffset: $.selfMintingOffset + }); + } + + /** + * @inheritdoc IIssuanceAllocationStatus + */ + function getTargetCount() external view override returns (uint256) { + return _getIssuanceAllocatorStorage().targetAddresses.length; + } + + /** + * @inheritdoc IIssuanceAllocationStatus + */ + function getTargets() external view override returns (address[] memory) { + return _getIssuanceAllocatorStorage().targetAddresses; + } + + /** + * @inheritdoc IIssuanceAllocationStatus + */ + function getTargetAt(uint256 index) external view override returns (address) { + return _getIssuanceAllocatorStorage().targetAddresses[index]; + } + + /** + * @inheritdoc IIssuanceAllocationData + */ + function getTargetData(address target) external view override returns (AllocationTarget memory) { + return _getIssuanceAllocatorStorage().allocationTargets[target]; + } + + /** + * @inheritdoc IIssuanceAllocationStatus + * @dev Returns assigned allocation regardless of whether target is address(0) or the default. + * @dev For address(0), no minting occurs but the allocation represents the unallocated portion. + * @dev For effective allocations excluding unmintable portion, use getTotalAllocation(). + */ + function getTargetAllocation(address target) external view override returns (Allocation memory) { + AllocationTarget storage targetData = _getIssuanceAllocatorStorage().allocationTargets[target]; + return + Allocation({ + totalAllocationRate: targetData.allocatorMintingRate + targetData.selfMintingRate, + allocatorMintingRate: targetData.allocatorMintingRate, + selfMintingRate: targetData.selfMintingRate + }); + } + + /** + * @inheritdoc IIssuanceAllocationDistribution + * @dev Returns assigned issuance rates regardless of whether target is address(0) or the default. + * @dev For address(0), no minting occurs but rates reflect what would be issued if mintable. + * @dev selfIssuanceBlockAppliedTo reflects the last block for which self-minting allowances have been + * calculated and emitted (lastSelfMintingBlock). This advances continuously, unaffected by pause state. + */ + function getTargetIssuancePerBlock(address target) external view override returns (TargetIssuancePerBlock memory) { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + AllocationTarget storage targetData = $.allocationTargets[target]; + + return + TargetIssuancePerBlock({ + allocatorIssuanceRate: targetData.allocatorMintingRate, + allocatorIssuanceBlockAppliedTo: $.lastDistributionBlock, + selfIssuanceRate: targetData.selfMintingRate, + selfIssuanceBlockAppliedTo: $.lastSelfMintingBlock + }); + } + + /** + * @inheritdoc IIssuanceAllocationStatus + * @dev For reporting purposes, if the default target is address(0), its allocation + * @dev is treated as "unallocated" since address(0) cannot receive minting. + * @dev When default is address(0): returns actual allocated amounts (may be less than issuancePerBlock) + * @dev When default is a real address: returns issuancePerBlock + * @dev Note: Internally, the contract always maintains 100% allocation invariant, even when default is address(0) + */ + function getTotalAllocation() external view override returns (Allocation memory allocation) { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + + // If default is address(0), exclude its allocation from reported totals + // since it does not receive minting (so it is considered unallocated). + // Address(0) will only have non-zero allocation when it is the default target, + // so we can directly subtract zero address allocation. + allocation.totalAllocationRate = $.issuancePerBlock - $.allocationTargets[address(0)].allocatorMintingRate; + allocation.selfMintingRate = $.totalSelfMintingRate; + allocation.allocatorMintingRate = allocation.totalAllocationRate - allocation.selfMintingRate; + } +} diff --git a/packages/issuance/contracts/common/BaseUpgradeable.sol b/packages/issuance/contracts/common/BaseUpgradeable.sol new file mode 100644 index 000000000..ead4f6a4f --- /dev/null +++ b/packages/issuance/contracts/common/BaseUpgradeable.sol @@ -0,0 +1,159 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity 0.8.27; + +import { Initializable } from "@openzeppelin/contracts-upgradeable/proxy/utils/Initializable.sol"; +import { PausableUpgradeable } from "@openzeppelin/contracts-upgradeable/utils/PausableUpgradeable.sol"; +import { AccessControlUpgradeable } from "@openzeppelin/contracts-upgradeable/access/AccessControlUpgradeable.sol"; +import { IGraphToken } from "@graphprotocol/interfaces/contracts/contracts/token/IGraphToken.sol"; +import { IPausableControl } from "@graphprotocol/interfaces/contracts/issuance/common/IPausableControl.sol"; + +/** + * @title BaseUpgradeable + * @author Edge & Node + * @notice A base contract that provides role-based access control and pausability. + * + * @dev This contract combines OpenZeppelin's AccessControl and Pausable + * to provide a standardized way to manage access control and pausing functionality. + * It uses ERC-7201 namespaced storage pattern for better storage isolation. + * This contract is abstract and meant to be inherited by other contracts. + * @custom:security-contact Please email security+contracts@thegraph.com if you find any bugs. We might have an active bug bounty program. + */ +abstract contract BaseUpgradeable is Initializable, AccessControlUpgradeable, PausableUpgradeable, IPausableControl { + // -- Constants -- + + /// @notice One million - used as the denominator for values provided as Parts Per Million (PPM) + /// @dev This constant represents 1,000,000 and serves as the denominator when working with + /// PPM values. For example, 50% would be represented as 500,000 PPM, calculated as + /// (500,000 / MILLION) = 0.5 = 50% + uint256 public constant MILLION = 1_000_000; + + // -- Role Constants -- + + /** + * @notice Role identifier for governor accounts + * @dev Governors have the highest level of access and can: + * - Grant and revoke roles within the established hierarchy + * - Perform administrative functions and system configuration + * - Set critical parameters and upgrade contracts + * Admin of: GOVERNOR_ROLE, PAUSE_ROLE, OPERATOR_ROLE + */ + bytes32 public constant GOVERNOR_ROLE = keccak256("GOVERNOR_ROLE"); + + /** + * @notice Role identifier for pause accounts + * @dev Pause role holders can: + * - Pause and unpause contract operations for emergency situations + * Typically granted to automated monitoring systems or emergency responders. + * Pausing is intended for quick response to potential threats, and giving time for investigation and resolution (potentially with governance intervention). + * Admin: GOVERNOR_ROLE + */ + bytes32 public constant PAUSE_ROLE = keccak256("PAUSE_ROLE"); + + /** + * @notice Role identifier for operator accounts + * @dev Operators can: + * - Perform operational tasks as defined by inheriting contracts + * - Manage roles that are designated as operator-administered + * Admin: GOVERNOR_ROLE + */ + bytes32 public constant OPERATOR_ROLE = keccak256("OPERATOR_ROLE"); + + // -- Immutable Variables -- + + /// @notice The Graph Token contract + /// @custom:oz-upgrades-unsafe-allow state-variable-immutable + IGraphToken internal immutable GRAPH_TOKEN; + + // -- Custom Errors -- + + /// @notice Thrown when attempting to set the Graph Token to the zero address + error GraphTokenCannotBeZeroAddress(); + + /// @notice Thrown when attempting to set the governor to the zero address + error GovernorCannotBeZeroAddress(); + + // -- Constructor -- + + /** + * @notice Constructor for the BaseUpgradeable contract + * @dev This contract is upgradeable, but we use the constructor to set immutable variables + * and disable initializers to prevent the implementation contract from being initialized. + * @param graphToken Address of the Graph Token contract + * @custom:oz-upgrades-unsafe-allow constructor + */ + constructor(address graphToken) { + require(graphToken != address(0), GraphTokenCannotBeZeroAddress()); + GRAPH_TOKEN = IGraphToken(graphToken); + _disableInitializers(); + } + + // -- Initialization -- + + /** + * @notice Internal function to initialize the BaseUpgradeable contract + * @dev This function is used by child contracts to initialize the BaseUpgradeable contract + * @param governor Address that will have the GOVERNOR_ROLE + */ + function __BaseUpgradeable_init(address governor) internal { + // solhint-disable-previous-line func-name-mixedcase + + __AccessControl_init(); + __Pausable_init(); + + __BaseUpgradeable_init_unchained(governor); + } + + /** + * @notice Internal unchained initialization function for BaseUpgradeable + * @dev This function sets up the governor role and role admin hierarchy + * @param governor Address that will have the GOVERNOR_ROLE + */ + function __BaseUpgradeable_init_unchained(address governor) internal { + // solhint-disable-previous-line func-name-mixedcase + + require(governor != address(0), GovernorCannotBeZeroAddress()); + + // Set up role admin hierarchy: + // GOVERNOR is admin of GOVERNOR, PAUSE, and OPERATOR roles + _setRoleAdmin(GOVERNOR_ROLE, GOVERNOR_ROLE); + _setRoleAdmin(PAUSE_ROLE, GOVERNOR_ROLE); + _setRoleAdmin(OPERATOR_ROLE, GOVERNOR_ROLE); + + // Grant initial governor role + _grantRole(GOVERNOR_ROLE, governor); + } + + // -- External Functions -- + + /** + * @inheritdoc IPausableControl + */ + function pause() external override onlyRole(PAUSE_ROLE) { + _pause(); + } + + /** + * @inheritdoc IPausableControl + */ + function unpause() external override onlyRole(PAUSE_ROLE) { + _unpause(); + } + + /** + * @inheritdoc IPausableControl + */ + function paused() public view virtual override(PausableUpgradeable, IPausableControl) returns (bool) { + return super.paused(); + } + + /** + * @notice Check if this contract supports a given interface + * @dev Adds support for IPausableControl interface + * @param interfaceId The interface identifier to check + * @return True if the contract supports the interface, false otherwise + */ + function supportsInterface(bytes4 interfaceId) public view virtual override returns (bool) { + return interfaceId == type(IPausableControl).interfaceId || super.supportsInterface(interfaceId); + } +} diff --git a/packages/issuance/contracts/test/allocate/IssuanceAllocatorTestHarness.sol b/packages/issuance/contracts/test/allocate/IssuanceAllocatorTestHarness.sol new file mode 100644 index 000000000..15d589c6c --- /dev/null +++ b/packages/issuance/contracts/test/allocate/IssuanceAllocatorTestHarness.sol @@ -0,0 +1,52 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity 0.8.27; + +import { IssuanceAllocator } from "../../allocate/IssuanceAllocator.sol"; + +/** + * @title IssuanceAllocatorTestHarness + * @author Edge & Node + * @notice Test harness to expose internal functions for white-box testing + * @dev This contract allows direct testing of internal distribution functions to achieve 100% coverage + */ +contract IssuanceAllocatorTestHarness is IssuanceAllocator { + /** + * @notice Constructor for the test harness + * @param _graphToken Address of the Graph Token contract + * @custom:oz-upgrades-unsafe-allow constructor + */ + constructor(address _graphToken) IssuanceAllocator(_graphToken) {} + + /** + * @notice Exposes _distributePendingProportionally for testing + * @dev Allows testing of defensive checks and edge cases + * @param available Total available allocator-minting budget for the period + * @param allocatedRate Total rate allocated to non-default targets + * @param toBlockNumber Block number distributing to + */ + function exposed_distributePendingProportionally( + uint256 available, + uint256 allocatedRate, + uint256 toBlockNumber + ) external { + _distributePendingProportionally(available, allocatedRate, toBlockNumber); + } + + /** + * @notice Exposes _distributePendingWithFullRate for testing + * @dev Allows testing of edge cases in full rate distribution + * @param blocks Number of blocks in the distribution period + * @param available Total available allocator-minting budget for the period + * @param allocatedTotal Total amount allocated to non-default targets at full rate + * @param toBlockNumber Block number distributing to + */ + function exposed_distributePendingWithFullRate( + uint256 blocks, + uint256 available, + uint256 allocatedTotal, + uint256 toBlockNumber + ) external { + _distributePendingWithFullRate(blocks, available, allocatedTotal, toBlockNumber); + } +} diff --git a/packages/issuance/contracts/test/allocate/MockERC165.sol b/packages/issuance/contracts/test/allocate/MockERC165.sol new file mode 100644 index 000000000..461e0409b --- /dev/null +++ b/packages/issuance/contracts/test/allocate/MockERC165.sol @@ -0,0 +1,20 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity 0.8.27; + +import { IERC165 } from "@openzeppelin/contracts/utils/introspection/IERC165.sol"; + +/** + * @title MockERC165 + * @author Edge & Node + * @dev Minimal implementation of IERC165 for testing + * @notice Used to test interface validation - supports only ERC165, not specific interfaces + */ +contract MockERC165 is IERC165 { + /** + * @inheritdoc IERC165 + */ + function supportsInterface(bytes4 interfaceId) public pure override returns (bool) { + return interfaceId == type(IERC165).interfaceId; + } +} diff --git a/packages/issuance/contracts/test/allocate/MockNotificationTracker.sol b/packages/issuance/contracts/test/allocate/MockNotificationTracker.sol new file mode 100644 index 000000000..a33212282 --- /dev/null +++ b/packages/issuance/contracts/test/allocate/MockNotificationTracker.sol @@ -0,0 +1,45 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.24; + +import { IIssuanceTarget } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceTarget.sol"; +import { ERC165 } from "@openzeppelin/contracts/utils/introspection/ERC165.sol"; + +/** + * @title MockNotificationTracker + * @author Edge & Node + * @notice A mock contract that tracks notification calls for testing + * @dev Records when beforeIssuanceAllocationChange is called + */ +contract MockNotificationTracker is IIssuanceTarget, ERC165 { + /// @notice Number of times the contract has been notified + uint256 public notificationCount; + + /// @notice Block number of the last notification received + uint256 public lastNotificationBlock; + + /// @notice Emitted when a notification is received + /// @param blockNumber The block number when notification was received + /// @param count The total notification count after this notification + event NotificationReceived(uint256 indexed blockNumber, uint256 indexed count); // solhint-disable-line gas-indexed-events + + /// @inheritdoc IIssuanceTarget + function beforeIssuanceAllocationChange() external override { + ++notificationCount; + lastNotificationBlock = block.number; + emit NotificationReceived(block.number, notificationCount); + } + + /// @inheritdoc IIssuanceTarget + function setIssuanceAllocator(address _issuanceAllocator) external pure override {} + + /// @inheritdoc ERC165 + function supportsInterface(bytes4 interfaceId) public view virtual override returns (bool) { + return interfaceId == type(IIssuanceTarget).interfaceId || super.supportsInterface(interfaceId); + } + + /// @notice Resets the notification counter and last block to zero + function resetNotificationCount() external { + notificationCount = 0; + lastNotificationBlock = 0; + } +} diff --git a/packages/issuance/contracts/test/allocate/MockReentrantTarget.sol b/packages/issuance/contracts/test/allocate/MockReentrantTarget.sol new file mode 100644 index 000000000..484648805 --- /dev/null +++ b/packages/issuance/contracts/test/allocate/MockReentrantTarget.sol @@ -0,0 +1,96 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.24; + +import { IIssuanceTarget } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceTarget.sol"; +import { IIssuanceAllocationDistribution } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocationDistribution.sol"; +import { IIssuanceAllocationAdministration } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocationAdministration.sol"; +import { ERC165 } from "@openzeppelin/contracts/utils/introspection/ERC165.sol"; + +/** + * @title MockReentrantTarget + * @author Edge & Node + * @notice A malicious mock contract that attempts reentrancy attacks for testing + * @dev Used for testing reentrancy protection in IssuanceAllocator + */ +contract MockReentrantTarget is IIssuanceTarget, ERC165 { + /// @notice The issuance allocator to target for reentrancy attacks + address public issuanceAllocator; + /// @notice The configured reentrancy action to perform + ReentrantAction public actionToPerform; + /// @notice Whether reentrancy should be attempted + bool public shouldAttemptReentrancy; + + enum ReentrantAction { + None, + DistributeIssuance, + SetTargetAllocation1Param, + SetTargetAllocation2Param, + SetTargetAllocation3Param, + SetIssuancePerBlock, + SetIssuancePerBlock2Param, + NotifyTarget, + SetDefaultTarget1Param, + SetDefaultTarget2Param, + DistributePendingIssuance0Param, + DistributePendingIssuance1Param + } + + /// @notice Sets the action to perform during reentrancy attempt + /// @param _action The reentrancy action to configure + function setReentrantAction(ReentrantAction _action) external { + actionToPerform = _action; + shouldAttemptReentrancy = _action != ReentrantAction.None; + } + + /// @inheritdoc IIssuanceTarget + function beforeIssuanceAllocationChange() external override { + if (!shouldAttemptReentrancy) return; + + // Attempt reentrancy based on configured action + if (actionToPerform == ReentrantAction.DistributeIssuance) { + IIssuanceAllocationDistribution(issuanceAllocator).distributeIssuance(); + } else if (actionToPerform == ReentrantAction.SetTargetAllocation1Param) { + IIssuanceAllocationAdministration(issuanceAllocator).setTargetAllocation( + IIssuanceTarget(address(this)), + 1000 + ); + } else if (actionToPerform == ReentrantAction.SetTargetAllocation2Param) { + IIssuanceAllocationAdministration(issuanceAllocator).setTargetAllocation( + IIssuanceTarget(address(this)), + 1000, + 0 + ); + } else if (actionToPerform == ReentrantAction.SetTargetAllocation3Param) { + IIssuanceAllocationAdministration(issuanceAllocator).setTargetAllocation( + IIssuanceTarget(address(this)), + 1000, + 0, + block.number + ); + } else if (actionToPerform == ReentrantAction.SetIssuancePerBlock) { + IIssuanceAllocationAdministration(issuanceAllocator).setIssuancePerBlock(1000); + } else if (actionToPerform == ReentrantAction.SetIssuancePerBlock2Param) { + IIssuanceAllocationAdministration(issuanceAllocator).setIssuancePerBlock(1000, block.number); + } else if (actionToPerform == ReentrantAction.NotifyTarget) { + IIssuanceAllocationAdministration(issuanceAllocator).notifyTarget(address(this)); + } else if (actionToPerform == ReentrantAction.SetDefaultTarget1Param) { + IIssuanceAllocationAdministration(issuanceAllocator).setDefaultTarget(address(this)); + } else if (actionToPerform == ReentrantAction.SetDefaultTarget2Param) { + IIssuanceAllocationAdministration(issuanceAllocator).setDefaultTarget(address(this), block.number); + } else if (actionToPerform == ReentrantAction.DistributePendingIssuance0Param) { + IIssuanceAllocationAdministration(issuanceAllocator).distributePendingIssuance(); + } else if (actionToPerform == ReentrantAction.DistributePendingIssuance1Param) { + IIssuanceAllocationAdministration(issuanceAllocator).distributePendingIssuance(block.number); + } + } + + /// @inheritdoc IIssuanceTarget + function setIssuanceAllocator(address _issuanceAllocator) external override { + issuanceAllocator = _issuanceAllocator; + } + + /// @inheritdoc ERC165 + function supportsInterface(bytes4 interfaceId) public view virtual override returns (bool) { + return interfaceId == type(IIssuanceTarget).interfaceId || super.supportsInterface(interfaceId); + } +} diff --git a/packages/issuance/contracts/test/allocate/MockRevertingTarget.sol b/packages/issuance/contracts/test/allocate/MockRevertingTarget.sol new file mode 100644 index 000000000..27522e5a4 --- /dev/null +++ b/packages/issuance/contracts/test/allocate/MockRevertingTarget.sol @@ -0,0 +1,34 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.24; + +import { IIssuanceTarget } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceTarget.sol"; +import { ERC165 } from "@openzeppelin/contracts/utils/introspection/ERC165.sol"; + +/** + * @title MockRevertingTarget + * @author Edge & Node + * @notice A mock contract that reverts when beforeIssuanceAllocationChange is called + * @dev Used for testing error handling in IssuanceAllocator + */ +contract MockRevertingTarget is IIssuanceTarget, ERC165 { + /// @notice Error thrown when the target reverts intentionally + error TargetRevertsIntentionally(); + /** + * @inheritdoc IIssuanceTarget + */ + function beforeIssuanceAllocationChange() external pure override { + revert TargetRevertsIntentionally(); + } + + /** + * @inheritdoc IIssuanceTarget + */ + function setIssuanceAllocator(address _issuanceAllocator) external pure override { + // No-op + } + + /// @inheritdoc ERC165 + function supportsInterface(bytes4 interfaceId) public view virtual override returns (bool) { + return interfaceId == type(IIssuanceTarget).interfaceId || super.supportsInterface(interfaceId); + } +} diff --git a/packages/issuance/contracts/test/allocate/MockSimpleTarget.sol b/packages/issuance/contracts/test/allocate/MockSimpleTarget.sol new file mode 100644 index 000000000..311e1f03c --- /dev/null +++ b/packages/issuance/contracts/test/allocate/MockSimpleTarget.sol @@ -0,0 +1,24 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.24; + +import { IIssuanceTarget } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceTarget.sol"; +import { ERC165 } from "@openzeppelin/contracts/utils/introspection/ERC165.sol"; + +/** + * @title MockSimpleTarget + * @author Edge & Node + * @notice A simple mock contract that implements IIssuanceTarget for testing + * @dev Used for testing basic functionality in IssuanceAllocator + */ +contract MockSimpleTarget is IIssuanceTarget, ERC165 { + /// @inheritdoc IIssuanceTarget + function beforeIssuanceAllocationChange() external pure override {} + + /// @inheritdoc IIssuanceTarget + function setIssuanceAllocator(address _issuanceAllocator) external pure override {} + + /// @inheritdoc ERC165 + function supportsInterface(bytes4 interfaceId) public view virtual override returns (bool) { + return interfaceId == type(IIssuanceTarget).interfaceId || super.supportsInterface(interfaceId); + } +} diff --git a/packages/issuance/hardhat.base.config.ts b/packages/issuance/hardhat.base.config.ts new file mode 100644 index 000000000..e4d0cc8bb --- /dev/null +++ b/packages/issuance/hardhat.base.config.ts @@ -0,0 +1,24 @@ +import { hardhatBaseConfig } from '@graphprotocol/toolshed/hardhat' +import type { HardhatUserConfig } from 'hardhat/config' + +// Issuance-specific Solidity configuration with Cancun EVM version +// Based on toolshed solidityUserConfig but with Cancun EVM target +export const issuanceSolidityConfig = { + version: '0.8.27', + settings: { + optimizer: { + enabled: true, + runs: 100, + }, + evmVersion: 'cancun' as const, + }, +} + +// Base configuration for issuance package - inherits from toolshed and overrides Solidity config +export const issuanceBaseConfig = (() => { + const baseConfig = hardhatBaseConfig(require) + return { + ...baseConfig, + solidity: issuanceSolidityConfig, + } as HardhatUserConfig +})() diff --git a/packages/issuance/hardhat.config.ts b/packages/issuance/hardhat.config.ts new file mode 100644 index 000000000..f76949af8 --- /dev/null +++ b/packages/issuance/hardhat.config.ts @@ -0,0 +1,26 @@ +import '@nomicfoundation/hardhat-ethers' +import '@typechain/hardhat' +import 'hardhat-contract-sizer' +import '@openzeppelin/hardhat-upgrades' +import '@nomicfoundation/hardhat-verify' + +import type { HardhatUserConfig } from 'hardhat/config' + +import { issuanceBaseConfig } from './hardhat.base.config' + +const config: HardhatUserConfig = { + ...issuanceBaseConfig, + // Main config specific settings + typechain: { + outDir: 'types', + target: 'ethers-v6', + }, + paths: { + sources: './contracts', + tests: './test/tests', + artifacts: './artifacts', + cache: './cache', + }, +} + +export default config diff --git a/packages/issuance/hardhat.coverage.config.ts b/packages/issuance/hardhat.coverage.config.ts new file mode 100644 index 000000000..01ee96e83 --- /dev/null +++ b/packages/issuance/hardhat.coverage.config.ts @@ -0,0 +1,22 @@ +import '@nomicfoundation/hardhat-ethers' +import '@nomicfoundation/hardhat-chai-matchers' +import '@nomicfoundation/hardhat-network-helpers' +import '@openzeppelin/hardhat-upgrades' +import 'hardhat-gas-reporter' +import 'solidity-coverage' + +import { HardhatUserConfig } from 'hardhat/config' + +import { issuanceBaseConfig } from './hardhat.base.config' + +const config: HardhatUserConfig = { + ...issuanceBaseConfig, + paths: { + sources: './contracts', + tests: './test/tests', + artifacts: './coverage/artifacts', + cache: './coverage/cache', + }, +} as HardhatUserConfig + +export default config diff --git a/packages/issuance/package.json b/packages/issuance/package.json new file mode 100644 index 000000000..fbb658193 --- /dev/null +++ b/packages/issuance/package.json @@ -0,0 +1,79 @@ +{ + "name": "@graphprotocol/issuance", + "version": "1.0.0", + "publishConfig": { + "access": "public" + }, + "description": "The Graph Issuance Contracts", + "author": "Edge & Node", + "license": "GPL-2.0-or-later", + "main": "index.js", + "exports": { + ".": "./index.js", + "./artifacts/*": "./artifacts/*", + "./contracts/*": "./contracts/*", + "./types": "./types/index.ts", + "./types/*": "./types/*" + }, + "scripts": { + "build": "pnpm build:dep && pnpm build:self", + "build:dep": "pnpm --filter '@graphprotocol/issuance^...' run build:self", + "build:self": "pnpm compile && pnpm build:self:typechain", + "build:coverage": "pnpm build:dep && pnpm build:self:coverage", + "build:self:coverage": "npx hardhat compile --config hardhat.coverage.config.ts && pnpm build:self:typechain", + "build:self:typechain": "bash -c 'missing=$(grep -rL \"static readonly interfaceId\" types/factories --include=\"*__factory.ts\" 2>/dev/null | wc -l); if [ $missing -gt 0 ]; then node -e \"require('\"'\"'@graphprotocol/interfaces/utils'\"'\"').addInterfaceIds('\"'\"'types/factories'\"'\"')\"; fi'", + "clean": "rm -rf artifacts/ types/ forge-artifacts/ cache_forge/ coverage/ cache/ .eslintcache", + "compile": "hardhat compile --quiet", + "test": "pnpm --filter @graphprotocol/issuance-test test", + "test:coverage": "pnpm --filter @graphprotocol/issuance-test run test:coverage", + "lint": "pnpm lint:ts; pnpm lint:sol; pnpm lint:md; pnpm lint:json", + "lint:ts": "eslint '**/*.{js,ts,cjs,mjs,jsx,tsx}' --fix --cache; prettier -w --cache --log-level warn '**/*.{js,ts,cjs,mjs,jsx,tsx}'", + "lint:sol": "solhint --fix --noPrompt --noPoster 'contracts/**/*.sol'; prettier -w --cache --log-level warn 'contracts/**/*.sol'", + "lint:md": "markdownlint --fix --ignore-path ../../.gitignore '**/*.md'; prettier -w --cache --log-level warn '**/*.md'", + "lint:json": "prettier -w --cache --log-level warn '**/*.json'", + "typechain": "hardhat typechain", + "verify": "hardhat verify", + "size": "hardhat size-contracts", + "forge:build": "forge build" + }, + "files": [ + "artifacts/**/*", + "types/**/*", + "contracts/**/*", + "README.md" + ], + "devDependencies": { + "@graphprotocol/interfaces": "workspace:^", + "@graphprotocol/toolshed": "workspace:^", + "@nomicfoundation/hardhat-ethers": "catalog:", + "@nomicfoundation/hardhat-verify": "catalog:", + "@openzeppelin/contracts": "^5.4.0", + "@openzeppelin/contracts-upgradeable": "^5.4.0", + "@openzeppelin/hardhat-upgrades": "^3.9.0", + "@typechain/ethers-v6": "^0.5.0", + "@typechain/hardhat": "catalog:", + "@types/node": "^20.17.50", + "dotenv": "catalog:", + "eslint": "catalog:", + "ethers": "catalog:", + "glob": "catalog:", + "globals": "catalog:", + "hardhat": "catalog:", + "hardhat-contract-sizer": "catalog:", + "hardhat-secure-accounts": "catalog:", + "hardhat-storage-layout": "catalog:", + "lint-staged": "catalog:", + "markdownlint-cli": "catalog:", + "prettier": "catalog:", + "prettier-plugin-solidity": "catalog:", + "solhint": "catalog:", + "ts-node": "^10.9.2", + "typechain": "^8.3.0", + "typescript": "catalog:", + "typescript-eslint": "catalog:", + "yaml-lint": "catalog:" + }, + "dependencies": { + "@noble/hashes": "^1.8.0" + } +} diff --git a/packages/issuance/prettier.config.cjs b/packages/issuance/prettier.config.cjs new file mode 100644 index 000000000..4e8dcf4f3 --- /dev/null +++ b/packages/issuance/prettier.config.cjs @@ -0,0 +1,5 @@ +const baseConfig = require('../../prettier.config.cjs') + +module.exports = { + ...baseConfig, +} diff --git a/packages/issuance/test/package.json b/packages/issuance/test/package.json new file mode 100644 index 000000000..f362b4c9b --- /dev/null +++ b/packages/issuance/test/package.json @@ -0,0 +1,62 @@ +{ + "name": "@graphprotocol/issuance-test", + "version": "1.0.0", + "private": true, + "description": "Test utilities for @graphprotocol/issuance", + "author": "Edge & Node", + "license": "GPL-2.0-or-later", + "main": "src/index.ts", + "types": "src/index.ts", + "exports": { + ".": { + "default": "./src/index.ts", + "types": "./src/index.ts" + } + }, + "scripts": { + "build": "pnpm build:dep && pnpm build:self", + "build:dep": "pnpm --filter '@graphprotocol/issuance-test^...' run build:self", + "build:self": "tsc --build", + "build:coverage": "pnpm build:dep:coverage && pnpm build:self", + "build:dep:coverage": "pnpm --filter '@graphprotocol/issuance-test^...' run build:coverage", + "clean": "rm -rf .eslintcache artifacts/", + "test": "pnpm build && pnpm test:self", + "test:self": "cd .. && hardhat test", + "test:coverage": "pnpm build:coverage && pnpm test:coverage:self", + "test:coverage:self": "cd .. && npx hardhat coverage --config hardhat.coverage.config.ts", + "lint": "pnpm lint:ts; pnpm lint:json", + "lint:ts": "eslint '**/*.{js,ts,cjs,mjs,jsx,tsx}' --fix --cache; prettier -w --cache --log-level warn '**/*.{js,ts,cjs,mjs,jsx,tsx}'", + "lint:json": "prettier -w --cache --log-level warn '**/*.json'" + }, + "dependencies": { + "@graphprotocol/issuance": "workspace:^", + "@graphprotocol/interfaces": "workspace:^", + "@graphprotocol/contracts": "workspace:^" + }, + "devDependencies": { + "@nomicfoundation/hardhat-chai-matchers": "^2.0.0", + "@nomicfoundation/hardhat-ethers": "catalog:", + "@nomicfoundation/hardhat-foundry": "^1.1.1", + "@nomicfoundation/hardhat-network-helpers": "^1.0.0", + "@nomicfoundation/hardhat-toolbox": "5.0.0", + "@openzeppelin/contracts": "^5.4.0", + "@openzeppelin/contracts-upgradeable": "^5.4.0", + "@openzeppelin/foundry-upgrades": "0.4.0", + "@types/chai": "^4.3.20", + "@types/mocha": "^10.0.10", + "@types/node": "^20.17.50", + "chai": "^4.3.7", + "dotenv": "^16.5.0", + "eslint": "catalog:", + "eslint-plugin-no-only-tests": "catalog:", + "ethers": "catalog:", + "forge-std": "https://github.com/foundry-rs/forge-std/tarball/v1.9.7", + "glob": "catalog:", + "hardhat": "catalog:", + "hardhat-gas-reporter": "catalog:", + "prettier": "catalog:", + "solidity-coverage": "^0.8.0", + "ts-node": "^10.9.2", + "typescript": "catalog:" + } +} diff --git a/packages/issuance/test/prettier.config.cjs b/packages/issuance/test/prettier.config.cjs new file mode 100644 index 000000000..8eb0a0bee --- /dev/null +++ b/packages/issuance/test/prettier.config.cjs @@ -0,0 +1,5 @@ +const baseConfig = require('../prettier.config.cjs') + +module.exports = { + ...baseConfig, +} diff --git a/packages/issuance/test/src/index.ts b/packages/issuance/test/src/index.ts new file mode 100644 index 000000000..614cfd50d --- /dev/null +++ b/packages/issuance/test/src/index.ts @@ -0,0 +1,5 @@ +// Test utilities for @graphprotocol/issuance +// This package contains test files, test helpers, and testing utilities + +// This package provides test utilities for issuance contracts +export const PACKAGE_NAME = '@graphprotocol/issuance-test' diff --git a/packages/issuance/test/tests/allocate/AccessControl.test.ts b/packages/issuance/test/tests/allocate/AccessControl.test.ts new file mode 100644 index 000000000..141a730aa --- /dev/null +++ b/packages/issuance/test/tests/allocate/AccessControl.test.ts @@ -0,0 +1,229 @@ +/** + * Allocate Access Control Tests + * Tests access control patterns for IssuanceAllocator and DirectAllocation contracts + */ + +import { expect } from 'chai' +import hre from 'hardhat' +const { ethers } = hre +import { deployTestGraphToken, getTestAccounts, SHARED_CONSTANTS } from '../common/fixtures' +import { testMultipleAccessControl } from './commonTestUtils' +import { deployDirectAllocation, deployIssuanceAllocator } from './fixtures' + +describe('Allocate Access Control Tests', () => { + let accounts: any + let contracts: any + + before(async () => { + accounts = await getTestAccounts() + + // Deploy allocate contracts + const graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + const issuanceAllocator = await deployIssuanceAllocator( + graphTokenAddress, + accounts.governor, + ethers.parseEther('100'), + ) + const directAllocation = await deployDirectAllocation(graphTokenAddress, accounts.governor) + + contracts = { + graphToken, + issuanceAllocator, + directAllocation, + } + }) + + describe('IssuanceAllocator Access Control', () => { + describe('setIssuancePerBlock', () => { + it('should revert when non-governor calls setIssuancePerBlock', async () => { + await expect( + contracts.issuanceAllocator.connect(accounts.nonGovernor).setIssuancePerBlock(ethers.parseEther('200')), + ).to.be.revertedWithCustomError(contracts.issuanceAllocator, 'AccessControlUnauthorizedAccount') + }) + + it('should allow governor to call setIssuancePerBlock', async () => { + await expect( + contracts.issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('200')), + ).to.not.be.reverted + }) + + it('should revert when non-governor calls setIssuancePerBlock (2-param variant)', async () => { + await expect( + contracts.issuanceAllocator + .connect(accounts.nonGovernor) + ['setIssuancePerBlock(uint256,uint256)'](ethers.parseEther('300'), 0), + ).to.be.revertedWithCustomError(contracts.issuanceAllocator, 'AccessControlUnauthorizedAccount') + }) + + it('should allow governor to call setIssuancePerBlock (2-param variant)', async () => { + await expect( + contracts.issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('300')), + ).to.not.be.reverted + }) + }) + + describe('setTargetAllocation', () => { + it('should revert when non-governor calls setTargetAllocation', async () => { + await expect( + contracts.issuanceAllocator + .connect(accounts.nonGovernor) + ['setTargetAllocation(address,uint256,uint256)'](accounts.nonGovernor.address, 100000, 0), + ).to.be.revertedWithCustomError(contracts.issuanceAllocator, 'AccessControlUnauthorizedAccount') + }) + + it('should allow governor to call setTargetAllocation', async () => { + // Use a valid target contract address instead of EOA + await expect( + contracts.issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](contracts.directAllocation.target, 100000, 0), + ).to.not.be.reverted + }) + + it('should revert when non-governor calls setTargetAllocation (3-param variant)', async () => { + await expect( + contracts.issuanceAllocator + .connect(accounts.nonGovernor) + ['setTargetAllocation(address,uint256,uint256,uint256)'](accounts.nonGovernor.address, 100000, 0, 0), + ).to.be.revertedWithCustomError(contracts.issuanceAllocator, 'AccessControlUnauthorizedAccount') + }) + + it('should allow governor to call setTargetAllocation (3-param variant)', async () => { + // Use a valid target contract address instead of EOA + await expect( + contracts.issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,uint256)'](contracts.directAllocation.target, 100000, 0, 0), + ).to.not.be.reverted + }) + }) + + describe('notifyTarget', () => { + it('should revert when non-governor calls notifyTarget', async () => { + await expect( + contracts.issuanceAllocator.connect(accounts.nonGovernor).notifyTarget(contracts.directAllocation.target), + ).to.be.revertedWithCustomError(contracts.issuanceAllocator, 'AccessControlUnauthorizedAccount') + }) + + it('should allow governor to call notifyTarget', async () => { + // First add the target so notifyTarget has something to notify + await contracts.issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](contracts.directAllocation.target, 100000, 0) + + await expect( + contracts.issuanceAllocator.connect(accounts.governor).notifyTarget(contracts.directAllocation.target), + ).to.not.be.reverted + }) + }) + + describe('forceTargetNoChangeNotificationBlock', () => { + it('should revert when non-governor calls forceTargetNoChangeNotificationBlock', async () => { + await expect( + contracts.issuanceAllocator + .connect(accounts.nonGovernor) + .forceTargetNoChangeNotificationBlock(contracts.directAllocation.target, 12345), + ).to.be.revertedWithCustomError(contracts.issuanceAllocator, 'AccessControlUnauthorizedAccount') + }) + + it('should allow governor to call forceTargetNoChangeNotificationBlock', async () => { + await expect( + contracts.issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock(contracts.directAllocation.target, 12345), + ).to.not.be.reverted + }) + }) + + describe('Role Management Methods', () => { + it('should enforce access control on role management methods', async () => { + await testMultipleAccessControl( + contracts.issuanceAllocator, + [ + { + method: 'grantRole', + args: [SHARED_CONSTANTS.PAUSE_ROLE, accounts.operator.address], + description: 'grantRole', + }, + { + method: 'revokeRole', + args: [SHARED_CONSTANTS.PAUSE_ROLE, accounts.operator.address], + description: 'revokeRole', + }, + ], + accounts.governor, + accounts.nonGovernor, + ) + }) + }) + }) + + describe('DirectAllocation Access Control', () => { + describe('Role Management Methods', () => { + it('should enforce access control on role management methods', async () => { + await testMultipleAccessControl( + contracts.directAllocation, + [ + { + method: 'grantRole', + args: [SHARED_CONSTANTS.OPERATOR_ROLE, accounts.operator.address], + description: 'grantRole', + }, + { + method: 'revokeRole', + args: [SHARED_CONSTANTS.OPERATOR_ROLE, accounts.operator.address], + description: 'revokeRole', + }, + ], + accounts.governor, + accounts.nonGovernor, + ) + }) + }) + + it('should require OPERATOR_ROLE for sendTokens', async () => { + // Setup: Grant operator role first + await contracts.directAllocation + .connect(accounts.governor) + .grantRole(SHARED_CONSTANTS.OPERATOR_ROLE, accounts.operator.address) + + // Non-operator should be rejected + await expect( + contracts.directAllocation.connect(accounts.nonGovernor).sendTokens(accounts.nonGovernor.address, 1000), + ).to.be.revertedWithCustomError(contracts.directAllocation, 'AccessControlUnauthorizedAccount') + + // Operator should be allowed (may revert for other reasons like insufficient balance, but not access control) + // We just test that access control passes, not the full functionality + const hasRole = await contracts.directAllocation.hasRole( + SHARED_CONSTANTS.OPERATOR_ROLE, + accounts.operator.address, + ) + expect(hasRole).to.be.true + }) + + it('should require GOVERNOR_ROLE for setIssuanceAllocator', async () => { + await expect( + contracts.directAllocation.connect(accounts.nonGovernor).setIssuanceAllocator(accounts.user.address), + ).to.be.revertedWithCustomError(contracts.directAllocation, 'AccessControlUnauthorizedAccount') + }) + }) + + describe('Role Management Consistency', () => { + it('should have consistent GOVERNOR_ROLE across allocate contracts', async () => { + const governorRole = SHARED_CONSTANTS.GOVERNOR_ROLE + + // All allocate contracts should recognize the governor + expect(await contracts.issuanceAllocator.hasRole(governorRole, accounts.governor.address)).to.be.true + expect(await contracts.directAllocation.hasRole(governorRole, accounts.governor.address)).to.be.true + }) + + it('should have correct role admin hierarchy', async () => { + const governorRole = SHARED_CONSTANTS.GOVERNOR_ROLE + + // GOVERNOR_ROLE should be admin of itself (allowing governors to manage other governors) + expect(await contracts.issuanceAllocator.getRoleAdmin(governorRole)).to.equal(governorRole) + expect(await contracts.directAllocation.getRoleAdmin(governorRole)).to.equal(governorRole) + }) + }) +}) diff --git a/packages/issuance/test/tests/allocate/DefaultTarget.test.ts b/packages/issuance/test/tests/allocate/DefaultTarget.test.ts new file mode 100644 index 000000000..ed10be459 --- /dev/null +++ b/packages/issuance/test/tests/allocate/DefaultTarget.test.ts @@ -0,0 +1,749 @@ +import { expect } from 'chai' +import hre from 'hardhat' +const { ethers } = hre + +import { deployTestGraphToken, getTestAccounts } from '../common/fixtures' +import { deployDirectAllocation, deployIssuanceAllocator } from './fixtures' +import { expectCustomError } from './optimizationHelpers' + +describe('IssuanceAllocator - Default Allocation', () => { + let accounts + let graphToken + let issuanceAllocator + let target1 + let target2 + let target3 + let addresses + + const issuancePerBlock = ethers.parseEther('100') + + beforeEach(async () => { + accounts = await getTestAccounts() + + // Deploy fresh contracts for each test + graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + + issuanceAllocator = await deployIssuanceAllocator(graphTokenAddress, accounts.governor, issuancePerBlock) + + target1 = await deployDirectAllocation(graphTokenAddress, accounts.governor) + target2 = await deployDirectAllocation(graphTokenAddress, accounts.governor) + target3 = await deployDirectAllocation(graphTokenAddress, accounts.governor) + + addresses = { + issuanceAllocator: await issuanceAllocator.getAddress(), + target1: await target1.getAddress(), + target2: await target2.getAddress(), + target3: await target3.getAddress(), + graphToken: graphTokenAddress, + } + + // Grant minter role to issuanceAllocator + await (graphToken as any).addMinter(addresses.issuanceAllocator) + }) + + describe('Initialization', () => { + it('should initialize with default target at index 0', async () => { + const targetCount = await issuanceAllocator.getTargetCount() + expect(targetCount).to.equal(1n) + + const defaultAddress = await issuanceAllocator.getTargetAt(0) + expect(defaultAddress).to.equal(ethers.ZeroAddress) + }) + + it('should initialize with 100% allocation to default target', async () => { + const defaultAddress = await issuanceAllocator.getTargetAt(0) + const allocation = await issuanceAllocator.getTargetAllocation(defaultAddress) + + expect(allocation.totalAllocationRate).to.equal(issuancePerBlock) + expect(allocation.allocatorMintingRate).to.equal(issuancePerBlock) + expect(allocation.selfMintingRate).to.equal(0n) + }) + + it('should report total allocation as 0% when default is address(0)', async () => { + const totalAllocation = await issuanceAllocator.getTotalAllocation() + + // When default is address(0), it is treated as unallocated for reporting purposes + expect(totalAllocation.totalAllocationRate).to.equal(0n) + expect(totalAllocation.allocatorMintingRate).to.equal(0n) + expect(totalAllocation.selfMintingRate).to.equal(0n) + }) + }) + + describe('100% Allocation Invariant', () => { + it('should auto-adjust default target when setting normal target allocation', async () => { + const allocation1Rate = ethers.parseEther('30') // 30% + + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, allocation1Rate) + + // Check target1 has correct allocation + const target1Allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) + expect(target1Allocation.totalAllocationRate).to.equal(allocation1Rate) + + // Check default target was auto-adjusted + const defaultAddress = await issuanceAllocator.getTargetAt(0) + const defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) + expect(defaultAllocation.totalAllocationRate).to.equal(issuancePerBlock - allocation1Rate) + + // Check reported total (excludes default since it's address(0)) + const totalAllocation = await issuanceAllocator.getTotalAllocation() + expect(totalAllocation.totalAllocationRate).to.equal(allocation1Rate) + }) + + it('should maintain 100% invariant with multiple targets', async () => { + const allocation1Rate = ethers.parseEther('20') // 20% + const allocation2Rate = ethers.parseEther('35') // 35% + const allocation3Rate = ethers.parseEther('15') // 15% + + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, allocation1Rate) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target2, allocation2Rate) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target3, allocation3Rate) + + // Check default target is 30% (100% - 20% - 35% - 15%) + const defaultAddress = await issuanceAllocator.getTargetAt(0) + const defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) + const expectedDefault = issuancePerBlock - allocation1Rate - allocation2Rate - allocation3Rate + expect(defaultAllocation.totalAllocationRate).to.equal(expectedDefault) + + // Check reported total (excludes default since it's address(0)) + const totalAllocation = await issuanceAllocator.getTotalAllocation() + expect(totalAllocation.totalAllocationRate).to.equal(allocation1Rate + allocation2Rate + allocation3Rate) + }) + + it('should allow 0% default target when all allocation is assigned', async () => { + const allocation1Rate = ethers.parseEther('60') // 60% + const allocation2Rate = ethers.parseEther('40') // 40% + + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, allocation1Rate) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target2, allocation2Rate) + + // Check default target is 0% + const defaultAddress = await issuanceAllocator.getTargetAt(0) + const defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) + expect(defaultAllocation.totalAllocationRate).to.equal(0n) + + // Check reported total is 100% (default has 0%, so exclusion doesn't matter) + const totalAllocation = await issuanceAllocator.getTotalAllocation() + expect(totalAllocation.totalAllocationRate).to.equal(issuancePerBlock) + }) + + it('should revert if non-default targets exceed 100%', async () => { + const allocation1Rate = ethers.parseEther('60') // 60% + const allocation2Rate = ethers.parseEther('50') // 50% (total would be 110%) + + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, allocation1Rate) + + await expectCustomError( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target2, allocation2Rate), + issuanceAllocator, + 'InsufficientAllocationAvailable', + ) + }) + + it('should adjust default when removing a target allocation', async () => { + // Set up initial allocations + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target2, ethers.parseEther('20')) + + // Default should be 50% + let defaultAddress = await issuanceAllocator.getTargetAt(0) + let defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) + expect(defaultAllocation.totalAllocationRate).to.equal(ethers.parseEther('50')) + + // Remove target1 allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 0, 0) + + // Default should now be 80% + defaultAddress = await issuanceAllocator.getTargetAt(0) + defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) + expect(defaultAllocation.totalAllocationRate).to.equal(ethers.parseEther('80')) + + // Reported total excludes default (only target2's 20% is reported) + const totalAllocation = await issuanceAllocator.getTotalAllocation() + expect(totalAllocation.totalAllocationRate).to.equal(ethers.parseEther('20')) + }) + + it('should handle self-minting allocations correctly in 100% invariant', async () => { + const allocator1 = ethers.parseEther('20') + const self1 = ethers.parseEther('10') + const allocator2 = ethers.parseEther('30') + const self2 = ethers.parseEther('5') + + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, allocator1, self1) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target2, allocator2, self2) + + // Total non-default: 20% + 10% + 30% + 5% = 65% + // Default should be: 35% + const defaultAddress = await issuanceAllocator.getTargetAt(0) + const defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) + expect(defaultAllocation.totalAllocationRate).to.equal(ethers.parseEther('35')) + + // Reported total excludes default (only target1+target2's 65% is reported) + const totalAllocation = await issuanceAllocator.getTotalAllocation() + expect(totalAllocation.totalAllocationRate).to.equal(allocator1 + self1 + allocator2 + self2) + expect(totalAllocation.selfMintingRate).to.equal(self1 + self2) + }) + }) + + describe('setDefaultTarget', () => { + it('should allow governor to change default target address', async () => { + const newDefaultAddress = addresses.target1 + + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(newDefaultAddress) + + const defaultAddress = await issuanceAllocator.getTargetAt(0) + expect(defaultAddress).to.equal(newDefaultAddress) + }) + + it('should maintain allocation when changing default address', async () => { + // Set a target allocation first + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target2, ethers.parseEther('40')) + + // Default should be 60% + let defaultAddress = await issuanceAllocator.getTargetAt(0) + let defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) + expect(defaultAllocation.totalAllocationRate).to.equal(ethers.parseEther('60')) + + // Change default address + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target1) + + // Check new address has the same allocation + defaultAddress = await issuanceAllocator.getTargetAt(0) + expect(defaultAddress).to.equal(addresses.target1) + defaultAllocation = await issuanceAllocator.getTargetAllocation(addresses.target1) + expect(defaultAllocation.totalAllocationRate).to.equal(ethers.parseEther('60')) + + // Old address should have zero allocation + const oldAllocation = await issuanceAllocator.getTargetAllocation(ethers.ZeroAddress) + expect(oldAllocation.totalAllocationRate).to.equal(0n) + }) + + it('should emit DefaultTargetUpdated event', async () => { + const newDefaultAddress = addresses.target1 + + await expect(issuanceAllocator.connect(accounts.governor).setDefaultTarget(newDefaultAddress)) + .to.emit(issuanceAllocator, 'DefaultTargetUpdated') + .withArgs(ethers.ZeroAddress, newDefaultAddress) + }) + + it('should be no-op when setting to same address', async () => { + const currentAddress = await issuanceAllocator.getTargetAt(0) + + const tx = await issuanceAllocator.connect(accounts.governor).setDefaultTarget(currentAddress) + const receipt = await tx.wait() + + // Should not emit event when no-op + const events = receipt!.logs.filter((log: any) => { + try { + return issuanceAllocator.interface.parseLog(log)?.name === 'DefaultTargetUpdated' + } catch { + return false + } + }) + expect(events.length).to.equal(0) + }) + + it('should revert when non-governor tries to change default address', async () => { + await expect( + issuanceAllocator.connect(accounts.user).setDefaultTarget(addresses.target1), + ).to.be.revertedWithCustomError(issuanceAllocator, 'AccessControlUnauthorizedAccount') + }) + + it('should revert when non-governor tries to change default address with explicit fromBlockNumber', async () => { + const currentBlock = await ethers.provider.getBlockNumber() + await expect( + issuanceAllocator.connect(accounts.user)['setDefaultTarget(address,uint256)'](addresses.target1, currentBlock), + ).to.be.revertedWithCustomError(issuanceAllocator, 'AccessControlUnauthorizedAccount') + }) + + it('should return false when trying to change default address while paused without explicit fromBlockNumber', async () => { + // Grant pause role and pause + const PAUSE_ROLE = ethers.keccak256(ethers.toUtf8Bytes('PAUSE_ROLE')) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).pause() + + // Try to change default without explicit fromBlockNumber - should return false (checked via staticCall) + const result = await issuanceAllocator.connect(accounts.governor).setDefaultTarget.staticCall(addresses.target3) + expect(result).to.equal(false) + + // Verify allocation didn't change + const currentDefault = await issuanceAllocator.getTargetAt(0) + expect(currentDefault).to.equal(ethers.ZeroAddress) + + // Should succeed with explicit minDistributedBlock that has been reached + const lastDistributionBlock = (await issuanceAllocator.getDistributionState()).lastDistributionBlock + await issuanceAllocator + .connect(accounts.governor) + ['setDefaultTarget(address,uint256)'](addresses.target3, lastDistributionBlock) + + const newDefault = await issuanceAllocator.getTargetAt(0) + expect(newDefault).to.equal(addresses.target3) + }) + + it('should revert when trying to set default to a normally allocated target', async () => { + // Set target1 as a normal allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) + + // Try to set target1 as default should fail + await expectCustomError( + issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target1), + issuanceAllocator, + 'CannotSetDefaultToAllocatedTarget', + ) + }) + + it('should allow changing back to zero address', async () => { + // Change to target1 + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target1) + + // Change back to zero address + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(ethers.ZeroAddress) + + const defaultAddress = await issuanceAllocator.getTargetAt(0) + expect(defaultAddress).to.equal(ethers.ZeroAddress) + }) + }) + + describe('setTargetAllocation restrictions', () => { + it('should revert with zero address error when default target is address(0)', async () => { + const defaultAddress = await issuanceAllocator.getTargetAt(0) + expect(defaultAddress).to.equal(ethers.ZeroAddress) + + // When default is address(0), the zero address check happens first + await expectCustomError( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](defaultAddress, ethers.parseEther('50')), + issuanceAllocator, + 'TargetAddressCannotBeZero', + ) + }) + + it('should revert when trying to set allocation for changed default target', async () => { + // Change default to target1 + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target1) + + // Should not be able to set allocation for target1 now + await expectCustomError( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('50')), + issuanceAllocator, + 'CannotSetAllocationForDefaultTarget', + ) + }) + + it('should allow setting allocation for previous default address after it changes', async () => { + // Change default to target1 + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target1) + + // Change default to target2 (target1 is no longer the default) + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target2) + + // Now target1 can receive a normal allocation since it's no longer the default + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) + + const allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) + expect(allocation.totalAllocationRate).to.equal(ethers.parseEther('30')) + }) + + it('should revert when trying to set allocation for address(0) when default is not address(0)', async () => { + // Change default to target1 + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target1) + + // Try to set allocation for address(0) directly should fail + await expectCustomError( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](ethers.ZeroAddress, ethers.parseEther('30')), + issuanceAllocator, + 'TargetAddressCannotBeZero', + ) + }) + }) + + describe('Distribution with default target', () => { + it('should not mint to zero address when default is unset', async () => { + // Set a normal target allocation (this is block 1) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('40')) + + // Distribute (this is block 2, so we distribute for block 1->2 = 1 block since last distribution) + await issuanceAllocator.distributeIssuance() + + // Target1 should receive 40% of issuance for the block between setTargetAllocation and distributeIssuance + const target1Balance = await graphToken.balanceOf(addresses.target1) + const expectedTarget1 = (issuancePerBlock * ethers.parseEther('40')) / issuancePerBlock + expect(target1Balance).to.equal(expectedTarget1) + + // Zero address should have nothing (cannot be minted to) + const zeroBalance = await graphToken.balanceOf(ethers.ZeroAddress) + expect(zeroBalance).to.equal(0n) + + // The 60% for default (zero address) is effectively burned/not minted + }) + + it('should mint to default address when it is set', async () => { + // Change default to target3 + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target3) + + // Set target1 allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) + + // Distribute to settle issuance + await issuanceAllocator.distributeIssuance() + + // Target1 should receive 30% for 1 block + const target1Balance = await graphToken.balanceOf(addresses.target1) + const expectedTarget1 = (issuancePerBlock * ethers.parseEther('30')) / issuancePerBlock + expect(target1Balance).to.equal(expectedTarget1) + + // Target3 (default) should receive: + // - 100% for 1 block (from setDefaultTarget to setTargetAllocation) + // - 70% for 1 block (from setTargetAllocation to distributeIssuance) + const target3Balance = await graphToken.balanceOf(addresses.target3) + const expectedTarget3 = issuancePerBlock + ethers.parseEther('70') + expect(target3Balance).to.equal(expectedTarget3) + }) + + it('should distribute correctly with multiple targets and default', async () => { + // Set default to target3 + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target3) + + // Set allocations (target3 gets remaining 50% as default) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('20')) // 20% + + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target2, ethers.parseEther('30')) // 30% + + // Distribute to settle issuance + await issuanceAllocator.distributeIssuance() + + // Check balances: + // - target1 gets 20% for 2 blocks (from first setTargetAllocation onwards) + // - target2 gets 30% for 1 block (from second setTargetAllocation onwards) + // - target3 (default) gets 100% for 1 block + 80% for 1 block + 50% for 1 block + const target1Balance = await graphToken.balanceOf(addresses.target1) + const target2Balance = await graphToken.balanceOf(addresses.target2) + const target3Balance = await graphToken.balanceOf(addresses.target3) + + const expectedTarget1 = (issuancePerBlock * ethers.parseEther('20') * 2n) / issuancePerBlock + const expectedTarget2 = (issuancePerBlock * ethers.parseEther('30')) / issuancePerBlock + const expectedTarget3 = issuancePerBlock + ethers.parseEther('80') + ethers.parseEther('50') + + expect(target1Balance).to.equal(expectedTarget1) + expect(target2Balance).to.equal(expectedTarget2) + expect(target3Balance).to.equal(expectedTarget3) + + // Total minted should equal 3 blocks of issuance + const totalMinted = target1Balance + target2Balance + target3Balance + expect(totalMinted).to.equal(issuancePerBlock * 3n) + }) + + it('should handle distribution when default target is 0%', async () => { + // Allocate 100% to explicit targets (default gets 0%) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('60')) + // At this point target1 has 60%, default has 40% + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target2, ethers.parseEther('40')) + // Now target1 has 60%, target2 has 40%, default has 0% + + // Distribute (1 block since last setTargetAllocation) + await issuanceAllocator.distributeIssuance() + + // Zero address (default) should receive nothing + const zeroBalance = await graphToken.balanceOf(ethers.ZeroAddress) + expect(zeroBalance).to.equal(0n) + + // Target1 receives: 0% (from first distributeIssuance to first setTargetAllocation) + // + 60% (from first setTargetAllocation to second setTargetAllocation) + // + 60% (from second setTargetAllocation to final distributeIssuance) + // = 120% of one block = 60% * 2 blocks + const target1Balance = await graphToken.balanceOf(addresses.target1) + expect(target1Balance).to.equal((issuancePerBlock * ethers.parseEther('60') * 2n) / issuancePerBlock) + + // Target2 receives: 40% (from second setTargetAllocation to final distributeIssuance) + const target2Balance = await graphToken.balanceOf(addresses.target2) + expect(target2Balance).to.equal((issuancePerBlock * ethers.parseEther('40')) / issuancePerBlock) + + // Default allocation is now 0% + const defaultAddress = await issuanceAllocator.getTargetAt(0) + const defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) + expect(defaultAllocation.totalAllocationRate).to.equal(0n) + }) + + it('should distribute during setDefaultTarget when using default behavior', async () => { + // Change default to target3 using the simple variant (no explicit fromBlockNumber) + // This should distribute issuance up to current block before changing the default + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target3) + + // Set target1 allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,uint256)'](addresses.target1, ethers.parseEther('30'), 0n, 0) + + // Distribute once more + await issuanceAllocator.distributeIssuance() + + // Target3 (default) should receive: + // - 0% for 1 block (setDefaultTarget distributes to old default (zero address) before changing) + // - 100% for 1 block (from setDefaultTarget to setTargetAllocation) + // - 70% for 1 block (from setTargetAllocation to final distributeIssuance) + const target3Balance = await graphToken.balanceOf(addresses.target3) + const expectedTarget3 = issuancePerBlock + ethers.parseEther('70') + expect(target3Balance).to.equal(expectedTarget3) + + // Target1 should receive 30% for 1 block + const target1Balance = await graphToken.balanceOf(addresses.target1) + const expectedTarget1 = (issuancePerBlock * ethers.parseEther('30')) / issuancePerBlock + expect(target1Balance).to.equal(expectedTarget1) + }) + + it('should handle changing default to address that previously had normal allocation', async () => { + // Scenario: target1 has normal allocation → removed (0%) → set as default + // This tests for stale data issues + + // Set target1 as normal allocation with 30% + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) + + let allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) + expect(allocation.totalAllocationRate).to.equal(ethers.parseEther('30')) + + // Remove target1's allocation (set to 0%) + await issuanceAllocator.connect(accounts.governor)['setTargetAllocation(address,uint256)'](addresses.target1, 0n) + + // Verify target1 is no longer in targetAddresses (except if it's at index 0, which it's not) + const targetCount = await issuanceAllocator.getTargetCount() + const targets = [] + for (let i = 0; i < targetCount; i++) { + targets.push(await issuanceAllocator.getTargetAt(i)) + } + expect(targets).to.not.include(addresses.target1) // Should not be in list anymore + + // Now set target1 as default - should work and not have stale allocation data + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target1) + + // Verify target1 is now default with 100% allocation (since no other targets) + const defaultAddress = await issuanceAllocator.getTargetAt(0) + expect(defaultAddress).to.equal(addresses.target1) + + allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) + expect(allocation.totalAllocationRate).to.equal(issuancePerBlock) // Should have full allocation as default + }) + + it('should handle changing default when default has 0% allocation', async () => { + // Allocate 100% to other targets so default has 0% + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('60')) + + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target2, ethers.parseEther('40')) + + // Default should now have 0% + const defaultAddress = await issuanceAllocator.getTargetAt(0) + const defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) + expect(defaultAllocation.totalAllocationRate).to.equal(0n) + + // Change default to target3 + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target3) + + // New default should have 0% (same as old default) + const newDefaultAddress = await issuanceAllocator.getTargetAt(0) + expect(newDefaultAddress).to.equal(addresses.target3) + + const newDefaultAllocation = await issuanceAllocator.getTargetAllocation(addresses.target3) + expect(newDefaultAllocation.totalAllocationRate).to.equal(0n) + + // Other allocations should be maintained + const target1Allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) + const target2Allocation = await issuanceAllocator.getTargetAllocation(addresses.target2) + expect(target1Allocation.totalAllocationRate).to.equal(ethers.parseEther('60')) + expect(target2Allocation.totalAllocationRate).to.equal(ethers.parseEther('40')) + }) + + it('should handle changing from initial address(0) default without errors', async () => { + // Verify initial state: default is address(0) + const initialDefault = await issuanceAllocator.getTargetAt(0) + expect(initialDefault).to.equal(ethers.ZeroAddress) + + // Add a normal allocation so there's pending issuance to distribute + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('40')) + + // Mine a few blocks to accumulate issuance + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Change default from address(0) to target2 + // This should: + // 1. Call _handleDistributionBeforeAllocation(address(0), ...) - should not revert + // 2. Call _notifyTarget(address(0)) - should return early safely + // 3. Delete allocationTargets[address(0)] - should not cause issues + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target2) + + // Verify the change succeeded + const newDefault = await issuanceAllocator.getTargetAt(0) + expect(newDefault).to.equal(addresses.target2) + + // Verify address(0) received no tokens (can't mint to zero address) + const zeroAddressBalance = await graphToken.balanceOf(ethers.ZeroAddress) + expect(zeroAddressBalance).to.equal(0n) + + // Distribute and verify target2 (new default) receives correct allocation + await issuanceAllocator.distributeIssuance() + + // Target2 should have received 60% for 1 block (from setDefaultTarget to distributeIssuance) + const target2Balance = await graphToken.balanceOf(addresses.target2) + const expectedTarget2 = (issuancePerBlock * ethers.parseEther('60')) / issuancePerBlock + expect(target2Balance).to.equal(expectedTarget2) + + // Target1 should have accumulated tokens across multiple blocks + const target1Balance = await graphToken.balanceOf(addresses.target1) + expect(target1Balance).to.be.gt(0n) // Should have received something + + // Verify lastChangeNotifiedBlock was preserved for the new default (not overwritten to 0 from address(0)) + const target2Data = await issuanceAllocator.getTargetData(addresses.target2) + const currentBlock = await ethers.provider.getBlockNumber() + expect(target2Data.lastChangeNotifiedBlock).to.be.gt(0n) + expect(target2Data.lastChangeNotifiedBlock).to.be.lte(currentBlock) + }) + + it('should not transfer future notification block from old default to new default', async () => { + // Set initial default to target1 + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target1) + + // Force a future notification block on target1 (the current default) + const currentBlock = await ethers.provider.getBlockNumber() + const futureBlock = currentBlock + 100 + await issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock(addresses.target1, futureBlock) + + // Verify target1 has the future block set + const target1DataBefore = await issuanceAllocator.getTargetData(addresses.target1) + expect(target1DataBefore.lastChangeNotifiedBlock).to.equal(futureBlock) + + // Change default from target1 to target2 + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target2) + + // Verify target2 (new default) has its own notification block (current block), not the future block from target1 + const target2Data = await issuanceAllocator.getTargetData(addresses.target2) + const blockAfterChange = await ethers.provider.getBlockNumber() + + // target2 should have been notified at the current block, not inherit the future block + expect(target2Data.lastChangeNotifiedBlock).to.equal(blockAfterChange) + expect(target2Data.lastChangeNotifiedBlock).to.not.equal(futureBlock) + expect(target2Data.lastChangeNotifiedBlock).to.be.lt(futureBlock) + + // Old default (target1) should no longer have data (it was removed) + const target1DataAfter = await issuanceAllocator.getTargetData(addresses.target1) + expect(target1DataAfter.lastChangeNotifiedBlock).to.equal(0) + }) + }) + + describe('View functions', () => { + it('should return correct target count including default', async () => { + let count = await issuanceAllocator.getTargetCount() + expect(count).to.equal(1n) // Just default + + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) + + count = await issuanceAllocator.getTargetCount() + expect(count).to.equal(2n) // Default + target1 + + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target2, ethers.parseEther('20')) + + count = await issuanceAllocator.getTargetCount() + expect(count).to.equal(3n) // Default + target1 + target2 + }) + + it('should include default in getTargets array', async () => { + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) + + const targets = await issuanceAllocator.getTargets() + expect(targets.length).to.equal(2) + expect(targets[0]).to.equal(ethers.ZeroAddress) // Default at index 0 + expect(targets[1]).to.equal(addresses.target1) + }) + + it('should return correct data for default target', async () => { + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('40')) + + const defaultAddress = await issuanceAllocator.getTargetAt(0) + const data = await issuanceAllocator.getTargetData(defaultAddress) + + expect(data.allocatorMintingRate).to.equal(ethers.parseEther('60')) + expect(data.selfMintingRate).to.equal(0n) + }) + + it('should report 100% total allocation when default is a real address', async () => { + // Set target1 allocation first + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) + + // Change default to target2 (a real address, not address(0)) + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target2) + + // When default is a real address, it should report 100% total allocation + const totalAllocation = await issuanceAllocator.getTotalAllocation() + expect(totalAllocation.totalAllocationRate).to.equal(issuancePerBlock) + expect(totalAllocation.allocatorMintingRate).to.equal(issuancePerBlock) // target1=30% + target2=70% = 100% + expect(totalAllocation.selfMintingRate).to.equal(0n) + }) + }) +}) diff --git a/packages/issuance/test/tests/allocate/DefensiveChecks.test.ts b/packages/issuance/test/tests/allocate/DefensiveChecks.test.ts new file mode 100644 index 000000000..56ebed829 --- /dev/null +++ b/packages/issuance/test/tests/allocate/DefensiveChecks.test.ts @@ -0,0 +1,71 @@ +import { expect } from 'chai' +import hre from 'hardhat' +const { ethers } = hre +const { upgrades } = require('hardhat') + +import { deployTestGraphToken, getTestAccounts } from '../common/fixtures' + +describe('IssuanceAllocator - Defensive Checks', function () { + let accounts + let issuanceAllocator + let graphToken + + beforeEach(async function () { + accounts = await getTestAccounts() + graphToken = await deployTestGraphToken() + + // Deploy test harness as regular upgradeable contract with explicit validation skip + const IssuanceAllocatorFactory = await ethers.getContractFactory('IssuanceAllocatorTestHarness') + const issuanceAllocatorContract = await upgrades.deployProxy( + IssuanceAllocatorFactory, + [accounts.governor.address], + { + constructorArgs: [await graphToken.getAddress()], + initializer: 'initialize', + unsafeAllow: ['constructor', 'state-variable-immutable'], + }, + ) + issuanceAllocator = issuanceAllocatorContract + + // Add IssuanceAllocator as minter + await graphToken.connect(accounts.governor).addMinter(await issuanceAllocator.getAddress()) + }) + + describe('_distributePendingProportionally defensive checks', function () { + it('should return early when allocatedRate is 0', async function () { + // Call exposed function with allocatedRate = 0 + // This should return early without reverting + await expect( + issuanceAllocator.exposed_distributePendingProportionally( + 100, // available + 0, // allocatedRate = 0 (defensive check) + 1000, // toBlockNumber + ), + ).to.not.be.reverted + }) + + it('should return early when available is 0', async function () { + // Call exposed function with available = 0 + // This should return early without reverting + await expect( + issuanceAllocator.exposed_distributePendingProportionally( + 0, // available = 0 (defensive check) + 100, // allocatedRate + 1000, // toBlockNumber + ), + ).to.not.be.reverted + }) + + it('should return early when both are 0', async function () { + // Call exposed function with both = 0 + // This should return early without reverting + await expect( + issuanceAllocator.exposed_distributePendingProportionally( + 0, // available = 0 + 0, // allocatedRate = 0 + 1000, // toBlockNumber + ), + ).to.not.be.reverted + }) + }) +}) diff --git a/packages/issuance/test/tests/allocate/DirectAllocation.test.ts b/packages/issuance/test/tests/allocate/DirectAllocation.test.ts new file mode 100644 index 000000000..15162208d --- /dev/null +++ b/packages/issuance/test/tests/allocate/DirectAllocation.test.ts @@ -0,0 +1,291 @@ +import { expect } from 'chai' +import hre from 'hardhat' + +const { ethers } = hre + +const { upgrades } = require('hardhat') + +import { deployTestGraphToken, getTestAccounts, SHARED_CONSTANTS } from '../common/fixtures' +import { GraphTokenHelper } from '../common/graphTokenHelper' +import { deployDirectAllocation } from './fixtures' + +describe('DirectAllocation - Optimized & Consolidated', () => { + // Common variables + let accounts + let sharedContracts + + // Pre-calculated role constants to avoid repeated async contract calls + const GOVERNOR_ROLE = SHARED_CONSTANTS.GOVERNOR_ROLE + const OPERATOR_ROLE = SHARED_CONSTANTS.OPERATOR_ROLE + const PAUSE_ROLE = SHARED_CONSTANTS.PAUSE_ROLE + + before(async () => { + accounts = await getTestAccounts() + + // Deploy shared contracts once for most tests - PERFORMANCE OPTIMIZATION + const graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + const directAllocation = await deployDirectAllocation(graphTokenAddress, accounts.governor) + const directAllocationAddress = await directAllocation.getAddress() + + // Create helper + const graphTokenHelper = new GraphTokenHelper(graphToken as any, accounts.governor) + + sharedContracts = { + graphToken, + directAllocation, + graphTokenHelper, + addresses: { + graphToken: graphTokenAddress, + directAllocation: directAllocationAddress, + }, + } + }) + + // Fast state reset function for shared contracts - PERFORMANCE OPTIMIZATION + async function resetContractState() { + if (!sharedContracts) return + + const { directAllocation } = sharedContracts + + // Reset pause state + try { + if (await directAllocation.paused()) { + await directAllocation.connect(accounts.governor).unpause() + } + } catch { + // Ignore if not paused + } + + // Remove all roles except governor (keep governor role intact) + try { + // Remove operator role from all accounts + for (const account of [accounts.operator, accounts.user, accounts.nonGovernor]) { + if (await directAllocation.hasRole(OPERATOR_ROLE, account.address)) { + await directAllocation.connect(accounts.governor).revokeRole(OPERATOR_ROLE, account.address) + } + if (await directAllocation.hasRole(PAUSE_ROLE, account.address)) { + await directAllocation.connect(accounts.governor).revokeRole(PAUSE_ROLE, account.address) + } + } + + // Remove pause role from governor if present + if (await directAllocation.hasRole(PAUSE_ROLE, accounts.governor.address)) { + await directAllocation.connect(accounts.governor).revokeRole(PAUSE_ROLE, accounts.governor.address) + } + } catch { + // Ignore role management errors during reset + } + } + + beforeEach(async () => { + await resetContractState() + }) + + // Test fixtures for tests that need fresh contracts + async function setupDirectAllocation() { + const graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + const directAllocation = await deployDirectAllocation(graphTokenAddress, accounts.governor) + return { directAllocation, graphToken } + } + + describe('Constructor Validation', () => { + it('should revert when constructed with zero GraphToken address', async () => { + const DirectAllocationFactory = await ethers.getContractFactory('DirectAllocation') + await expect(DirectAllocationFactory.deploy(ethers.ZeroAddress)).to.be.revertedWithCustomError( + DirectAllocationFactory, + 'GraphTokenCannotBeZeroAddress', + ) + }) + }) + + describe('Initialization', () => { + it('should set the governor role correctly', async () => { + const { directAllocation } = sharedContracts + expect(await directAllocation.hasRole(GOVERNOR_ROLE, accounts.governor.address)).to.be.true + }) + + it('should not set operator role to anyone initially', async () => { + const { directAllocation } = sharedContracts + expect(await directAllocation.hasRole(OPERATOR_ROLE, accounts.operator.address)).to.be.false + }) + + it('should revert when initialize is called more than once', async () => { + const { directAllocation } = sharedContracts + await expect(directAllocation.initialize(accounts.governor.address)).to.be.revertedWithCustomError( + directAllocation, + 'InvalidInitialization', + ) + }) + + it('should revert when initialized with zero governor address', async () => { + const graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + + // Try to deploy proxy with zero governor address - this should hit the BaseUpgradeable check + const DirectAllocationFactory = await ethers.getContractFactory('DirectAllocation') + await expect( + upgrades.deployProxy(DirectAllocationFactory, [ethers.ZeroAddress], { + constructorArgs: [graphTokenAddress], + initializer: 'initialize', + }), + ).to.be.revertedWithCustomError(DirectAllocationFactory, 'GovernorCannotBeZeroAddress') + }) + }) + + describe('Role Management', () => { + it('should manage operator role correctly and enforce access control', async () => { + const { directAllocation } = sharedContracts + + // Test granting operator role + await expect(directAllocation.connect(accounts.governor).grantRole(OPERATOR_ROLE, accounts.operator.address)) + .to.emit(directAllocation, 'RoleGranted') + .withArgs(OPERATOR_ROLE, accounts.operator.address, accounts.governor.address) + + expect(await directAllocation.hasRole(OPERATOR_ROLE, accounts.operator.address)).to.be.true + + // Test revoking operator role + await expect(directAllocation.connect(accounts.governor).revokeRole(OPERATOR_ROLE, accounts.operator.address)) + .to.emit(directAllocation, 'RoleRevoked') + .withArgs(OPERATOR_ROLE, accounts.operator.address, accounts.governor.address) + + expect(await directAllocation.hasRole(OPERATOR_ROLE, accounts.operator.address)).to.be.false + }) + }) + + describe('Token Management', () => { + it('should handle token operations with proper access control and validation', async () => { + // Use shared contracts for better performance + const { directAllocation, graphToken, graphTokenHelper } = sharedContracts + await resetContractState() + + // Setup: mint tokens and grant operator role + await graphTokenHelper.mint(await directAllocation.getAddress(), ethers.parseEther('1000')) + await directAllocation.connect(accounts.governor).grantRole(OPERATOR_ROLE, accounts.operator.address) + + // Test successful token sending with event emission + const amount = ethers.parseEther('100') + await expect(directAllocation.connect(accounts.operator).sendTokens(accounts.user.address, amount)) + .to.emit(directAllocation, 'TokensSent') + .withArgs(accounts.user.address, amount) + expect(await graphToken.balanceOf(accounts.user.address)).to.equal(amount) + + // Test zero amount sending + await expect(directAllocation.connect(accounts.operator).sendTokens(accounts.user.address, 0)) + .to.emit(directAllocation, 'TokensSent') + .withArgs(accounts.user.address, 0) + + // Test access control - operator should succeed, non-operator should fail + await expect( + directAllocation.connect(accounts.nonGovernor).sendTokens(accounts.user.address, ethers.parseEther('100')), + ).to.be.revertedWithCustomError(directAllocation, 'AccessControlUnauthorizedAccount') + + // Test zero address validation - transfer to zero address will fail + await expect( + directAllocation.connect(accounts.operator).sendTokens(ethers.ZeroAddress, ethers.parseEther('100')), + ).to.be.revertedWith('ERC20: transfer to the zero address') + }) + + it('should handle insufficient balance and pause states correctly', async () => { + // Use fresh setup for this test + const { directAllocation, graphToken } = await setupDirectAllocation() + const graphTokenHelper = new GraphTokenHelper(graphToken as any, accounts.governor) + + // Test insufficient balance (no tokens minted) + await directAllocation.connect(accounts.governor).grantRole(OPERATOR_ROLE, accounts.operator.address) + await expect( + directAllocation.connect(accounts.operator).sendTokens(accounts.user.address, ethers.parseEther('100')), + ).to.be.revertedWith('ERC20: transfer amount exceeds balance') + + // Setup for pause test + await graphTokenHelper.mint(await directAllocation.getAddress(), ethers.parseEther('1000')) + await directAllocation.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await directAllocation.connect(accounts.governor).pause() + + // Test paused state + await expect( + directAllocation.connect(accounts.operator).sendTokens(accounts.user.address, ethers.parseEther('100')), + ).to.be.revertedWithCustomError(directAllocation, 'EnforcedPause') + }) + }) + + describe('Pausability and Access Control', () => { + beforeEach(async () => { + await resetContractState() + }) + + it('should handle pause/unpause operations and access control', async () => { + const { directAllocation } = sharedContracts + + // Grant pause role to governor and operator + await directAllocation.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await directAllocation.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.operator.address) + + // Test basic pause/unpause with governor + await directAllocation.connect(accounts.governor).pause() + expect(await directAllocation.paused()).to.be.true + await directAllocation.connect(accounts.governor).unpause() + expect(await directAllocation.paused()).to.be.false + + // Test multiple pause/unpause cycles with operator + await directAllocation.connect(accounts.operator).pause() + expect(await directAllocation.paused()).to.be.true + await directAllocation.connect(accounts.operator).unpause() + expect(await directAllocation.paused()).to.be.false + await directAllocation.connect(accounts.operator).pause() + expect(await directAllocation.paused()).to.be.true + await directAllocation.connect(accounts.operator).unpause() + expect(await directAllocation.paused()).to.be.false + + // Test access control for unauthorized accounts + await expect(directAllocation.connect(accounts.nonGovernor).pause()).to.be.revertedWithCustomError( + directAllocation, + 'AccessControlUnauthorizedAccount', + ) + + // Setup for unpause access control test + await directAllocation.connect(accounts.governor).pause() + await expect(directAllocation.connect(accounts.nonGovernor).unpause()).to.be.revertedWithCustomError( + directAllocation, + 'AccessControlUnauthorizedAccount', + ) + }) + + it('should support all BaseUpgradeable constants', async () => { + const { directAllocation } = sharedContracts + + // Test that constants are accessible + expect(await directAllocation.MILLION()).to.equal(1_000_000) + expect(await directAllocation.GOVERNOR_ROLE()).to.equal(GOVERNOR_ROLE) + expect(await directAllocation.PAUSE_ROLE()).to.equal(PAUSE_ROLE) + expect(await directAllocation.OPERATOR_ROLE()).to.equal(OPERATOR_ROLE) + }) + + it('should maintain role hierarchy properly', async () => { + const { directAllocation } = sharedContracts + + // Governor should be admin of all roles + expect(await directAllocation.getRoleAdmin(GOVERNOR_ROLE)).to.equal(GOVERNOR_ROLE) + expect(await directAllocation.getRoleAdmin(PAUSE_ROLE)).to.equal(GOVERNOR_ROLE) + expect(await directAllocation.getRoleAdmin(OPERATOR_ROLE)).to.equal(GOVERNOR_ROLE) + }) + }) + + describe('Interface Implementation', () => { + it('should implement beforeIssuanceAllocationChange as a no-op and emit event', async () => { + const { directAllocation } = sharedContracts + // This should not revert and should emit an event + await expect(directAllocation.beforeIssuanceAllocationChange()).to.emit( + directAllocation, + 'BeforeIssuanceAllocationChange', + ) + }) + + it('should implement setIssuanceAllocator as a no-op', async () => { + const { directAllocation } = sharedContracts + // This should not revert + await directAllocation.connect(accounts.governor).setIssuanceAllocator(accounts.nonGovernor.address) + }) + }) +}) diff --git a/packages/issuance/test/tests/allocate/InterfaceCompliance.test.ts b/packages/issuance/test/tests/allocate/InterfaceCompliance.test.ts new file mode 100644 index 000000000..bf9f36f6b --- /dev/null +++ b/packages/issuance/test/tests/allocate/InterfaceCompliance.test.ts @@ -0,0 +1,69 @@ +// Import Typechain-generated factories with interface metadata (interfaceId and interfaceName) +import { + IIssuanceAllocationAdministration__factory, + IIssuanceAllocationData__factory, + IIssuanceAllocationDistribution__factory, + IIssuanceAllocationStatus__factory, + IIssuanceTarget__factory, + IPausableControl__factory, + ISendTokens__factory, +} from '@graphprotocol/interfaces/types' +import { IAccessControl__factory } from '@graphprotocol/issuance/types' +import { ethers } from 'hardhat' + +import { deployTestGraphToken, getTestAccounts } from '../common/fixtures' +import { deployDirectAllocation, deployIssuanceAllocator } from './fixtures' +import { shouldSupportInterfaces } from './testPatterns' + +/** + * Allocate ERC-165 Interface Compliance Tests + * Tests interface support for IssuanceAllocator and DirectAllocation contracts + */ +describe('Allocate ERC-165 Interface Compliance', () => { + let accounts: any + let contracts: any + + before(async () => { + accounts = await getTestAccounts() + + // Deploy allocate contracts for interface testing + const graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + + const issuanceAllocator = await deployIssuanceAllocator( + graphTokenAddress, + accounts.governor, + ethers.parseEther('100'), + ) + + const directAllocation = await deployDirectAllocation(graphTokenAddress, accounts.governor) + + contracts = { + issuanceAllocator, + directAllocation, + } + }) + + describe( + 'IssuanceAllocator Interface Compliance', + shouldSupportInterfaces( + () => contracts.issuanceAllocator, + [ + IIssuanceAllocationDistribution__factory, + IIssuanceAllocationAdministration__factory, + IIssuanceAllocationStatus__factory, + IIssuanceAllocationData__factory, + IPausableControl__factory, + IAccessControl__factory, + ], + ), + ) + + describe( + 'DirectAllocation Interface Compliance', + shouldSupportInterfaces( + () => contracts.directAllocation, + [IIssuanceTarget__factory, ISendTokens__factory, IPausableControl__factory, IAccessControl__factory], + ), + ) +}) diff --git a/packages/issuance/test/tests/allocate/InterfaceIdStability.test.ts b/packages/issuance/test/tests/allocate/InterfaceIdStability.test.ts new file mode 100644 index 000000000..fc5f27349 --- /dev/null +++ b/packages/issuance/test/tests/allocate/InterfaceIdStability.test.ts @@ -0,0 +1,47 @@ +import { + IIssuanceAllocationAdministration__factory, + IIssuanceAllocationData__factory, + IIssuanceAllocationDistribution__factory, + IIssuanceAllocationStatus__factory, + IIssuanceTarget__factory, + ISendTokens__factory, +} from '@graphprotocol/interfaces/types' +import { expect } from 'chai' + +/** + * Allocate Interface ID Stability Tests + * + * These tests verify that allocate-specific interface IDs remain stable across builds. + * Changes to these IDs indicate breaking changes to the interface definitions. + * + * If a test fails: + * 1. Verify the interface change was intentional + * 2. Understand the impact on deployed contracts + * 3. Update the expected ID if the change is correct + * 4. Document the breaking change in release notes + */ +describe('Allocate Interface ID Stability', () => { + it('IIssuanceAllocationDistribution should have stable interface ID', () => { + expect(IIssuanceAllocationDistribution__factory.interfaceId).to.equal('0x79da37fc') + }) + + it('IIssuanceAllocationAdministration should have stable interface ID', () => { + expect(IIssuanceAllocationAdministration__factory.interfaceId).to.equal('0x50d8541d') + }) + + it('IIssuanceAllocationStatus should have stable interface ID', () => { + expect(IIssuanceAllocationStatus__factory.interfaceId).to.equal('0xa896602d') + }) + + it('IIssuanceAllocationData should have stable interface ID', () => { + expect(IIssuanceAllocationData__factory.interfaceId).to.equal('0x48c3c62e') + }) + + it('IIssuanceTarget should have stable interface ID', () => { + expect(IIssuanceTarget__factory.interfaceId).to.equal('0xaee4dc43') + }) + + it('ISendTokens should have stable interface ID', () => { + expect(ISendTokens__factory.interfaceId).to.equal('0x05ab421d') + }) +}) diff --git a/packages/issuance/test/tests/allocate/IssuanceAllocator.test.ts b/packages/issuance/test/tests/allocate/IssuanceAllocator.test.ts new file mode 100644 index 000000000..feb0cb0d8 --- /dev/null +++ b/packages/issuance/test/tests/allocate/IssuanceAllocator.test.ts @@ -0,0 +1,2678 @@ +import { expect } from 'chai' +import hre from 'hardhat' +const { ethers } = hre + +import { deployTestGraphToken, getTestAccounts, SHARED_CONSTANTS } from '../common/fixtures' +import { deployDirectAllocation, deployIssuanceAllocator } from './fixtures' +// calculateExpectedAccumulation removed with PPM model +// Import optimization helpers for common test utilities +import { expectCustomError } from './optimizationHelpers' + +// Helper function to deploy a simple mock target for testing +async function deployMockSimpleTarget() { + const MockSimpleTargetFactory = await ethers.getContractFactory('MockSimpleTarget') + return await MockSimpleTargetFactory.deploy() +} + +describe('IssuanceAllocator', () => { + // Common variables + let accounts + let issuancePerBlock + + // Shared contracts for optimized tests + // - Deploy contracts once in before() hook instead of per-test + // - Reset state in beforeEach() hook instead of redeploying + // - Use sharedContracts.addresses for cached addresses + // - Use sharedContracts.issuanceAllocator, etc. for contract instances + let sharedContracts + + // Role constants - hardcoded to avoid slow contract calls + const GOVERNOR_ROLE = SHARED_CONSTANTS.GOVERNOR_ROLE + const PAUSE_ROLE = SHARED_CONSTANTS.PAUSE_ROLE + + // Interface IDs moved to consolidated tests + + before(async () => { + accounts = await getTestAccounts() + issuancePerBlock = ethers.parseEther('100') // Default issuance per block + + // Deploy shared contracts once for most tests + const graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + + const issuanceAllocator = await deployIssuanceAllocator(graphTokenAddress, accounts.governor, issuancePerBlock) + + const target1 = await deployDirectAllocation(graphTokenAddress, accounts.governor) + const target2 = await deployDirectAllocation(graphTokenAddress, accounts.governor) + const target3 = await deployDirectAllocation(graphTokenAddress, accounts.governor) + + // Cache addresses to avoid repeated getAddress() calls + const addresses = { + issuanceAllocator: await issuanceAllocator.getAddress(), + target1: await target1.getAddress(), + target2: await target2.getAddress(), + target3: await target3.getAddress(), + graphToken: graphTokenAddress, + } + + // Grant minter role to issuanceAllocator + await (graphToken as any).addMinter(addresses.issuanceAllocator) + + sharedContracts = { + graphToken, + issuanceAllocator, + target1, + target2, + target3, + addresses, + } + }) + + // Fast state reset function for shared contracts + async function resetIssuanceAllocatorState() { + if (!sharedContracts) return + + const { issuanceAllocator } = sharedContracts + + // Remove all existing allocations (except default at index 0) + try { + const targetCount = await issuanceAllocator.getTargetCount() + // Skip index 0 (default target) and remove from index 1 onwards + for (let i = 1; i < targetCount; i++) { + const targetAddr = await issuanceAllocator.getTargetAt(1) // Always remove index 1 + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](targetAddr, 0, 0) + } + } catch (_e) { + // Ignore errors during cleanup + } + + // Reset pause state + try { + if (await issuanceAllocator.paused()) { + await issuanceAllocator.connect(accounts.governor).unpause() + } + } catch (_e) { + // Ignore if not paused + } + + // Reset issuance per block to default + try { + const currentIssuance = await issuanceAllocator.getIssuancePerBlock() + if (currentIssuance !== issuancePerBlock) { + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(issuancePerBlock) + } + } catch (_e) { + // Ignore if can't reset + } + } + + beforeEach(async () => { + if (!accounts) { + accounts = await getTestAccounts() + issuancePerBlock = ethers.parseEther('100') + } + await resetIssuanceAllocatorState() + }) + + // Cached addresses to avoid repeated getAddress() calls + let cachedAddresses = {} + + // Test fixtures with caching + async function setupIssuanceAllocator() { + // Deploy test GraphToken + const graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + + // Deploy IssuanceAllocator with proxy using OpenZeppelin's upgrades library + const issuanceAllocator = await deployIssuanceAllocator(graphTokenAddress, accounts.governor, issuancePerBlock) + + // Deploy target contracts using OpenZeppelin's upgrades library + const target1 = await deployDirectAllocation(graphTokenAddress, accounts.governor) + const target2 = await deployDirectAllocation(graphTokenAddress, accounts.governor) + const target3 = await deployDirectAllocation(graphTokenAddress, accounts.governor) + + // Cache addresses to avoid repeated getAddress() calls + const issuanceAllocatorAddress = await issuanceAllocator.getAddress() + const target1Address = await target1.getAddress() + const target2Address = await target2.getAddress() + const target3Address = await target3.getAddress() + + cachedAddresses = { + issuanceAllocator: issuanceAllocatorAddress, + target1: target1Address, + target2: target2Address, + target3: target3Address, + graphToken: graphTokenAddress, + } + + return { + issuanceAllocator, + graphToken, + target1, + target2, + target3, + addresses: cachedAddresses, + } + } + + // Simplified setup for tests that don't need target contracts + async function setupSimpleIssuanceAllocator() { + // Deploy test GraphToken + const graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + + // Deploy IssuanceAllocator with proxy using OpenZeppelin's upgrades library + const issuanceAllocator = await deployIssuanceAllocator(graphTokenAddress, accounts.governor, issuancePerBlock) + + // Cache the issuance allocator address + const issuanceAllocatorAddress = await issuanceAllocator.getAddress() + + // Grant minter role to issuanceAllocator (needed for distributeIssuance calls) + await (graphToken as any).addMinter(issuanceAllocatorAddress) + + return { + issuanceAllocator, + graphToken, + addresses: { + issuanceAllocator: issuanceAllocatorAddress, + graphToken: graphTokenAddress, + }, + } + } + + describe('Initialization', () => { + it('should initialize contract correctly and prevent re-initialization', async () => { + const { issuanceAllocator } = sharedContracts + + // Verify all initialization state in one test + expect(await issuanceAllocator.hasRole(GOVERNOR_ROLE, accounts.governor.address)).to.be.true + expect(await issuanceAllocator.getIssuancePerBlock()).to.equal(issuancePerBlock) + + // Verify re-initialization is prevented + await expect(issuanceAllocator.initialize(accounts.governor.address)).to.be.revertedWithCustomError( + issuanceAllocator, + 'InvalidInitialization', + ) + }) + }) + + // Interface Compliance tests moved to consolidated/InterfaceCompliance.test.ts + + describe('ERC-165 Interface Checking', () => { + it('should successfully add a target that supports IIssuanceTarget interface', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Should succeed because DirectAllocation supports IIssuanceTarget + await expect( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 100000, 0), + ).to.not.be.reverted + + // Verify the target was added + const targetData = await issuanceAllocator.getTargetData(addresses.target1) + expect(targetData.allocatorMintingRate).to.equal(100000) + expect(targetData.selfMintingRate).to.equal(0) + const allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) + expect(allocation.totalAllocationRate).to.equal(100000) + expect(allocation.allocatorMintingRate).to.equal(100000) + expect(allocation.selfMintingRate).to.equal(0) + }) + + it('should revert when adding EOA targets (no contract code)', async () => { + const { issuanceAllocator } = sharedContracts + const eoaAddress = accounts.nonGovernor.address + + // Should revert because EOAs don't have contract code to call supportsInterface on + await expect( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](eoaAddress, 100000, 0), + ).to.be.reverted + }) + + it('should revert when adding a contract that does not support IIssuanceTarget', async () => { + const { issuanceAllocator } = sharedContracts + + // Deploy a contract that supports ERC-165 but not IIssuanceTarget + const ERC165OnlyFactory = await ethers.getContractFactory('MockERC165') + const erc165OnlyContract = await ERC165OnlyFactory.deploy() + const contractAddress = await erc165OnlyContract.getAddress() + + // Should revert because the contract doesn't support IIssuanceTarget + await expect( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](contractAddress, 100000, 0), + ).to.be.revertedWithCustomError(issuanceAllocator, 'TargetDoesNotSupportIIssuanceTarget') + }) + + it('should fail to add MockRevertingTarget due to notification failure even with force=true', async () => { + const { issuanceAllocator } = sharedContracts + + // MockRevertingTarget now supports both ERC-165 and IIssuanceTarget, so it passes interface check + const MockRevertingTargetFactory = await ethers.getContractFactory('MockRevertingTarget') + const mockRevertingTarget = await MockRevertingTargetFactory.deploy() + const contractAddress = await mockRevertingTarget.getAddress() + + // This should revert because MockRevertingTarget reverts during notification + await expect( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,uint256)'](contractAddress, 100000, 0, 0), + ).to.be.revertedWithCustomError(mockRevertingTarget, 'TargetRevertsIntentionally') + + // Verify the target was NOT added because the transaction reverted + const targetData = await issuanceAllocator.getTargetData(contractAddress) + expect(targetData.allocatorMintingRate).to.equal(0) + expect(targetData.selfMintingRate).to.equal(0) + const allocation = await issuanceAllocator.getTargetAllocation(contractAddress) + expect(allocation.totalAllocationRate).to.equal(0) + }) + + it('should allow re-adding existing target with same self-minter flag', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add the target first time + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 100000, 0) + + // Should succeed when setting allocation again with same flag (no interface check needed) + await expect( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 200000, 0), + ).to.not.be.reverted + }) + }) + + // Access Control tests moved to consolidated/AccessControl.test.ts + + describe('Target Management', () => { + it('should automatically remove target when setting allocation to 0', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add target with allocation in one step + const allocation = 300000 // 30% in PPM + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, allocation, 0) + + // Verify allocation is set and target exists + const target1Allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) + expect(target1Allocation.totalAllocationRate).to.equal(allocation) + const totalAlloc = await issuanceAllocator.getTotalAllocation() + // With default as address(0), only non-default targets are reported + expect(totalAlloc.totalAllocationRate).to.equal(allocation) + + // Remove target by setting allocation to 0 + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 0, 0) + + // Verify target is removed (only default remains) + const targets = await issuanceAllocator.getTargets() + expect(targets.length).to.equal(1) // Only default target + + // Verify reported total is 0% (default has it all, but isn't reported) + { + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.totalAllocationRate).to.equal(0) + } + }) + + it('should remove a target when multiple targets exist', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add targets with allocations in one step + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 300000, 0) // 30% + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target2, 400000, 0) // 40% + + // Verify allocations are set + const target1Allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) + const target2Allocation = await issuanceAllocator.getTargetAllocation(addresses.target2) + expect(target1Allocation.totalAllocationRate).to.equal(300000) + expect(target2Allocation.totalAllocationRate).to.equal(400000) + { + const totalAlloc = await issuanceAllocator.getTotalAllocation() + // With default as address(0), only non-default targets are reported (70%) + expect(totalAlloc.totalAllocationRate).to.equal(700000) + } + + // Get initial target addresses (including default) + const initialTargets = await issuanceAllocator.getTargets() + expect(initialTargets.length).to.equal(3) // default + target1 + target2 + + // Remove target2 by setting allocation to 0 (tests the swap-and-pop logic in the contract) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target2, 0, 0) + + // Verify target2 is removed but target1 and default remain + const remainingTargets = await issuanceAllocator.getTargets() + expect(remainingTargets.length).to.equal(2) // default + target1 + expect(remainingTargets).to.include(addresses.target1) + + // Verify reported total excludes default (only target1's 30% is reported) + { + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.totalAllocationRate).to.equal(300000) + } + }) + + it('should add allocation targets correctly', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add targets with allocations in one step + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 100000, 0) // 10% + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target2, 200000, 0) // 20% + + // Verify targets were added + const target1Info = await issuanceAllocator.getTargetData(addresses.target1) + const target2Info = await issuanceAllocator.getTargetData(addresses.target2) + + // Check that targets exist by verifying they have non-zero allocations + expect(target1Info.allocatorMintingRate + target1Info.selfMintingRate).to.equal(100000) + expect(target2Info.allocatorMintingRate + target2Info.selfMintingRate).to.equal(200000) + expect(target1Info.selfMintingRate).to.equal(0) + expect(target2Info.selfMintingRate).to.equal(0) + + // Verify reported total excludes default (only target1+target2's 70% is reported) + { + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.totalAllocationRate).to.equal(300000) + } + }) + + it('should validate setTargetAllocation parameters and constraints', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Test 1: Should revert when setting non-zero allocation for target that does not support IIssuanceTarget + const nonExistentTarget = accounts.nonGovernor.address + // When trying to set allocation for an EOA, the IERC165 call will revert + await expect( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](nonExistentTarget, 500_000, 0), + ).to.be.reverted + + // Test 2: Should revert when total allocation would exceed 100% + // Set allocation for target1 to 60% + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, ethers.parseEther('60'), 0) + + // Try to set allocation for target2 to 50%, which would exceed 100% (60% + 50% > 100%) + await expectCustomError( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target2, ethers.parseEther('50'), 0), + issuanceAllocator, + 'InsufficientAllocationAvailable', + ) + }) + }) + + describe('Self-Minting Targets', () => { + it('should not mint tokens for self-minting targets during distributeIssuance', async () => { + const { issuanceAllocator, graphToken, addresses } = sharedContracts + + // Add targets with different self-minter flags and set allocations + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 300000, 0) // 30%, allocator-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target2, 0, 400000) // 40%, self-minting + + // Get balances after setting allocations (some tokens may have been minted due to setTargetAllocation calling distributeIssuance) + const balanceAfterAllocation1 = await (graphToken as any).balanceOf(addresses.target1) + const balanceAfterAllocation2 = await (graphToken as any).balanceOf(addresses.target2) + + // Mine some blocks + for (let i = 0; i < 5; i++) { + await ethers.provider.send('evm_mine', []) + } + + // Distribute issuance + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Check balances after distribution + const finalBalance1 = await (graphToken as any).balanceOf(addresses.target1) + const finalBalance2 = await (graphToken as any).balanceOf(addresses.target2) + + // Allocator-minting target should have received more tokens after the additional distribution + expect(finalBalance1).to.be.gt(balanceAfterAllocation1) + + // Self-minting target should not have received any tokens (should still be the same as after allocation) + expect(finalBalance2).to.equal(balanceAfterAllocation2) + }) + + it('should allow non-governor to call distributeIssuance', async () => { + const { issuanceAllocator, graphToken, addresses } = sharedContracts + + // Add target and set allocation in one step + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 300000, 0) // 30% + + // Mine some blocks + for (let i = 0; i < 5; i++) { + await ethers.provider.send('evm_mine', []) + } + + // Distribute issuance as non-governor (should work since distributeIssuance is not protected by GOVERNOR_ROLE) + await issuanceAllocator.connect(accounts.nonGovernor).distributeIssuance() + + // Verify tokens were minted to the target + expect(await (graphToken as any).balanceOf(addresses.target1)).to.be.gt(0) + }) + + it('should not distribute issuance when paused but not revert', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add target and set allocation in one step + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 300000, 0) // 30% + + // Mine some blocks + for (let i = 0; i < 5; i++) { + await ethers.provider.send('evm_mine', []) + } + + // Grant pause role to governor + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + + // Get initial balance and lastIssuanceDistributionBlock before pausing + const { graphToken } = sharedContracts + const initialBalance = await (graphToken as any).balanceOf(addresses.target1) + const initialLastIssuanceBlock = (await issuanceAllocator.getDistributionState()).lastDistributionBlock + + // Pause the contract + await issuanceAllocator.connect(accounts.governor).pause() + + // Mine some more blocks + await ethers.provider.send('evm_mine', []) + + // Try to distribute issuance while paused - should not revert but return lastIssuanceDistributionBlock + const result = await issuanceAllocator.connect(accounts.governor).distributeIssuance.staticCall() + expect(result).to.equal(initialLastIssuanceBlock) + + // Verify no tokens were minted and lastIssuanceDistributionBlock was not updated + const finalBalance = await (graphToken as any).balanceOf(addresses.target1) + const finalLastIssuanceBlock = (await issuanceAllocator.getDistributionState()).lastDistributionBlock + + expect(finalBalance).to.equal(initialBalance) + expect(finalLastIssuanceBlock).to.equal(initialLastIssuanceBlock) + }) + + it('should update selfMinter flag when allocation stays the same but flag changes', async () => { + await resetIssuanceAllocatorState() + const { issuanceAllocator, graphToken, target1 } = sharedContracts + + // Minter role already granted in shared setup + + // Add target as allocator-minting with 30% allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 300000, 0) // 30%, allocator-minting + + // Verify initial state + const initialAllocation = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) + expect(initialAllocation.selfMintingRate).to.equal(0) + + // Change to self-minting with same allocation - this should NOT return early + const result = await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,uint256)'].staticCall(await target1.getAddress(), 0, 300000, 0) // Same allocation, but now self-minting + + // Should return true (indicating change was made) + expect(result).to.be.true + + // Actually make the change + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 0, 300000) + + // Verify the selfMinter flag was updated + const updatedAllocation = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) + expect(updatedAllocation.selfMintingRate).to.be.gt(0) + }) + + it('should update selfMinter flag when changing from self-minting to allocator-minting', async () => { + await resetIssuanceAllocatorState() + const { issuanceAllocator, target1 } = sharedContracts + + // Minter role already granted in shared setup + + // Add target as self-minting with 30% allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 0, 300000) // 30%, self-minting + + // Verify initial state + const initialAllocation2 = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) + expect(initialAllocation2.selfMintingRate).to.be.gt(0) + + // Change to allocator-minting with same allocation - this should NOT return early + const result = await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,uint256)'].staticCall(await target1.getAddress(), 300000, 0, 0) // Same allocation, but now allocator-minting + + // Should return true (indicating change was made) + expect(result).to.be.true + + // Actually make the change + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 300000, 0) + + // Verify the selfMinter flag was updated + const finalAllocation = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) + expect(finalAllocation.selfMintingRate).to.equal(0) + }) + + it('should track totalActiveSelfMintingAllocation correctly with incremental updates', async () => { + await resetIssuanceAllocatorState() + const { issuanceAllocator, target1, target2 } = sharedContracts + + // Minter role already granted in shared setup + + // Initially should be 0 (no targets) + { + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.selfMintingRate).to.equal(0) + } + + // Add self-minting target with 30% allocation (300000 PPM) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 0, 300000) // 30%, self-minting + + // Should now be 300000 PPM + { + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.selfMintingRate).to.equal(300000) + } + + // Add allocator-minting target with 20% allocation (200000 PPM) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target2.getAddress(), 200000, 0) // 20%, allocator-minting + + // totalActiveSelfMintingAllocation should remain the same (still 300000 PPM) + { + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.selfMintingRate).to.equal(300000) + } + + // Change target2 to self-minting with 10% allocation (100000 PPM) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target2.getAddress(), 0, 100000) // 10%, self-minting + + // Should now be 400000 PPM (300000 + 100000) + { + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.selfMintingRate).to.equal(400000) + } + + // Change target1 from self-minting to allocator-minting (same allocation) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 300000, 0) // 30%, allocator-minting + + // Should now be 100000 PPM (400000 - 300000) + { + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.selfMintingRate).to.equal(100000) + } + + // Remove target2 (set allocation to 0) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target2.getAddress(), 0, 0) // Remove target2 + + // Should now be 0 PPM (100000 - 100000) + { + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.selfMintingRate).to.equal(0) + } + + // Add target1 back as self-minting with 50% allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 0, 500000) // 50%, self-minting + + // Should now be 500000 PPM + { + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.selfMintingRate).to.equal(500000) + } + }) + }) + + describe('Issuance Rate Management', () => { + it('should update issuance rate correctly', async () => { + const { issuanceAllocator } = sharedContracts + + const newIssuancePerBlock = ethers.parseEther('200') + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(newIssuancePerBlock) + + expect(await issuanceAllocator.getIssuancePerBlock()).to.equal(newIssuancePerBlock) + }) + + it('should notify targets with contract code when changing issuance rate', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add target and set allocation in one step + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 300000, 0) // 30% + + // Mine some blocks to ensure distributeIssuance will update to current block + await ethers.provider.send('evm_mine', []) + + // Change issuance rate - this should trigger _preIssuanceChangeDistributionAndNotification + // which will iterate through targets and call beforeIssuanceAllocationChange on targets with code + const newIssuancePerBlock = ethers.parseEther('200') + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(newIssuancePerBlock) + + // Verify the issuance rate was updated + expect(await issuanceAllocator.getIssuancePerBlock()).to.equal(newIssuancePerBlock) + }) + + it('should handle targets without contract code when changing issuance rate', async () => { + const { issuanceAllocator, graphToken } = await setupIssuanceAllocator() + + // Grant minter role to issuanceAllocator (needed for distributeIssuance calls) + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Add a target using MockSimpleTarget and set allocation in one step + const mockTarget = await deployMockSimpleTarget() + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await mockTarget.getAddress(), 300000, 0) // 30% + + // Mine some blocks to ensure distributeIssuance will update to current block + await ethers.provider.send('evm_mine', []) + + // Change issuance rate - this should trigger _preIssuanceChangeDistributionAndNotification + // which will iterate through targets and notify them + const newIssuancePerBlock = ethers.parseEther('200') + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(newIssuancePerBlock) + + // Verify the issuance rate was updated + expect(await issuanceAllocator.getIssuancePerBlock()).to.equal(newIssuancePerBlock) + }) + + it('should handle zero issuance when distributing', async () => { + const { issuanceAllocator, graphToken, addresses } = sharedContracts + + // Set issuance per block to 0 + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(0) + + // Add target and set allocation in one step + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 0, 0) // 30% + + // Get initial balance + const initialBalance = await (graphToken as any).balanceOf(addresses.target1) + + // Mine some blocks + await ethers.provider.send('evm_mine', []) + + // Distribute issuance - should not mint any tokens since issuance per block is 0 + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Verify no tokens were minted + const finalBalance = await (graphToken as any).balanceOf(addresses.target1) + expect(finalBalance).to.equal(initialBalance) + }) + + it('should revert when decreasing issuance rate with insufficient unallocated budget', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Add issuanceAllocator as minter + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Set initial issuance rate + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000')) + + // Allocate almost everything to target1, leaving very little for default + // target1 gets 950 ether/block, default gets 50 ether/block + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](await target1.getAddress(), ethers.parseEther('950'), 0, 0) + + // Verify the current allocation + const allocationBefore = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) + expect(allocationBefore.allocatorMintingRate).to.equal(ethers.parseEther('950')) + + // Verify current issuance and unallocated amount + const issuanceBefore = await issuanceAllocator.getIssuancePerBlock() + expect(issuanceBefore).to.equal(ethers.parseEther('1000')) + + // Try to decrease issuance rate by 100 ether (to 900 ether/block) + // This would require default to absorb -100 ether/block change + // But default only has 50 ether/block unallocated + // So this should fail: oldIssuancePerBlock (1000) > newIssuancePerBlock (900) + unallocated (50) + await expect( + issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('900')), + ).to.be.revertedWithCustomError(issuanceAllocator, 'InsufficientUnallocatedForRateDecrease') + }) + + it('should allow governor to manually notify a specific target', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add target and set allocation in one step + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 300000, 0) // 30% + + // Manually notify the target using the new notifyTarget function + const result = await issuanceAllocator.connect(accounts.governor).notifyTarget.staticCall(addresses.target1) + + // Should return true since notification was sent + expect(result).to.be.true + }) + + it('should revert when notifying a non-existent target (EOA)', async () => { + const { issuanceAllocator } = sharedContracts + + // Try to notify a target that doesn't exist (EOA) + // This will revert because it tries to call a function on a non-contract + await expect(issuanceAllocator.connect(accounts.governor).notifyTarget(accounts.nonGovernor.address)).to.be + .reverted + }) + + it('should return false when notifying a target without contract code', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add a target and set allocation in one step + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 100000, 0) + + // Try to notify the target - should succeed since it has contract code + const result = await issuanceAllocator.connect(accounts.governor).notifyTarget.staticCall(addresses.target1) + + // Should return true since target has contract code and supports the interface + expect(result).to.be.true + }) + + it('should return false when _notifyTarget is called directly on EOA target', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add a target and set allocation in one step to trigger _notifyTarget call + const result = await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,uint256)'].staticCall(addresses.target1, 100000, 0, 0) + + // Should return true (allocation was set) and notification succeeded + expect(result).to.be.true + + // Actually set the allocation to verify the internal _notifyTarget call + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 100000, 0) + + // Verify allocation was set + const mockTargetAllocation = await issuanceAllocator.getTargetAllocation(addresses.target1) + expect(mockTargetAllocation.totalAllocationRate).to.equal(100000) + }) + + it('should only notify target once per block', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Grant minter role to issuanceAllocator + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Add target and set allocation in one step + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 300000, 0) // 30% + + // First notification should return true + const result1 = await issuanceAllocator + .connect(accounts.governor) + .notifyTarget.staticCall(await target1.getAddress()) + expect(result1).to.be.true + + // Actually send the first notification + await issuanceAllocator.connect(accounts.governor).notifyTarget(await target1.getAddress()) + + // Second notification in the same block should return true (already notified) + const result2 = await issuanceAllocator + .connect(accounts.governor) + .notifyTarget.staticCall(await target1.getAddress()) + expect(result2).to.be.true + }) + + it('should revert when notification fails due to target reverting', async () => { + const { issuanceAllocator, graphToken } = await setupIssuanceAllocator() + + // Deploy a mock target that reverts on beforeIssuanceAllocationChange + const MockRevertingTarget = await ethers.getContractFactory('MockRevertingTarget') + const revertingTarget = await MockRevertingTarget.deploy() + + // Grant minter role to issuanceAllocator + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // First, we need to force set the lastChangeNotifiedBlock to a past block + // so that the notification will actually be attempted + const currentBlock = await ethers.provider.getBlockNumber() + await issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock(await revertingTarget.getAddress(), currentBlock - 1) + + await expect( + issuanceAllocator.connect(accounts.governor).notifyTarget(await revertingTarget.getAddress()), + ).to.be.revertedWithCustomError(revertingTarget, 'TargetRevertsIntentionally') + }) + + it('should revert and not set allocation when notification fails with force=false', async () => { + const { issuanceAllocator, graphToken } = await setupIssuanceAllocator() + + // Deploy a mock target that reverts on beforeIssuanceAllocationChange + const MockRevertingTarget = await ethers.getContractFactory('MockRevertingTarget') + const revertingTarget = await MockRevertingTarget.deploy() + + // Grant minter role to issuanceAllocator + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Try to add the reverting target with force=false + // This should trigger notification which will fail and cause the transaction to revert + await expect( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await revertingTarget.getAddress(), 300000, 0), + ).to.be.revertedWithCustomError(revertingTarget, 'TargetRevertsIntentionally') + + // The allocation should NOT be set because the transaction reverted + const revertingTargetAllocation = await issuanceAllocator.getTargetAllocation(await revertingTarget.getAddress()) + expect(revertingTargetAllocation.totalAllocationRate).to.equal(0) + }) + + it('should revert and not set allocation when target notification fails even with force=true', async () => { + const { issuanceAllocator, graphToken } = await setupIssuanceAllocator() + + // Deploy a mock target that reverts on beforeIssuanceAllocationChange + const MockRevertingTarget = await ethers.getContractFactory('MockRevertingTarget') + const revertingTarget = await MockRevertingTarget.deploy() + + // Grant minter role to issuanceAllocator + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Try to add the reverting target with force=true + // This should trigger notification which will fail and cause the transaction to revert + // (force only affects distribution, not notification) + await expect( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,uint256)'](await revertingTarget.getAddress(), 300000, 0, 0), + ).to.be.revertedWithCustomError(revertingTarget, 'TargetRevertsIntentionally') + + // The allocation should NOT be set because the transaction reverted + const allocation = await issuanceAllocator.getTargetAllocation(await revertingTarget.getAddress()) + expect(allocation.totalAllocationRate).to.equal(0) + }) + + it('should return false when setTargetAllocation called with force=false and issuance distribution is behind', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Grant minter role to issuanceAllocator + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Set initial issuance rate and distribute once to set lastIssuanceDistributionBlock + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Get the current lastIssuanceDistributionBlock + const lastIssuanceBlock = (await issuanceAllocator.getDistributionState()).lastDistributionBlock + + // Grant pause role and pause the contract + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).pause() + + // Mine several blocks while paused (this will make _distributeIssuance() return lastIssuanceDistributionBlock < block.number) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Verify that we're now in a state where _distributeIssuance() would return a value < block.number + const currentBlock = await ethers.provider.getBlockNumber() + expect(lastIssuanceBlock).to.be.lt(currentBlock) + + // While still paused, call setTargetAllocation with minDistributedBlock=currentBlock + // This should return false because _distributeIssuance() < minDistributedBlock + // (lastDistributionBlock is behind currentBlock due to pause) + const result = await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ].staticCall(await target1.getAddress(), ethers.parseEther('30'), 0, currentBlock) + + // Should return false due to issuance being behind the required minimum + expect(result).to.be.false + + // Allocation is not actually set (staticCall) + const allocation = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) + expect(allocation.totalAllocationRate).to.equal(0) + }) + + it('should allow setTargetAllocation with force=true when issuance distribution is behind', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Grant minter role to issuanceAllocator + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Set initial issuance rate and distribute once to set lastIssuanceDistributionBlock + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Get the current lastIssuanceDistributionBlock + const lastIssuanceBlock = (await issuanceAllocator.getDistributionState()).lastDistributionBlock + + // Grant pause role and pause the contract + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).pause() + + // Mine several blocks while paused (this will make _distributeIssuance() return lastIssuanceDistributionBlock < block.number) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Verify that we're now in a state where _distributeIssuance() would return a value < block.number + const currentBlock = await ethers.provider.getBlockNumber() + expect(lastIssuanceBlock).to.be.lt(currentBlock) + + // While still paused, call setTargetAllocation with force=true + // This should succeed despite _distributeIssuance() < block.number because force=true + // This tests the uncovered branch where (_distributeIssuance() < block.number && !force) evaluates to false due to force=true + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,uint256)'](await target1.getAddress(), 300000, 0, 0) + + // Should succeed and set the allocation + const allocation = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) + expect(allocation.totalAllocationRate).to.equal(300000) + }) + }) + + describe('Force Change Notification Block', () => { + it('should allow governor to force set lastChangeNotifiedBlock', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add target and set allocation in one step + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 100000, 0) + + // Force set lastChangeNotifiedBlock to current block + const currentBlock = await ethers.provider.getBlockNumber() + const result = await issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock.staticCall(addresses.target1, currentBlock) + + expect(result).to.equal(currentBlock) + + // Actually call the function + await issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock(addresses.target1, currentBlock) + + // Verify the lastChangeNotifiedBlock was set + const targetData = await issuanceAllocator.getTargetData(addresses.target1) + expect(targetData.lastChangeNotifiedBlock).to.equal(currentBlock) + }) + + it('should allow force setting lastChangeNotifiedBlock for non-existent target', async () => { + const { issuanceAllocator } = sharedContracts + + const nonExistentTarget = accounts.nonGovernor.address + const currentBlock = await ethers.provider.getBlockNumber() + + // Force set for non-existent target should work (no validation) + const result = await issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock.staticCall(nonExistentTarget, currentBlock) + expect(result).to.equal(currentBlock) + + // Actually call the function + await issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock(nonExistentTarget, currentBlock) + + // Verify the lastChangeNotifiedBlock was set (even though target doesn't exist) + const targetData = await issuanceAllocator.getTargetData(nonExistentTarget) + expect(targetData.lastChangeNotifiedBlock).to.equal(currentBlock) + }) + + it('should enable notification to be sent again by setting to past block', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Grant minter role to issuanceAllocator + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Add target and set allocation in one step to trigger notification + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 300000, 0) + + // Verify target was notified (lastChangeNotifiedBlock should be current block) + const currentBlock = await ethers.provider.getBlockNumber() + let targetData = await issuanceAllocator.getTargetData(await target1.getAddress()) + expect(targetData.lastChangeNotifiedBlock).to.equal(currentBlock) + + // Try to notify again in the same block - should return true (already notified) + const notifyResult1 = await issuanceAllocator + .connect(accounts.governor) + .notifyTarget.staticCall(await target1.getAddress()) + expect(notifyResult1).to.be.true + + // Force set lastChangeNotifiedBlock to a past block (current block - 1) + const pastBlock = currentBlock - 1 + const forceResult = await issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock.staticCall(await target1.getAddress(), pastBlock) + + // Should return the block number that was set + expect(forceResult).to.equal(pastBlock) + + // Actually call the function + await issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock(await target1.getAddress(), pastBlock) + + // Now notification should be sent again + const notifyResult2 = await issuanceAllocator + .connect(accounts.governor) + .notifyTarget.staticCall(await target1.getAddress()) + expect(notifyResult2).to.be.true + + // Actually send the notification + await issuanceAllocator.connect(accounts.governor).notifyTarget(await target1.getAddress()) + + // Verify lastChangeNotifiedBlock was updated to the current block (which may have advanced) + targetData = await issuanceAllocator.getTargetData(await target1.getAddress()) + const finalBlock = await ethers.provider.getBlockNumber() + expect(targetData.lastChangeNotifiedBlock).to.equal(finalBlock) + }) + + it('should prevent notification until next block by setting to current block', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Grant minter role to issuanceAllocator + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Add target and set allocation in one step + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 100000, 0) + + // Force set lastChangeNotifiedBlock to current block + const currentBlock = await ethers.provider.getBlockNumber() + const forceResult = await issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock.staticCall(await target1.getAddress(), currentBlock) + + // Should return the block number that was set + expect(forceResult).to.equal(currentBlock) + + // Actually call the function + await issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock(await target1.getAddress(), currentBlock) + + // Try to notify in the same block - should return true (already notified this block) + const notifyResult1 = await issuanceAllocator + .connect(accounts.governor) + .notifyTarget.staticCall(await target1.getAddress()) + expect(notifyResult1).to.be.true + + // Mine a block to advance + await ethers.provider.send('evm_mine', []) + + // Now notification should be sent in the next block + const notifyResult2 = await issuanceAllocator + .connect(accounts.governor) + .notifyTarget.staticCall(await target1.getAddress()) + expect(notifyResult2).to.be.true + }) + + it('should prevent notification until future block by setting to future block', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Grant minter role to issuanceAllocator + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Add target and set allocation in one step + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 100000, 0) + + // Force set lastChangeNotifiedBlock to a future block (current + 2) + const currentBlock = await ethers.provider.getBlockNumber() + const futureBlock = currentBlock + 2 + const forceResult = await issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock.staticCall(await target1.getAddress(), futureBlock) + + // Should return the block number that was set + expect(forceResult).to.equal(futureBlock) + + // Actually call the function + await issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock(await target1.getAddress(), futureBlock) + + // Try to notify in the current block - should return true (already "notified" for future block) + const notifyResult1 = await issuanceAllocator + .connect(accounts.governor) + .notifyTarget.staticCall(await target1.getAddress()) + expect(notifyResult1).to.be.true + + // Mine one block + await ethers.provider.send('evm_mine', []) + + // Still should return true (still before the future block) + const notifyResult2 = await issuanceAllocator + .connect(accounts.governor) + .notifyTarget.staticCall(await target1.getAddress()) + expect(notifyResult2).to.be.true + + // Mine another block to reach the future block + await ethers.provider.send('evm_mine', []) + + // Now should still return true (at the future block) + const notifyResult3 = await issuanceAllocator + .connect(accounts.governor) + .notifyTarget.staticCall(await target1.getAddress()) + expect(notifyResult3).to.be.true + + // Mine one more block to go past the future block + await ethers.provider.send('evm_mine', []) + + // Now notification should be sent + const notifyResult4 = await issuanceAllocator + .connect(accounts.governor) + .notifyTarget.staticCall(await target1.getAddress()) + expect(notifyResult4).to.be.true + }) + }) + + describe('Idempotent Operations', () => { + it('should not revert when operating on non-existent targets', async () => { + const { issuanceAllocator } = sharedContracts + + const nonExistentTarget = accounts.nonGovernor.address + + // Test 1: Setting allocation to 0 for non-existent target should not revert + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](nonExistentTarget, 0, 0) + + // Verify no non-default targets were added (only default remains) + const targets = await issuanceAllocator.getTargets() + expect(targets.length).to.equal(1) // Only default target + + // Verify reported total is 0% (all in default, which isn't reported) + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.totalAllocationRate).to.equal(0) + + // Test 2: Removing non-existent target (by setting allocation to 0 again) should not revert + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](nonExistentTarget, 0, 0) + + // Verify still only default target + const targetsAfter = await issuanceAllocator.getTargets() + expect(targetsAfter.length).to.equal(1) // Only default target + }) + }) + + describe('View Functions', () => { + it('should update lastIssuanceDistributionBlock after distribution', async () => { + const { issuanceAllocator } = sharedContracts + + // Get initial lastIssuanceDistributionBlock + const initialBlock = (await issuanceAllocator.getDistributionState()).lastDistributionBlock + + // Mine a block + await ethers.provider.send('evm_mine', []) + + // Distribute issuance to update lastIssuanceDistributionBlock + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Now lastIssuanceDistributionBlock should be updated + const newBlock = (await issuanceAllocator.getDistributionState()).lastDistributionBlock + expect(newBlock).to.be.gt(initialBlock) + }) + + it('should manage target count and array correctly', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Test initial state (with default target) + expect(await issuanceAllocator.getTargetCount()).to.equal(1) // Default allocation exists + expect((await issuanceAllocator.getTargets()).length).to.equal(1) + + // Test adding targets + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 100000, 0) + expect(await issuanceAllocator.getTargetCount()).to.equal(2) // Default + target1 + + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target2, 200000, 0) + expect(await issuanceAllocator.getTargetCount()).to.equal(3) // Default + target1 + target2 + + // Test getTargets array content + const targetAddresses = await issuanceAllocator.getTargets() + expect(targetAddresses.length).to.equal(3) + expect(targetAddresses).to.include(addresses.target1) + expect(targetAddresses).to.include(addresses.target2) + + // Test removing targets + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 0, 0) + expect(await issuanceAllocator.getTargetCount()).to.equal(2) // Default + target2 + + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target2, 0, 0) + expect(await issuanceAllocator.getTargetCount()).to.equal(1) // Only default remains + expect((await issuanceAllocator.getTargets()).length).to.equal(1) + }) + + it('should store targets in the getTargets array in correct order', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add targets + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 100000, 0) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target2, 200000, 0) + + // Get addresses array + const targetAddresses = await issuanceAllocator.getTargets() + + // Check that the addresses are in the correct order + // targetAddresses[0] is the default target (address(0)) + expect(targetAddresses[0]).to.equal(ethers.ZeroAddress) // Default + expect(targetAddresses[1]).to.equal(addresses.target1) + expect(targetAddresses[2]).to.equal(addresses.target2) + expect(targetAddresses.length).to.equal(3) // Default + target1 + target2 + }) + + it('should return the correct target address by index', async () => { + const { issuanceAllocator, graphToken, target1, target2, target3 } = await setupIssuanceAllocator() + + // Grant minter role to issuanceAllocator (needed for distributeIssuance calls) + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Add targets + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 100000, 0) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target2.getAddress(), 200000, 0) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target3.getAddress(), 0, 300000) + + // Get all target addresses + const addresses = await issuanceAllocator.getTargets() + expect(addresses.length).to.equal(4) // Default + 3 targets + + // Check that the addresses are in the correct order + // addresses[0] is the default target (address(0)) + expect(addresses[0]).to.equal(ethers.ZeroAddress) // Default + expect(addresses[1]).to.equal(await target1.getAddress()) + expect(addresses[2]).to.equal(await target2.getAddress()) + expect(addresses[3]).to.equal(await target3.getAddress()) + + // Test getTargetAt method for individual access + expect(await issuanceAllocator.getTargetAt(0)).to.equal(ethers.ZeroAddress) // Default + expect(await issuanceAllocator.getTargetAt(1)).to.equal(await target1.getAddress()) + expect(await issuanceAllocator.getTargetAt(2)).to.equal(await target2.getAddress()) + expect(await issuanceAllocator.getTargetAt(3)).to.equal(await target3.getAddress()) + }) + + it('should return the correct target allocation', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add target with allocation in one step + const allocation = 300000 // 30% in PPM + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, allocation, 0) + + // Now allocation should be set + const targetAllocation = await issuanceAllocator.getTargetAllocation(addresses.target1) + expect(targetAllocation.totalAllocationRate).to.equal(allocation) + }) + + it('should return the correct allocation types', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + + // Grant minter role to issuanceAllocator (needed for distributeIssuance calls) + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Add targets with different allocation types + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 100000, 0) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target2.getAddress(), 0, 200000) + + // Check allocation types + const target1Allocation = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) + const target2Allocation = await issuanceAllocator.getTargetAllocation(await target2.getAddress()) + + expect(target1Allocation.selfMintingRate).to.equal(0) // Not self-minting + expect(target1Allocation.allocatorMintingRate).to.equal(100000) // Allocator-minting + + expect(target2Allocation.selfMintingRate).to.equal(200000) // Self-minting + expect(target2Allocation.allocatorMintingRate).to.equal(0) // Not allocator-minting + }) + }) + + describe('Return Values', () => { + describe('setTargetAllocation', () => { + it('should return true for successful operations', async () => { + const { issuanceAllocator } = await setupSimpleIssuanceAllocator() + const target = await deployMockSimpleTarget() + + // Adding new target + const addResult = await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,uint256)'].staticCall(await target.getAddress(), 100000, 0, 0) + expect(addResult).to.equal(true) + + // Actually add the target + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target.getAddress(), 100000, 0) + + // Changing existing allocation + const changeResult = await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,uint256)'].staticCall(await target.getAddress(), 200000, 0, 0) + expect(changeResult).to.equal(true) + + // Setting same allocation (no-op) + const sameResult = await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,uint256)'].staticCall(await target.getAddress(), 100000, 0, 0) + expect(sameResult).to.equal(true) + + // Removing target + const removeResult = await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,uint256)'].staticCall(await target.getAddress(), 0, 0, 0) + expect(removeResult).to.equal(true) + + // Setting allocation to 0 for non-existent target + const nonExistentResult = await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,uint256)'].staticCall(accounts.nonGovernor.address, 0, 0, 0) + expect(nonExistentResult).to.equal(true) + }) + }) + + describe('setTargetAllocation overloads', () => { + it('should work with all setTargetAllocation overloads and enforce access control', async () => { + const { issuanceAllocator } = await setupSimpleIssuanceAllocator() + const target1 = await deployMockSimpleTarget() + const target2 = await deployMockSimpleTarget() + + // Test 1: 2-parameter overload (allocator-only) + const allocatorPPM = 300000 // 30% + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](await target1.getAddress(), allocatorPPM) + + // Verify the allocation was set correctly + const allocation1 = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) + expect(allocation1.allocatorMintingRate).to.equal(allocatorPPM) + expect(allocation1.selfMintingRate).to.equal(0) + + // Test 2: 3-parameter overload (allocator + self) + const allocatorPPM2 = 200000 // 20% + const selfPPM = 150000 // 15% + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target2.getAddress(), allocatorPPM2, selfPPM) + + // Verify the allocation was set correctly + const allocation2 = await issuanceAllocator.getTargetAllocation(await target2.getAddress()) + expect(allocation2.allocatorMintingRate).to.equal(allocatorPPM2) + expect(allocation2.selfMintingRate).to.equal(selfPPM) + + // Test 3: Access control - 2-parameter overload should require governor + await expect( + issuanceAllocator + .connect(accounts.nonGovernor) + ['setTargetAllocation(address,uint256)'](await target1.getAddress(), 200000), + ).to.be.revertedWithCustomError(issuanceAllocator, 'AccessControlUnauthorizedAccount') + + // Test 4: Access control - 3-parameter overload should require governor + await expect( + issuanceAllocator + .connect(accounts.nonGovernor) + ['setTargetAllocation(address,uint256,uint256)'](await target2.getAddress(), 160000, 90000), + ).to.be.revertedWithCustomError(issuanceAllocator, 'AccessControlUnauthorizedAccount') + }) + }) + + describe('setIssuancePerBlock', () => { + it('should return appropriate values based on conditions', async () => { + const { issuanceAllocator } = sharedContracts + + // Should return true for normal operations + const newRate = ethers.parseEther('200') + const normalResult = await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock.staticCall(newRate) + expect(normalResult).to.equal(true) + + // Should return true even when setting same rate + const sameResult = await issuanceAllocator + .connect(accounts.governor) + .setIssuancePerBlock.staticCall(issuancePerBlock) + expect(sameResult).to.equal(true) + + // Grant pause role and pause the contract + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).pause() + + // setIssuancePerBlock returns false when paused without explicit fromBlockNumber + const pausedResult = await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock.staticCall(newRate) + expect(pausedResult).to.equal(false) + + // setIssuancePerBlock returns true when paused with explicit fromBlockNumber that has been reached + const lastDistributionBlock = await (await issuanceAllocator.getDistributionState()).lastDistributionBlock + const pausedWithBlockResult = await issuanceAllocator + .connect(accounts.governor) + ['setIssuancePerBlock(uint256,uint256)'].staticCall(newRate, lastDistributionBlock) + expect(pausedWithBlockResult).to.equal(true) + + // Actually execute the call with fromBlockNumber to cover all branches + await issuanceAllocator + .connect(accounts.governor) + ['setIssuancePerBlock(uint256,uint256)'](newRate, lastDistributionBlock) + expect(await issuanceAllocator.getIssuancePerBlock()).to.equal(newRate) + + // Verify the simple variant still returns false when paused + const differentRate = ethers.parseEther('2000') + const result = await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock.staticCall(differentRate) + expect(result).to.equal(false) + // Rate should not change because paused and no explicit fromBlockNumber + expect(await issuanceAllocator.getIssuancePerBlock()).to.equal(newRate) + }) + }) + + describe('distributeIssuance', () => { + it('should return appropriate block numbers', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Should return lastIssuanceDistributionBlock when no blocks have passed + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const lastIssuanceBlock = (await issuanceAllocator.getDistributionState()).lastDistributionBlock + const noBlocksResult = await issuanceAllocator.connect(accounts.governor).distributeIssuance.staticCall() + expect(noBlocksResult).to.equal(lastIssuanceBlock) + + // Add a target and mine blocks to test distribution + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 300000, 0) // 30% + await ethers.provider.send('evm_mine', []) + + // Should return current block number when issuance is distributed + const currentBlock = await ethers.provider.getBlockNumber() + const distributionResult = await issuanceAllocator.connect(accounts.governor).distributeIssuance.staticCall() + expect(distributionResult).to.equal(currentBlock) + }) + }) + }) + + describe('getTargetIssuancePerBlock', () => { + it('should return correct issuance for different target configurations', async () => { + const { issuanceAllocator, addresses } = sharedContracts + // OLD: These were used for PPM calculations + // const issuancePerBlock = await issuanceAllocator.getIssuancePerBlock() + // const PPM = 1_000_000 + + // Test unregistered target (should return zeros) + let result = await issuanceAllocator.getTargetIssuancePerBlock(addresses.target1) + expect(result.selfIssuanceRate).to.equal(0) + expect(result.allocatorIssuanceRate).to.equal(0) + expect(result.allocatorIssuanceBlockAppliedTo).to.be.greaterThanOrEqual(0) + expect(result.selfIssuanceBlockAppliedTo).to.be.greaterThanOrEqual(0) + + // Test self-minting target with 30% allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 0, ethers.parseEther('30')) + + const expectedSelfIssuance = ethers.parseEther('30') + result = await issuanceAllocator.getTargetIssuancePerBlock(addresses.target1) + expect(result.selfIssuanceRate).to.equal(expectedSelfIssuance) + expect(result.allocatorIssuanceRate).to.equal(0) + // expect(result.selfIssuanceBlockAppliedTo).to.equal(await issuanceAllocator.lastIssuanceAccumulationBlock()) + expect(result.allocatorIssuanceBlockAppliedTo).to.equal( + (await issuanceAllocator.getDistributionState()).lastDistributionBlock, + ) + + // Test allocator-minting target with 40% allocation (reset target1 first) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, ethers.parseEther('40'), 0) + + const expectedAllocatorIssuance = ethers.parseEther('40') + result = await issuanceAllocator.getTargetIssuancePerBlock(addresses.target1) + expect(result.allocatorIssuanceRate).to.equal(expectedAllocatorIssuance) + expect(result.selfIssuanceRate).to.equal(0) + expect(result.allocatorIssuanceBlockAppliedTo).to.equal( + (await issuanceAllocator.getDistributionState()).lastDistributionBlock, + ) + // expect(result.selfIssuanceBlockAppliedTo).to.equal(await issuanceAllocator.lastIssuanceAccumulationBlock()) + }) + + it('should not revert when contract is paused and blockAppliedTo indicates pause state', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add target as self-minter with 30% allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 0, ethers.parseEther('30')) // 30%, self-minter + + // Distribute issuance to set blockAppliedTo to current block + await issuanceAllocator.distributeIssuance() + + // Pause the contract + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).pause() + + // Should not revert when paused - this is the key difference from old functions + const result = await issuanceAllocator.getTargetIssuancePerBlock(addresses.target1) + + // OLD: These were used for PPM calculations + // const issuancePerBlock = await issuanceAllocator.getIssuancePerBlock() + // const PPM = 1_000_000 + const expectedIssuance = ethers.parseEther('30') + + expect(result.selfIssuanceRate).to.equal(expectedIssuance) + expect(result.allocatorIssuanceRate).to.equal(0) + // For self-minting targets, selfIssuanceBlockAppliedTo reflects when events were last emitted (lastAccumulationBlock) + // expect(result.selfIssuanceBlockAppliedTo).to.equal(await issuanceAllocator.lastIssuanceAccumulationBlock()) + // allocatorIssuanceBlockAppliedTo should be the last distribution block (before pause) + expect(result.allocatorIssuanceBlockAppliedTo).to.equal( + (await issuanceAllocator.getDistributionState()).lastDistributionBlock, + ) + }) + + it('should show blockAppliedTo updates after distribution', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Grant minter role to issuanceAllocator (needed for distributeIssuance calls) + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Add target as allocator-minter with 50% allocation + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](await target1.getAddress(), ethers.parseEther('50'), 0, 0) // 50%, allocator-minter + + // allocatorIssuanceBlockAppliedTo should be current block since setTargetAllocation triggers distribution + let result = await issuanceAllocator.getTargetIssuancePerBlock(await target1.getAddress()) + expect(result.allocatorIssuanceBlockAppliedTo).to.equal(await ethers.provider.getBlockNumber()) + expect(result.selfIssuanceBlockAppliedTo).to.equal(await ethers.provider.getBlockNumber()) + + // Distribute issuance + await issuanceAllocator.distributeIssuance() + const distributionBlock = await ethers.provider.getBlockNumber() + + // Now allocatorIssuanceBlockAppliedTo should be updated to current block + result = await issuanceAllocator.getTargetIssuancePerBlock(await target1.getAddress()) + expect(result.allocatorIssuanceBlockAppliedTo).to.equal(distributionBlock) + expect(result.selfIssuanceBlockAppliedTo).to.equal(distributionBlock) + + // OLD: These were used for PPM calculations + // const issuancePerBlock = await issuanceAllocator.getIssuancePerBlock() + // const PPM = 1_000_000 + const expectedIssuance = ethers.parseEther('50') + expect(result.allocatorIssuanceRate).to.equal(expectedIssuance) + expect(result.selfIssuanceRate).to.equal(0) + }) + }) + + describe('Notification Behavior When Paused', () => { + it('should notify targets of allocation changes even when paused', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Setup + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) + + // Add initial allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 300000, 0) // 30% + + // Pause the contract + await issuanceAllocator.connect(accounts.governor).pause() + + // Change allocation while paused - should notify target even though paused + const lastDistributionBlock = await (await issuanceAllocator.getDistributionState()).lastDistributionBlock + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,uint256)'](addresses.target1, 400000, 0, lastDistributionBlock) // Change to 40% + + // Verify that beforeIssuanceAllocationChange was called on the target + // This is verified by checking that the transaction succeeded and the allocation was updated + const allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) + expect(allocation.allocatorMintingRate).to.equal(400000) + }) + + it('should notify targets of issuance rate changes even when paused', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Setup + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) + + // Add target + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, 300000) // 30% + + // Pause the contract + await issuanceAllocator.connect(accounts.governor).pause() + + // Change issuance rate while paused - should notify targets even though paused + // Use explicit fromBlockNumber to allow change while paused + const lastDistributionBlock = await (await issuanceAllocator.getDistributionState()).lastDistributionBlock + await issuanceAllocator + .connect(accounts.governor) + ['setIssuancePerBlock(uint256,uint256)'](ethers.parseEther('200'), lastDistributionBlock) + + // Verify that the rate change was applied + expect(await issuanceAllocator.getIssuancePerBlock()).to.equal(ethers.parseEther('200')) + }) + + it('should not notify targets when no actual change occurs', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) + + // Add target + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 300000, 0) // 30% + + // Try to set the same allocation - should not notify (no change) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 300000, 0) // Same 30% + + // Verify allocation is unchanged + const allocation = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) + expect(allocation.allocatorMintingRate).to.equal(300000) + + // Try to set the same issuance rate - should not notify (no change) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) + + expect(await issuanceAllocator.getIssuancePerBlock()).to.equal(ethers.parseEther('100')) + }) + }) + + describe('Pending Issuance Distribution', () => { + it('should handle distributePendingIssuance with accumulated self-minting', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) + + // Add allocator-minting and self-minting targets + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 400000, 0) // 40% allocator + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target2.getAddress(), 0, 100000) // 10% self + + // Distribute once to initialize + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + + // Pause and mine blocks to accumulate self-minting + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Trigger accumulation by changing self-minting allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,uint256)'](await target2.getAddress(), 0, 200000, 0) // Change to 20% self + + // Check accumulation exists + const distState = await issuanceAllocator.getDistributionState() + expect(distState.selfMintingOffset).to.be.gt(0) + + // Call distributePendingIssuance + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']() + + // Verify tokens were distributed + const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + expect(finalBalance1).to.be.gt(initialBalance1) + + // Verify accumulation was cleared + const finalDistState = await issuanceAllocator.getDistributionState() + expect(finalDistState.selfMintingOffset).to.equal(0) + }) + + it('should handle distributePendingIssuance with toBlockNumber parameter', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 500000, 100000) + + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const beforePauseState = await issuanceAllocator.getDistributionState() + + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Trigger accumulation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,uint256)'](await target1.getAddress(), 500000, 200000, 0) + + const currentBlock = await ethers.provider.getBlockNumber() + const distState = await issuanceAllocator.getDistributionState() + // Distribute only to a block that's midway through the accumulated period + const partialBlock = beforePauseState.lastDistributionBlock + BigInt(2) + + // Distribute to a partial block (not current block) + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](partialBlock) + + // Verify partial distribution + const afterPartialState = await issuanceAllocator.getDistributionState() + expect(afterPartialState.lastDistributionBlock).to.equal(partialBlock) + // Verify accumulation was partially consumed but some remains + expect(afterPartialState.selfMintingOffset).to.be.lt(distState.selfMintingOffset) + + // Distribute remainder to current block + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](currentBlock) + const finalState = await issuanceAllocator.getDistributionState() + expect(finalState.selfMintingOffset).to.equal(0) // All cleared + }) + + it('should handle distributePendingIssuance when blocks == 0', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 500000, 0) + + // Distribute to current block + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + const distState = await issuanceAllocator.getDistributionState() + const currentBlock = distState.lastDistributionBlock + + // Call distributePendingIssuance with toBlockNumber == lastDistributionBlock (blocks == 0) + const result = await issuanceAllocator + .connect(accounts.governor) + ['distributePendingIssuance(uint256)'].staticCall(currentBlock) + + expect(result).to.equal(currentBlock) + }) + + it('should not emit SelfMintingOffsetReconciled when offset unchanged', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Setup with only allocator-minting (no self-minting) + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), ethers.parseEther('50'), 0) + + // Distribute to current block (no accumulated offset) + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Verify no offset accumulated + const stateBefore = await issuanceAllocator.getDistributionState() + expect(stateBefore.selfMintingOffset).to.equal(0) + + // Mine blocks and distribute again + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // distributePendingIssuance with no accumulated offset should not emit reconciliation event + const tx = await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']() + + await expect(tx).to.not.emit(issuanceAllocator, 'SelfMintingOffsetReconciled') + + // Verify offset is still 0 + const stateAfter = await issuanceAllocator.getDistributionState() + expect(stateAfter.selfMintingOffset).to.equal(0) + }) + + it('should handle proportional distribution when available < allocatedTotal', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + + // Setup with high allocator-minting and high self-minting rates + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000')) + + // Setup: 40% + 40% allocator-minting, 15% self-minting (5% default) + // Using absolute values (tokens per block, not PPM): + // allocatedRate (non-default) = 1000 - 150 (self) - 50 (default) = 800 ether + await issuanceAllocator.connect(accounts.governor)['setTargetAllocation(address,uint256,uint256,uint256)']( + await target1.getAddress(), + ethers.parseEther('400'), // 400 ether per block allocator-minting + 0, + 0, + ) + await issuanceAllocator.connect(accounts.governor)['setTargetAllocation(address,uint256,uint256,uint256)']( + await target2.getAddress(), + ethers.parseEther('400'), // 400 ether per block allocator-minting + ethers.parseEther('150'), // 150 ether per block self-minting + 0, + ) + + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Pause and mine blocks to build up self-minting accumulation + await issuanceAllocator.connect(accounts.governor).pause() + for (let i = 0; i < 10; i++) { + await ethers.provider.send('evm_mine', []) + } + // Don't change allocations - just distribute with accumulated self-minting + // After 10 blocks: + // - selfMintingOffset = 150 ether * 10 = 1500 ether + // - totalForPeriod = 1000 ether * 10 = 10000 ether + // - available = 10000 - 1500 = 8500 ether + // - allocatedTotal = 800 ether * 10 = 8000 ether + // So: 8500 > 8000, this won't trigger proportional... + // + // Let me force it by calling distributePendingIssuance for only PART of the period + // This will make available smaller relative to allocatedTotal + + const distState = await issuanceAllocator.getDistributionState() + // Distribute for only 2 blocks instead of all 10 + const partialBlock = distState.lastDistributionBlock + BigInt(2) + + const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + + // For 2 blocks with 10 blocks of accumulated self-minting: + // - selfMintingOffset = 1500 ether (from 10 blocks) + // - totalForPeriod = 1000 * 2 = 2000 ether (only distributing 2 blocks) + // - available = 2000 - 1500 = 500 ether + // - allocatedTotal = 800 * 2 = 1600 ether + // So: 500 < 1600 ✓ triggers proportional distribution! + + // Distribute pending for partial period - should use proportional distribution + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](partialBlock) + + // Both targets should receive tokens (proportionally reduced due to budget constraint) + const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + + expect(finalBalance1).to.be.gt(initialBalance1) + expect(finalBalance2).to.be.gt(initialBalance2) + + // Verify proportional distribution (both should get same amount since same allocator rate) + const distributed1 = finalBalance1 - initialBalance1 + const distributed2 = finalBalance2 - initialBalance2 + expect(distributed1).to.be.closeTo(distributed2, ethers.parseEther('1')) + }) + + it('should distribute remainder to default target in full rate distribution', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) + + // Set target2 as default target (it's a contract that supports IIssuanceTarget) + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(await target2.getAddress()) + + // Add target with low allocator rate, high self-minting - ensures default gets significant portion + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 100000, 100000) // 10% each + + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const initialDefaultBalance = await (graphToken as any).balanceOf(await target2.getAddress()) + + // Pause and accumulate (with small self-minting, available should be > allocatedTotal) + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Trigger accumulation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,uint256)'](await target1.getAddress(), 100000, 150000, 0) + + // Distribute - should give remainder to default target + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']() + + // Default target should receive tokens + const finalDefaultBalance = await (graphToken as any).balanceOf(await target2.getAddress()) + expect(finalDefaultBalance).to.be.gt(initialDefaultBalance) + }) + + it('should trigger pending distribution path when selfMintingOffset > 0 in distributeIssuance', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 500000, 100000) + + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + + // Pause and accumulate + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + + // Trigger accumulation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,uint256)'](await target1.getAddress(), 500000, 200000, 0) + + // Verify accumulation exists + let distState = await issuanceAllocator.getDistributionState() + expect(distState.selfMintingOffset).to.be.gt(0) + + // Unpause + await issuanceAllocator.connect(accounts.governor).unpause() + + // Call distributeIssuance - should internally call _distributePendingIssuance due to accumulation + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Verify tokens distributed and accumulation cleared + const finalBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + expect(finalBalance).to.be.gt(initialBalance) + + distState = await issuanceAllocator.getDistributionState() + expect(distState.selfMintingOffset).to.equal(0) + }) + + it('should revert when non-governor calls distributePendingIssuance()', async () => { + const { issuanceAllocator } = await setupIssuanceAllocator() + + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + + // Try to call distributePendingIssuance() as non-governor + await expect(issuanceAllocator.connect(accounts.user)['distributePendingIssuance()']()).to.be.reverted + }) + + it('should revert when non-governor calls distributePendingIssuance(uint256)', async () => { + const { issuanceAllocator } = await setupIssuanceAllocator() + + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + + const distState = await issuanceAllocator.getDistributionState() + const blockNumber = distState.lastDistributionBlock + BigInt(1) + + // Try to call distributePendingIssuance(uint256) as non-governor + await expect(issuanceAllocator.connect(accounts.user)['distributePendingIssuance(uint256)'](blockNumber)).to.be + .reverted + }) + + it('should revert when toBlockNumber > block.number', async () => { + const { issuanceAllocator } = await setupIssuanceAllocator() + + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + + // Pause to enable distributePendingIssuance + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + + // Try to distribute to a future block + const futureBlock = (await ethers.provider.getBlockNumber()) + 100 + await expect(issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](futureBlock)).to + .be.reverted + }) + + it('should revert when toBlockNumber < lastDistributionBlock', async () => { + const { issuanceAllocator } = await setupIssuanceAllocator() + + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + + // Pause and mine some blocks + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + const distState = await issuanceAllocator.getDistributionState() + const pastBlock = distState.lastDistributionBlock - BigInt(1) + + // Try to distribute to a block before lastDistributionBlock + await expect(issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](pastBlock)).to.be + .reverted + }) + + it('should handle exact allocation with zero remainder to default', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + + // Set issuance to 1000 ether per block + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000')) + + // Configure target1 with allocator=800, self=200 (total = 1000, leaving 0 for default) + await issuanceAllocator.connect(accounts.governor)['setTargetAllocation(address,uint256,uint256,uint256)']( + await target1.getAddress(), + ethers.parseEther('800'), // 800 ether per block allocator-minting + ethers.parseEther('200'), // 200 ether per block self-minting + 0, + ) + + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Pause and accumulate + await issuanceAllocator.connect(accounts.governor).pause() + for (let i = 0; i < 5; i++) { + await ethers.provider.send('evm_mine', []) + } + + const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + + const distStateBefore = await issuanceAllocator.getDistributionState() + + // Distribute - should result in exactly 0 remainder for default + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']() + + const distStateAfter = await issuanceAllocator.getDistributionState() + const blocksDist = distStateAfter.lastDistributionBlock - distStateBefore.lastDistributionBlock + + // Calculate expected distribution based on actual blocks + // totalForPeriod = 1000 * blocksDist ether + // selfMintingOffset = 200 * blocksDist ether + // available = (1000 - 200) * blocksDist = 800 * blocksDist ether + // allocatedTotal = 800 * blocksDist ether + // remainder = 0 ✓ + const finalBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + const expectedDistribution = ethers.parseEther('800') * BigInt(blocksDist) + expect(finalBalance - initialBalance).to.equal(expectedDistribution) + }) + + it('should handle proportional distribution with target having zero allocator rate', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000')) + + // target1: allocator=400, self=0 + // target2: allocator=0, self=100 (self-minting only, no allocator-minting) + // default: gets the remainder (500 allocator + 0 self) + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](await target1.getAddress(), ethers.parseEther('400'), 0, 0) + await issuanceAllocator.connect(accounts.governor)['setTargetAllocation(address,uint256,uint256,uint256)']( + await target2.getAddress(), + 0, // Zero allocator-minting rate + ethers.parseEther('100'), + 0, + ) + + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Pause and accumulate enough self-minting + await issuanceAllocator.connect(accounts.governor).pause() + for (let i = 0; i < 15; i++) { + await ethers.provider.send('evm_mine', []) + } + + const distStateBefore = await issuanceAllocator.getDistributionState() + const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + + // Distribute only 2 blocks (out of the 15+ accumulated) + // With high self-minting accumulation, this creates proportional distribution scenario + // Expected accumulation during pause: 100 ether/block * ~15 blocks = ~1500 ether + // Distribution for 2 blocks: totalForPeriod = 2000 ether, consumed ~= 1500 ether, available ~= 500 ether + // allocatedTotal = 400 ether * 2 = 800 ether + // Since available < allocatedTotal, proportional distribution kicks in + const partialBlock = distStateBefore.lastDistributionBlock + BigInt(2) + + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](partialBlock) + + const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + + // The key test: target1 should receive some tokens (it has allocatorMintingRate > 0) + // target2 should receive ZERO tokens (it has allocatorMintingRate == 0) + // This proves the `if (0 < targetData.allocatorMintingRate)` branch was tested + const distributed1 = finalBalance1 - initialBalance1 + expect(distributed1).to.be.gt(0) // target1 gets some tokens + expect(finalBalance2).to.equal(initialBalance2) // target2 gets zero (skipped in the if check) + }) + }) + + describe('Pause/Unpause Edge Cases', () => { + // Helper function to deploy a fresh IssuanceAllocator for these tests + async function setupIssuanceAllocator() { + const graphToken = await deployTestGraphToken() + const issuanceAllocator = await deployIssuanceAllocator( + await graphToken.getAddress(), + accounts.governor, + ethers.parseEther('100'), + ) + const target1 = await deployDirectAllocation(await graphToken.getAddress(), accounts.governor) + const target2 = await deployDirectAllocation(await graphToken.getAddress(), accounts.governor) + + return { graphToken, issuanceAllocator, target1, target2 } + } + + it('should handle unpause → mine blocks → pause without distributeIssuance', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) + + // Add targets: 30 tokens/block allocator-minting, 20 tokens/block self-minting (leaving 50 for default) + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](await target1.getAddress(), ethers.parseEther('30'), 0, 0) // 30 tokens/block allocator + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](await target2.getAddress(), 0, ethers.parseEther('20'), 0) // 20 tokens/block self + + // Initialize distribution + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const initialBlock = await ethers.provider.getBlockNumber() + + // Track initial balance for target1 (allocator-minting target) + const balance1Initial = await (graphToken as any).balanceOf(await target1.getAddress()) + + // Phase 1: Pause the contract + await issuanceAllocator.connect(accounts.governor).pause() + const _pauseBlock1 = await ethers.provider.getBlockNumber() + + // Mine a few blocks while paused + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Phase 2: Unpause WITHOUT calling distributeIssuance + await issuanceAllocator.connect(accounts.governor).unpause() + const _unpauseBlock = await ethers.provider.getBlockNumber() + + // Phase 3: Mine blocks while unpaused, but DON'T call distributeIssuance + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Phase 4: Pause again WITHOUT calling distributeIssuance + await issuanceAllocator.connect(accounts.governor).pause() + const _pauseBlock2 = await ethers.provider.getBlockNumber() + + // Mine more blocks while paused + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Phase 5: Call distributeIssuance while paused + // This is the key test: blocks between unpauseBlock and pauseBlock2 were unpaused, + // but since distributeIssuance is called while paused, self-minting accumulation + // treats them as paused (lazy evaluation) + const tx1 = await issuanceAllocator.connect(accounts.governor).distributeIssuance() + await tx1.wait() + const distributionBlock1 = await ethers.provider.getBlockNumber() + + // Verify: Check distribution state after first distribution + const distState1 = await issuanceAllocator.getDistributionState() + expect(distState1.lastSelfMintingBlock).to.equal(distributionBlock1) + expect(distState1.lastDistributionBlock).to.equal(initialBlock) // Should NOT advance (paused) + expect(distState1.selfMintingOffset).to.be.gt(0) // Should have accumulated + + // Calculate expected self-minting accumulation + // From initialBlock to distributionBlock1 (all blocks treated as paused) + const blocksSinceInitial = BigInt(distributionBlock1) - BigInt(initialBlock) + const selfMintingRate = ethers.parseEther('20') // 20% of 100 = 20 tokens/block + const expectedAccumulation = selfMintingRate * blocksSinceInitial + expect(distState1.selfMintingOffset).to.be.closeTo(expectedAccumulation, ethers.parseEther('1')) + + // Verify no additional allocator-minting was distributed during pause + const balance1AfterPause = await (graphToken as any).balanceOf(await target1.getAddress()) + expect(balance1AfterPause).to.equal(balance1Initial) // Should not have changed during pause + + // Phase 6: Unpause and call distributeIssuance + await issuanceAllocator.connect(accounts.governor).unpause() + await ethers.provider.send('evm_mine', []) + + const tx2 = await issuanceAllocator.connect(accounts.governor).distributeIssuance() + await tx2.wait() + const distributionBlock2 = await ethers.provider.getBlockNumber() + + // Verify: Distribution state after second distribution + const distState2 = await issuanceAllocator.getDistributionState() + expect(distState2.lastSelfMintingBlock).to.equal(distributionBlock2) + expect(distState2.lastDistributionBlock).to.equal(distributionBlock2) // Should advance (unpaused) + expect(distState2.selfMintingOffset).to.equal(0) // Should be reset after distribution + + // Verify allocator-minting was distributed correctly + const balance1After = await (graphToken as any).balanceOf(await target1.getAddress()) + expect(balance1After).to.be.gt(balance1Initial) // Should have received additional tokens + + // Calculate total issuance for the period + const totalBlocks = BigInt(distributionBlock2) - BigInt(initialBlock) + const totalIssuance = ethers.parseEther('100') * totalBlocks + + // Self-minting should have received their allowance (but not minted via allocator) + // Allocator-minting should have received (totalIssuance - selfMintingOffset) * (30 / 80) + // 30 tokens/block for target1, 50 tokens/block for default = 80 tokens/block total allocator-minting + const expectedAllocatorDistribution = + ((totalIssuance - expectedAccumulation) * ethers.parseEther('30')) / ethers.parseEther('80') + + // Allow for rounding errors (compare total distributed amount) + // Note: Tolerance is higher due to multiple distribution events and the initial distribution + const totalDistributed = balance1After - balance1Initial + expect(totalDistributed).to.be.closeTo(expectedAllocatorDistribution, ethers.parseEther('25')) + }) + + it('should use getDistributionState to query distribution state efficiently', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) + + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](await target1.getAddress(), 0, ethers.parseEther('50'), 0) // 50 tokens/block self + + // Initialize + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const initBlock = await ethers.provider.getBlockNumber() + + // Verify initial state + let distState = await issuanceAllocator.getDistributionState() + expect(distState.lastDistributionBlock).to.equal(initBlock) + expect(distState.lastSelfMintingBlock).to.equal(initBlock) + expect(distState.selfMintingOffset).to.equal(0) + + // Pause and mine blocks + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Call distributeIssuance while paused + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const pausedDistBlock = await ethers.provider.getBlockNumber() + + // Verify state after paused distribution + distState = await issuanceAllocator.getDistributionState() + expect(distState.lastSelfMintingBlock).to.equal(pausedDistBlock) + expect(distState.lastDistributionBlock).to.equal(initBlock) // Should NOT advance (paused) + expect(distState.selfMintingOffset).to.be.gt(0) // Should have accumulated + + // Verify getDistributionState returns consistent values + const distState2 = await issuanceAllocator.getDistributionState() + expect(distState.lastDistributionBlock).to.equal(distState2.lastDistributionBlock) + expect(distState.selfMintingOffset).to.equal(distState2.selfMintingOffset) + expect(distState.lastSelfMintingBlock).to.equal(distState2.lastSelfMintingBlock) + }) + + it('should correctly emit IssuanceSelfMintAllowance events across pause/unpause cycles', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) + + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](await target1.getAddress(), 0, ethers.parseEther('50'), 0) // 50 tokens/block self + + // Initialize + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const initBlock = await ethers.provider.getBlockNumber() + + // Pause, unpause (without distribute), pause again + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + await issuanceAllocator.connect(accounts.governor).unpause() + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + + // Call distributeIssuance while paused + const tx = await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const receipt = await tx.wait() + const currentBlock = await ethers.provider.getBlockNumber() + + // Find IssuanceSelfMintAllowance events + const events = receipt.logs.filter( + (log) => log.topics[0] === issuanceAllocator.interface.getEvent('IssuanceSelfMintAllowance').topicHash, + ) + + // Should emit exactly one event for the entire range + expect(events.length).to.equal(1) + + // Decode the event + const decodedEvent = issuanceAllocator.interface.decodeEventLog( + 'IssuanceSelfMintAllowance', + events[0].data, + events[0].topics, + ) + + // Verify event covers the correct block range (from initBlock+1 to currentBlock) + expect(decodedEvent.fromBlock).to.equal(BigInt(initBlock) + 1n) + expect(decodedEvent.toBlock).to.equal(currentBlock) + expect(decodedEvent.target).to.equal(await target1.getAddress()) + + // Verify amount matches expected (50% of 100 tokens/block * number of blocks) + const blocksInRange = BigInt(currentBlock) - BigInt(initBlock) + const expectedAmount = ethers.parseEther('50') * blocksInRange + expect(decodedEvent.amount).to.be.closeTo(expectedAmount, ethers.parseEther('1')) + }) + + it('should continue accumulating through unpaused periods when accumulated balance exists', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) + + // Set target1 allocation with both allocator and self minting + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](await target1.getAddress(), ethers.parseEther('30'), ethers.parseEther('20'), 0) + + // Distribute to set starting point + await issuanceAllocator.distributeIssuance() + const blockAfterInitialDist = await ethers.provider.getBlockNumber() + + // Phase 1: Pause and mine blocks + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Phase 2: Distribute while paused + await issuanceAllocator.distributeIssuance() + const blockDist1 = await ethers.provider.getBlockNumber() + + const state1 = await issuanceAllocator.getDistributionState() + const pausedBlocks1 = blockDist1 - blockAfterInitialDist + const expectedAccumulation1 = ethers.parseEther('20') * BigInt(pausedBlocks1) + expect(state1.selfMintingOffset).to.equal(expectedAccumulation1) + + // Phase 3: Unpause (no distribute) + await issuanceAllocator.connect(accounts.governor).unpause() + + // Mine more blocks while unpaused (no distribute!) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Phase 4: Distribute while unpaused + await issuanceAllocator.distributeIssuance() + const blockDist2 = await ethers.provider.getBlockNumber() + + const state2 = await issuanceAllocator.getDistributionState() + expect(state2.lastSelfMintingBlock).to.equal(blockDist2) + expect(state2.selfMintingOffset).to.equal(0) // Cleared by distribution + + // Phase 5: Pause again (no distribute) + await issuanceAllocator.connect(accounts.governor).pause() + const blockPause2 = await ethers.provider.getBlockNumber() + + // Mine more blocks while paused + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Phase 6: Distribute while paused + await issuanceAllocator.distributeIssuance() + const blockDist3 = await ethers.provider.getBlockNumber() + + const state3 = await issuanceAllocator.getDistributionState() + + // THE FIX: With the new logic, accumulation continues from lastSelfMintingBlock + // when paused, even if some of those blocks happened during an unpaused period + // where no distribution occurred. This is conservative and safe. + const blocksAccumulated = blockDist3 - blockDist2 + const actuallyPausedBlocks = blockDist3 - blockPause2 + const unpausedBlocksIncluded = blocksAccumulated - actuallyPausedBlocks + + // Verify the fix: accumulation should be for all blocks from lastSelfMintingBlock + const actualAccumulation = state3.selfMintingOffset + const expectedAccumulation = ethers.parseEther('20') * BigInt(blocksAccumulated) + + expect(actualAccumulation).to.equal( + expectedAccumulation, + 'Should accumulate from lastSelfMintingBlock when paused, including unpaused blocks where no distribution occurred', + ) + + // Rationale: Once accumulation starts (during pause), continue through any unpaused periods + // until distribution clears the accumulation. This is conservative and allows better recovery. + expect(unpausedBlocksIncluded).to.equal(1) // Should include 1 unpaused block (blockDist2 to blockPause2) + }) + + it('should correctly handle partial distribution when toBlockNumber < block.number', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) + + // Add targets: 30 tokens/block allocator-minting, 20 tokens/block self-minting + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](await target1.getAddress(), ethers.parseEther('30'), 0, 0) + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](await target2.getAddress(), 0, ethers.parseEther('20'), 0) + + // Initialize distribution + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const initialBlock = await ethers.provider.getBlockNumber() + + // Pause and mine blocks to accumulate self-minting + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // We've mined 8 blocks while paused (pause tx + 8 evm_mine calls) + // Current block should be initialBlock + 9 (pause + 8 mines) + + // Call distributePendingIssuance with toBlockNumber at the halfway point + const midBlock = initialBlock + 5 // Distribute only up to block 5 + + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](midBlock) + + // Check the state after partial distribution + const stateAfterPartial = await issuanceAllocator.getDistributionState() + const actualCurrentBlock = await ethers.provider.getBlockNumber() + + // Budget-based clearing behavior for partial distribution: + // - lastSelfMintingBlock advances to actualCurrentBlock (via _advanceSelfMintingBlock) + // - lastDistributionBlock advances to midBlock (partial distribution) + // - selfMintingOffset is reduced by min(accumulated, totalForPeriod) + // + // In this case: accumulated self-minting from initialBlock to actualCurrentBlock is small + // compared to the period budget (100 tokens/block * 5 blocks distributed = 500 tokens), + // so all accumulated is cleared (budget exceeds accumulated). + + expect(stateAfterPartial.lastDistributionBlock).to.equal(midBlock) + expect(stateAfterPartial.lastSelfMintingBlock).to.equal(actualCurrentBlock) + + // Budget-based logic: subtract min(accumulated, totalForPeriod) from accumulated + // Since accumulated < totalForPeriod (small accumulation vs large budget for 5 blocks), + // all accumulated is cleared. + expect(stateAfterPartial.selfMintingOffset).to.equal(0, 'Accumulated cleared when less than period budget') + + // Verify subsequent distribution works correctly + await issuanceAllocator.connect(accounts.governor).unpause() + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const finalBlock = await ethers.provider.getBlockNumber() + + const stateAfterFinal = await issuanceAllocator.getDistributionState() + expect(stateAfterFinal.selfMintingOffset).to.equal(0) + expect(stateAfterFinal.lastDistributionBlock).to.equal(finalBlock) + + // Verify token distribution is mathematically correct + // The allocator-minting should have received the correct amount accounting for ALL self-minting accumulation + const balance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + + const totalBlocks = BigInt(finalBlock) - BigInt(initialBlock) + const totalIssuance = ethers.parseEther('100') * totalBlocks + const totalSelfMinting = ethers.parseEther('20') * totalBlocks + const availableForAllocator = totalIssuance - totalSelfMinting + // target1 gets 30/80 of allocator-minting (30 for target1, 50 for default) + const expectedForTarget1 = (availableForAllocator * ethers.parseEther('30')) / ethers.parseEther('80') + + // Allow higher tolerance due to multiple distribution calls (partial + full) + // Each transaction adds blocks which affects the total issuance calculation + expect(balance1).to.be.closeTo(expectedForTarget1, ethers.parseEther('100')) + }) + + it('should correctly handle accumulated self-minting that exceeds period budget', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) + + // High self-minting rate: 80 tokens/block, allocator: 20 tokens/block + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](await target1.getAddress(), ethers.parseEther('20'), 0, 0) + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](await target2.getAddress(), 0, ethers.parseEther('80'), 0) + + // Initialize + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const initialBlock = await ethers.provider.getBlockNumber() + + // Pause and accumulate a lot + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + const afterMining = await ethers.provider.getBlockNumber() + + // Accumulated should be: 80 * (afterMining - initialBlock) + const blocksAccumulated = afterMining - initialBlock + const _expectedAccumulated = ethers.parseEther('80') * BigInt(blocksAccumulated) + + // Now distribute only 1 block worth (partialBlock - initialBlock = 1) + const partialBlock = initialBlock + 1 + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](partialBlock) + + const stateAfter = await issuanceAllocator.getDistributionState() + const afterDistBlock = await ethers.provider.getBlockNumber() + + // More accumulation happened during the distributePendingIssuance call itself + const totalBlocksAccumulated = afterDistBlock - initialBlock + const totalExpectedAccumulated = ethers.parseEther('80') * BigInt(totalBlocksAccumulated) + + // Budget-based logic: distributed 1 block with totalForPeriod = issuancePerBlock * 1 = 100 + // Subtract budget from accumulated (not rate-based), since we don't know historical rates + const blocksDistributed = partialBlock - initialBlock + const totalForPeriod = ethers.parseEther('100') * BigInt(blocksDistributed) + const expectedRemaining = totalExpectedAccumulated - totalForPeriod + + // This should NOT be zero - accumulated exceeds period budget, so remainder is retained + expect(stateAfter.selfMintingOffset).to.be.gt(0) + // Budget-based: accumulated ~480, subtract 100, expect ~380 remaining (within 10 token tolerance) + expect(stateAfter.selfMintingOffset).to.be.closeTo(expectedRemaining, ethers.parseEther('10')) + }) + }) +}) diff --git a/packages/issuance/test/tests/allocate/IssuanceSystem.test.ts b/packages/issuance/test/tests/allocate/IssuanceSystem.test.ts new file mode 100644 index 000000000..d21150c81 --- /dev/null +++ b/packages/issuance/test/tests/allocate/IssuanceSystem.test.ts @@ -0,0 +1,134 @@ +/** + * Issuance System Integration Tests - Optimized Version + * Reduced from 149 lines to ~80 lines using shared utilities + */ + +import { expect } from 'chai' + +import { setupOptimizedAllocateSystem } from './optimizedFixtures' +import { expectRatioToEqual, mineBlocks, TestConstants } from './testPatterns' + +describe('Issuance System', () => { + let system: any + + before(async () => { + // Single setup instead of beforeEach - major performance improvement + system = await setupOptimizedAllocateSystem({ + setupTargets: false, // We'll set up specific scenarios per test + }) + }) + + beforeEach(async () => { + // Fast state reset instead of full redeployment + await system.helpers.resetState() + }) + + describe('End-to-End Issuance Flow', () => { + it('should allocate tokens to targets based on their allocation percentages', async () => { + const { contracts, addresses, accounts } = system + + // Verify initial balances (should be 0) + expect(await contracts.graphToken.balanceOf(addresses.target1)).to.equal(0) + expect(await contracts.graphToken.balanceOf(addresses.target2)).to.equal(0) + + // Set up allocations using predefined constants: target1 = 30%, target2 = 40% + await contracts.issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](addresses.target1, TestConstants.ALLOCATION_30_PERCENT, 0, 0) + await contracts.issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](addresses.target2, TestConstants.ALLOCATION_40_PERCENT, 0, 0) + + // Grant operator roles using predefined constants + await contracts.target1 + .connect(accounts.governor) + .grantRole(TestConstants.OPERATOR_ROLE, accounts.operator.address) + await contracts.target2 + .connect(accounts.governor) + .grantRole(TestConstants.OPERATOR_ROLE, accounts.operator.address) + + // Get balances after allocation setup + const balanceAfterAllocation1 = await contracts.graphToken.balanceOf(addresses.target1) + const balanceAfterAllocation2 = await contracts.graphToken.balanceOf(addresses.target2) + + // Mine blocks using helper function + await mineBlocks(10) + await contracts.issuanceAllocator.distributeIssuance() + + // Get final balances and verify distributions + const finalBalance1 = await contracts.graphToken.balanceOf(addresses.target1) + const finalBalance2 = await contracts.graphToken.balanceOf(addresses.target2) + + // Verify targets received tokens proportionally + expect(finalBalance1).to.be.gt(balanceAfterAllocation1) + expect(finalBalance2).to.be.gt(balanceAfterAllocation2) + + // Test token distribution from targets to users + await contracts.target1.connect(accounts.operator).sendTokens(accounts.user.address, finalBalance1) + await contracts.target2.connect(accounts.operator).sendTokens(accounts.indexer1.address, finalBalance2) + + // Verify user balances and target emptiness + expect(await contracts.graphToken.balanceOf(accounts.user.address)).to.equal(finalBalance1) + expect(await contracts.graphToken.balanceOf(accounts.indexer1.address)).to.equal(finalBalance2) + expect(await contracts.graphToken.balanceOf(addresses.target1)).to.equal(0) + expect(await contracts.graphToken.balanceOf(addresses.target2)).to.equal(0) + }) + + it('should handle allocation changes correctly', async () => { + const { contracts, addresses, accounts } = system + + // Set up initial allocations using helper + await system.helpers.setupStandardAllocations() + + // Verify initial total allocation (excludes default since it's address(0)) + const totalAlloc = await contracts.issuanceAllocator.getTotalAllocation() + expect(totalAlloc.totalAllocationRate).to.equal(700000) // 70% (30% + 40%, excludes default) + + // Change allocations: target1 = 50%, target2 = 20% (30% goes to default) + await contracts.issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](addresses.target1, TestConstants.ALLOCATION_50_PERCENT, 0, 0) + await contracts.issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](addresses.target2, TestConstants.ALLOCATION_20_PERCENT, 0, 0) + + // Verify updated allocations (excludes default since it's address(0)) + const updatedTotalAlloc = await contracts.issuanceAllocator.getTotalAllocation() + expect(updatedTotalAlloc.totalAllocationRate).to.equal(700000) // 70% (50% + 20%, excludes default) + + // Verify individual target allocations + const target1Info = await contracts.issuanceAllocator.getTargetData(addresses.target1) + const target2Info = await contracts.issuanceAllocator.getTargetData(addresses.target2) + + expect(target1Info.allocatorMintingRate + target1Info.selfMintingRate).to.equal( + TestConstants.ALLOCATION_50_PERCENT, + ) + expect(target2Info.allocatorMintingRate + target2Info.selfMintingRate).to.equal( + TestConstants.ALLOCATION_20_PERCENT, + ) + + // Verify proportional issuance distribution (50:20 = 5:2 ratio) + const target1Result = await contracts.issuanceAllocator.getTargetIssuancePerBlock(addresses.target1) + const target2Result = await contracts.issuanceAllocator.getTargetIssuancePerBlock(addresses.target2) + + expect(target1Result.selfIssuanceRate).to.equal(0) + expect(target2Result.selfIssuanceRate).to.equal(0) + + // Verify the ratio using helper function: 50/20 = 2.5, so 2500 in our precision + expectRatioToEqual( + target1Result.allocatorIssuanceRate, + target2Result.allocatorIssuanceRate, + 2500n, // 50/20 * 1000 precision + TestConstants.DEFAULT_TOLERANCE, + ) + }) + }) +}) diff --git a/packages/issuance/test/tests/allocate/ReentrancyProtection.test.ts b/packages/issuance/test/tests/allocate/ReentrancyProtection.test.ts new file mode 100644 index 000000000..245271acb --- /dev/null +++ b/packages/issuance/test/tests/allocate/ReentrancyProtection.test.ts @@ -0,0 +1,265 @@ +import { expect } from 'chai' +import hre from 'hardhat' +const { ethers } = hre + +import { deployTestGraphToken, getTestAccounts, SHARED_CONSTANTS } from '../common/fixtures' +import { deployIssuanceAllocator } from './fixtures' + +/** + * ReentrantAction enum matching MockReentrantTarget.sol + * IMPORTANT: This must be kept in sync with the Solidity enum + */ +enum ReentrantAction { + None, + DistributeIssuance, + SetTargetAllocation1Param, + SetTargetAllocation2Param, + SetTargetAllocation3Param, + SetIssuancePerBlock, + SetIssuancePerBlock2Param, + NotifyTarget, + SetDefaultTarget1Param, + SetDefaultTarget2Param, + DistributePendingIssuance0Param, + DistributePendingIssuance1Param, +} + +describe('IssuanceAllocator - Reentrancy Protection', () => { + let accounts + let graphToken + let issuanceAllocator + let reentrantTarget + let issuancePerBlock + const GOVERNOR_ROLE = SHARED_CONSTANTS.GOVERNOR_ROLE + const PAUSE_ROLE = SHARED_CONSTANTS.PAUSE_ROLE + + beforeEach(async () => { + accounts = await getTestAccounts() + issuancePerBlock = ethers.parseEther('100') + + // Deploy contracts + graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + + issuanceAllocator = await deployIssuanceAllocator(graphTokenAddress, accounts.governor, issuancePerBlock) + + // Grant minter role to issuanceAllocator + await graphToken.addMinter(await issuanceAllocator.getAddress()) + + // Deploy mock reentrant target + const MockReentrantTargetFactory = await ethers.getContractFactory('MockReentrantTarget') + reentrantTarget = await MockReentrantTargetFactory.deploy() + + // Set the issuance allocator address in the reentrant target + await reentrantTarget.setIssuanceAllocator(await issuanceAllocator.getAddress()) + + // Grant GOVERNOR_ROLE and PAUSE_ROLE to the reentrant target so it can attempt protected operations + await issuanceAllocator.connect(accounts.governor).grantRole(GOVERNOR_ROLE, await reentrantTarget.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + }) + + describe('Reentrancy during distributeIssuance', () => { + it('should allow target to call distributeIssuance during notification (legitimate use case)', async () => { + // This verifies that targets can legitimately call distributeIssuance() during notification + // This is safe because: + // 1. distributeIssuance() has block-tracking protection (no-op if already at current block) + // 2. It makes no outward calls (just mints tokens) + // 3. It doesn't modify allocations + // 4. Targets may want to claim pending issuance before allocation changes + + // Add the reentrant target (reentrancy disabled during setup) + await reentrantTarget.setReentrantAction(ReentrantAction.None) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](await reentrantTarget.getAddress(), ethers.parseEther('50')) + + // Configure to call distributeIssuance during next notification + await reentrantTarget.setReentrantAction(ReentrantAction.DistributeIssuance) + + // Change allocation - the notification will call distributeIssuance + // This should succeed (distributeIssuance is not protected, as it's a legitimate use case) + await expect( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](await reentrantTarget.getAddress(), ethers.parseEther('40')), + ).to.not.be.reverted + }) + }) + + describe('Reentrancy during setTargetAllocation', () => { + const testCases = [ + { + name: '1 param variant', + action: ReentrantAction.SetTargetAllocation1Param, + trigger: async (target: string) => + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](target, ethers.parseEther('40')), + }, + { + name: '2 param variant', + action: ReentrantAction.SetTargetAllocation2Param, + trigger: async (target: string) => + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](target, ethers.parseEther('40'), 0), + }, + { + name: '3 param variant', + action: ReentrantAction.SetTargetAllocation3Param, + trigger: async (target: string) => + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](target, ethers.parseEther('40'), 0), + }, + ] + + testCases.forEach(({ name, action, trigger }) => { + it(`should revert when target attempts to reenter setTargetAllocation (${name})`, async () => { + // First add the target with normal behavior + await reentrantTarget.setReentrantAction(ReentrantAction.None) + const targetAddress = await reentrantTarget.getAddress() + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](targetAddress, ethers.parseEther('30')) + + // Now configure it to attempt reentrancy on next notification + await reentrantTarget.setReentrantAction(action) + + // Attempt to change allocation - should revert due to reentrancy + await expect(trigger(targetAddress)).to.be.revertedWithCustomError( + issuanceAllocator, + 'ReentrancyGuardReentrantCall', + ) + }) + }) + }) + + describe('Reentrancy during setIssuancePerBlock', () => { + const testCases = [ + { + name: '1 param variant', + action: ReentrantAction.SetIssuancePerBlock, + trigger: async () => issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('200')), + }, + { + name: '2 param variant', + action: ReentrantAction.SetIssuancePerBlock2Param, + trigger: async () => + issuanceAllocator + .connect(accounts.governor) + ['setIssuancePerBlock(uint256,uint256)'](ethers.parseEther('200'), 0), + }, + ] + + testCases.forEach(({ name, action, trigger }) => { + it(`should revert when target attempts to reenter setIssuancePerBlock (${name})`, async () => { + // Set up a malicious default target + await issuanceAllocator + .connect(accounts.governor) + ['setDefaultTarget(address)'](await reentrantTarget.getAddress()) + + // Configure to attempt reentrancy + await reentrantTarget.setReentrantAction(action) + + // Attempt to change issuance rate - should revert due to reentrancy + await expect(trigger()).to.be.revertedWithCustomError(issuanceAllocator, 'ReentrancyGuardReentrantCall') + }) + }) + }) + + describe('Reentrancy during notifyTarget', () => { + it('should revert when target attempts to reenter notifyTarget', async () => { + // Add the target + await reentrantTarget.setReentrantAction(ReentrantAction.None) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](await reentrantTarget.getAddress(), ethers.parseEther('25')) + + // Configure to attempt reentrancy + await reentrantTarget.setReentrantAction(ReentrantAction.NotifyTarget) + + // Attempt to notify - should revert due to reentrancy + await expect( + issuanceAllocator.connect(accounts.governor).notifyTarget(await reentrantTarget.getAddress()), + ).to.be.revertedWithCustomError(issuanceAllocator, 'ReentrancyGuardReentrantCall') + }) + }) + + describe('Reentrancy during setDefaultTarget', () => { + const testCases = [ + { + name: '1 param variant', + action: ReentrantAction.SetDefaultTarget1Param, + trigger: async (target: string) => + issuanceAllocator.connect(accounts.governor)['setDefaultTarget(address)'](target), + }, + { + name: '2 param variant', + action: ReentrantAction.SetDefaultTarget2Param, + trigger: async (target: string) => issuanceAllocator.connect(accounts.governor).setDefaultTarget(target), + }, + ] + + testCases.forEach(({ name, action, trigger }) => { + it(`should revert when target attempts to reenter setDefaultTarget (${name})`, async () => { + // Configure to attempt reentrancy + await reentrantTarget.setReentrantAction(action) + + // Attempt to set as default target - should revert due to reentrancy + await expect(trigger(await reentrantTarget.getAddress())).to.be.revertedWithCustomError( + issuanceAllocator, + 'ReentrancyGuardReentrantCall', + ) + }) + }) + }) + + describe('Reentrancy during distributePendingIssuance', () => { + const testCases = [ + { name: '0 param variant', action: ReentrantAction.DistributePendingIssuance0Param }, + { name: '1 param variant', action: ReentrantAction.DistributePendingIssuance1Param }, + ] + + testCases.forEach(({ name, action }) => { + it(`should revert when target attempts to reenter distributePendingIssuance (${name})`, async () => { + // Add the reentrant target with initial allocation + await reentrantTarget.setReentrantAction(ReentrantAction.None) + const targetAddress = await reentrantTarget.getAddress() + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](targetAddress, ethers.parseEther('30')) + + // Configure to attempt calling distributePendingIssuance during next notification + await reentrantTarget.setReentrantAction(action) + + // Attempt to change allocation - should revert due to reentrancy + await expect( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](targetAddress, ethers.parseEther('40')), + ).to.be.revertedWithCustomError(issuanceAllocator, 'ReentrancyGuardReentrantCall') + }) + }) + }) + + describe('No reentrancy when disabled', () => { + it('should work normally when reentrancy is not attempted', async () => { + // Ensure reentrant action is None + await reentrantTarget.setReentrantAction(ReentrantAction.None) + + // Add the target with some allocation + await expect( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](await reentrantTarget.getAddress(), ethers.parseEther('50')), + ).to.not.be.reverted + + // Mine some blocks + await hre.network.provider.send('hardhat_mine', ['0x0A']) // Mine 10 blocks + + // Distribute should work normally + await expect(issuanceAllocator.distributeIssuance()).to.not.be.reverted + }) + }) +}) diff --git a/packages/issuance/test/tests/allocate/SelfMintingEventMode.test.ts b/packages/issuance/test/tests/allocate/SelfMintingEventMode.test.ts new file mode 100644 index 000000000..bcf6be726 --- /dev/null +++ b/packages/issuance/test/tests/allocate/SelfMintingEventMode.test.ts @@ -0,0 +1,372 @@ +import { expect } from 'chai' +import hre from 'hardhat' +const { ethers } = hre + +import { deployTestGraphToken, getTestAccounts } from '../common/fixtures' +import { deployDirectAllocation, deployIssuanceAllocator } from './fixtures' + +describe('SelfMintingEventMode', () => { + let accounts + let graphToken + let issuanceAllocator + let selfMintingTarget + let addresses + + const issuancePerBlock = ethers.parseEther('100') + + // SelfMintingEventMode enum values + const EventMode = { + None: 0, + Aggregate: 1, + PerTarget: 2, + } + + beforeEach(async () => { + accounts = await getTestAccounts() + + // Deploy contracts + graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + + issuanceAllocator = await deployIssuanceAllocator(graphTokenAddress, accounts.governor, issuancePerBlock) + + selfMintingTarget = await deployDirectAllocation(graphTokenAddress, accounts.governor) + + // Cache addresses + addresses = { + issuanceAllocator: await issuanceAllocator.getAddress(), + selfMintingTarget: await selfMintingTarget.getAddress(), + graphToken: graphTokenAddress, + } + + // Grant minter role + await (graphToken as any).addMinter(addresses.issuanceAllocator) + }) + + describe('Initialization', () => { + it('should initialize to PerTarget mode', async () => { + const mode = await issuanceAllocator.getSelfMintingEventMode() + expect(mode).to.equal(EventMode.PerTarget) + }) + }) + + describe('setSelfMintingEventMode', () => { + it('should allow governor to set event mode', async () => { + await expect(issuanceAllocator.connect(accounts.governor).setSelfMintingEventMode(EventMode.None)) + .to.emit(issuanceAllocator, 'SelfMintingEventModeUpdated') + .withArgs(EventMode.PerTarget, EventMode.None) + + expect(await issuanceAllocator.getSelfMintingEventMode()).to.equal(EventMode.None) + }) + + it('should return true when setting to same mode', async () => { + const currentMode = await issuanceAllocator.getSelfMintingEventMode() + const result = await issuanceAllocator.connect(accounts.governor).setSelfMintingEventMode(currentMode) + expect(result).to.not.be.reverted + }) + + it('should not emit event when setting to same mode', async () => { + const currentMode = await issuanceAllocator.getSelfMintingEventMode() + await expect(issuanceAllocator.connect(accounts.governor).setSelfMintingEventMode(currentMode)).to.not.emit( + issuanceAllocator, + 'SelfMintingEventModeUpdated', + ) + }) + + it('should allow switching between all modes', async () => { + // PerTarget -> None + await issuanceAllocator.connect(accounts.governor).setSelfMintingEventMode(EventMode.None) + expect(await issuanceAllocator.getSelfMintingEventMode()).to.equal(EventMode.None) + + // None -> Aggregate + await issuanceAllocator.connect(accounts.governor).setSelfMintingEventMode(EventMode.Aggregate) + expect(await issuanceAllocator.getSelfMintingEventMode()).to.equal(EventMode.Aggregate) + + // Aggregate -> PerTarget + await issuanceAllocator.connect(accounts.governor).setSelfMintingEventMode(EventMode.PerTarget) + expect(await issuanceAllocator.getSelfMintingEventMode()).to.equal(EventMode.PerTarget) + }) + + it('should revert when non-governor tries to set mode', async () => { + await expect( + issuanceAllocator.connect(accounts.nonGovernor).setSelfMintingEventMode(EventMode.None), + ).to.be.revertedWithCustomError(issuanceAllocator, 'AccessControlUnauthorizedAccount') + }) + }) + + describe('Event Emission - None Mode', () => { + beforeEach(async () => { + // Set to None mode + await issuanceAllocator.connect(accounts.governor).setSelfMintingEventMode(EventMode.None) + + // Set up self-minting target + const selfMintingRate = ethers.parseEther('30') + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.selfMintingTarget, 0, selfMintingRate) + }) + + it('should not emit IssuanceSelfMintAllowance events in None mode', async () => { + // Advance blocks by calling distributeIssuance + await ethers.provider.send('evm_mine', []) + const tx = await issuanceAllocator.distributeIssuance() + + // Should not emit per-target events + await expect(tx).to.not.emit(issuanceAllocator, 'IssuanceSelfMintAllowance') + }) + + it('should not emit IssuanceSelfMintAllowanceAggregate events in None mode', async () => { + await ethers.provider.send('evm_mine', []) + const tx = await issuanceAllocator.distributeIssuance() + + await expect(tx).to.not.emit(issuanceAllocator, 'IssuanceSelfMintAllowanceAggregate') + }) + }) + + describe('Event Emission - Aggregate Mode', () => { + beforeEach(async () => { + // Set to Aggregate mode + await issuanceAllocator.connect(accounts.governor).setSelfMintingEventMode(EventMode.Aggregate) + + // Set up self-minting target + const selfMintingRate = ethers.parseEther('30') + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.selfMintingTarget, 0, selfMintingRate) + }) + + it('should emit IssuanceSelfMintAllowanceAggregate event', async () => { + await ethers.provider.send('evm_mine', []) + const tx = await issuanceAllocator.distributeIssuance() + + await expect(tx).to.emit(issuanceAllocator, 'IssuanceSelfMintAllowanceAggregate') + }) + + it('should emit aggregate event with correct total amount', async () => { + const selfMintingRate = ethers.parseEther('30') + + // Distribute to get to current state + await issuanceAllocator.distributeIssuance() + const startBlock = await ethers.provider.getBlockNumber() + + // Mine a block then distribute + await ethers.provider.send('evm_mine', []) + const tx = await issuanceAllocator.distributeIssuance() + const endBlock = await ethers.provider.getBlockNumber() + + // Expected amount is for the block we just mined + const blocks = endBlock - startBlock + const expectedAmount = selfMintingRate * BigInt(blocks) + + await expect(tx) + .to.emit(issuanceAllocator, 'IssuanceSelfMintAllowanceAggregate') + .withArgs(expectedAmount, startBlock + 1, endBlock) + }) + + it('should not emit per-target events in Aggregate mode', async () => { + await ethers.provider.send('evm_mine', []) + const tx = await issuanceAllocator.distributeIssuance() + + await expect(tx).to.not.emit(issuanceAllocator, 'IssuanceSelfMintAllowance') + }) + }) + + describe('Event Emission - PerTarget Mode', () => { + beforeEach(async () => { + // Already in PerTarget mode by default + const selfMintingRate = ethers.parseEther('30') + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.selfMintingTarget, 0, selfMintingRate) + }) + + it('should emit IssuanceSelfMintAllowance event for each target', async () => { + await ethers.provider.send('evm_mine', []) + const tx = await issuanceAllocator.distributeIssuance() + + await expect(tx).to.emit(issuanceAllocator, 'IssuanceSelfMintAllowance') + }) + + it('should emit per-target event with correct amount', async () => { + const selfMintingRate = ethers.parseEther('30') + + // Distribute to get to current state + await issuanceAllocator.distributeIssuance() + const startBlock = await ethers.provider.getBlockNumber() + + // Mine a block then distribute + await ethers.provider.send('evm_mine', []) + const tx = await issuanceAllocator.distributeIssuance() + const endBlock = await ethers.provider.getBlockNumber() + + // Expected amount is for the block we just mined + const blocks = endBlock - startBlock + const expectedAmount = selfMintingRate * BigInt(blocks) + + await expect(tx) + .to.emit(issuanceAllocator, 'IssuanceSelfMintAllowance') + .withArgs(addresses.selfMintingTarget, expectedAmount, startBlock + 1, endBlock) + }) + + it('should not emit aggregate events in PerTarget mode', async () => { + await ethers.provider.send('evm_mine', []) + const tx = await issuanceAllocator.distributeIssuance() + + await expect(tx).to.not.emit(issuanceAllocator, 'IssuanceSelfMintAllowanceAggregate') + }) + }) + + describe('Mode Switching During Operation', () => { + it('should apply new mode immediately on next distribution', async () => { + // Set up self-minting target + const selfMintingRate = ethers.parseEther('30') + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.selfMintingTarget, 0, selfMintingRate) + + // PerTarget mode initially + await ethers.provider.send('evm_mine', []) + await expect(issuanceAllocator.distributeIssuance()).to.emit(issuanceAllocator, 'IssuanceSelfMintAllowance') + + // Switch to None mode + await issuanceAllocator.connect(accounts.governor).setSelfMintingEventMode(EventMode.None) + + // Next distribution should not emit events + await ethers.provider.send('evm_mine', []) + await expect(issuanceAllocator.distributeIssuance()).to.not.emit(issuanceAllocator, 'IssuanceSelfMintAllowance') + }) + + it('should handle rapid mode switching correctly', async () => { + const selfMintingRate = ethers.parseEther('30') + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.selfMintingTarget, 0, selfMintingRate) + + // Switch through all modes + await issuanceAllocator.connect(accounts.governor).setSelfMintingEventMode(EventMode.None) + await issuanceAllocator.connect(accounts.governor).setSelfMintingEventMode(EventMode.Aggregate) + await issuanceAllocator.connect(accounts.governor).setSelfMintingEventMode(EventMode.PerTarget) + + // Should end up in PerTarget mode + expect(await issuanceAllocator.getSelfMintingEventMode()).to.equal(EventMode.PerTarget) + + await ethers.provider.send('evm_mine', []) + await expect(issuanceAllocator.distributeIssuance()).to.emit(issuanceAllocator, 'IssuanceSelfMintAllowance') + }) + }) + + describe('Gas Optimization', () => { + it('should use less gas in None mode than PerTarget mode', async () => { + const selfMintingRate = ethers.parseEther('30') + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.selfMintingTarget, 0, selfMintingRate) + + // Measure gas in PerTarget mode + await ethers.provider.send('evm_mine', []) + const perTargetTx = await issuanceAllocator.distributeIssuance() + const perTargetReceipt = await perTargetTx.wait() + const perTargetGas = perTargetReceipt.gasUsed + + // Switch to None mode + await issuanceAllocator.connect(accounts.governor).setSelfMintingEventMode(EventMode.None) + + // Measure gas in None mode + await ethers.provider.send('evm_mine', []) + const noneTx = await issuanceAllocator.distributeIssuance() + const noneReceipt = await noneTx.wait() + const noneGas = noneReceipt.gasUsed + + // None mode should use less gas + expect(noneGas).to.be.lessThan(perTargetGas) + }) + + it('should use less gas in Aggregate mode than PerTarget mode with multiple targets', async () => { + // Add multiple self-minting targets + const target2 = await deployDirectAllocation(await graphToken.getAddress(), accounts.governor) + const target3 = await deployDirectAllocation(await graphToken.getAddress(), accounts.governor) + + const selfMintingRate = ethers.parseEther('10') + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.selfMintingTarget, 0, selfMintingRate) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target2.getAddress(), 0, selfMintingRate) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target3.getAddress(), 0, selfMintingRate) + + // Measure gas in PerTarget mode + await ethers.provider.send('evm_mine', []) + const perTargetTx = await issuanceAllocator.distributeIssuance() + const perTargetReceipt = await perTargetTx.wait() + const perTargetGas = perTargetReceipt.gasUsed + + // Switch to Aggregate mode + await issuanceAllocator.connect(accounts.governor).setSelfMintingEventMode(EventMode.Aggregate) + + // Measure gas in Aggregate mode + await ethers.provider.send('evm_mine', []) + const aggregateTx = await issuanceAllocator.distributeIssuance() + const aggregateReceipt = await aggregateTx.wait() + const aggregateGas = aggregateReceipt.gasUsed + + // Aggregate mode should use less gas + expect(aggregateGas).to.be.lessThan(perTargetGas) + }) + }) + + describe('Edge Cases', () => { + it('should handle mode changes when no self-minting targets exist', async () => { + // No self-minting targets added + await issuanceAllocator.connect(accounts.governor).setSelfMintingEventMode(EventMode.None) + + await ethers.provider.send('evm_mine', []) + const tx = await issuanceAllocator.distributeIssuance() + + // Should not emit any self-minting events + await expect(tx).to.not.emit(issuanceAllocator, 'IssuanceSelfMintAllowance') + await expect(tx).to.not.emit(issuanceAllocator, 'IssuanceSelfMintAllowanceAggregate') + }) + + it('should handle mode when totalSelfMintingRate is zero', async () => { + // Add target with only allocator-minting (no self-minting) + const allocatorMintingRate = ethers.parseEther('50') + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.selfMintingTarget, allocatorMintingRate, 0) + + await issuanceAllocator.connect(accounts.governor).setSelfMintingEventMode(EventMode.Aggregate) + + await ethers.provider.send('evm_mine', []) + const tx = await issuanceAllocator.distributeIssuance() + + // Should not emit self-minting events when totalSelfMintingRate is 0 + await expect(tx).to.not.emit(issuanceAllocator, 'IssuanceSelfMintAllowanceAggregate') + }) + + it('should work correctly after removing and re-adding self-minting target', async () => { + const selfMintingRate = ethers.parseEther('30') + + // Add target + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.selfMintingTarget, 0, selfMintingRate) + + // Remove target + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.selfMintingTarget, 0, 0) + + // Re-add target + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.selfMintingTarget, 0, selfMintingRate) + + // Should emit events normally + await ethers.provider.send('evm_mine', []) + await expect(issuanceAllocator.distributeIssuance()).to.emit(issuanceAllocator, 'IssuanceSelfMintAllowance') + }) + }) +}) diff --git a/packages/issuance/test/tests/allocate/TargetNotification.test.ts b/packages/issuance/test/tests/allocate/TargetNotification.test.ts new file mode 100644 index 000000000..ab7f757a4 --- /dev/null +++ b/packages/issuance/test/tests/allocate/TargetNotification.test.ts @@ -0,0 +1,249 @@ +import { expect } from 'chai' +import hre from 'hardhat' + +const { ethers } = hre + +import { getTestAccounts } from '../common/fixtures' +import { deployTestGraphToken } from '../common/fixtures' +import { deployIssuanceAllocator } from './fixtures' + +describe('IssuanceAllocator - Target Notification', () => { + let accounts + let addresses: { + target1: string + target2: string + defaultTarget: string + } + + let issuanceAllocator + let graphToken + let target1 + let target2 + let defaultTarget + + const issuancePerBlock = ethers.parseEther('100') + + beforeEach(async () => { + // Get test accounts + accounts = await getTestAccounts() + + // Deploy GraphToken + graphToken = await deployTestGraphToken() + + // Deploy IssuanceAllocator + issuanceAllocator = await deployIssuanceAllocator( + await graphToken.getAddress(), + accounts.governor, + issuancePerBlock, + ) + + // Grant minter role to IssuanceAllocator + await graphToken.addMinter(await issuanceAllocator.getAddress()) + + // Deploy mock notification trackers + const MockNotificationTracker = await ethers.getContractFactory('MockNotificationTracker') + target1 = await MockNotificationTracker.deploy() + target2 = await MockNotificationTracker.deploy() + defaultTarget = await MockNotificationTracker.deploy() + + addresses = { + target1: await target1.getAddress(), + target2: await target2.getAddress(), + defaultTarget: await defaultTarget.getAddress(), + } + }) + + describe('setTargetAllocation notifications', () => { + it('should notify both target and default target when setting allocation', async () => { + // Set a non-zero default target first + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.defaultTarget) + + // Verify initial state + expect(await target1.notificationCount()).to.equal(0) + expect(await defaultTarget.notificationCount()).to.equal(1) // Notified during setDefaultTarget + + // Reset notification count for clean test + await defaultTarget.resetNotificationCount() + + // Set allocation for target1 - should notify BOTH target1 and defaultTarget + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) + + // Verify both targets were notified + expect(await target1.notificationCount()).to.equal(1) + expect(await defaultTarget.notificationCount()).to.equal(1) + }) + + it('should notify both targets when changing existing allocation', async () => { + // Set a non-zero default target + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.defaultTarget) + + // Set initial allocation for target1 + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) + + // Reset counters + await target1.resetNotificationCount() + await defaultTarget.resetNotificationCount() + + // Change allocation for target1 + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('50')) + + // Both should be notified again + expect(await target1.notificationCount()).to.equal(1) + expect(await defaultTarget.notificationCount()).to.equal(1) + }) + + it('should notify both targets when removing allocation', async () => { + // Set a non-zero default target + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.defaultTarget) + + // Set initial allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) + + // Reset counters + await target1.resetNotificationCount() + await defaultTarget.resetNotificationCount() + + // Remove allocation (set to 0) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 0, 0) + + // Both should be notified + expect(await target1.notificationCount()).to.equal(1) + expect(await defaultTarget.notificationCount()).to.equal(1) + }) + + it('should notify default target even when it is address(0)', async () => { + // Default is address(0) by default, which should handle notification gracefully + expect(await issuanceAllocator.getTargetAt(0)).to.equal(ethers.ZeroAddress) + + // Set allocation for target1 - should not revert even though default is address(0) + await expect( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')), + ).to.not.be.reverted + + // Target1 should be notified + expect(await target1.notificationCount()).to.equal(1) + }) + + it('should notify correct targets when setting multiple allocations', async () => { + // Set a non-zero default target + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.defaultTarget) + await defaultTarget.resetNotificationCount() + + // Set allocation for target1 + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) + + expect(await target1.notificationCount()).to.equal(1) + expect(await target2.notificationCount()).to.equal(0) + expect(await defaultTarget.notificationCount()).to.equal(1) + + // Reset counters + await target1.resetNotificationCount() + await defaultTarget.resetNotificationCount() + + // Set allocation for target2 + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target2, ethers.parseEther('20')) + + // Only target2 and default should be notified (not target1) + expect(await target1.notificationCount()).to.equal(0) + expect(await target2.notificationCount()).to.equal(1) + expect(await defaultTarget.notificationCount()).to.equal(1) + }) + + it('should emit NotificationReceived events for both targets', async () => { + // Set a non-zero default target + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.defaultTarget) + await defaultTarget.resetNotificationCount() + + // Set allocation and check for events from both mock targets + const tx = await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) + + // Both targets should emit their NotificationReceived events + await expect(tx).to.emit(target1, 'NotificationReceived') + await expect(tx).to.emit(defaultTarget, 'NotificationReceived') + }) + }) + + describe('setIssuancePerBlock notifications', () => { + it('should notify only default target when changing issuance rate', async () => { + // Set a non-zero default target + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.defaultTarget) + + // Add a regular target + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) + + // Reset counters + await target1.resetNotificationCount() + await defaultTarget.resetNotificationCount() + + // Change issuance rate + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('200')) + + // Only default should be notified (regular targets keep same absolute rates) + expect(await target1.notificationCount()).to.equal(0) + expect(await defaultTarget.notificationCount()).to.equal(1) + }) + }) + + describe('setDefaultTarget notifications', () => { + it('should notify both old and new default targets', async () => { + // Set first default target + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target1) + + // Reset counter + await target1.resetNotificationCount() + + // Change to new default target - should notify both + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target2) + + // Both old and new default should be notified + expect(await target1.notificationCount()).to.equal(1) + expect(await target2.notificationCount()).to.equal(1) + }) + }) + + describe('notification deduplication', () => { + it('should not notify target twice in the same block', async () => { + // Set a non-zero default target + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.defaultTarget) + await defaultTarget.resetNotificationCount() + + // Try to set the same allocation twice in same block (second should be no-op) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) + + // Should only be notified once + expect(await target1.notificationCount()).to.equal(1) + expect(await defaultTarget.notificationCount()).to.equal(1) + + // Second call with same values should not notify again (no change) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) + + // Counts should remain the same (no new notifications) + expect(await target1.notificationCount()).to.equal(1) + expect(await defaultTarget.notificationCount()).to.equal(1) + }) + }) +}) diff --git a/packages/issuance/test/tests/allocate/commonTestUtils.ts b/packages/issuance/test/tests/allocate/commonTestUtils.ts new file mode 100644 index 000000000..c150e92d6 --- /dev/null +++ b/packages/issuance/test/tests/allocate/commonTestUtils.ts @@ -0,0 +1,46 @@ +/** + * Common test utilities for access control and other shared test patterns + */ + +import type { SignerWithAddress } from '@nomicfoundation/hardhat-ethers/signers' +import { expect } from 'chai' +import type { Contract } from 'ethers' + +/** + * Test multiple access control methods on a contract + * @param contract - The contract to test + * @param methods - Array of methods to test with their arguments + * @param authorizedAccount - Account that should have access + * @param unauthorizedAccount - Account that should not have access + */ + +export async function testMultipleAccessControl( + contract: Contract, + methods: Array<{ + method: string + args: unknown[] + description: string + }>, + authorizedAccount: SignerWithAddress, + unauthorizedAccount: SignerWithAddress, +): Promise { + for (const methodConfig of methods) { + const { method, args, description: _description } = methodConfig + + // Test that unauthorized account is rejected + await expect(contract.connect(unauthorizedAccount)[method](...args)).to.be.revertedWithCustomError( + contract, + 'AccessControlUnauthorizedAccount', + ) + + // Test that authorized account can call the method (if it exists and is callable) + try { + // Some methods might revert for business logic reasons even with proper access + // We just want to ensure they don't revert with AccessControlUnauthorizedAccount + await contract.connect(authorizedAccount)[method](...args) + } catch (error: any) { + // If it reverts, make sure it's not due to access control + expect(error.message).to.not.include('AccessControlUnauthorizedAccount') + } + } +} diff --git a/packages/issuance/test/tests/allocate/fixtures.ts b/packages/issuance/test/tests/allocate/fixtures.ts new file mode 100644 index 000000000..0122365b1 --- /dev/null +++ b/packages/issuance/test/tests/allocate/fixtures.ts @@ -0,0 +1,91 @@ +/** + * Allocate-specific test fixtures + * Deployment and setup functions for allocate contracts + */ + +import hre from 'hardhat' + +const { ethers } = hre +const { upgrades } = require('hardhat') + +import { Constants, deployTestGraphToken } from '../common/fixtures' +import { GraphTokenHelper } from '../common/graphTokenHelper' + +/** + * Deploy the IssuanceAllocator contract with proxy using OpenZeppelin's upgrades library + * @param {string} graphToken + * @param {HardhatEthersSigner} governor + * @param {bigint} issuancePerBlock + * @returns {Promise} + */ +export async function deployIssuanceAllocator(graphToken, governor, issuancePerBlock) { + // Deploy implementation and proxy using OpenZeppelin's upgrades library + const IssuanceAllocatorFactory = await ethers.getContractFactory('IssuanceAllocator') + + // Deploy proxy with implementation + const issuanceAllocatorContract = await upgrades.deployProxy(IssuanceAllocatorFactory, [governor.address], { + constructorArgs: [graphToken], + initializer: 'initialize', + }) + + // Get the contract instance + const issuanceAllocator = issuanceAllocatorContract + + // Set issuance per block + await issuanceAllocator.connect(governor).setIssuancePerBlock(issuancePerBlock) + + return issuanceAllocator +} + +/** + * Deploy the DirectAllocation contract with proxy using OpenZeppelin's upgrades library + * @param {string} graphToken + * @param {HardhatEthersSigner} governor + * @returns {Promise} + */ +export async function deployDirectAllocation(graphToken, governor) { + // Deploy implementation and proxy using OpenZeppelin's upgrades library + const DirectAllocationFactory = await ethers.getContractFactory('DirectAllocation') + + // Deploy proxy with implementation + const directAllocationContract = await upgrades.deployProxy(DirectAllocationFactory, [governor.address], { + constructorArgs: [graphToken], + initializer: 'initialize', + }) + + // Return the contract instance + return directAllocationContract +} + +/** + * Deploy allocate-only system (IssuanceAllocator + DirectAllocation targets) + * This version excludes eligibility contracts for clean separation in tests + * @param {TestAccounts} accounts + * @param {bigint} [issuancePerBlock=Constants.DEFAULT_ISSUANCE_PER_BLOCK] + * @returns {Promise} + */ +export async function deployAllocateSystem(accounts, issuancePerBlock = Constants.DEFAULT_ISSUANCE_PER_BLOCK) { + const { governor } = accounts + + // Deploy test GraphToken + const graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + + // Deploy IssuanceAllocator + const issuanceAllocator = await deployIssuanceAllocator(graphTokenAddress, governor, issuancePerBlock) + + // Add the IssuanceAllocator as a minter on the GraphToken + const graphTokenHelper = new GraphTokenHelper(graphToken as any, governor) + await graphTokenHelper.addMinter(await issuanceAllocator.getAddress()) + + // Deploy DirectAllocation targets + const target1 = await deployDirectAllocation(graphTokenAddress, governor) + const target2 = await deployDirectAllocation(graphTokenAddress, governor) + + return { + graphToken, + issuanceAllocator, + target1, + target2, + } +} diff --git a/packages/issuance/test/tests/allocate/issuanceCalculations.ts b/packages/issuance/test/tests/allocate/issuanceCalculations.ts new file mode 100644 index 000000000..e013ff5e9 --- /dev/null +++ b/packages/issuance/test/tests/allocate/issuanceCalculations.ts @@ -0,0 +1,129 @@ +import { ethers } from 'hardhat' + +/** + * Shared calculation utilities for issuance tests. + * These functions provide reference implementations for expected values in tests. + */ + +// Constants for better readability +export const CALCULATION_CONSTANTS = { + PRECISION_MULTIPLIER: 1000n, // For ratio calculations + WEI_PER_ETHER: ethers.parseEther('1'), +} as const + +/** + * Calculate expected self-minting accumulation during pause. + * In the new model, we accumulate self-minting (not allocator-minting) during pause. + * + * @param totalSelfMintingRate - Total self-minting rate (tokens per block) + * @param blocks - Number of blocks to accumulate over + * @returns Expected accumulated self-minting amount + */ +export function calculateExpectedSelfMintingAccumulation(totalSelfMintingRate: bigint, blocks: bigint): bigint { + if (blocks === 0n || totalSelfMintingRate === 0n) return 0n + return totalSelfMintingRate * blocks +} + +/** + * Calculate expected issuance for a specific target during normal operation. + * + * @param targetRate - Target's allocation rate (tokens per block) + * @param blocks - Number of blocks + * @returns Expected issuance for the target + */ +export function calculateExpectedTargetIssuance(targetRate: bigint, blocks: bigint): bigint { + if (blocks === 0n || targetRate === 0n) return 0n + return targetRate * blocks +} + +/** + * Calculate proportional distribution during unpause when insufficient funds. + * Used when available funds < total non-default needs. + * + * @param availableAmount - Total available amount to distribute + * @param targetRate - Target's allocator-minting rate (tokens per block) + * @param totalNonDefaultRate - Total non-default allocator-minting rate + * @returns Expected amount for the target + */ +export function calculateProportionalDistribution( + availableAmount: bigint, + targetRate: bigint, + totalNonDefaultRate: bigint, +): bigint { + if (availableAmount === 0n || targetRate === 0n || totalNonDefaultRate === 0n) return 0n + return (availableAmount * targetRate) / totalNonDefaultRate +} + +/** + * Calculate expected total issuance for multiple targets. + * + * @param blocks - Number of blocks + * @param targetRates - Array of target rates (tokens per block) + * @returns Array of expected issuance amounts for each target + */ +export function calculateMultiTargetIssuance(blocks: bigint, targetRates: bigint[]): bigint[] { + return targetRates.map((rate) => calculateExpectedTargetIssuance(rate, blocks)) +} + +/** + * Verify that distributed amounts add up to expected total. + * + * @param distributedAmounts - Array of distributed amounts + * @param expectedTotal - Expected total amount + * @param tolerance - Tolerance for rounding errors (default: 1 wei) + * @returns True if amounts add up within tolerance + */ +export function verifyTotalDistribution( + distributedAmounts: bigint[], + expectedTotal: bigint, + tolerance: bigint = 1n, +): boolean { + const totalDistributed = distributedAmounts.reduce((sum, amount) => sum + amount, 0n) + const diff = totalDistributed > expectedTotal ? totalDistributed - expectedTotal : expectedTotal - totalDistributed + return diff <= tolerance +} + +/** + * Calculate expected distribution ratios between targets + * + * @param rates - Array of rates (tokens per block) + * @returns Array of ratios relative to first target + */ +export function calculateExpectedRatios(rates: bigint[]): bigint[] { + if (rates.length === 0) return [] + + const baseRate = rates[0] + if (baseRate === 0n) return rates.map(() => 0n) + + return rates.map((rate) => (rate * CALCULATION_CONSTANTS.PRECISION_MULTIPLIER) / baseRate) +} + +/** + * Convert allocation percentage to absolute rate + * + * @param percentage - Percentage as a number (e.g., 30 for 30%) + * @param issuancePerBlock - Total issuance per block + * @returns Absolute rate (tokens per block) + */ +export function percentageToRate(percentage: number, issuancePerBlock: bigint): bigint { + return (issuancePerBlock * BigInt(Math.round(percentage * 100))) / 10000n +} + +/** + * Convert rate to percentage + * + * @param rate - Rate (tokens per block) + * @param issuancePerBlock - Total issuance per block + * @returns Percentage as a number + */ +export function rateToPercentage(rate: bigint, issuancePerBlock: bigint): number { + if (issuancePerBlock === 0n) return 0 + return Number((rate * 10000n) / issuancePerBlock) / 100 +} + +/** + * Helper to convert ETH string to wei bigint. + */ +export function parseEther(value: string): bigint { + return ethers.parseEther(value) +} diff --git a/packages/issuance/test/tests/allocate/optimizationHelpers.ts b/packages/issuance/test/tests/allocate/optimizationHelpers.ts new file mode 100644 index 000000000..d9d986516 --- /dev/null +++ b/packages/issuance/test/tests/allocate/optimizationHelpers.ts @@ -0,0 +1,59 @@ +/** + * Performance optimization helpers for test files + * Focus on reducing code duplication and improving readability + */ + +import { expect } from 'chai' +import hre from 'hardhat' +const { ethers } = hre + +// Common test constants to avoid magic numbers +const TEST_CONSTANTS = { + // Common allocation percentages (in PPM) + ALLOCATION_10_PERCENT: 100_000, + ALLOCATION_20_PERCENT: 200_000, + ALLOCATION_30_PERCENT: 300_000, + ALLOCATION_40_PERCENT: 400_000, + ALLOCATION_50_PERCENT: 500_000, + ALLOCATION_60_PERCENT: 600_000, + ALLOCATION_100_PERCENT: 1_000_000, + + // Common amounts + AMOUNT_100_TOKENS: '100', + AMOUNT_1000_TOKENS: '1000', + AMOUNT_10000_TOKENS: '10000', + + // Time constants + ONE_DAY: 24 * 60 * 60, + ONE_WEEK: 7 * 24 * 60 * 60, + TWO_WEEKS: 14 * 24 * 60 * 60, + + // Common interface IDs (to avoid recalculation) + ERC165_INTERFACE_ID: '0x01ffc9a7', + INVALID_INTERFACE_ID: '0x12345678', +} + +/** + * Helper to create consistent ethers amounts + */ +export function parseEther(amount: string): bigint { + return ethers.parseEther(amount) +} + +/** + * Helper to expect a transaction to revert with a specific custom error + */ +export async function expectCustomError(txPromise: Promise, contract: any, errorName: string): Promise { + await expect(txPromise).to.be.revertedWithCustomError(contract, errorName) +} + +/** + * Helper to mine blocks for time-sensitive tests + */ +export async function mineBlocks(count: number): Promise { + for (let i = 0; i < count; i++) { + await ethers.provider.send('evm_mine', []) + } +} + +export { TEST_CONSTANTS } diff --git a/packages/issuance/test/tests/allocate/optimizedFixtures.ts b/packages/issuance/test/tests/allocate/optimizedFixtures.ts new file mode 100644 index 000000000..66d3f3dc7 --- /dev/null +++ b/packages/issuance/test/tests/allocate/optimizedFixtures.ts @@ -0,0 +1,306 @@ +/** + * Enhanced Test Fixtures with Performance Optimizations + * Consolidates common test setup patterns and reduces duplication + */ + +import hre from 'hardhat' + +import { Constants, deployTestGraphToken, getTestAccounts } from '../common/fixtures' +import { deployAllocateSystem } from './fixtures' +import { TestConstants } from './testPatterns' +const { ethers } = hre + +/** + * Enhanced fixture for allocate-only system (excludes eligibility contracts) + * Use this for allocate tests to ensure clean separation from eligibility + */ +export async function setupOptimizedAllocateSystem(customOptions: any = {}) { + const accounts = await getTestAccounts() + + const options = { + issuancePerBlock: Constants.DEFAULT_ISSUANCE_PER_BLOCK, + setupMinterRole: true, + setupTargets: true, + targetCount: 2, + ...customOptions, + } + + // Deploy allocate-only system (no eligibility contracts) + const { graphToken, issuanceAllocator, target1, target2 } = await deployAllocateSystem( + accounts, + options.issuancePerBlock, + ) + + // Cache addresses to avoid repeated getAddress() calls + const addresses = { + graphToken: await graphToken.getAddress(), + issuanceAllocator: await issuanceAllocator.getAddress(), + target1: await target1.getAddress(), + target2: await target2.getAddress(), + } + + // Setup minter role if requested + if (options.setupMinterRole) { + await (graphToken as any).addMinter(addresses.issuanceAllocator) + } + + // Setup default targets if requested + if (options.setupTargets) { + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, TestConstants.ALLOCATION_30_PERCENT, 0) + + if (options.targetCount >= 2) { + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target2, TestConstants.ALLOCATION_20_PERCENT, 0) + } + } + + return { + accounts, + contracts: { + graphToken, + issuanceAllocator, + target1, + target2, + }, + addresses, + helpers: { + // Helper to reset state without redeploying + resetState: async () => { + // Remove all targets except the default at index 0 + const targets = await issuanceAllocator.getTargets() + const defaultAddress = await issuanceAllocator.getTargetAt(0) + for (const targetAddr of targets) { + // Skip the default target target + if (targetAddr === defaultAddress) continue + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](targetAddr, 0, 0) + } + + // Reset default target to address(0) with 100% + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(ethers.ZeroAddress) + + // Reset issuance rate + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(options.issuancePerBlock) + }, + + // Helper to setup standard allocations + setupStandardAllocations: async () => { + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, TestConstants.ALLOCATION_30_PERCENT, 0) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target2, TestConstants.ALLOCATION_40_PERCENT, 0) + }, + + // Helper to verify proportional distributions + verifyProportionalDistribution: async (expectedRatios: number[]) => { + const balance1: bigint = await (graphToken as any).balanceOf(addresses.target1) + const balance2: bigint = await (graphToken as any).balanceOf(addresses.target2) + + if (balance2 > 0n) { + const ratio: bigint = (balance1 * TestConstants.RATIO_PRECISION) / balance2 + const expectedRatio: bigint = BigInt( + Math.round((expectedRatios[0] / expectedRatios[1]) * Number(TestConstants.RATIO_PRECISION)), + ) + + // Allow for small rounding errors + const tolerance: bigint = 50n // TestConstants.DEFAULT_TOLERANCE + const diff: bigint = ratio > expectedRatio ? ratio - expectedRatio : expectedRatio - ratio + + if (diff > tolerance) { + throw new Error( + `Distribution ratio ${ratio} does not match expected ${expectedRatio} within tolerance ${tolerance}`, + ) + } + } + }, + }, + } +} + +/** + * Lightweight fixture for testing single allocate contracts + */ +export async function setupSingleContract(contractType: 'issuanceAllocator' | 'directAllocation') { + const accounts = await getTestAccounts() + const graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + + let contract: any + + switch (contractType) { + case 'issuanceAllocator': { + const { deployIssuanceAllocator } = await import('./fixtures') + contract = await deployIssuanceAllocator( + graphTokenAddress, + accounts.governor, + Constants.DEFAULT_ISSUANCE_PER_BLOCK, + ) + break + } + case 'directAllocation': { + const { deployDirectAllocation } = await import('./fixtures') + contract = await deployDirectAllocation(graphTokenAddress, accounts.governor) + break + } + default: + throw new Error(`Unknown contract type: ${contractType}`) + } + + return { + accounts, + contract, + graphToken, + addresses: { + contract: await contract.getAddress(), + graphToken: graphTokenAddress, + }, + } +} + +/** + * Shared test data for consistent testing + */ +export const TestData = { + // Standard allocation scenarios + scenarios: { + balanced: [ + { target: 'target1', allocatorPPM: TestConstants.ALLOCATION_30_PERCENT, selfPPM: 0 }, + { target: 'target2', allocatorPPM: TestConstants.ALLOCATION_40_PERCENT, selfPPM: 0 }, + ], + mixed: [ + { target: 'target1', allocatorPPM: TestConstants.ALLOCATION_20_PERCENT, selfPPM: 0 }, + { target: 'target2', allocatorPPM: 0, selfPPM: TestConstants.ALLOCATION_30_PERCENT }, + ], + selfMintingOnly: [ + { target: 'target1', allocatorPPM: 0, selfPPM: TestConstants.ALLOCATION_50_PERCENT }, + { target: 'target2', allocatorPPM: 0, selfPPM: TestConstants.ALLOCATION_30_PERCENT }, + ], + }, + + // Standard test parameters + issuanceRates: { + low: ethers.parseEther('10'), + medium: ethers.parseEther('100'), + high: ethers.parseEther('1000'), + }, + + // Common test tolerances + tolerances: { + strict: 1n, + normal: 50n, // TestConstants.DEFAULT_TOLERANCE + loose: 100n, // TestConstants.DEFAULT_TOLERANCE * 2n + }, +} + +/** + * Helper to apply a scenario to contracts + */ +export async function applyAllocationScenario(issuanceAllocator: any, addresses: any, scenario: any[], governor: any) { + for (const allocation of scenario) { + const targetAddress = addresses[allocation.target] + await issuanceAllocator + .connect(governor) + ['setTargetAllocation(address,uint256,uint256)'](targetAddress, allocation.allocatorPPM, allocation.selfPPM) + } +} + +/** + * OptimizedFixtures class for managing test contracts and state + */ +export class OptimizedFixtures { + private accounts: any + private sharedContracts: any = null + + constructor(accounts: any) { + this.accounts = accounts + } + + async setupDirectAllocationSuite() { + const graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + const { deployDirectAllocation } = await import('./fixtures') + const directAllocation = await deployDirectAllocation(graphTokenAddress, this.accounts.governor) + const directAllocationAddress = await directAllocation.getAddress() + + const { GraphTokenHelper } = require('../common/graphTokenHelper') + const graphTokenHelper = new GraphTokenHelper(graphToken, this.accounts.governor) + + this.sharedContracts = { + graphToken, + directAllocation, + graphTokenHelper, + addresses: { + graphToken: graphTokenAddress, + directAllocation: directAllocationAddress, + }, + } + } + + getContracts() { + if (!this.sharedContracts) { + throw new Error('Contracts not initialized. Call setupDirectAllocationSuite() first.') + } + return this.sharedContracts + } + + async resetContractsState() { + if (!this.sharedContracts) return + + const { directAllocation } = this.sharedContracts + const { ROLES } = require('./testPatterns') + + // Reset pause state + try { + if (await directAllocation.paused()) { + await directAllocation.connect(this.accounts.governor).unpause() + } + } catch { + // Ignore if not paused + } + + // Remove all roles except governor + try { + for (const account of [this.accounts.operator, this.accounts.user, this.accounts.nonGovernor]) { + if (await directAllocation.hasRole(ROLES.OPERATOR, account.address)) { + await directAllocation.connect(this.accounts.governor).revokeRole(ROLES.OPERATOR, account.address) + } + if (await directAllocation.hasRole(ROLES.PAUSE, account.address)) { + await directAllocation.connect(this.accounts.governor).revokeRole(ROLES.PAUSE, account.address) + } + } + + // Remove pause role from governor if present + if (await directAllocation.hasRole(ROLES.PAUSE, this.accounts.governor.address)) { + await directAllocation.connect(this.accounts.governor).revokeRole(ROLES.PAUSE, this.accounts.governor.address) + } + } catch { + // Ignore role management errors during reset + } + } + + async createFreshDirectAllocation() { + const graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + const { deployDirectAllocation } = await import('./fixtures') + const directAllocation = await deployDirectAllocation(graphTokenAddress, this.accounts.governor) + + const { GraphTokenHelper } = require('../common/graphTokenHelper') + const graphTokenHelper = new GraphTokenHelper(graphToken, this.accounts.governor) + + return { + directAllocation, + graphToken, + graphTokenHelper, + addresses: { + graphToken: graphTokenAddress, + directAllocation: await directAllocation.getAddress(), + }, + } + } +} diff --git a/packages/issuance/test/tests/allocate/testPatterns.ts b/packages/issuance/test/tests/allocate/testPatterns.ts new file mode 100644 index 000000000..4592eb9e9 --- /dev/null +++ b/packages/issuance/test/tests/allocate/testPatterns.ts @@ -0,0 +1,583 @@ +/** + * Shared test patterns and utilities to reduce duplication across test files + */ + +import { expect } from 'chai' +import { ethers } from 'hardhat' + +// Type definitions for test utilities +export interface TestAccounts { + governor: any + nonGovernor: any + operator: any + user: any + indexer1: any + indexer2: any + selfMintingTarget: any +} + +export interface ContractWithMethods { + connect(signer: any): ContractWithMethods + [methodName: string]: any +} + +// Test constants - centralized to avoid magic numbers +export const TestConstants = { + // Precision and tolerance constants + RATIO_PRECISION: 1000n, + DEFAULT_TOLERANCE: 50n, + STRICT_TOLERANCE: 10n, + + // Common allocation percentages in PPM + ALLOCATION_10_PERCENT: 100_000, + ALLOCATION_20_PERCENT: 200_000, + ALLOCATION_30_PERCENT: 300_000, + ALLOCATION_40_PERCENT: 400_000, + ALLOCATION_50_PERCENT: 500_000, + ALLOCATION_60_PERCENT: 600_000, + ALLOCATION_100_PERCENT: 1_000_000, + + // Role constants - pre-calculated to avoid repeated contract calls + GOVERNOR_ROLE: ethers.keccak256(ethers.toUtf8Bytes('GOVERNOR_ROLE')), + OPERATOR_ROLE: ethers.keccak256(ethers.toUtf8Bytes('OPERATOR_ROLE')), + PAUSE_ROLE: ethers.keccak256(ethers.toUtf8Bytes('PAUSE_ROLE')), + ORACLE_ROLE: ethers.keccak256(ethers.toUtf8Bytes('ORACLE_ROLE')), +} as const + +// Consolidated role constants +export const ROLES = { + GOVERNOR: TestConstants.GOVERNOR_ROLE, + OPERATOR: TestConstants.OPERATOR_ROLE, + PAUSE: TestConstants.PAUSE_ROLE, + ORACLE: TestConstants.ORACLE_ROLE, +} as const + +/** + * Shared test pattern for governor-only access control + */ +export function shouldEnforceGovernorRole( + contractGetter: () => T, + methodName: string, + methodArgs: any[] = [], + accounts?: any, +) { + return function () { + it(`should revert when non-governor calls ${methodName}`, async function () { + const contract = contractGetter() + const testAccounts = accounts || this.parent.ctx.accounts + + await expect( + (contract as any).connect(testAccounts.nonGovernor)[methodName](...methodArgs), + ).to.be.revertedWithCustomError(contract as any, 'AccessControlUnauthorizedAccount') + }) + + it(`should allow governor to call ${methodName}`, async function () { + const contract = contractGetter() + const testAccounts = accounts || this.parent.ctx.accounts + + await expect((contract as any).connect(testAccounts.governor)[methodName](...methodArgs)).to.not.be.reverted + }) + } +} + +/** + * Shared test pattern for role-based access control + */ +export function shouldEnforceRoleAccess( + contractGetter: () => T, + methodName: string, + requiredRole: string, + methodArgs: any[] = [], + accounts?: any, +) { + return function () { + it(`should revert when account without ${requiredRole} calls ${methodName}`, async function () { + const contract = contractGetter() + const testAccounts = accounts || this.parent.ctx.accounts + + await expect( + (contract as any).connect(testAccounts.nonGovernor)[methodName](...methodArgs), + ).to.be.revertedWithCustomError(contract as any, 'AccessControlUnauthorizedAccount') + }) + } +} + +/** + * Calculate ratio between two values with precision + */ +export function calculateRatio( + value1: bigint, + value2: bigint, + precision: bigint = TestConstants.RATIO_PRECISION, +): bigint { + return (value1 * precision) / value2 +} + +/** + * Helper to verify ratio matches expected value within tolerance + */ +export function expectRatioToEqual( + actual1: bigint, + actual2: bigint, + expectedRatio: bigint, + tolerance: bigint = TestConstants.DEFAULT_TOLERANCE, + precision: bigint = TestConstants.RATIO_PRECISION, +) { + const actualRatio = calculateRatio(actual1, actual2, precision) + expect(actualRatio).to.be.closeTo(expectedRatio, tolerance) +} + +/** + * Shared test pattern for initialization + */ +export function shouldInitializeCorrectly(contractGetter: () => T, expectedValues: Record) { + return function () { + Object.entries(expectedValues).forEach(([property, expectedValue]) => { + it(`should set ${property} correctly during initialization`, async function () { + const contract = contractGetter() + // Type assertion is necessary here since we're accessing dynamic properties + const actualValue = await (contract as any)[property]() + expect(actualValue).to.equal(expectedValue) + }) + }) + + it('should revert when initialize is called more than once', async function () { + const contract = contractGetter() + const accounts = this.parent.ctx.accounts + + await expect((contract as any).initialize(accounts.governor.address)).to.be.revertedWithCustomError( + contract as any, + 'InvalidInitialization', + ) + }) + } +} + +/** + * Shared test pattern for pausing functionality + */ +export function shouldHandlePausingCorrectly( + contractGetter: () => T, + pauseRoleAccount: any, + methodName: string = 'distributeIssuance', +) { + return function () { + it('should allow pausing and unpausing by authorized account', async function () { + const contract = contractGetter() + + await (contract as any).connect(pauseRoleAccount).pause() + expect(await (contract as any).paused()).to.be.true + + await (contract as any).connect(pauseRoleAccount).unpause() + expect(await (contract as any).paused()).to.be.false + }) + + it(`should handle ${methodName} when paused`, async function () { + const contract = contractGetter() + + await (contract as any).connect(pauseRoleAccount).pause() + + // Should not revert when paused, but behavior may differ + await expect((contract as any)[methodName]()).to.not.be.reverted + }) + } +} + +/** + * Helper for mining blocks consistently across tests + */ +export async function mineBlocks(count: number): Promise { + for (let i = 0; i < count; i++) { + await ethers.provider.send('evm_mine', []) + } +} + +/** + * Helper to get current block number + */ +export async function getCurrentBlockNumber(): Promise { + return await ethers.provider.getBlockNumber() +} + +/** + * Helper to disable/enable auto-mining for precise block control + */ +export async function withAutoMiningDisabled(callback: () => Promise): Promise { + await ethers.provider.send('evm_setAutomine', [false]) + try { + return await callback() + } finally { + await ethers.provider.send('evm_setAutomine', [true]) + } +} + +/** + * Helper to verify role assignment + */ +export async function expectRole(contract: any, role: string, account: string, shouldHaveRole: boolean) { + const hasRole = await contract.hasRole(role, account) + expect(hasRole).to.equal(shouldHaveRole) +} + +/** + * Helper to verify transaction reverts with specific error + */ +export async function expectRevert(transactionPromise: Promise, errorName: string, contract?: any) { + if (contract) { + await expect(transactionPromise).to.be.revertedWithCustomError(contract, errorName) + } else { + await expect(transactionPromise).to.be.revertedWith(errorName) + } +} + +/** + * Comprehensive access control test suite for a contract + * Replaces multiple individual access control tests + */ +export function shouldEnforceAccessControl( + contractGetter: () => T, + methods: Array<{ + name: string + args: any[] + requiredRole?: string + allowedRoles?: string[] + }>, + accounts: any, +) { + return function () { + methods.forEach((method) => { + const allowedRoles = method.allowedRoles || [TestConstants.GOVERNOR_ROLE] + + describe(`${method.name} access control`, () => { + it(`should revert when unauthorized account calls ${method.name}`, async function () { + const contract = contractGetter() + await expect( + (contract as any).connect(accounts.nonGovernor)[method.name](...method.args), + ).to.be.revertedWithCustomError(contract as any, 'AccessControlUnauthorizedAccount') + }) + + allowedRoles.forEach((role) => { + const roleName = + role === TestConstants.GOVERNOR_ROLE + ? 'governor' + : role === TestConstants.OPERATOR_ROLE + ? 'operator' + : 'authorized' + const account = + role === TestConstants.GOVERNOR_ROLE + ? accounts.governor + : role === TestConstants.OPERATOR_ROLE + ? accounts.operator + : accounts.governor + + it(`should allow ${roleName} to call ${method.name}`, async function () { + const contract = contractGetter() + await expect((contract as any).connect(account)[method.name](...method.args)).to.not.be.reverted + }) + }) + }) + }) + } +} + +/** + * Comprehensive initialization test suite + * Replaces multiple individual initialization tests + */ +export function shouldInitializeProperly( + contractGetter: () => T, + initializationTests: Array<{ + description: string + check: (contract: T) => Promise + }>, + reinitializationTest?: { + method: string + args: any[] + expectedError: string + }, +) { + return function () { + describe('Initialization', () => { + initializationTests.forEach((test) => { + it(test.description, async function () { + const contract = contractGetter() + await test.check(contract) + }) + }) + + if (reinitializationTest) { + it('should revert when initialize is called more than once', async function () { + const contract = contractGetter() + await expect( + (contract as any)[reinitializationTest.method](...reinitializationTest.args), + ).to.be.revertedWithCustomError(contract as any, reinitializationTest.expectedError) + }) + } + }) + } +} + +/** + * Comprehensive pausability test suite + * Replaces multiple individual pause/unpause tests + */ +export function shouldHandlePausability( + contractGetter: () => T, + pausableOperations: Array<{ + name: string + args: any[] + caller: string + }>, + accounts: any, +) { + return function () { + describe('Pausability', () => { + it('should allow PAUSE_ROLE to pause and unpause', async function () { + const contract = contractGetter() + + // Grant pause role to operator + await (contract as any) + .connect(accounts.governor) + .grantRole(TestConstants.PAUSE_ROLE, accounts.operator.address) + + // Should be able to pause + await expect((contract as any).connect(accounts.operator).pause()).to.not.be.reverted + expect(await (contract as any).paused()).to.be.true + + // Should be able to unpause + await expect((contract as any).connect(accounts.operator).unpause()).to.not.be.reverted + expect(await (contract as any).paused()).to.be.false + }) + + it('should revert when non-PAUSE_ROLE tries to pause', async function () { + const contract = contractGetter() + await expect((contract as any).connect(accounts.nonGovernor).pause()).to.be.revertedWithCustomError( + contract as any, + 'AccessControlUnauthorizedAccount', + ) + }) + + pausableOperations.forEach((operation) => { + it(`should revert ${operation.name} when paused`, async function () { + const contract = contractGetter() + const caller = + operation.caller === 'governor' + ? accounts.governor + : operation.caller === 'operator' + ? accounts.operator + : accounts.nonGovernor + + // Grant pause role and pause + await (contract as any) + .connect(accounts.governor) + .grantRole(TestConstants.PAUSE_ROLE, accounts.governor.address) + await (contract as any).connect(accounts.governor).pause() + + await expect( + (contract as any).connect(caller)[operation.name](...operation.args), + ).to.be.revertedWithCustomError(contract as any, 'EnforcedPause') + }) + }) + }) + } +} + +/** + * Comprehensive role management test suite + * Replaces multiple individual role grant/revoke tests + */ +export function shouldManageRoles( + contractGetter: () => T, + roles: Array<{ + role: string + roleName: string + grantableBy?: string[] + }>, + accounts: any, +) { + return function () { + describe('Role Management', () => { + roles.forEach((roleConfig) => { + const grantableBy = roleConfig.grantableBy || ['governor'] + + describe(`${roleConfig.roleName} management`, () => { + grantableBy.forEach((granterRole) => { + const granter = granterRole === 'governor' ? accounts.governor : accounts.operator + + it(`should allow ${granterRole} to grant ${roleConfig.roleName}`, async function () { + const contract = contractGetter() + await expect((contract as any).connect(granter).grantRole(roleConfig.role, accounts.user.address)).to.not + .be.reverted + + expect(await (contract as any).hasRole(roleConfig.role, accounts.user.address)).to.be.true + }) + + it(`should allow ${granterRole} to revoke ${roleConfig.roleName}`, async function () { + const contract = contractGetter() + + // First grant the role + await (contract as any).connect(granter).grantRole(roleConfig.role, accounts.user.address) + + // Then revoke it + await expect((contract as any).connect(granter).revokeRole(roleConfig.role, accounts.user.address)).to.not + .be.reverted + + expect(await (contract as any).hasRole(roleConfig.role, accounts.user.address)).to.be.false + }) + }) + + it(`should revert when non-authorized tries to grant ${roleConfig.roleName}`, async function () { + const contract = contractGetter() + await expect( + (contract as any).connect(accounts.nonGovernor).grantRole(roleConfig.role, accounts.user.address), + ).to.be.revertedWithCustomError(contract as any, 'AccessControlUnauthorizedAccount') + }) + }) + }) + }) + } +} + +/** + * Comprehensive interface compliance test suite + * Replaces multiple individual interface support tests + * + * @param contractGetter - Function that returns the contract instance to test + * @param interfaces - Array of Typechain factory classes with interfaceId and interfaceName + * + * @example + * import { IPausableControl__factory, IAccessControl__factory } from '@graphprotocol/interfaces/types' + * + * shouldSupportInterfaces( + * () => contract, + * [ + * IPausableControl__factory, + * IAccessControl__factory, + * ] + * ) + */ +export function shouldSupportInterfaces( + contractGetter: () => T, + interfaces: Array<{ + interfaceId: string + interfaceName: string + }>, +) { + return function () { + describe('Interface Compliance', () => { + it('should support ERC-165 interface', async function () { + const contract = contractGetter() + expect(await (contract as any).supportsInterface('0x01ffc9a7')).to.be.true + }) + + interfaces.forEach((iface) => { + it(`should support ${iface.interfaceName} interface`, async function () { + const contract = contractGetter() + expect(await (contract as any).supportsInterface(iface.interfaceId)).to.be.true + }) + }) + + it('should not support random interface', async function () { + const contract = contractGetter() + expect(await (contract as any).supportsInterface('0x12345678')).to.be.false + }) + }) + } +} + +/** + * Comprehensive validation test suite + * Replaces multiple individual validation tests + */ +export function shouldValidateInputs( + contractGetter: () => T, + validationTests: Array<{ + method: string + args: any[] + expectedError: string + description: string + caller?: string + }>, + accounts: any, +) { + return function () { + describe('Input Validation', () => { + validationTests.forEach((test) => { + it(test.description, async function () { + const contract = contractGetter() + const caller = + test.caller === 'operator' ? accounts.operator : test.caller === 'user' ? accounts.user : accounts.governor + + await expect((contract as any).connect(caller)[test.method](...test.args)).to.be.revertedWithCustomError( + contract as any, + test.expectedError, + ) + }) + }) + }) + } +} + +/** + * Shared assertion helpers for common test patterns + */ +export const TestAssertions = { + /** + * Assert that a target received tokens proportionally + */ + expectProportionalDistribution: ( + distributions: bigint[], + expectedRatios: number[], + tolerance: bigint = TestConstants.DEFAULT_TOLERANCE, + ) => { + for (let i = 1; i < distributions.length; i++) { + const expectedRatio = BigInt( + Math.round((expectedRatios[0] / expectedRatios[i]) * Number(TestConstants.RATIO_PRECISION)), + ) + expectRatioToEqual(distributions[0], distributions[i], expectedRatio, tolerance) + } + }, + + /** + * Assert that balance increased by at least expected amount + */ + expectBalanceIncreasedBy: (initialBalance: bigint, finalBalance: bigint, expectedIncrease: bigint) => { + const actualIncrease = finalBalance - initialBalance + expect(actualIncrease).to.be.gte(expectedIncrease) + }, + + /** + * Assert that total allocations add up correctly + */ + expectTotalAllocation: (contract: any, expectedTotal: number) => { + return async () => { + const totalAlloc = await contract.getTotalAllocation() + expect(totalAlloc.totalAllocationPPM).to.equal(expectedTotal) + } + }, +} + +/** + * Shared test patterns organized by functionality + */ +export const TestPatterns = { + roleManagement: { + grantRole: async (contract: any, granter: any, role: string, account: string) => { + await contract.connect(granter).grantRole(role, account) + }, + + revokeRole: async (contract: any, revoker: any, role: string, account: string) => { + await contract.connect(revoker).revokeRole(role, account) + }, + }, + + pausable: { + pause: async (contract: any, account: any) => { + await contract.connect(account).pause() + }, + + unpause: async (contract: any, account: any) => { + await contract.connect(account).unpause() + }, + }, +} diff --git a/packages/issuance/test/tests/common/CommonInterfaceIdStability.test.ts b/packages/issuance/test/tests/common/CommonInterfaceIdStability.test.ts new file mode 100644 index 000000000..e91b12bd2 --- /dev/null +++ b/packages/issuance/test/tests/common/CommonInterfaceIdStability.test.ts @@ -0,0 +1,27 @@ +import { IPausableControl__factory } from '@graphprotocol/interfaces/types' +import { IAccessControl__factory } from '@graphprotocol/issuance/types' +import { expect } from 'chai' + +/** + * Common Interface ID Stability Tests + * + * These tests verify that common interface IDs remain stable across builds. + * These interfaces are used by both allocate and eligibility contracts. + * + * Changes to these IDs indicate breaking changes to the interface definitions. + * + * If a test fails: + * 1. Verify the interface change was intentional + * 2. Understand the impact on deployed contracts + * 3. Update the expected ID if the change is correct + * 4. Document the breaking change in release notes + */ +describe('Common Interface ID Stability', () => { + it('IPausableControl should have stable interface ID', () => { + expect(IPausableControl__factory.interfaceId).to.equal('0xe78a39d8') + }) + + it('IAccessControl should have stable interface ID', () => { + expect(IAccessControl__factory.interfaceId).to.equal('0x7965db0b') + }) +}) diff --git a/packages/issuance/test/tests/common/fixtures.ts b/packages/issuance/test/tests/common/fixtures.ts new file mode 100644 index 000000000..5feaa0e6a --- /dev/null +++ b/packages/issuance/test/tests/common/fixtures.ts @@ -0,0 +1,127 @@ +/** + * Common test fixtures shared by all test domains + * Contains only truly shared functionality used by both allocate and eligibility tests + */ + +import '@nomicfoundation/hardhat-chai-matchers' + +import fs from 'fs' +import hre from 'hardhat' + +const { ethers } = hre +const { upgrades } = require('hardhat') + +import type { SignerWithAddress } from '@nomicfoundation/hardhat-ethers/signers' + +import { GraphTokenHelper } from './graphTokenHelper' + +/** + * Standard test accounts interface + */ +export interface TestAccounts { + governor: SignerWithAddress + nonGovernor: SignerWithAddress + operator: SignerWithAddress + user: SignerWithAddress + indexer1: SignerWithAddress + indexer2: SignerWithAddress + selfMintingTarget: SignerWithAddress +} + +/** + * Get standard test accounts + */ +export async function getTestAccounts(): Promise { + const [governor, nonGovernor, operator, user, indexer1, indexer2, selfMintingTarget] = await ethers.getSigners() + + return { + governor, + nonGovernor, + operator, + user, + indexer1, + indexer2, + selfMintingTarget, + } +} + +/** + * Common constants used in tests + */ +export const Constants = { + PPM: 1_000_000, // Parts per million (100%) + DEFAULT_ISSUANCE_PER_BLOCK: ethers.parseEther('100'), // 100 GRT per block +} + +// Shared test constants +export const SHARED_CONSTANTS = { + PPM: 1_000_000, + + // Pre-calculated role constants to avoid repeated async calls + GOVERNOR_ROLE: ethers.keccak256(ethers.toUtf8Bytes('GOVERNOR_ROLE')), + OPERATOR_ROLE: ethers.keccak256(ethers.toUtf8Bytes('OPERATOR_ROLE')), + PAUSE_ROLE: ethers.keccak256(ethers.toUtf8Bytes('PAUSE_ROLE')), + ORACLE_ROLE: ethers.keccak256(ethers.toUtf8Bytes('ORACLE_ROLE')), +} as const + +/** + * Deploy a test GraphToken for testing + * This uses the real GraphToken contract + * @returns {Promise} + */ +export async function deployTestGraphToken() { + // Get the governor account + const [governor] = await ethers.getSigners() + + // Load the GraphToken artifact directly from the contracts package + const graphTokenArtifactPath = require.resolve( + '@graphprotocol/contracts/artifacts/contracts/token/GraphToken.sol/GraphToken.json', + ) + const GraphTokenArtifact = JSON.parse(fs.readFileSync(graphTokenArtifactPath, 'utf8')) + + // Create a contract factory using the artifact + const GraphTokenFactory = new ethers.ContractFactory(GraphTokenArtifact.abi, GraphTokenArtifact.bytecode, governor) + + // Deploy the contract + const graphToken = await GraphTokenFactory.deploy(ethers.parseEther('1000000000')) + await graphToken.waitForDeployment() + + return graphToken +} + +/** + * Get a GraphTokenHelper for an existing token + * @param {string} tokenAddress The address of the GraphToken + * @param {boolean} [isFork=false] Whether this is running on a forked network + * @returns {Promise} + */ +export async function getGraphTokenHelper(tokenAddress, isFork = false) { + // Get the governor account + const [governor] = await ethers.getSigners() + + // Get the GraphToken at the specified address + const graphToken = await ethers.getContractAt(isFork ? 'IGraphToken' : 'GraphToken', tokenAddress) + + return new GraphTokenHelper(graphToken, governor) +} + +/** + * Upgrade a contract using OpenZeppelin's upgrades library + * This is a generic function that can be used to upgrade any contract + * @param {string} contractAddress + * @param {string} contractName + * @param {any[]} [constructorArgs=[]] + * @returns {Promise} + */ +export async function upgradeContract(contractAddress, contractName, constructorArgs = []) { + // Get the contract factory + const ContractFactory = await ethers.getContractFactory(contractName) + + // Upgrade the contract + const upgradedContractInstance = await upgrades.upgradeProxy(contractAddress, ContractFactory, { + constructorArgs, + }) + + // Return the upgraded contract instance + return upgradedContractInstance +} diff --git a/packages/issuance/test/tests/common/graphTokenHelper.ts b/packages/issuance/test/tests/common/graphTokenHelper.ts new file mode 100644 index 000000000..f4adbcc8a --- /dev/null +++ b/packages/issuance/test/tests/common/graphTokenHelper.ts @@ -0,0 +1,91 @@ +import fs from 'fs' +import hre from 'hardhat' +const { ethers } = hre +import { SignerWithAddress } from '@nomicfoundation/hardhat-ethers/signers' +import { Contract } from 'ethers' + +/** + * Helper class for working with GraphToken in tests + * This provides a consistent interface for minting tokens + * and managing minters + */ +export class GraphTokenHelper { + private graphToken: Contract + private governor: SignerWithAddress + + /** + * Create a new GraphTokenHelper + * @param graphToken The GraphToken instance + * @param governor The governor account + */ + constructor(graphToken: Contract, governor: SignerWithAddress) { + this.graphToken = graphToken + this.governor = governor + } + + /** + * Get the GraphToken instance + */ + getToken(): Contract { + return this.graphToken + } + + /** + * Get the GraphToken address + */ + async getAddress(): Promise { + return await this.graphToken.getAddress() + } + + /** + * Mint tokens to an address + */ + async mint(to: string, amount: bigint): Promise { + await (this.graphToken as any).connect(this.governor).mint(to, amount) + } + + /** + * Add a minter to the GraphToken + */ + async addMinter(minter: string): Promise { + await (this.graphToken as any).connect(this.governor).addMinter(minter) + } + + /** + * Deploy a new GraphToken for testing + * @param {SignerWithAddress} governor The governor account + * @returns {Promise} + */ + static async deploy(governor) { + // Load the GraphToken artifact directly from the contracts package + const graphTokenArtifactPath = require.resolve( + '@graphprotocol/contracts/artifacts/contracts/token/GraphToken.sol/GraphToken.json', + ) + const GraphTokenArtifact = JSON.parse(fs.readFileSync(graphTokenArtifactPath, 'utf8')) + + // Create a contract factory using the artifact + const GraphTokenFactory = new ethers.ContractFactory(GraphTokenArtifact.abi, GraphTokenArtifact.bytecode, governor) + + // Deploy the contract + const graphToken = await GraphTokenFactory.deploy(ethers.parseEther('1000000000')) + await graphToken.waitForDeployment() + + return new GraphTokenHelper(graphToken as any, governor) + } + + /** + * Create a GraphTokenHelper for an existing GraphToken on a forked network + * @param {string} tokenAddress The GraphToken address + * @param {SignerWithAddress} governor The governor account + * @returns {Promise} + */ + static async forFork(tokenAddress, governor) { + // Get the GraphToken at the specified address + const graphToken = await ethers.getContractAt('IGraphToken', tokenAddress) + + // Create a helper + const helper = new GraphTokenHelper(graphToken as any, governor) + + return helper + } +} diff --git a/packages/issuance/test/tests/common/testPatterns.ts b/packages/issuance/test/tests/common/testPatterns.ts new file mode 100644 index 000000000..5af5bc73c --- /dev/null +++ b/packages/issuance/test/tests/common/testPatterns.ts @@ -0,0 +1,52 @@ +/** + * Common test patterns shared by both allocate and eligibility tests + */ + +import { expect } from 'chai' + +/** + * Comprehensive interface compliance test suite + * Replaces multiple individual interface support tests + * + * @param contractGetter - Function that returns the contract instance to test + * @param interfaces - Array of Typechain factory classes with interfaceId and interfaceName + * + * @example + * import { IPausableControl__factory, IAccessControl__factory } from '@graphprotocol/interfaces/types' + * + * shouldSupportInterfaces( + * () => contract, + * [ + * IPausableControl__factory, + * IAccessControl__factory, + * ] + * ) + */ +export function shouldSupportInterfaces( + contractGetter: () => T, + interfaces: Array<{ + interfaceId: string + interfaceName: string + }>, +) { + return function () { + describe('Interface Compliance', () => { + it('should support ERC-165 interface', async function () { + const contract = contractGetter() + expect(await (contract as any).supportsInterface('0x01ffc9a7')).to.be.true + }) + + interfaces.forEach((iface) => { + it(`should support ${iface.interfaceName} interface`, async function () { + const contract = contractGetter() + expect(await (contract as any).supportsInterface(iface.interfaceId)).to.be.true + }) + }) + + it('should not support random interface', async function () { + const contract = contractGetter() + expect(await (contract as any).supportsInterface('0x12345678')).to.be.false + }) + }) + } +} diff --git a/packages/issuance/test/tsconfig.json b/packages/issuance/test/tsconfig.json new file mode 100644 index 000000000..dfecc9bcf --- /dev/null +++ b/packages/issuance/test/tsconfig.json @@ -0,0 +1,25 @@ +{ + "extends": "../../../tsconfig.json", + "compilerOptions": { + "target": "es2022", + "module": "ESNext", + "moduleResolution": "bundler", + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "forceConsistentCasingInFileNames": true, + "strict": false, + "skipLibCheck": true, + "resolveJsonModule": true, + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "allowJs": true, + "checkJs": false, + "incremental": true, + "noEmitOnError": false, + "noImplicitAny": false, + "outDir": "./artifacts" + }, + "include": ["tests/**/*", "utils/**/*", "../types/**/*"], + "exclude": ["node_modules", "build", "scripts/**/*"] +} diff --git a/packages/issuance/tsconfig.json b/packages/issuance/tsconfig.json new file mode 100644 index 000000000..00aa1b8ef --- /dev/null +++ b/packages/issuance/tsconfig.json @@ -0,0 +1,18 @@ +{ + "compilerOptions": { + "target": "es2023", + "lib": ["es2023"], + "module": "Node16", + "moduleResolution": "node16", + "strict": true, + "esModuleInterop": true, + "declaration": true, + "resolveJsonModule": true, + "allowJs": true, + "checkJs": false, + "incremental": true + }, + + "include": ["./scripts", "./test", "./typechain"], + "files": ["./hardhat.config.cjs"] +} diff --git a/packages/subgraph-service/contracts/utilities/AllocationManager.sol b/packages/subgraph-service/contracts/utilities/AllocationManager.sol index 08608d8b4..c58336e35 100644 --- a/packages/subgraph-service/contracts/utilities/AllocationManager.sol +++ b/packages/subgraph-service/contracts/utilities/AllocationManager.sol @@ -11,6 +11,7 @@ import { IGraphToken } from "@graphprotocol/interfaces/contracts/contracts/token import { IHorizonStakingTypes } from "@graphprotocol/interfaces/contracts/horizon/internal/IHorizonStakingTypes.sol"; import { IAllocation } from "@graphprotocol/interfaces/contracts/subgraph-service/internal/IAllocation.sol"; import { ILegacyAllocation } from "@graphprotocol/interfaces/contracts/subgraph-service/internal/ILegacyAllocation.sol"; +import { RewardsReclaim } from "@graphprotocol/interfaces/contracts/contracts/rewards/RewardsReclaim.sol"; import { GraphDirectory } from "@graphprotocol/horizon/contracts/utilities/GraphDirectory.sol"; import { AllocationManagerV1Storage } from "./AllocationManagerStorage.sol"; @@ -259,6 +260,21 @@ abstract contract AllocationManager is EIP712Upgradeable, GraphDirectory, Alloca * which to calculate POIs. EBO posts once per epoch typically at each epoch change, so we restrict rewards to allocations * that have gone through at least one epoch change. * + * Reclaim target hierarchy: + * When rewards cannot be minted, they are reclaimed with a specific reason. The following conditions are checked + * in order, and the first matching condition determines which reclaim reason is used: + * 1. STALE_POI - if allocation is stale (lastPOI older than maxPOIStaleness) + * 2. ALTRUISTIC_ALLOCATION - if allocation has zero tokens + * 3. ZERO_POI - if POI is bytes32(0) + * 4. ALLOCATION_TOO_YOUNG - if allocation was created in the current epoch + * Each reason may have a different reclaim address configured in the RewardsManager. If multiple conditions + * apply simultaneously, only the first matching condition's reclaim address receives the rewards. + * + * Retroactive reclaim address changes: + * Any change to a reclaim address in the RewardsManager takes effect immediately and retroactively. + * All unclaimed rewards from previous periods will be sent to the new reclaim address when they are + * eventually reclaimed, regardless of which address was configured when the rewards were originally accrued. + * * Emits a {IndexingRewardsCollected} event. * * @param _allocationId The id of the allocation to collect rewards for @@ -278,12 +294,20 @@ abstract contract AllocationManager is EIP712Upgradeable, GraphDirectory, Alloca IAllocation.State memory allocation = _allocations.get(_allocationId); require(allocation.isOpen(), AllocationManagerAllocationClosed(_allocationId)); - // Mint indexing rewards if all conditions are met - uint256 tokensRewards = (!allocation.isStale(maxPOIStaleness) && - !allocation.isAltruistic() && - _poi != bytes32(0)) && _graphEpochManager().currentEpoch() > allocation.createdAtEpoch - ? _graphRewardsManager().takeRewards(_allocationId) - : 0; + // Mint indexing rewards if all conditions are met, otherwise reclaim with specific reason + uint256 tokensRewards; + if (allocation.isStale(maxPOIStaleness)) { + _graphRewardsManager().reclaimRewards(RewardsReclaim.STALE_POI, _allocationId, ""); + } else if (allocation.isAltruistic()) { + _graphRewardsManager().reclaimRewards(RewardsReclaim.ALTRUISTIC_ALLOCATION, _allocationId, ""); + } else if (_poi == bytes32(0)) { + _graphRewardsManager().reclaimRewards(RewardsReclaim.ZERO_POI, _allocationId, ""); + // solhint-disable-next-line gas-strict-inequalities + } else if (_graphEpochManager().currentEpoch() <= allocation.createdAtEpoch) { + _graphRewardsManager().reclaimRewards(RewardsReclaim.ALLOCATION_TOO_YOUNG, _allocationId, ""); + } else { + tokensRewards = _graphRewardsManager().takeRewards(_allocationId); + } // ... but we still take a snapshot to ensure the rewards are not accumulated for the next valid POI _allocations.snapshotRewards( @@ -418,12 +442,25 @@ abstract contract AllocationManager is EIP712Upgradeable, GraphDirectory, Alloca function _closeAllocation(address _allocationId, bool _forceClosed) internal { IAllocation.State memory allocation = _allocations.get(_allocationId); + // Reclaim uncollected rewards before closing + uint256 reclaimedRewards = _graphRewardsManager().reclaimRewards( + RewardsReclaim.CLOSE_ALLOCATION, + _allocationId, + "" + ); + // Take rewards snapshot to prevent other allos from counting tokens from this allo _allocations.snapshotRewards( _allocationId, _graphRewardsManager().onSubgraphAllocationUpdate(allocation.subgraphDeploymentId) ); + // Clear pending rewards only if rewards were reclaimed. This marks them as consumed, + // which could be useful for future logic that searches for unconsumed rewards. + // Known limitation: This capture is incomplete due to other code paths (e.g., _presentPOI) + // that clear pending even when rewards are not consumed. + if (0 < reclaimedRewards) _allocations.clearPendingRewards(_allocationId); + _allocations.close(_allocationId); allocationProvisionTracker.release(allocation.indexer, allocation.tokens); diff --git a/packages/subgraph-service/test/unit/mocks/MockRewardsManager.sol b/packages/subgraph-service/test/unit/mocks/MockRewardsManager.sol index 8286f2570..389b48cae 100644 --- a/packages/subgraph-service/test/unit/mocks/MockRewardsManager.sol +++ b/packages/subgraph-service/test/unit/mocks/MockRewardsManager.sol @@ -57,6 +57,31 @@ contract MockRewardsManager is IRewardsManager { function isDenied(bytes32) external view returns (bool) {} + // -- Reclaim -- + + function setSubgraphDeniedReclaimAddress(address) external {} + + function setIndexerEligibilityReclaimAddress(address) external {} + + function setReclaimAddress(bytes32, address) external {} + + function reclaimRewards(bytes32, address _allocationID, bytes calldata) external view returns (uint256) { + address rewardsIssuer = msg.sender; + (bool isActive, , , uint256 tokens, uint256 accRewardsPerAllocatedToken) = IRewardsIssuer(rewardsIssuer) + .getAllocationData(_allocationID); + + if (!isActive) { + return 0; + } + + // Calculate accumulated but unclaimed rewards + uint256 accRewardsPerTokens = tokens.mulPPM(rewardsPerSignal); + uint256 rewards = accRewardsPerTokens - accRewardsPerAllocatedToken; + + // Note: We don't mint tokens for reclaimed rewards, they are just discarded + return rewards; + } + // -- Getters -- function getNewRewardsPerSignal() external view returns (uint256) {} diff --git a/packages/subgraph-service/test/unit/subgraphService/SubgraphService.t.sol b/packages/subgraph-service/test/unit/subgraphService/SubgraphService.t.sol index 74c677504..cf398f7dc 100644 --- a/packages/subgraph-service/test/unit/subgraphService/SubgraphService.t.sol +++ b/packages/subgraph-service/test/unit/subgraphService/SubgraphService.t.sol @@ -383,7 +383,7 @@ contract SubgraphServiceTest is SubgraphServiceSharedTest { CollectPaymentData memory collectPaymentDataBefore, CollectPaymentData memory collectPaymentDataAfter ) private view { - (IGraphTallyCollector.SignedRAV memory signedRav, uint256 tokensToCollect) = abi.decode( + (IGraphTallyCollector.SignedRAV memory signedRav, ) = abi.decode( _data, (IGraphTallyCollector.SignedRAV, uint256) ); diff --git a/packages/subgraph-service/test/unit/subgraphService/collect/indexing/indexing.t.sol b/packages/subgraph-service/test/unit/subgraphService/collect/indexing/indexing.t.sol index 61224ff23..85cc4f84b 100644 --- a/packages/subgraph-service/test/unit/subgraphService/collect/indexing/indexing.t.sol +++ b/packages/subgraph-service/test/unit/subgraphService/collect/indexing/indexing.t.sol @@ -171,4 +171,75 @@ contract SubgraphServiceCollectIndexingTest is SubgraphServiceTest { ); subgraphService.collect(newIndexer, paymentType, data); } + + function test_SubgraphService_Collect_Indexing_ZeroRewards(uint256 tokens) public useIndexer useAllocation(tokens) { + IGraphPayments.PaymentTypes paymentType = IGraphPayments.PaymentTypes.IndexingRewards; + bytes memory data = abi.encode(allocationID, bytes32("POI"), _getHardcodedPOIMetadata()); + + // Don't skip time - collect immediately, expecting zero rewards + _collect(users.indexer, paymentType, data); + } + + function test_SubgraphService_Collect_Indexing_ZeroPOI(uint256 tokens) public useIndexer useAllocation(tokens) { + IGraphPayments.PaymentTypes paymentType = IGraphPayments.PaymentTypes.IndexingRewards; + // Submit zero POI (bytes32(0)) + bytes memory data = abi.encode(allocationID, bytes32(0), _getHardcodedPOIMetadata()); + + // skip time to ensure allocation could get rewards + vm.roll(block.number + EPOCH_LENGTH); + + // Should succeed but reclaim rewards due to zero POI - just verify it doesn't revert + subgraphService.collect(users.indexer, paymentType, data); + } + + function test_SubgraphService_Collect_Indexing_StalePOI(uint256 tokens) public useIndexer useAllocation(tokens) { + IGraphPayments.PaymentTypes paymentType = IGraphPayments.PaymentTypes.IndexingRewards; + bytes memory data = abi.encode(allocationID, bytes32("POI"), _getHardcodedPOIMetadata()); + + // Skip past maxPOIStaleness to make allocation stale + skip(maxPOIStaleness + 1); + + // Should succeed but reclaim rewards due to stale POI - just verify it doesn't revert + subgraphService.collect(users.indexer, paymentType, data); + } + + function test_SubgraphService_Collect_Indexing_AltruisticAllocation(uint256 tokens) public useIndexer { + tokens = bound(tokens, minimumProvisionTokens, MAX_TOKENS); + + _createProvision(users.indexer, tokens, fishermanRewardPercentage, disputePeriod); + _register(users.indexer, abi.encode("url", "geoHash", address(0))); + + // Create altruistic allocation (0 tokens) + bytes memory data = _createSubgraphAllocationData(users.indexer, subgraphDeployment, allocationIDPrivateKey, 0); + _startService(users.indexer, data); + + IGraphPayments.PaymentTypes paymentType = IGraphPayments.PaymentTypes.IndexingRewards; + bytes memory collectData = abi.encode(allocationID, bytes32("POI"), _getHardcodedPOIMetadata()); + + // skip time to ensure allocation could get rewards + vm.roll(block.number + EPOCH_LENGTH); + + // Should succeed but reclaim rewards due to altruistic allocation - just verify it doesn't revert + subgraphService.collect(users.indexer, paymentType, collectData); + } + + function test_SubgraphService_Collect_Indexing_RevertWhen_AllocationClosed( + uint256 tokens + ) public useIndexer useAllocation(tokens) { + IGraphPayments.PaymentTypes paymentType = IGraphPayments.PaymentTypes.IndexingRewards; + bytes memory data = abi.encode(allocationID, bytes32("POI"), _getHardcodedPOIMetadata()); + + // Close the allocation + resetPrank(users.indexer); + subgraphService.stopService(users.indexer, abi.encode(allocationID)); + + // skip time to ensure allocation could get rewards + vm.roll(block.number + EPOCH_LENGTH); + + // Attempt to collect on closed allocation should revert + // Using the bytes4 selector directly since AllocationManagerAllocationClosed is inherited from AllocationManager + bytes4 selector = bytes4(keccak256("AllocationManagerAllocationClosed(address)")); + vm.expectRevert(abi.encodeWithSelector(selector, allocationID)); + subgraphService.collect(users.indexer, paymentType, data); + } } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 1da271388..07029ae86 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -21,12 +21,21 @@ catalogs: '@nomicfoundation/hardhat-ethers': specifier: ^3.1.0 version: 3.1.0 + '@nomicfoundation/hardhat-verify': + specifier: ^2.0.10 + version: 2.1.1 + '@typechain/hardhat': + specifier: ^9.0.0 + version: 9.1.0 '@typescript-eslint/eslint-plugin': specifier: ^8.46.1 version: 8.46.2 '@typescript-eslint/parser': specifier: ^8.46.1 version: 8.46.2 + dotenv: + specifier: ^16.5.0 + version: 16.6.1 eslint: specifier: ^9.37.0 version: 9.38.0 @@ -66,6 +75,9 @@ catalogs: hardhat-ignore-warnings: specifier: ^0.2.12 version: 0.2.12 + hardhat-secure-accounts: + specifier: ^1.0.5 + version: 1.0.5 hardhat-storage-layout: specifier: ^0.1.7 version: 0.1.7 @@ -956,6 +968,185 @@ importers: specifier: ^2.31.7 version: 2.37.6(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76) + packages/issuance: + dependencies: + '@noble/hashes': + specifier: ^1.8.0 + version: 1.8.0 + devDependencies: + '@graphprotocol/interfaces': + specifier: workspace:^ + version: link:../interfaces + '@graphprotocol/toolshed': + specifier: workspace:^ + version: link:../toolshed + '@nomicfoundation/hardhat-ethers': + specifier: 'catalog:' + version: 3.1.0(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@nomicfoundation/hardhat-verify': + specifier: 'catalog:' + version: 2.1.1(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@openzeppelin/contracts': + specifier: ^5.4.0 + version: 5.4.0 + '@openzeppelin/contracts-upgradeable': + specifier: ^5.4.0 + version: 5.4.0(@openzeppelin/contracts@5.4.0) + '@openzeppelin/hardhat-upgrades': + specifier: ^3.9.0 + version: 3.9.1(@nomicfoundation/hardhat-ethers@3.1.0(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)))(@nomicfoundation/hardhat-verify@2.1.1(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)))(encoding@0.1.13)(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@typechain/ethers-v6': + specifier: ^0.5.0 + version: 0.5.1(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(typechain@8.3.2(patch_hash=b34ed6afcf99760666fdc85ecb2094fdd20ce509f947eb09cef21665a2a6a1d6)(typescript@5.9.3))(typescript@5.9.3) + '@typechain/hardhat': + specifier: 'catalog:' + version: 9.1.0(@typechain/ethers-v6@0.5.1(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(typechain@8.3.2(patch_hash=b34ed6afcf99760666fdc85ecb2094fdd20ce509f947eb09cef21665a2a6a1d6)(typescript@5.9.3))(typescript@5.9.3))(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10))(typechain@8.3.2(patch_hash=b34ed6afcf99760666fdc85ecb2094fdd20ce509f947eb09cef21665a2a6a1d6)(typescript@5.9.3)) + '@types/node': + specifier: ^20.17.50 + version: 20.19.14 + dotenv: + specifier: 'catalog:' + version: 16.6.1 + eslint: + specifier: 'catalog:' + version: 9.38.0(jiti@2.5.1) + ethers: + specifier: 'catalog:' + version: 6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10) + glob: + specifier: 'catalog:' + version: 11.0.3 + globals: + specifier: 'catalog:' + version: 16.4.0 + hardhat: + specifier: 'catalog:' + version: 2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10) + hardhat-contract-sizer: + specifier: 'catalog:' + version: 2.10.1(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + hardhat-secure-accounts: + specifier: 'catalog:' + version: 1.0.5(@nomicfoundation/hardhat-ethers@3.1.0(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)))(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + hardhat-storage-layout: + specifier: 'catalog:' + version: 0.1.7(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + lint-staged: + specifier: 'catalog:' + version: 16.2.6 + markdownlint-cli: + specifier: 'catalog:' + version: 0.45.0 + prettier: + specifier: 'catalog:' + version: 3.6.2 + prettier-plugin-solidity: + specifier: 'catalog:' + version: 2.1.0(prettier@3.6.2) + solhint: + specifier: 'catalog:' + version: 6.0.1(typescript@5.9.3) + ts-node: + specifier: ^10.9.2 + version: 10.9.2(@types/node@20.19.14)(typescript@5.9.3) + typechain: + specifier: ^8.3.0 + version: 8.3.2(patch_hash=b34ed6afcf99760666fdc85ecb2094fdd20ce509f947eb09cef21665a2a6a1d6)(typescript@5.9.3) + typescript: + specifier: 'catalog:' + version: 5.9.3 + typescript-eslint: + specifier: 'catalog:' + version: 8.46.2(eslint@9.38.0(jiti@2.5.1))(typescript@5.9.3) + yaml-lint: + specifier: 'catalog:' + version: 1.7.0 + + packages/issuance/test: + dependencies: + '@graphprotocol/contracts': + specifier: workspace:^ + version: link:../../contracts + '@graphprotocol/interfaces': + specifier: workspace:^ + version: link:../../interfaces + '@graphprotocol/issuance': + specifier: workspace:^ + version: link:.. + devDependencies: + '@nomicfoundation/hardhat-chai-matchers': + specifier: ^2.0.0 + version: 2.1.0(@nomicfoundation/hardhat-ethers@3.1.0(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)))(chai@4.5.0)(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@nomicfoundation/hardhat-ethers': + specifier: 'catalog:' + version: 3.1.0(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@nomicfoundation/hardhat-foundry': + specifier: ^1.1.1 + version: 1.2.0(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@nomicfoundation/hardhat-network-helpers': + specifier: ^1.0.0 + version: 1.1.0(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@nomicfoundation/hardhat-toolbox': + specifier: 5.0.0 + version: 5.0.0(d4ea276d64fbf8f2a60adf85f1748ee6) + '@openzeppelin/contracts': + specifier: ^5.4.0 + version: 5.4.0 + '@openzeppelin/contracts-upgradeable': + specifier: ^5.4.0 + version: 5.4.0(@openzeppelin/contracts@5.4.0) + '@openzeppelin/foundry-upgrades': + specifier: 0.4.0 + version: 0.4.0(@openzeppelin/defender-deploy-client-cli@0.0.1-alpha.10(encoding@0.1.13))(@openzeppelin/upgrades-core@1.44.1) + '@types/chai': + specifier: ^4.3.20 + version: 4.3.20 + '@types/mocha': + specifier: ^10.0.10 + version: 10.0.10 + '@types/node': + specifier: ^20.17.50 + version: 20.19.14 + chai: + specifier: ^4.3.7 + version: 4.5.0 + dotenv: + specifier: ^16.5.0 + version: 16.6.1 + eslint: + specifier: 'catalog:' + version: 9.38.0(jiti@2.5.1) + eslint-plugin-no-only-tests: + specifier: 'catalog:' + version: 3.3.0 + ethers: + specifier: 'catalog:' + version: 6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10) + forge-std: + specifier: https://github.com/foundry-rs/forge-std/tarball/v1.9.7 + version: https://github.com/foundry-rs/forge-std/tarball/v1.9.7 + glob: + specifier: 'catalog:' + version: 11.0.3 + hardhat: + specifier: 'catalog:' + version: 2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10) + hardhat-gas-reporter: + specifier: 'catalog:' + version: 1.0.10(bufferutil@4.0.9)(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10))(utf-8-validate@5.0.10) + prettier: + specifier: 'catalog:' + version: 3.6.2 + solidity-coverage: + specifier: ^0.8.0 + version: 0.8.16(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + ts-node: + specifier: ^10.9.2 + version: 10.9.2(@types/node@20.19.14)(typescript@5.9.3) + typescript: + specifier: 'catalog:' + version: 5.9.3 + packages/subgraph-service: devDependencies: '@graphprotocol/contracts': @@ -3290,6 +3481,28 @@ packages: typechain: ^8.3.0 typescript: '>=4.5.0' + '@nomicfoundation/hardhat-toolbox@5.0.0': + resolution: {integrity: sha512-FnUtUC5PsakCbwiVNsqlXVIWG5JIb5CEZoSXbJUsEBun22Bivx2jhF1/q9iQbzuaGpJKFQyOhemPB2+XlEE6pQ==} + peerDependencies: + '@nomicfoundation/hardhat-chai-matchers': ^2.0.0 + '@nomicfoundation/hardhat-ethers': ^3.0.0 + '@nomicfoundation/hardhat-ignition-ethers': ^0.15.0 + '@nomicfoundation/hardhat-network-helpers': ^1.0.0 + '@nomicfoundation/hardhat-verify': ^2.0.0 + '@typechain/ethers-v6': ^0.5.0 + '@typechain/hardhat': ^9.0.0 + '@types/chai': ^4.2.0 + '@types/mocha': '>=9.1.0' + '@types/node': ^20.17.50 + chai: ^4.2.0 + ethers: ^6.4.0 + hardhat: ^2.11.0 + hardhat-gas-reporter: ^1.0.8 + solidity-coverage: ^0.8.1 + ts-node: '>=8.0.0' + typechain: ^8.3.0 + typescript: '>=4.5.0' + '@nomicfoundation/hardhat-verify@2.1.1': resolution: {integrity: sha512-K1plXIS42xSHDJZRkrE2TZikqxp9T4y6jUMUNI/imLgN5uCcEQokmfU0DlyP9zzHncYK92HlT5IWP35UVCLrPw==} peerDependencies: @@ -3425,6 +3638,18 @@ packages: '@nomiclabs/harhdat-etherscan': optional: true + '@openzeppelin/hardhat-upgrades@3.9.1': + resolution: {integrity: sha512-pSDjlOnIpP+PqaJVe144dK6VVKZw2v6YQusyt0OOLiCsl+WUzfo4D0kylax7zjrOxqy41EK2ipQeIF4T+cCn2A==} + hasBin: true + peerDependencies: + '@nomicfoundation/hardhat-ethers': ^3.0.6 + '@nomicfoundation/hardhat-verify': ^2.0.14 + ethers: ^6.6.0 + hardhat: ^2.24.1 + peerDependenciesMeta: + '@nomicfoundation/hardhat-verify': + optional: true + '@openzeppelin/platform-deploy-client@0.8.0': resolution: {integrity: sha512-POx3AsnKwKSV/ZLOU/gheksj0Lq7Is1q2F3pKmcFjGZiibf+4kjGxr4eSMrT+2qgKYZQH1ZLQZ+SkbguD8fTvA==} deprecated: '@openzeppelin/platform-deploy-client is deprecated. Please use @openzeppelin/defender-sdk-deploy-client' @@ -11244,6 +11469,10 @@ packages: resolution: {integrity: sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==} engines: {node: '>=14.0'} + undici@6.22.0: + resolution: {integrity: sha512-hU/10obOIu62MGYjdskASR3CUAiYaFTtC9Pa6vHyf//mAipSvSQg6od2CnJswq7fvzNS3zJhxoRkgNVaHurWKw==} + engines: {node: '>=18.17'} + unfetch@4.2.0: resolution: {integrity: sha512-F9p7yYCn6cIW9El1zi0HI6vqpeIvBsr3dSuRO6Xuppb1u5rXpCPmMvLSyECLhybr9isec8Ohl0hPekMVrEinDA==} @@ -13287,7 +13516,7 @@ snapshots: '@ethereumjs/common@2.6.0': dependencies: crc-32: 1.2.2 - ethereumjs-util: 7.1.3 + ethereumjs-util: 7.1.5 '@ethereumjs/common@2.6.5': dependencies: @@ -13309,7 +13538,7 @@ snapshots: '@ethereumjs/tx@3.4.0': dependencies: '@ethereumjs/common': 2.6.0 - ethereumjs-util: 7.1.3 + ethereumjs-util: 7.1.5 '@ethereumjs/tx@3.5.2': dependencies: @@ -13336,7 +13565,7 @@ snapshots: async-eventemitter: 0.2.4 core-js-pure: 3.45.1 debug: 2.6.9 - ethereumjs-util: 7.1.3 + ethereumjs-util: 7.1.5 functional-red-black-tree: 1.0.1 mcl-wasm: 0.7.9 merkle-patricia-tree: 4.2.4 @@ -15560,6 +15789,27 @@ snapshots: typechain: 8.3.2(patch_hash=b34ed6afcf99760666fdc85ecb2094fdd20ce509f947eb09cef21665a2a6a1d6)(typescript@5.9.3) typescript: 5.9.3 + '@nomicfoundation/hardhat-toolbox@5.0.0(d4ea276d64fbf8f2a60adf85f1748ee6)': + dependencies: + '@nomicfoundation/hardhat-chai-matchers': 2.1.0(@nomicfoundation/hardhat-ethers@3.1.0(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)))(chai@4.5.0)(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@nomicfoundation/hardhat-ethers': 3.1.0(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@nomicfoundation/hardhat-ignition-ethers': 0.15.14(@nomicfoundation/hardhat-ethers@3.1.0(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)))(@nomicfoundation/hardhat-ignition@0.15.13(@nomicfoundation/hardhat-verify@2.1.1(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)))(bufferutil@4.0.9)(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10))(utf-8-validate@5.0.10))(@nomicfoundation/ignition-core@0.15.13(bufferutil@4.0.9)(utf-8-validate@5.0.10))(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@nomicfoundation/hardhat-network-helpers': 1.1.0(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@nomicfoundation/hardhat-verify': 2.1.1(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@typechain/ethers-v6': 0.5.1(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(typechain@8.3.2(patch_hash=b34ed6afcf99760666fdc85ecb2094fdd20ce509f947eb09cef21665a2a6a1d6)(typescript@5.9.3))(typescript@5.9.3) + '@typechain/hardhat': 9.1.0(@typechain/ethers-v6@0.5.1(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(typechain@8.3.2(patch_hash=b34ed6afcf99760666fdc85ecb2094fdd20ce509f947eb09cef21665a2a6a1d6)(typescript@5.9.3))(typescript@5.9.3))(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10))(typechain@8.3.2(patch_hash=b34ed6afcf99760666fdc85ecb2094fdd20ce509f947eb09cef21665a2a6a1d6)(typescript@5.9.3)) + '@types/chai': 4.3.20 + '@types/mocha': 10.0.10 + '@types/node': 20.19.14 + chai: 4.5.0 + ethers: 6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10) + hardhat: 2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10) + hardhat-gas-reporter: 1.0.10(bufferutil@4.0.9)(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10))(utf-8-validate@5.0.10) + solidity-coverage: 0.8.16(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + ts-node: 10.9.2(@types/node@20.19.14)(typescript@5.9.3) + typechain: 8.3.2(patch_hash=b34ed6afcf99760666fdc85ecb2094fdd20ce509f947eb09cef21665a2a6a1d6)(typescript@5.9.3) + typescript: 5.9.3 + '@nomicfoundation/hardhat-verify@2.1.1(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10))': dependencies: '@ethersproject/abi': 5.8.0 @@ -15726,8 +15976,8 @@ snapshots: '@openzeppelin/defender-deploy-client-cli@0.0.1-alpha.10(encoding@0.1.13)': dependencies: '@openzeppelin/defender-sdk-base-client': 2.7.0(encoding@0.1.13) - '@openzeppelin/defender-sdk-deploy-client': 2.7.0(encoding@0.1.13) - '@openzeppelin/defender-sdk-network-client': 2.7.0(encoding@0.1.13) + '@openzeppelin/defender-sdk-deploy-client': 2.7.0(debug@4.4.3)(encoding@0.1.13) + '@openzeppelin/defender-sdk-network-client': 2.7.0(debug@4.4.3)(encoding@0.1.13) dotenv: 16.6.1 minimist: 1.2.8 transitivePeerDependencies: @@ -15744,7 +15994,7 @@ snapshots: - aws-crt - encoding - '@openzeppelin/defender-sdk-deploy-client@2.7.0(encoding@0.1.13)': + '@openzeppelin/defender-sdk-deploy-client@2.7.0(debug@4.4.3)(encoding@0.1.13)': dependencies: '@openzeppelin/defender-sdk-base-client': 2.7.0(encoding@0.1.13) axios: 1.12.2(debug@4.4.3) @@ -15754,7 +16004,7 @@ snapshots: - debug - encoding - '@openzeppelin/defender-sdk-network-client@2.7.0(encoding@0.1.13)': + '@openzeppelin/defender-sdk-network-client@2.7.0(debug@4.4.3)(encoding@0.1.13)': dependencies: '@openzeppelin/defender-sdk-base-client': 2.7.0(encoding@0.1.13) axios: 1.12.2(debug@4.4.3) @@ -15785,6 +16035,27 @@ snapshots: - encoding - supports-color + '@openzeppelin/hardhat-upgrades@3.9.1(@nomicfoundation/hardhat-ethers@3.1.0(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)))(@nomicfoundation/hardhat-verify@2.1.1(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)))(encoding@0.1.13)(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10))': + dependencies: + '@nomicfoundation/hardhat-ethers': 3.1.0(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@openzeppelin/defender-sdk-base-client': 2.7.0(encoding@0.1.13) + '@openzeppelin/defender-sdk-deploy-client': 2.7.0(debug@4.4.3)(encoding@0.1.13) + '@openzeppelin/defender-sdk-network-client': 2.7.0(debug@4.4.3)(encoding@0.1.13) + '@openzeppelin/upgrades-core': 1.44.1 + chalk: 4.1.2 + debug: 4.4.3(supports-color@9.4.0) + ethereumjs-util: 7.1.5 + ethers: 6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10) + hardhat: 2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10) + proper-lockfile: 4.1.2 + undici: 6.22.0 + optionalDependencies: + '@nomicfoundation/hardhat-verify': 2.1.1(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + transitivePeerDependencies: + - aws-crt + - encoding + - supports-color + '@openzeppelin/platform-deploy-client@0.8.0(debug@4.4.3)(encoding@0.1.13)': dependencies: '@ethersproject/abi': 5.8.0 @@ -26240,6 +26511,8 @@ snapshots: dependencies: '@fastify/busboy': 2.1.1 + undici@6.22.0: {} + unfetch@4.2.0: {} unicorn-magic@0.1.0: {}