Skip to content

Integrate with DB #3

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 9 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitattributes
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
* text=auto
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,5 @@ cache
node_modules
typechain
.env

.vscode
output
104 changes: 52 additions & 52 deletions hardhat.config.ts
Original file line number Diff line number Diff line change
@@ -1,52 +1,52 @@
/* eslint-disable @typescript-eslint/no-var-requires, @typescript-eslint/no-unused-vars */
require("dotenv").config();
import "@nomicfoundation/hardhat-ethers";
import "@openzeppelin/hardhat-upgrades";
import "@typechain/hardhat";
import { HardhatUserConfig } from "hardhat/types";
const config : HardhatUserConfig = {
solidity: {
compilers: [
{
version: "0.8.26",
settings: {
optimizer: {
enabled: true,
runs: 20000,
},
},
},
],
},
paths: {
sources: "./contracts",
tests: "./test",
cache: "./cache",
artifacts: "./artifacts",
},
typechain: {
outDir: "typechain",
},
mocha: {
timeout: 5000000,
},
networks: {
hardhat: {
forking: {
url: `${process.env.MAINNET_RPC_URL}`
}
},
mainnet: {
url: `${process.env.MAINNET_RPC_URL}`,
accounts: [
`${process.env.PRIVATE_KEY_A}`,
]
},
},
};
export default config;
/* eslint-disable @typescript-eslint/no-var-requires, @typescript-eslint/no-unused-vars */

require("dotenv").config();

import "@nomicfoundation/hardhat-ethers";
import "@openzeppelin/hardhat-upgrades";
import "@typechain/hardhat";

import { HardhatUserConfig } from "hardhat/types";

const config : HardhatUserConfig = {
solidity: {
compilers: [
{
version: "0.8.26",
settings: {
optimizer: {
enabled: true,
runs: 20000,
},
},
},
],
},
paths: {
sources: "./contracts",
tests: "./test",
cache: "./cache",
artifacts: "./artifacts",
},
typechain: {
outDir: "typechain",
},
mocha: {
timeout: 5000000,
},
networks: {
hardhat: {
forking: {
url: `${process.env.MAINNET_RPC_URL}`
}
},
mainnet: {
url: `${process.env.MAINNET_RPC_URL}`,
accounts: [
`${process.env.PRIVATE_KEY_A}`,
]
},
},
};

export default config;
4 changes: 4 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -26,24 +26,28 @@
"@openzeppelin/contracts": "4.9.3",
"@openzeppelin/contracts-upgradeable": "4.9.3",
"@openzeppelin/hardhat-upgrades": "3.9.0",
"@openzeppelin/merkle-tree": "1.0.8",
"@typechain/ethers-v6": "^0.5.1",
"@typechain/hardhat": "^9.1.0",
"@types/chai": "^4.3.11",
"@types/mocha": "10.0.10",
"@types/node": "^22.10.7",
"@typescript-eslint/parser": "^8.26.0",
"@zero-tech/eslint-config-cpt": "0.2.7",
"@zero-tech/zdc": "^0.2.0",
"chai": "^4.3.10",
"dotenv": "16.4.7",
"eslint": "^8.37.0",
"ethers": "^6.9.0",
"graphql": "^16.10.0",
"hardhat": "^2.19.1",
"mongodb": "6.15.0",
"react": "^19.0.0",
"ts-node": "10.9.1",
"typechain": "^8.3.2",
"typescript": "^5.0.2"
},
"dependencies": {
"@openzeppelin/merkle-tree": "1.0.8"
}
}
172 changes: 118 additions & 54 deletions scripts/getStakerData.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { accountInfoQuery } from "./helpers/queries";
import { createClient, sendQuery } from "./subgraph";
import { BaseArgs } from "./subgraph/types";
import { createClient, sendQuery } from "../src/subgraph";
import { BaseArgs } from "../src/subgraph/types";
import {
MockCorePool,
MockCorePool__factory,
Expand All @@ -11,13 +11,17 @@ import { Contract } from "ethers";
import * as hre from "hardhat";
import * as fs from "fs";
import * as artifact from "./helpers/uniswap_v2_token_abi.json";
import { AccountAmount, Totals, UserStake } from "./types";
import { AccountAmount, MerkleData, Totals, UserStake } from "./types";
import { LP_POOL_ADDRESS, LP_TOKEN_ADDRESS, WILD_POOL_ADDRESS, WILD_TOKEN_ADDRESS } from "./helpers/constants";
import assert from "assert";
import { getMongoAdapter } from "../src/mongo/mongo";
import { StandardMerkleTree } from "@openzeppelin/merkle-tree";
import { OptionalId } from "mongodb";


const getStakesByUser = async (
pool : MockCorePool, account : string,
pool : MockCorePool,
account : string,
) : Promise<UserStake> => {
const depositsLength = await pool.getDepositsLength(account);

Expand Down Expand Up @@ -53,6 +57,7 @@ const getStakesInPool = async (

const stakers = new Map<string, AccountAmount>();

// We want to keep track of total data to be sure it matches sum from each user
let totalWildStaked = 0n;
let totalWildYield = 0n;

Expand Down Expand Up @@ -80,27 +85,41 @@ const getStakesInPool = async (
lpAmounts,
] = await Promise.all(promises) as unknown as [bigint, bigint, UserStake, UserStake];

// Account must be unique
if (!stakers.has(account)) {
stakers.set(account, {
user: account,
amountStakedWILD: wildAmounts.amount.toString(),
amountStakedWILDYield: wildAmounts.yieldAmount.toString(),
amountStakedLP: lpAmounts.amount.toString(),
pendingYieldRewardsWILD: pendingYieldRewardsWild.toString(),
pendingYieldRewardsLP: pendingYieldRewardsLP.toString(),
});

totalWildStaked += wildAmounts.amount;
totalWildYield += wildAmounts.yieldAmount;
totalWildPendingRewards += pendingYieldRewardsWild;

totalLPStaked += lpAmounts.amount;
totalLPYield += lpAmounts.yieldAmount;
totalLPPendingRewards += pendingYieldRewardsLP;
if (
// Account must have balance owed to be added to stakers map
wildAmounts.amount > 0n ||
wildAmounts.yieldAmount > 0n ||
lpAmounts.amount > 0n ||
lpAmounts.yieldAmount > 0n
) {
stakers.set(account, {
user: account,
amountStakedWILD: wildAmounts.amount.toString(),
amountStakedWILDYield: wildAmounts.yieldAmount.toString(),
amountStakedLP: lpAmounts.amount.toString(),
pendingYieldRewardsWILD: pendingYieldRewardsWild.toString(),
pendingYieldRewardsLP: pendingYieldRewardsLP.toString(),
});

totalWildStaked += wildAmounts.amount;
totalWildYield += wildAmounts.yieldAmount;
totalWildPendingRewards += pendingYieldRewardsWild;

totalLPStaked += lpAmounts.amount;
totalLPYield += lpAmounts.yieldAmount;
totalLPPendingRewards += pendingYieldRewardsLP;
} else {
// If staker has no balance remaining in contract, pending yield should be 0
assert.ok(pendingYieldRewardsWild === 0n);
assert.ok(pendingYieldRewardsLP === 0n);
}
} else {
throw Error(`Duplicate account found: ${account}`)
throw Error(`Duplicate account found: ${account}`);
}

// To track progress as we go log here
console.log("Processed: ", i);
}

Expand All @@ -114,7 +133,7 @@ const getStakesInPool = async (
totalLPPendingRewards,
} as Totals,
stakers,
]
];
};

const getStakers = async () => {
Expand Down Expand Up @@ -175,14 +194,30 @@ const main = async () => {
let stakers = Array<string>();

// When we decide on a snapshot timestamp, get this list again to be sure we have latest
if (!fs.existsSync("output/stakers.json")) { // TODO local file writes will be replaced when DB connection is setup
if (!fs.existsSync("output/stakers.json")) {
stakers = await getStakers();
fs.writeFileSync("output/stakers.json", JSON.stringify(stakers, undefined, 2));
console.log("Total # of stakers: ", stakers.length);
} else {
stakers = JSON.parse(fs.readFileSync("output/stakers.json").toString());
}

const documents : Array<OptionalId<Document>> = [];

for(const staker of stakers) {
documents.push({ address: staker } as unknown as OptionalId<Document>);
}

const dbName = "zns-domain-migration";
const uri = process.env.MONGO_DB_URI_WRITE;

if (!uri) throw Error("No connection string provided");

let client = (await getMongoAdapter(uri)).db(dbName);

// To avoid duplicate data, we clear the DB before any inserts
await client.dropCollection("stakers");
await client.collection("stakers").insertMany(documents);

console.log("Total # of stakers: ", stakers.length);
console.log("Starting...");

Expand All @@ -193,13 +228,10 @@ const main = async () => {
);

// user address, wild amount, LP amount
type MerkleData = [string, string, string];
const merkleData: Array<MerkleData> = [];
const merkleData : Array<MerkleData> = [];

// Turn into merkle data format needed
for (const entry of stakersMap.entries()) {
const account = entry[1];

for (const [, account] of stakersMap.entries()) {
const wildAmountOwed =
BigInt(account.amountStakedWILD) +
BigInt(account.amountStakedWILDYield) +
Expand All @@ -210,7 +242,9 @@ const main = async () => {

if (wildAmountOwed > 0n || lpAmountOwed > 0n) {
merkleData.push([account.user, wildAmountOwed.toString(), lpAmountOwed.toString()]);

} else {
console.log("Staker with 0 values found: ", account.user);
// Remove any stakers who have 0 owed balances to keep data in sync
stakersMap.delete(account.user);
}
Expand All @@ -227,31 +261,61 @@ const main = async () => {
// this value should always be 0
assert.equal(results.totalLPYield, 0n);

console.log("Total Wild Staked: ", results.totalWildStaked.toString());
console.log("Total Wild Yield: ", results.totalWildYield.toString());
console.log("Total Wild Pending Rewards: ", results.totalWildPendingRewards.toString());
console.log("Balance of Wild Pool: ", balanceOfWildPool.toString());

console.log("Total LP Staked: ", results.totalLPStaked.toString());
console.log("Total LP Yield: ", results.totalLPYield.toString());
console.log("Total LP Rewards: ", results.totalLPPendingRewards.toString());
console.log("Balance of LP Pool: ", balanceOfLpPool.toString());

const output = {
totalWildStaked: results.totalWildStaked.toString(),
totalWildYield: results.totalWildYield.toString(),
totalWildPendingRewards: results.totalWildPendingRewards.toString(),
balanceOfWildPool: balanceOfWildPool.toString(),
totalLPStaked: results.totalLPStaked.toString(),
totalLPYield: results.totalLPYield.toString(),
totalLPPendingRewards: results.totalLPPendingRewards.toString(),
balanceOfLpPool: balanceOfLpPool.toString(),
};

// Output merkle data as well as totals for verification
fs.writeFileSync("output/merkle_data.json", JSON.stringify(merkleData, undefined, 2));
fs.writeFileSync("output/totals.json", JSON.stringify(output, undefined, 2));
fs.writeFileSync("output/allStakers.json", JSON.stringify(Array.from(stakersMap), undefined, 2));
// Refresh connection after long running call `getStakesInPool` above
client = (await getMongoAdapter()).db(dbName);

// Always drop and recreate the collection if executing follow up runs to avoid data duplication
await client.dropCollection("metrics");
await client.collection("metrics").insertOne(
{
timestamp: Date.now(),
totalWildStaked: results.totalWildStaked.toString(),
totalWildYield: results.totalWildYield.toString(),
totalWildPendingRewards: results.totalWildPendingRewards.toString(),
balanceOfWildPool: balanceOfWildPool.toString(),
totalLPStaked: results.totalLPStaked.toString(),
totalLPYield: results.totalLPYield.toString(),
totalLPPendingRewards: results.totalLPPendingRewards.toString(),
balanceOfLpPool: balanceOfLpPool.toString(),
}
);

// Create merkle tree from user staking data and write it to DB
const merkleTree = StandardMerkleTree.of(merkleData, ["address", "uint256", "uint256"]);

const collectionName = "stakers-merkle-data";
await client.dropCollection(collectionName);

// Insert the merkle tree root ahead of leaf data
await client.collection(collectionName).insertOne(
{
timestamp: Date.now(),
merkleRoot: merkleTree.root,
}
);

const leafDocuments : Array<OptionalId<Document>> = [];
for (const leaf of merkleData) {
// Each leaf is `[user, wildAmountOwed, lpAmountOwed]`
const proof = merkleTree.getProof([leaf[0], leaf[1], leaf[2]]);

// Confirm the data is verifiable by the merkle tree
assert.ok(merkleTree.verify([leaf[0], leaf[1], leaf[2]], proof));

leafDocuments.push(
{
user: leaf[0],
wildAmountOwed: leaf[1],
lpAmountOwed: leaf[2],
merkleProof: merkleTree.getProof([leaf[0], leaf[1], leaf[2]]),
} as unknown as OptionalId<Document>
);
}

// Send formatted staker merkle data
await client.collection(collectionName).insertMany(leafDocuments);

console.log("Finished processing");
};

main().then(() => process.exit(0))
Expand Down
Loading