Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .changeset/good-baboons-doubt.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
---
"@talismn/chaindata-provider": patch
"@talismn/balances": patch
---

minimetadata v9
5 changes: 5 additions & 0 deletions .changeset/hungry-spiders-matter.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"@talismn/balances": patch
---

fix tao root staking pending rewards calculation
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,15 @@ export const calculatePendingRootClaimable = ({
networkId,
validatorRootClaimableRate,
dynamicInfoByNetuid,
alreadyClaimedByNetuid,
}: {
stake: bigint
hotkey: string
address: string
networkId: string
validatorRootClaimableRate: Map<number, bigint>
dynamicInfoByNetuid: Record<number, DynamicInfo | undefined>
alreadyClaimedByNetuid: Map<number, bigint>
}): SubDTaoBalance[] => {
const pendingRootClaimBalances: SubDTaoBalance[] = []

Expand All @@ -35,7 +37,11 @@ export const calculatePendingRootClaimable = ({

// Multiply claimable_rate by root_stake
// I96F32 multiplication: round((a * b) / 2^32)
const pendingRootClaim = (stake * claimableRate + (1n << 31n)) >> 32n
const totalClaimable = (stake * claimableRate + (1n << 31n)) >> 32n

// Subtract already claimed amount to get net pending claimable
const alreadyClaimed = alreadyClaimedByNetuid.get(netuid) ?? 0n
const pendingRootClaim = totalClaimable > alreadyClaimed ? totalClaimable - alreadyClaimed : 0n

pendingRootClaimBalances.push({
address,
Expand Down
143 changes: 142 additions & 1 deletion packages/balances/src/modules/substrate-dtao/fetchBalances.ts
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,32 @@ export const fetchBalances: IBalanceModule<typeof MODULE_TYPE>["fetchBalances"]
? await fetchRootClaimableRates(connector, networkId, miniMetadata.data, rootHotkeys)
: new Map<string, Map<number, bigint>>()

// Collect all (address, hotkey, netuid) pairs for root stakes to fetch RootClaimed amounts
const addressHotkeyNetuidPairs: Array<[address: string, hotkey: string, netuid: number]> = []
for (const [address, stakes] of stakeInfos) {
for (const stake of stakes) {
if (stake.netuid === ROOT_NETUID) {
const claimableRates = rootClaimableRatesByHotkey.get(stake.hotkey)
if (claimableRates) {
// For each netuid that has a claimable rate, we need to check RootClaimed
for (const netuid of claimableRates.keys()) {
addressHotkeyNetuidPairs.push([address, stake.hotkey, netuid])
}
}
}
}
}

const rootClaimedAmounts =
addressHotkeyNetuidPairs.length && miniMetadata.data
? await fetchRootClaimedAmounts(
connector,
networkId,
miniMetadata.data,
addressHotkeyNetuidPairs,
)
: new Map<string, Map<string, Map<number, bigint>>>()

const dynamicInfoByNetuid = keyBy(dynamicInfos.filter(isNotNil), (info) => info.netuid)

// Upserts a balance into the accumulator, merging stake values if the balance already exists.
Expand Down Expand Up @@ -157,13 +183,18 @@ export const fetchBalances: IBalanceModule<typeof MODULE_TYPE>["fetchBalances"]

// Root stake cases, we need to calculate the pending root claim and add to the balances
if (stake.netuid === ROOT_NETUID) {
const claimableRates = rootClaimableRatesByHotkey.get(stake.hotkey) ?? new Map()
const alreadyClaimedMap =
rootClaimedAmounts.get(address)?.get(stake.hotkey) ?? new Map<number, bigint>()

const pendingRootClaimBalances = calculatePendingRootClaimable({
stake: stake.stake,
hotkey: stake.hotkey,
address,
networkId,
validatorRootClaimableRate: rootClaimableRatesByHotkey.get(stake.hotkey) ?? new Map(),
validatorRootClaimableRate: claimableRates,
dynamicInfoByNetuid,
alreadyClaimedByNetuid: alreadyClaimedMap,
})
pendingRootClaimBalances.forEach((balance) => {
upsertBalance(acc, address, balance.tokenId, balance)
Expand Down Expand Up @@ -300,6 +331,26 @@ const buildRootClaimableStorageCoder = async (
return storageCoder
}

const buildRootClaimedStorageCoder = async (
networkId: string,
metadataRpc: `0x${string}` | null,
): Promise<ReturnType<ReturnType<typeof parseMetadataRpc>["builder"]["buildStorage"]> | null> => {
let storageCoder: ReturnType<typeof buildStorageCoder> | null = null

if (metadataRpc) {
try {
storageCoder = buildStorageCoder(metadataRpc, "SubtensorModule", "RootClaimed")
} catch (cause) {
log.warn(
`Failed to build storage coder for SubtensorModule.RootClaimed using provided metadata on ${networkId}`,
{ cause },
)
}
}

return storageCoder
}

const buildRootClaimableQueries = (
networkId: string,
hotkeys: string[],
Expand Down Expand Up @@ -359,3 +410,93 @@ const fetchRootClaimableRates = async (
return new Map(hotkeys.map((hotkey) => [hotkey, new Map<number, bigint>()]))
}
}

const buildRootClaimedQueries = (
networkId: string,
addressHotkeyNetuidPairs: Array<[address: string, hotkey: string, netuid: number]>,
storageCoder: ReturnType<ReturnType<typeof parseMetadataRpc>["builder"]["buildStorage"]>,
): Array<RpcQueryPack<[string, string, number, bigint]>> => {
return addressHotkeyNetuidPairs.map(([address, hotkey, netuid]) => {
let stateKey: MaybeStateKey = null
try {
// RootClaimed storage takes params: [netuid, hotkey, coldkey_ss58]
stateKey = storageCoder.keys.enc(netuid, hotkey, address) as MaybeStateKey
} catch (cause) {
log.warn(
`Failed to encode storage key for RootClaimed (netuid=${netuid}, hotkey=${hotkey}, address=${address}) on ${networkId}`,
{ cause },
)
}

const decodeResult = (changes: MaybeStateKey[]): [string, string, number, bigint] => {
const hexValue = changes[0]
if (!hexValue) {
return [address, hotkey, netuid, 0n]
}

const decoded = decodeScale<bigint | null>(
storageCoder,
hexValue,
`Failed to decode RootClaimed for (netuid=${netuid}, hotkey=${hotkey}, address=${address}) on ${networkId}`,
)
return [address, hotkey, netuid, decoded ?? 0n]
}

return {
stateKeys: [stateKey],
decodeResult,
}
})
}

const fetchRootClaimedAmounts = async (
connector: IChainConnectorDot,
networkId: string,
metadataRpc: `0x${string}`,
addressHotkeyNetuidPairs: Array<[address: string, hotkey: string, netuid: number]>,
): Promise<Map<string, Map<string, Map<number, bigint>>>> => {
if (!addressHotkeyNetuidPairs.length) {
return new Map<string, Map<string, Map<number, bigint>>>()
}

const storageCoder = await buildRootClaimedStorageCoder(networkId, metadataRpc)
if (!storageCoder) {
// Fallback: return empty map for all pairs
const result = new Map<string, Map<string, Map<number, bigint>>>()
for (const [address, hotkey, netuid] of addressHotkeyNetuidPairs) {
if (!result.has(address)) result.set(address, new Map())
const addressMap = result.get(address)!
if (!addressMap.has(hotkey)) addressMap.set(hotkey, new Map())
addressMap.get(hotkey)!.set(netuid, 0n)
}
return result
}

const queries = buildRootClaimedQueries(networkId, addressHotkeyNetuidPairs, storageCoder)

try {
const results = await fetchRpcQueryPack(connector, networkId, queries)
// Build a nested map: address -> hotkey -> netuid -> claimed amount
const result = new Map<string, Map<string, Map<number, bigint>>>()
for (const [address, hotkey, netuid, claimed] of results) {
if (!result.has(address)) result.set(address, new Map())
const addressMap = result.get(address)!
if (!addressMap.has(hotkey)) addressMap.set(hotkey, new Map())
addressMap.get(hotkey)!.set(netuid, claimed)
}
return result
} catch (cause) {
log.warn(`Failed to fetch RootClaimed for address-hotkey-netuid pairs on ${networkId}`, {
cause,
})
// Fallback: return empty map for all pairs
const result = new Map<string, Map<string, Map<number, bigint>>>()
for (const [address, hotkey, netuid] of addressHotkeyNetuidPairs) {
if (!result.has(address)) result.set(address, new Map())
const addressMap = result.get(address)!
if (!addressMap.has(hotkey)) addressMap.set(hotkey, new Map())
addressMap.get(hotkey)!.set(netuid, 0n)
}
return result
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ const getData = (metadataRpc: `0x${string}`): `0x${string}` | null => {

compactMetadata(
metadata,
[{ pallet: "SubtensorModule", items: ["TransferToggle", "RootClaimable"] }],
[{ pallet: "SubtensorModule", items: ["TransferToggle", "RootClaimable", "RootClaimed"] }],
[
{
runtimeApi: "StakeInfoRuntimeApi",
Expand Down
85 changes: 85 additions & 0 deletions packages/chaindata-provider/UPDATING_CHAINDATA.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
# Updating Chaindata

When mini-metadata requires changes (e.g., adding new storage items or runtime API calls to the balances library), you must bump the mini-metadata version and configure a dedicated publishing folder in chaindata.

---

## Phase 1: Development

### Wallet Monorepo

1. Bump `CHAINDATA_PUB_FOLDER` in `./src/constants.ts`
- The mini-metadata version that chaindata builds is based on this value
```ts
const CHAINDATA_PUB_FOLDER = "pub/v9"
```
2. Add a changeset so CI publishes new package versions
3. Check the CI "Publish snapshot packages" task for the published versions
```
Example: @talismn/[email protected]
```

### Chaindata Repository

1. Create a new branch (e.g., `feat/chaindata-v9`)
2. Update `package.json` with the snapshot versions published by CI:
- `@talismn/balances`
- `@talismn/chaindata-provider`
- `@talismn/chain-connectors`
3. Update the output folder in `constants.ts` (e.g., `pub/v9`)
4. Run the build commands:
```sh
pnpm fetch-external # Builds mini-metadata v9 for all chains
pnpm build # Outputs pub/v9/chaindata.json
```
5. Push the branch and open a PR

### Back in Wallet Monorepo

1. Update `CHAINDATA_BRANCH` in `./src/constants.ts` to point to your chaindata branch
```ts
const CHAINDATA_BRANCH = "feat/chaindata-v9"
```

---

## Phase 2: Review

Test the wallet and wait for both PRs (wallet + chaindata) to be reviewed.

> ⚠️ **Important:** Delay merging the chaindata PR until the last moment.
>
> CI jobs that keep chaindata up to date only run on the `main` branch for the _current_ pub folder version. Merging chaindata too early will leave production users without chaindata updates until the next wallet release.

---

## Phase 3: Merge & Ship

While both PRs were under review, CI continuously updated the chaindata `main` branch, creating conflicts. **Do not resolve these manually.**

### Chaindata Repository

1. Revert all data folder changes:
```sh
git checkout main -- data/
git push
```
2. ⚠️ **Verify** the PR contains no changes to `data/cache` or `data/generated`
3. Merge the chaindata PR to `main`
4. Wait for all running actions to complete
5. Manually trigger the `fetch-external` job from the GitHub Actions UI (on `main`)
6. Confirm it completes successfully and the subsequent `build` job outputs `/pub/v9/chaindata.json`

### Wallet Monorepo

1. Reset `CHAINDATA_BRANCH` in `./src/constants.ts` back to `main`
```ts
const CHAINDATA_BRANCH = "main"
```
2. Re-test the wallet in dev mode
3. Sync wallet's fallback mini-metadata with chaindata:
```sh
pnpm chore:generate-init-data
```
4. Merge to `main`
5. Ship the release 🚀
2 changes: 1 addition & 1 deletion packages/chaindata-provider/src/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
const CHAINDATA_BRANCH = "main"

// pub subfolder needs to be incremented each time the schema of chaindata changes, or when the content of the minimetadata changes
const CHAINDATA_PUB_FOLDER = "pub/v8"
const CHAINDATA_PUB_FOLDER = "pub/v9"

export const MINIMETADATA_VERSION = CHAINDATA_PUB_FOLDER.split("/").pop()!
if (!MINIMETADATA_VERSION) {
Expand Down
Loading