diff --git a/rust/processor/src/db/common/models/mod.rs b/rust/processor/src/db/common/models/mod.rs index 6cf3e7533..557f13052 100644 --- a/rust/processor/src/db/common/models/mod.rs +++ b/rust/processor/src/db/common/models/mod.rs @@ -5,4 +5,7 @@ pub mod event_models; pub mod fungible_asset_models; pub mod object_models; pub mod stake_models; +pub mod token_models; pub mod token_v2_models; + +const DEFAULT_NONE: &str = "NULL"; diff --git a/rust/processor/src/db/common/models/stake_models/stake_utils.rs b/rust/processor/src/db/common/models/stake_models/stake_utils.rs index b988a58c3..e29f54244 100644 --- a/rust/processor/src/db/common/models/stake_models/stake_utils.rs +++ b/rust/processor/src/db/common/models/stake_models/stake_utils.rs @@ -2,8 +2,9 @@ // SPDX-License-Identifier: Apache-2.0 use crate::{ - db::postgres::models::{ - default_models::move_resources::MoveResource, token_models::token_utils::Table, + db::{ + common::models::token_models::token_utils::Table, + postgres::models::default_models::move_resources::MoveResource, }, utils::util::{deserialize_from_string, standardize_address}, }; diff --git a/rust/processor/src/db/postgres/models/token_models/collection_datas.rs b/rust/processor/src/db/common/models/token_models/collection_datas.rs similarity index 100% rename from rust/processor/src/db/postgres/models/token_models/collection_datas.rs rename to rust/processor/src/db/common/models/token_models/collection_datas.rs diff --git a/rust/processor/src/db/postgres/models/token_models/mod.rs b/rust/processor/src/db/common/models/token_models/mod.rs similarity index 52% rename from rust/processor/src/db/postgres/models/token_models/mod.rs rename to rust/processor/src/db/common/models/token_models/mod.rs index e97b317c1..f26297ce2 100644 --- a/rust/processor/src/db/postgres/models/token_models/mod.rs +++ b/rust/processor/src/db/common/models/token_models/mod.rs @@ -1,11 +1,7 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - pub mod collection_datas; -pub mod nft_points; -pub mod token_activities; pub mod token_claims; pub mod token_datas; pub mod token_ownerships; +pub mod token_royalty; pub mod token_utils; pub mod tokens; diff --git a/rust/processor/src/db/common/models/token_v2_models/raw_token_claims.rs b/rust/processor/src/db/common/models/token_models/token_claims.rs similarity index 62% rename from rust/processor/src/db/common/models/token_v2_models/raw_token_claims.rs rename to rust/processor/src/db/common/models/token_models/token_claims.rs index 383de7f7b..ce6be04b8 100644 --- a/rust/processor/src/db/common/models/token_v2_models/raw_token_claims.rs +++ b/rust/processor/src/db/common/models/token_models/token_claims.rs @@ -6,15 +6,20 @@ #![allow(clippy::unused_unit)] use crate::{ - db::{ - common::models::token_v2_models::raw_v2_token_activities::TokenActivityHelperV1, - postgres::models::token_models::{token_utils::TokenWriteSet, tokens::TableHandleToOwner}, + bq_analytics::generic_parquet_processor::{GetTimeStamp, HasVersion, NamedTable}, + db::common::models::{ + token_models::{token_utils::TokenWriteSet, tokens::TableHandleToOwner}, + token_v2_models::v2_token_activities::TokenActivityHelperV1, }, + schema::current_token_pending_claims, utils::util::standardize_address, }; use ahash::AHashMap; +use allocative_derive::Allocative; use aptos_protos::transaction::v1::{DeleteTableItem, WriteTableItem}; -use bigdecimal::{BigDecimal, Zero}; +use bigdecimal::{BigDecimal, ToPrimitive, Zero}; +use field_count::FieldCount; +use parquet_derive::ParquetRecordWriter; use serde::{Deserialize, Serialize}; // Map to keep track of the metadata of token offers that were claimed. The key is the token data id of the offer. @@ -22,7 +27,7 @@ use serde::{Deserialize, Serialize}; pub type TokenV1Claimed = AHashMap; #[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] -pub struct RawCurrentTokenPendingClaim { +pub struct CurrentTokenPendingClaim { pub token_data_id_hash: String, pub property_version: BigDecimal, pub from_address: String, @@ -39,7 +44,7 @@ pub struct RawCurrentTokenPendingClaim { pub collection_id: String, } -impl Ord for RawCurrentTokenPendingClaim { +impl Ord for CurrentTokenPendingClaim { fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.token_data_id_hash .cmp(&other.token_data_id_hash) @@ -49,13 +54,13 @@ impl Ord for RawCurrentTokenPendingClaim { } } -impl PartialOrd for RawCurrentTokenPendingClaim { +impl PartialOrd for CurrentTokenPendingClaim { fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } } -impl RawCurrentTokenPendingClaim { +impl CurrentTokenPendingClaim { /// Token claim is stored in a table in the offerer's account. The key is token_offer_id (token_id + to address) /// and value is token (token_id + amount) pub fn from_write_table_item( @@ -210,6 +215,124 @@ impl RawCurrentTokenPendingClaim { } } -pub trait CurrentTokenPendingClaimConvertible { - fn from_raw(raw_item: RawCurrentTokenPendingClaim) -> Self; +/// This is a parquet version of CurrentTokenPendingClaim +#[derive( + Allocative, Clone, Debug, Default, Deserialize, FieldCount, ParquetRecordWriter, Serialize, +)] +pub struct ParquetCurrentTokenPendingClaim { + pub token_data_id_hash: String, + pub property_version: u64, + pub from_address: String, + pub to_address: String, + pub collection_data_id_hash: String, + pub creator_address: String, + pub collection_name: String, + pub name: String, + pub amount: String, // String format of BigDecimal + pub table_handle: String, + pub last_transaction_version: i64, + #[allocative(skip)] + pub last_transaction_timestamp: chrono::NaiveDateTime, + pub token_data_id: String, + pub collection_id: String, +} + +impl NamedTable for ParquetCurrentTokenPendingClaim { + const TABLE_NAME: &'static str = "current_token_pending_claims"; +} + +impl HasVersion for ParquetCurrentTokenPendingClaim { + fn version(&self) -> i64 { + self.last_transaction_version + } +} + +impl GetTimeStamp for ParquetCurrentTokenPendingClaim { + fn get_timestamp(&self) -> chrono::NaiveDateTime { + self.last_transaction_timestamp + } +} + +impl From for ParquetCurrentTokenPendingClaim { + fn from(raw_item: CurrentTokenPendingClaim) -> Self { + Self { + token_data_id_hash: raw_item.token_data_id_hash, + property_version: raw_item + .property_version + .to_u64() + .expect("Failed to convert property_version to u64"), + from_address: raw_item.from_address, + to_address: raw_item.to_address, + collection_data_id_hash: raw_item.collection_data_id_hash, + creator_address: raw_item.creator_address, + collection_name: raw_item.collection_name, + name: raw_item.name, + amount: raw_item.amount.to_string(), // (assuming amount is non-critical) + table_handle: raw_item.table_handle, + last_transaction_version: raw_item.last_transaction_version, + last_transaction_timestamp: raw_item.last_transaction_timestamp, + token_data_id: raw_item.token_data_id, + collection_id: raw_item.collection_id, + } + } +} + +/// This is a postgres version of CurrentTokenPendingClaim +#[derive( + Clone, Debug, Deserialize, Eq, FieldCount, Identifiable, Insertable, PartialEq, Serialize, +)] +#[diesel(primary_key(token_data_id_hash, property_version, from_address, to_address))] +#[diesel(table_name = current_token_pending_claims)] +pub struct PostgresCurrentTokenPendingClaim { + pub token_data_id_hash: String, + pub property_version: BigDecimal, + pub from_address: String, + pub to_address: String, + pub collection_data_id_hash: String, + pub creator_address: String, + pub collection_name: String, + pub name: String, + pub amount: BigDecimal, + pub table_handle: String, + pub last_transaction_version: i64, + pub last_transaction_timestamp: chrono::NaiveDateTime, + pub token_data_id: String, + pub collection_id: String, +} + +impl Ord for PostgresCurrentTokenPendingClaim { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.token_data_id_hash + .cmp(&other.token_data_id_hash) + .then(self.property_version.cmp(&other.property_version)) + .then(self.from_address.cmp(&other.from_address)) + .then(self.to_address.cmp(&other.to_address)) + } +} + +impl PartialOrd for PostgresCurrentTokenPendingClaim { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl From for PostgresCurrentTokenPendingClaim { + fn from(raw_item: CurrentTokenPendingClaim) -> Self { + Self { + token_data_id_hash: raw_item.token_data_id_hash, + property_version: raw_item.property_version, + from_address: raw_item.from_address, + to_address: raw_item.to_address, + collection_data_id_hash: raw_item.collection_data_id_hash, + creator_address: raw_item.creator_address, + collection_name: raw_item.collection_name, + name: raw_item.name, + amount: raw_item.amount, + table_handle: raw_item.table_handle, + last_transaction_version: raw_item.last_transaction_version, + last_transaction_timestamp: raw_item.last_transaction_timestamp, + token_data_id: raw_item.token_data_id, + collection_id: raw_item.collection_id, + } + } } diff --git a/rust/processor/src/db/postgres/models/token_models/token_datas.rs b/rust/processor/src/db/common/models/token_models/token_datas.rs similarity index 100% rename from rust/processor/src/db/postgres/models/token_models/token_datas.rs rename to rust/processor/src/db/common/models/token_models/token_datas.rs diff --git a/rust/processor/src/db/postgres/models/token_models/token_ownerships.rs b/rust/processor/src/db/common/models/token_models/token_ownerships.rs similarity index 100% rename from rust/processor/src/db/postgres/models/token_models/token_ownerships.rs rename to rust/processor/src/db/common/models/token_models/token_ownerships.rs diff --git a/rust/processor/src/db/common/models/token_models/token_royalty.rs b/rust/processor/src/db/common/models/token_models/token_royalty.rs new file mode 100644 index 000000000..ed936d177 --- /dev/null +++ b/rust/processor/src/db/common/models/token_models/token_royalty.rs @@ -0,0 +1,182 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +// This is required because a diesel macro makes clippy sad +#![allow(clippy::extra_unused_lifetimes)] +#![allow(clippy::unused_unit)] + +use crate::{ + bq_analytics::generic_parquet_processor::{GetTimeStamp, HasVersion, NamedTable}, + db::common::models::token_models::token_utils::TokenWriteSet, + schema::current_token_royalty_v1, +}; +use allocative_derive::Allocative; +use aptos_protos::transaction::v1::WriteTableItem; +use bigdecimal::BigDecimal; +use field_count::FieldCount; +use parquet_derive::ParquetRecordWriter; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] +pub struct CurrentTokenRoyaltyV1 { + pub token_data_id: String, + pub payee_address: String, + pub royalty_points_numerator: BigDecimal, + pub royalty_points_denominator: BigDecimal, + pub last_transaction_version: i64, + pub last_transaction_timestamp: chrono::NaiveDateTime, +} + +impl Ord for CurrentTokenRoyaltyV1 { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.token_data_id.cmp(&other.token_data_id) + } +} +impl PartialOrd for CurrentTokenRoyaltyV1 { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl CurrentTokenRoyaltyV1 { + pub fn pk(&self) -> String { + self.token_data_id.clone() + } + + // Royalty for v2 token is more complicated and not supported yet. For token v2, royalty can be on the collection (default) or on + // the token (override). + pub fn get_v1_from_write_table_item( + write_table_item: &WriteTableItem, + transaction_version: i64, + transaction_timestamp: chrono::NaiveDateTime, + ) -> anyhow::Result> { + let table_item_data = write_table_item.data.as_ref().unwrap(); + + let maybe_token_data = match TokenWriteSet::from_table_item_type( + table_item_data.value_type.as_str(), + &table_item_data.value, + transaction_version, + )? { + Some(TokenWriteSet::TokenData(inner)) => Some(inner), + _ => None, + }; + + if let Some(token_data) = maybe_token_data { + let maybe_token_data_id = match TokenWriteSet::from_table_item_type( + table_item_data.key_type.as_str(), + &table_item_data.key, + transaction_version, + )? { + Some(TokenWriteSet::TokenDataId(inner)) => Some(inner), + _ => None, + }; + if let Some(token_data_id_struct) = maybe_token_data_id { + // token data id is the 0x{hash} version of the creator, collection name, and token name + let token_data_id = token_data_id_struct.to_id(); + let payee_address = token_data.royalty.get_payee_address(); + let royalty_points_numerator = token_data.royalty.royalty_points_numerator.clone(); + let royalty_points_denominator = + token_data.royalty.royalty_points_denominator.clone(); + + return Ok(Some(Self { + token_data_id, + payee_address, + royalty_points_numerator, + royalty_points_denominator, + last_transaction_version: transaction_version, + last_transaction_timestamp: transaction_timestamp, + })); + } else { + tracing::warn!( + transaction_version, + key_type = table_item_data.key_type, + key = table_item_data.key, + "Expecting token_data_id as key for value = token_data" + ); + } + } + Ok(None) + } +} + +/// This is a parquet version of CurrentTokenRoyaltyV1 +#[derive( + Allocative, Clone, Debug, Default, Deserialize, FieldCount, ParquetRecordWriter, Serialize, +)] +pub struct ParquetCurrentTokenRoyaltyV1 { + pub token_data_id: String, + pub payee_address: String, + pub royalty_points_numerator: String, // String format of BigDecimal + pub royalty_points_denominator: String, // String format of BigDecimal + pub last_transaction_version: i64, + #[allocative(skip)] + pub last_transaction_timestamp: chrono::NaiveDateTime, +} + +impl NamedTable for ParquetCurrentTokenRoyaltyV1 { + const TABLE_NAME: &'static str = "current_token_royalties_v1"; +} + +impl HasVersion for ParquetCurrentTokenRoyaltyV1 { + fn version(&self) -> i64 { + self.last_transaction_version + } +} + +impl GetTimeStamp for ParquetCurrentTokenRoyaltyV1 { + fn get_timestamp(&self) -> chrono::NaiveDateTime { + self.last_transaction_timestamp + } +} + +impl From for ParquetCurrentTokenRoyaltyV1 { + fn from(raw_item: CurrentTokenRoyaltyV1) -> Self { + Self { + token_data_id: raw_item.token_data_id, + payee_address: raw_item.payee_address, + royalty_points_numerator: raw_item.royalty_points_numerator.to_string(), + royalty_points_denominator: raw_item.royalty_points_denominator.to_string(), + last_transaction_version: raw_item.last_transaction_version, + last_transaction_timestamp: raw_item.last_transaction_timestamp, + } + } +} + +/// This is a postgres version of CurrentTokenRoyaltyV1 +#[derive( + Clone, Debug, Deserialize, FieldCount, Identifiable, Insertable, Serialize, PartialEq, Eq, +)] +#[diesel(primary_key(token_data_id))] +#[diesel(table_name = current_token_royalty_v1)] +pub struct PostgresCurrentTokenRoyaltyV1 { + pub token_data_id: String, + pub payee_address: String, + pub royalty_points_numerator: BigDecimal, + pub royalty_points_denominator: BigDecimal, + pub last_transaction_version: i64, + pub last_transaction_timestamp: chrono::NaiveDateTime, +} + +impl Ord for PostgresCurrentTokenRoyaltyV1 { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.token_data_id.cmp(&other.token_data_id) + } +} +impl PartialOrd for PostgresCurrentTokenRoyaltyV1 { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl From for PostgresCurrentTokenRoyaltyV1 { + fn from(raw_item: CurrentTokenRoyaltyV1) -> Self { + Self { + token_data_id: raw_item.token_data_id, + payee_address: raw_item.payee_address, + royalty_points_numerator: raw_item.royalty_points_numerator, + royalty_points_denominator: raw_item.royalty_points_denominator, + last_transaction_version: raw_item.last_transaction_version, + last_transaction_timestamp: raw_item.last_transaction_timestamp, + } + } +} diff --git a/rust/processor/src/db/postgres/models/token_models/token_utils.rs b/rust/processor/src/db/common/models/token_models/token_utils.rs similarity index 100% rename from rust/processor/src/db/postgres/models/token_models/token_utils.rs rename to rust/processor/src/db/common/models/token_models/token_utils.rs diff --git a/rust/processor/src/db/postgres/models/token_models/tokens.rs b/rust/processor/src/db/common/models/token_models/tokens.rs similarity index 100% rename from rust/processor/src/db/postgres/models/token_models/tokens.rs rename to rust/processor/src/db/common/models/token_models/tokens.rs diff --git a/rust/processor/src/db/common/models/token_v2_models/mod.rs b/rust/processor/src/db/common/models/token_v2_models/mod.rs index f00227296..e343d2b9b 100644 --- a/rust/processor/src/db/common/models/token_v2_models/mod.rs +++ b/rust/processor/src/db/common/models/token_v2_models/mod.rs @@ -1,7 +1,6 @@ -pub mod raw_token_claims; -pub mod raw_v1_token_royalty; -pub mod raw_v2_token_activities; -pub mod raw_v2_token_datas; -pub mod raw_v2_token_metadata; -pub mod raw_v2_token_ownerships; +pub mod v2_collections; +pub mod v2_token_activities; +pub mod v2_token_datas; +pub mod v2_token_metadata; +pub mod v2_token_ownerships; pub mod v2_token_utils; diff --git a/rust/processor/src/db/common/models/token_v2_models/raw_v1_token_royalty.rs b/rust/processor/src/db/common/models/token_v2_models/raw_v1_token_royalty.rs deleted file mode 100644 index 1f793d655..000000000 --- a/rust/processor/src/db/common/models/token_v2_models/raw_v1_token_royalty.rs +++ /dev/null @@ -1,97 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -// This is required because a diesel macro makes clippy sad -#![allow(clippy::extra_unused_lifetimes)] -#![allow(clippy::unused_unit)] - -use crate::db::postgres::models::token_models::token_utils::TokenWriteSet; -use aptos_protos::transaction::v1::WriteTableItem; -use bigdecimal::BigDecimal; -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] -pub struct RawCurrentTokenRoyaltyV1 { - pub token_data_id: String, - pub payee_address: String, - pub royalty_points_numerator: BigDecimal, - pub royalty_points_denominator: BigDecimal, - pub last_transaction_version: i64, - pub last_transaction_timestamp: chrono::NaiveDateTime, -} - -impl Ord for RawCurrentTokenRoyaltyV1 { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - self.token_data_id.cmp(&other.token_data_id) - } -} -impl PartialOrd for RawCurrentTokenRoyaltyV1 { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl RawCurrentTokenRoyaltyV1 { - pub fn pk(&self) -> String { - self.token_data_id.clone() - } - - // Royalty for v2 token is more complicated and not supported yet. For token v2, royalty can be on the collection (default) or on - // the token (override). - pub fn get_v1_from_write_table_item( - write_table_item: &WriteTableItem, - transaction_version: i64, - transaction_timestamp: chrono::NaiveDateTime, - ) -> anyhow::Result> { - let table_item_data = write_table_item.data.as_ref().unwrap(); - - let maybe_token_data = match TokenWriteSet::from_table_item_type( - table_item_data.value_type.as_str(), - &table_item_data.value, - transaction_version, - )? { - Some(TokenWriteSet::TokenData(inner)) => Some(inner), - _ => None, - }; - - if let Some(token_data) = maybe_token_data { - let maybe_token_data_id = match TokenWriteSet::from_table_item_type( - table_item_data.key_type.as_str(), - &table_item_data.key, - transaction_version, - )? { - Some(TokenWriteSet::TokenDataId(inner)) => Some(inner), - _ => None, - }; - if let Some(token_data_id_struct) = maybe_token_data_id { - // token data id is the 0x{hash} version of the creator, collection name, and token name - let token_data_id = token_data_id_struct.to_id(); - let payee_address = token_data.royalty.get_payee_address(); - let royalty_points_numerator = token_data.royalty.royalty_points_numerator.clone(); - let royalty_points_denominator = - token_data.royalty.royalty_points_denominator.clone(); - - return Ok(Some(Self { - token_data_id, - payee_address, - royalty_points_numerator, - royalty_points_denominator, - last_transaction_version: transaction_version, - last_transaction_timestamp: transaction_timestamp, - })); - } else { - tracing::warn!( - transaction_version, - key_type = table_item_data.key_type, - key = table_item_data.key, - "Expecting token_data_id as key for value = token_data" - ); - } - } - Ok(None) - } -} - -pub trait CurrentTokenRoyaltyV1Convertible { - fn from_raw(raw_item: RawCurrentTokenRoyaltyV1) -> Self; -} diff --git a/rust/processor/src/db/postgres/models/token_v2_models/v2_collections.rs b/rust/processor/src/db/common/models/token_v2_models/v2_collections.rs similarity index 99% rename from rust/processor/src/db/postgres/models/token_v2_models/v2_collections.rs rename to rust/processor/src/db/common/models/token_v2_models/v2_collections.rs index e17686efd..bd0380ffe 100644 --- a/rust/processor/src/db/postgres/models/token_v2_models/v2_collections.rs +++ b/rust/processor/src/db/common/models/token_v2_models/v2_collections.rs @@ -9,16 +9,14 @@ use crate::{ db::{ common::models::{ object_models::v2_object_utils::ObjectAggregatedDataMapping, - token_v2_models::v2_token_utils::{Collection, TokenStandard}, - }, - postgres::models::{ - resources::FromWriteResource, token_models::{ collection_datas::CollectionData, token_utils::{CollectionDataIdType, TokenWriteSet}, tokens::TableHandleToOwner, }, + token_v2_models::v2_token_utils::{Collection, TokenStandard}, }, + postgres::models::resources::FromWriteResource, }, schema::{collections_v2, current_collections_v2}, utils::{database::DbPoolConnection, util::standardize_address}, diff --git a/rust/processor/src/db/common/models/token_v2_models/raw_v2_token_activities.rs b/rust/processor/src/db/common/models/token_v2_models/v2_token_activities.rs similarity index 79% rename from rust/processor/src/db/common/models/token_v2_models/raw_v2_token_activities.rs rename to rust/processor/src/db/common/models/token_v2_models/v2_token_activities.rs index 264074e17..9d94f6788 100644 --- a/rust/processor/src/db/common/models/token_v2_models/raw_v2_token_activities.rs +++ b/rust/processor/src/db/common/models/token_v2_models/v2_token_activities.rs @@ -6,24 +6,27 @@ #![allow(clippy::unused_unit)] use crate::{ - db::{ - common::models::{ - object_models::v2_object_utils::ObjectAggregatedDataMapping, - token_v2_models::{ - raw_token_claims::TokenV1Claimed, - v2_token_utils::{TokenStandard, V2TokenEvent}, - }, + bq_analytics::generic_parquet_processor::{GetTimeStamp, HasVersion, NamedTable}, + db::common::models::{ + object_models::v2_object_utils::ObjectAggregatedDataMapping, + token_models::{ + token_claims::TokenV1Claimed, + token_utils::{TokenDataIdType, TokenEvent}, }, - postgres::models::token_models::token_utils::{TokenDataIdType, TokenEvent}, + token_v2_models::v2_token_utils::{TokenStandard, V2TokenEvent}, }, + schema::token_activities_v2, utils::util::standardize_address, }; +use allocative_derive::Allocative; use aptos_protos::transaction::v1::Event; -use bigdecimal::{BigDecimal, One, Zero}; +use bigdecimal::{BigDecimal, One, ToPrimitive, Zero}; +use field_count::FieldCount; +use parquet_derive::ParquetRecordWriter; use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, Deserialize, Serialize)] -pub struct RawTokenActivityV2 { +pub struct TokenActivityV2 { pub transaction_version: i64, pub event_index: i64, pub event_account_address: String, @@ -61,7 +64,7 @@ struct TokenActivityHelperV2 { pub event_type: String, } -impl RawTokenActivityV2 { +impl TokenActivityV2 { pub async fn get_nft_v2_from_parsed_event( event: &Event, txn_version: i64, @@ -355,6 +358,108 @@ impl RawTokenActivityV2 { } } -pub trait TokenActivityV2Convertible { - fn from_raw(raw_item: RawTokenActivityV2) -> Self; +/// This is a parquet version of TokenActivityV2 +#[derive( + Allocative, Clone, Debug, Default, Deserialize, FieldCount, ParquetRecordWriter, Serialize, +)] +pub struct ParquetTokenActivityV2 { + pub txn_version: i64, + pub event_index: i64, + pub event_account_address: String, + pub token_data_id: String, + pub property_version_v1: u64, // BigDecimal + pub type_: String, + pub from_address: Option, + pub to_address: Option, + pub token_amount: String, // BigDecimal + pub before_value: Option, + pub after_value: Option, + pub entry_function_id_str: Option, + pub token_standard: String, + pub is_fungible_v2: Option, + #[allocative(skip)] + pub block_timestamp: chrono::NaiveDateTime, +} + +impl NamedTable for ParquetTokenActivityV2 { + const TABLE_NAME: &'static str = "token_activities_v2"; +} + +impl HasVersion for ParquetTokenActivityV2 { + fn version(&self) -> i64 { + self.txn_version + } +} + +impl GetTimeStamp for ParquetTokenActivityV2 { + fn get_timestamp(&self) -> chrono::NaiveDateTime { + self.block_timestamp + } +} + +impl From for ParquetTokenActivityV2 { + fn from(raw_item: TokenActivityV2) -> Self { + Self { + txn_version: raw_item.transaction_version, + event_index: raw_item.event_index, + event_account_address: raw_item.event_account_address, + token_data_id: raw_item.token_data_id, + property_version_v1: raw_item.property_version_v1.to_u64().unwrap(), + type_: raw_item.type_, + from_address: raw_item.from_address, + to_address: raw_item.to_address, + token_amount: raw_item.token_amount.to_string(), + before_value: raw_item.before_value, + after_value: raw_item.after_value, + entry_function_id_str: raw_item.entry_function_id_str, + token_standard: raw_item.token_standard, + is_fungible_v2: raw_item.is_fungible_v2, + block_timestamp: raw_item.transaction_timestamp, + } + } +} + +/// This is a postgres version of TokenActivityV2 + +#[derive(Clone, Debug, Deserialize, FieldCount, Identifiable, Insertable, Serialize)] +#[diesel(primary_key(transaction_version, event_index))] +#[diesel(table_name = token_activities_v2)] +pub struct PostgresTokenActivityV2 { + pub transaction_version: i64, + pub event_index: i64, + pub event_account_address: String, + pub token_data_id: String, + pub property_version_v1: BigDecimal, + pub type_: String, + pub from_address: Option, + pub to_address: Option, + pub token_amount: BigDecimal, + pub before_value: Option, + pub after_value: Option, + pub entry_function_id_str: Option, + pub token_standard: String, + pub is_fungible_v2: Option, + pub transaction_timestamp: chrono::NaiveDateTime, +} + +impl From for PostgresTokenActivityV2 { + fn from(raw_item: TokenActivityV2) -> Self { + Self { + transaction_version: raw_item.transaction_version, + event_index: raw_item.event_index, + event_account_address: raw_item.event_account_address, + token_data_id: raw_item.token_data_id, + property_version_v1: raw_item.property_version_v1, + type_: raw_item.type_, + from_address: raw_item.from_address, + to_address: raw_item.to_address, + token_amount: raw_item.token_amount, + before_value: raw_item.before_value, + after_value: raw_item.after_value, + entry_function_id_str: raw_item.entry_function_id_str, + token_standard: raw_item.token_standard, + is_fungible_v2: raw_item.is_fungible_v2, + transaction_timestamp: raw_item.transaction_timestamp, + } + } } diff --git a/rust/processor/src/db/common/models/token_v2_models/raw_v2_token_datas.rs b/rust/processor/src/db/common/models/token_v2_models/v2_token_datas.rs similarity index 61% rename from rust/processor/src/db/common/models/token_v2_models/raw_v2_token_datas.rs rename to rust/processor/src/db/common/models/token_v2_models/v2_token_datas.rs index 1ed52b35c..afa8c958a 100644 --- a/rust/processor/src/db/common/models/token_v2_models/raw_v2_token_datas.rs +++ b/rust/processor/src/db/common/models/token_v2_models/v2_token_datas.rs @@ -6,23 +6,31 @@ #![allow(clippy::unused_unit)] use crate::{ + bq_analytics::generic_parquet_processor::{GetTimeStamp, HasVersion, NamedTable}, db::{ common::models::{ object_models::v2_object_utils::ObjectAggregatedDataMapping, + token_models::token_utils::TokenWriteSet, token_v2_models::v2_token_utils::{TokenStandard, TokenV2, TokenV2Burned}, + DEFAULT_NONE, }, - postgres::models::{ - resources::FromWriteResource, token_models::token_utils::TokenWriteSet, - }, + postgres::models::resources::FromWriteResource, }, + schema::current_token_datas_v2, utils::util::standardize_address, }; +use allocative_derive::Allocative; +use anyhow::Context; use aptos_protos::transaction::v1::{DeleteResource, WriteResource, WriteTableItem}; use bigdecimal::BigDecimal; +use diesel::prelude::*; +use field_count::FieldCount; +use parquet_derive::ParquetRecordWriter; use serde::{Deserialize, Serialize}; +use tracing::error; #[derive(Clone, Debug, Default, Deserialize, Serialize)] -pub struct RawTokenDataV2 { +pub struct TokenDataV2 { pub transaction_version: i64, pub write_set_change_index: i64, pub token_data_id: String, @@ -43,12 +51,8 @@ pub struct RawTokenDataV2 { pub is_deleted_v2: Option, } -pub trait TokenDataV2Convertible { - fn from_raw(raw_item: RawTokenDataV2) -> Self; -} - #[derive(Clone, Debug, Deserialize, Serialize)] -pub struct RawCurrentTokenDataV2 { +pub struct CurrentTokenDataV2 { pub token_data_id: String, pub collection_id: String, pub token_name: String, @@ -68,10 +72,10 @@ pub struct RawCurrentTokenDataV2 { } pub trait CurrentTokenDataV2Convertible { - fn from_raw(raw_item: RawCurrentTokenDataV2) -> Self; + fn from_raw(raw_item: CurrentTokenDataV2) -> Self; } -impl RawTokenDataV2 { +impl TokenDataV2 { // TODO: remove the useless_asref lint when new clippy nighly is released. #[allow(clippy::useless_asref)] pub fn get_v2_from_write_resource( @@ -80,7 +84,7 @@ impl RawTokenDataV2 { write_set_change_index: i64, txn_timestamp: chrono::NaiveDateTime, object_metadatas: &ObjectAggregatedDataMapping, - ) -> anyhow::Result> { + ) -> anyhow::Result> { if let Some(inner) = &TokenV2::from_write_resource(write_resource)? { let token_data_id = standardize_address(&write_resource.address.to_string()); let mut token_name = inner.get_name_trunc(); @@ -130,7 +134,7 @@ impl RawTokenDataV2 { decimals: None, is_deleted_v2: None, }, - RawCurrentTokenDataV2 { + CurrentTokenDataV2 { token_data_id, collection_id, token_name, @@ -159,11 +163,11 @@ impl RawTokenDataV2 { txn_version: i64, txn_timestamp: chrono::NaiveDateTime, tokens_burned: &TokenV2Burned, - ) -> anyhow::Result> { + ) -> anyhow::Result> { let token_data_id = standardize_address(&write_resource.address.to_string()); // reminder that v1 events won't get to this codepath if let Some(burn_event_v2) = tokens_burned.get(&standardize_address(&token_data_id)) { - Ok(Some(RawCurrentTokenDataV2 { + Ok(Some(CurrentTokenDataV2 { token_data_id, collection_id: burn_event_v2.get_collection_address(), token_name: "".to_string(), @@ -191,11 +195,11 @@ impl RawTokenDataV2 { txn_version: i64, txn_timestamp: chrono::NaiveDateTime, tokens_burned: &TokenV2Burned, - ) -> anyhow::Result> { + ) -> anyhow::Result> { let token_data_id = standardize_address(&delete_resource.address.to_string()); // reminder that v1 events won't get to this codepath if let Some(burn_event_v2) = tokens_burned.get(&standardize_address(&token_data_id)) { - Ok(Some(RawCurrentTokenDataV2 { + Ok(Some(CurrentTokenDataV2 { token_data_id, collection_id: burn_event_v2.get_collection_address(), token_name: "".to_string(), @@ -222,7 +226,7 @@ impl RawTokenDataV2 { txn_version: i64, write_set_change_index: i64, txn_timestamp: chrono::NaiveDateTime, - ) -> anyhow::Result> { + ) -> anyhow::Result> { let table_item_data = table_item.data.as_ref().unwrap(); let maybe_token_data = match TokenWriteSet::from_table_item_type( @@ -270,7 +274,7 @@ impl RawTokenDataV2 { decimals: None, is_deleted_v2: None, }, - RawCurrentTokenDataV2 { + CurrentTokenDataV2 { token_data_id, collection_id, token_name, @@ -300,3 +304,181 @@ impl RawTokenDataV2 { Ok(None) } } + +/// This is a parquet version of TokenDataV2 + +#[derive(Allocative, Clone, Debug, Default, Deserialize, ParquetRecordWriter, Serialize)] +pub struct ParquetTokenDataV2 { + pub txn_version: i64, + pub write_set_change_index: i64, + pub token_data_id: String, + pub collection_id: String, + pub token_name: String, + pub largest_property_version_v1: Option, // String format of BigDecimal + pub token_uri: String, + pub token_properties: String, + pub description: String, + pub token_standard: String, + pub is_fungible_v2: Option, + #[allocative(skip)] + pub block_timestamp: chrono::NaiveDateTime, + pub is_deleted_v2: Option, +} + +impl NamedTable for ParquetTokenDataV2 { + const TABLE_NAME: &'static str = "token_datas_v2"; +} + +impl HasVersion for ParquetTokenDataV2 { + fn version(&self) -> i64 { + self.txn_version + } +} + +impl GetTimeStamp for ParquetTokenDataV2 { + fn get_timestamp(&self) -> chrono::NaiveDateTime { + self.block_timestamp + } +} + +impl From for ParquetTokenDataV2 { + fn from(raw_item: TokenDataV2) -> Self { + Self { + txn_version: raw_item.transaction_version, + write_set_change_index: raw_item.write_set_change_index, + token_data_id: raw_item.token_data_id, + collection_id: raw_item.collection_id, + token_name: raw_item.token_name, + largest_property_version_v1: raw_item + .largest_property_version_v1 + .map(|v| v.to_string()), + token_uri: raw_item.token_uri, + token_properties: canonical_json::to_string(&raw_item.token_properties.clone()) + .context("Failed to serialize token properties") + .unwrap(), + description: raw_item.description, + token_standard: raw_item.token_standard, + is_fungible_v2: raw_item.is_fungible_v2, + block_timestamp: raw_item.transaction_timestamp, + is_deleted_v2: raw_item.is_deleted_v2, + } + } +} + +#[derive(Allocative, Clone, Debug, Default, Deserialize, ParquetRecordWriter, Serialize)] +pub struct ParquetCurrentTokenDataV2 { + pub token_data_id: String, + pub collection_id: String, + pub token_name: String, + pub maximum: Option, // BigDecimal + pub supply: Option, // BigDecimal + pub largest_property_version_v1: Option, // String format of BigDecimal + pub token_uri: String, + pub token_properties: String, // serde_json::Value, + pub description: String, + pub token_standard: String, + pub is_fungible_v2: Option, + pub last_transaction_version: i64, + #[allocative(skip)] + pub last_transaction_timestamp: chrono::NaiveDateTime, + // Deprecated, but still here for backwards compatibility + pub decimals: Option, + pub is_deleted_v2: Option, +} + +impl NamedTable for ParquetCurrentTokenDataV2 { + const TABLE_NAME: &'static str = "current_token_datas_v2"; +} + +impl HasVersion for ParquetCurrentTokenDataV2 { + fn version(&self) -> i64 { + self.last_transaction_version + } +} + +impl GetTimeStamp for ParquetCurrentTokenDataV2 { + fn get_timestamp(&self) -> chrono::NaiveDateTime { + self.last_transaction_timestamp + } +} + +impl From for ParquetCurrentTokenDataV2 { + fn from(raw_item: CurrentTokenDataV2) -> Self { + Self { + token_data_id: raw_item.token_data_id, + collection_id: raw_item.collection_id, + token_name: raw_item.token_name, + maximum: raw_item.maximum.map(|v| v.to_string()), + supply: raw_item.supply.map(|v| v.to_string()), + largest_property_version_v1: raw_item + .largest_property_version_v1 + .map(|v| v.to_string()), + token_uri: raw_item.token_uri, + token_properties: canonical_json::to_string(&raw_item.token_properties).unwrap_or_else( + |_| { + error!( + "Failed to serialize token_properties to JSON: {:?}", + raw_item.token_properties + ); + DEFAULT_NONE.to_string() + }, + ), + description: raw_item.description, + token_standard: raw_item.token_standard, + is_fungible_v2: raw_item.is_fungible_v2, + last_transaction_version: raw_item.last_transaction_version, + last_transaction_timestamp: raw_item.last_transaction_timestamp, + decimals: raw_item.decimals, + is_deleted_v2: raw_item.is_deleted_v2, + } + } +} + +/// This is a postgres version of TokenDataV2 + +// PK of current_token_datas_v2, i.e. token_data_id +pub type CurrentTokenDataV2PK = String; + +#[derive(Clone, Debug, Deserialize, FieldCount, Identifiable, Insertable, Serialize)] +#[diesel(primary_key(token_data_id))] +#[diesel(table_name = current_token_datas_v2)] +pub struct PostgresCurrentTokenDataV2 { + pub token_data_id: String, + pub collection_id: String, + pub token_name: String, + pub maximum: Option, + pub supply: Option, + pub largest_property_version_v1: Option, + pub token_uri: String, + pub token_properties: serde_json::Value, + pub description: String, + pub token_standard: String, + pub is_fungible_v2: Option, + pub last_transaction_version: i64, + pub last_transaction_timestamp: chrono::NaiveDateTime, + // Deprecated, but still here for backwards compatibility + pub decimals: Option, + pub is_deleted_v2: Option, +} + +impl From for PostgresCurrentTokenDataV2 { + fn from(raw_item: CurrentTokenDataV2) -> Self { + Self { + token_data_id: raw_item.token_data_id, + collection_id: raw_item.collection_id, + token_name: raw_item.token_name, + maximum: raw_item.maximum, + supply: raw_item.supply, + largest_property_version_v1: raw_item.largest_property_version_v1, + token_uri: raw_item.token_uri, + token_properties: raw_item.token_properties, + description: raw_item.description, + token_standard: raw_item.token_standard, + is_fungible_v2: raw_item.is_fungible_v2, + last_transaction_version: raw_item.last_transaction_version, + last_transaction_timestamp: raw_item.last_transaction_timestamp, + decimals: raw_item.decimals, + is_deleted_v2: raw_item.is_deleted_v2, + } + } +} diff --git a/rust/processor/src/db/common/models/token_v2_models/raw_v2_token_metadata.rs b/rust/processor/src/db/common/models/token_v2_models/v2_token_metadata.rs similarity index 58% rename from rust/processor/src/db/common/models/token_v2_models/raw_v2_token_metadata.rs rename to rust/processor/src/db/common/models/token_v2_models/v2_token_metadata.rs index 0b9cd405b..09a398eda 100644 --- a/rust/processor/src/db/common/models/token_v2_models/raw_v2_token_metadata.rs +++ b/rust/processor/src/db/common/models/token_v2_models/v2_token_metadata.rs @@ -6,26 +6,34 @@ #![allow(clippy::unused_unit)] use crate::{ + bq_analytics::generic_parquet_processor::{GetTimeStamp, HasVersion, NamedTable}, db::{ - common::models::object_models::v2_object_utils::ObjectAggregatedDataMapping, + common::models::{ + object_models::v2_object_utils::ObjectAggregatedDataMapping, + token_models::token_utils::NAME_LENGTH, DEFAULT_NONE, + }, postgres::models::{ default_models::move_resources::MoveResource, resources::{COIN_ADDR, TOKEN_ADDR, TOKEN_V2_ADDR}, - token_models::token_utils::NAME_LENGTH, }, }, + schema::current_token_v2_metadata, utils::util::{standardize_address, truncate_str}, }; +use allocative_derive::Allocative; use anyhow::Context; use aptos_protos::transaction::v1::WriteResource; +use field_count::FieldCount; +use parquet_derive::ParquetRecordWriter; use serde::{Deserialize, Serialize}; use serde_json::Value; +use tracing::error; // PK of current_objects, i.e. object_address, resource_type pub type CurrentTokenV2MetadataPK = (String, String); #[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] -pub struct RawCurrentTokenV2Metadata { +pub struct CurrentTokenV2Metadata { pub object_address: String, pub resource_type: String, pub data: Value, @@ -34,20 +42,20 @@ pub struct RawCurrentTokenV2Metadata { pub last_transaction_timestamp: chrono::NaiveDateTime, } -impl Ord for RawCurrentTokenV2Metadata { +impl Ord for CurrentTokenV2Metadata { fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.object_address .cmp(&other.object_address) .then(self.resource_type.cmp(&other.resource_type)) } } -impl PartialOrd for RawCurrentTokenV2Metadata { +impl PartialOrd for CurrentTokenV2Metadata { fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } } -impl RawCurrentTokenV2Metadata { +impl CurrentTokenV2Metadata { /// Parsing unknown resources with 0x4::token::Token pub fn from_write_resource( write_resource: &WriteResource, @@ -77,7 +85,7 @@ impl RawCurrentTokenV2Metadata { } let resource_type = truncate_str(&resource.type_, NAME_LENGTH); - return Ok(Some(RawCurrentTokenV2Metadata { + return Ok(Some(CurrentTokenV2Metadata { object_address, resource_type, data: resource @@ -93,6 +101,47 @@ impl RawCurrentTokenV2Metadata { } } -pub trait CurrentTokenV2MetadataConvertible { - fn from_raw(raw_item: RawCurrentTokenV2Metadata) -> Self; +/// This is the parquet version of CurrentTokenV2Metadata +#[derive( + Allocative, Clone, Debug, Default, Deserialize, FieldCount, ParquetRecordWriter, Serialize, +)] +pub struct ParquetCurrentTokenV2Metadata { + pub object_address: String, + pub resource_type: String, + pub data: String, + pub state_key_hash: String, + pub last_transaction_version: i64, + #[allocative(skip)] + pub last_transaction_timestamp: chrono::NaiveDateTime, +} +impl NamedTable for ParquetCurrentTokenV2Metadata { + const TABLE_NAME: &'static str = "current_token_v2_metadata"; +} + +impl HasVersion for ParquetCurrentTokenV2Metadata { + fn version(&self) -> i64 { + self.last_transaction_version + } +} + +impl GetTimeStamp for ParquetCurrentTokenV2Metadata { + fn get_timestamp(&self) -> chrono::NaiveDateTime { + self.last_transaction_timestamp + } +} + +impl From for ParquetCurrentTokenV2Metadata { + fn from(raw_item: CurrentTokenV2Metadata) -> Self { + Self { + object_address: raw_item.object_address, + resource_type: raw_item.resource_type, + data: canonical_json::to_string(&raw_item.data).unwrap_or_else(|_| { + error!("Failed to serialize data to JSON: {:?}", raw_item.data); + DEFAULT_NONE.to_string() + }), + state_key_hash: raw_item.state_key_hash, + last_transaction_version: raw_item.last_transaction_version, + last_transaction_timestamp: raw_item.last_transaction_timestamp, + } + } } diff --git a/rust/processor/src/db/common/models/token_v2_models/raw_v2_token_ownerships.rs b/rust/processor/src/db/common/models/token_v2_models/v2_token_ownerships.rs similarity index 76% rename from rust/processor/src/db/common/models/token_v2_models/raw_v2_token_ownerships.rs rename to rust/processor/src/db/common/models/token_v2_models/v2_token_ownerships.rs index b6ded57da..711c73274 100644 --- a/rust/processor/src/db/common/models/token_v2_models/raw_v2_token_ownerships.rs +++ b/rust/processor/src/db/common/models/token_v2_models/v2_token_ownerships.rs @@ -6,18 +6,18 @@ #![allow(clippy::unused_unit)] use crate::{ + bq_analytics::generic_parquet_processor::{GetTimeStamp, HasVersion, NamedTable}, db::{ common::models::{ object_models::v2_object_utils::{ObjectAggregatedDataMapping, ObjectWithMetadata}, + token_models::{token_utils::TokenWriteSet, tokens::TableHandleToOwner}, token_v2_models::{ - raw_v2_token_datas::RawTokenDataV2, + v2_token_datas::TokenDataV2, v2_token_utils::{TokenStandard, TokenV2Burned, DEFAULT_OWNER_ADDRESS}, }, + DEFAULT_NONE, }, - postgres::models::{ - resources::FromWriteResource, - token_models::{token_utils::TokenWriteSet, tokens::TableHandleToOwner}, - }, + postgres::models::resources::FromWriteResource, }, schema::current_token_ownerships_v2, utils::{ @@ -26,21 +26,24 @@ use crate::{ }, }; use ahash::AHashMap; +use allocative_derive::Allocative; use anyhow::Context; use aptos_protos::transaction::v1::{ DeleteResource, DeleteTableItem, WriteResource, WriteTableItem, }; -use bigdecimal::{BigDecimal, One, Zero}; +use bigdecimal::{BigDecimal, One, ToPrimitive, Zero}; use diesel::prelude::*; use diesel_async::RunQueryDsl; use field_count::FieldCount; +use parquet_derive::ParquetRecordWriter; use serde::{Deserialize, Serialize}; +use tracing::error; // PK of current_token_ownerships_v2, i.e. token_data_id, property_version_v1, owner_address, storage_id pub type CurrentTokenOwnershipV2PK = (String, BigDecimal, String, String); #[derive(Clone, Debug, Deserialize, FieldCount, Serialize)] -pub struct RawTokenOwnershipV2 { +pub struct TokenOwnershipV2 { pub transaction_version: i64, pub write_set_change_index: i64, pub token_data_id: String, @@ -58,11 +61,11 @@ pub struct RawTokenOwnershipV2 { } pub trait TokenOwnershipV2Convertible { - fn from_raw(raw_item: RawTokenOwnershipV2) -> Self; + fn from_raw(raw_item: TokenOwnershipV2) -> Self; } #[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] -pub struct RawCurrentTokenOwnershipV2 { +pub struct CurrentTokenOwnershipV2 { pub token_data_id: String, pub property_version_v1: BigDecimal, pub owner_address: String, @@ -78,7 +81,7 @@ pub struct RawCurrentTokenOwnershipV2 { pub non_transferrable_by_owner: Option, } -impl Ord for RawCurrentTokenOwnershipV2 { +impl Ord for CurrentTokenOwnershipV2 { fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.token_data_id .cmp(&other.token_data_id) @@ -88,14 +91,14 @@ impl Ord for RawCurrentTokenOwnershipV2 { } } -impl PartialOrd for RawCurrentTokenOwnershipV2 { +impl PartialOrd for CurrentTokenOwnershipV2 { fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } } pub trait CurrentTokenOwnershipV2Convertible { - fn from_raw(raw_item: RawCurrentTokenOwnershipV2) -> Self; + fn from_raw(raw_item: CurrentTokenOwnershipV2) -> Self; } // Facilitate tracking when a token is burned @@ -125,15 +128,15 @@ pub struct CurrentTokenOwnershipV2Query { pub non_transferrable_by_owner: Option, } -impl RawTokenOwnershipV2 { +impl TokenOwnershipV2 { /// For nfts it's the same resources that we parse tokendatas from so we leverage the work done in there to get ownership data /// Vecs are returned because there could be multiple transfers in a single transaction and we need to document each one here. pub fn get_nft_v2_from_token_data( - token_data: &RawTokenDataV2, + token_data: &TokenDataV2, object_metadatas: &ObjectAggregatedDataMapping, ) -> anyhow::Result<( Vec, - AHashMap, + AHashMap, )> { let mut ownerships = vec![]; let mut current_ownerships = AHashMap::new(); @@ -179,7 +182,7 @@ impl RawTokenOwnershipV2 { owner_address.clone(), storage_id.clone(), ), - RawCurrentTokenOwnershipV2 { + CurrentTokenOwnershipV2 { token_data_id: token_data_id.clone(), property_version_v1: BigDecimal::zero(), owner_address, @@ -228,7 +231,7 @@ impl RawTokenOwnershipV2 { transfer_event.get_from_address(), storage_id.clone(), ), - RawCurrentTokenOwnershipV2 { + CurrentTokenOwnershipV2 { token_data_id: token_data_id.clone(), property_version_v1: BigDecimal::zero(), // previous owner @@ -260,7 +263,7 @@ impl RawTokenOwnershipV2 { tokens_burned: &TokenV2Burned, object_metadatas: &ObjectAggregatedDataMapping, db_context: &mut Option>, - ) -> anyhow::Result> { + ) -> anyhow::Result> { let token_data_id = standardize_address(&write_resource.address.to_string()); if tokens_burned .get(&standardize_address(&token_data_id)) @@ -302,7 +305,7 @@ impl RawTokenOwnershipV2 { transaction_timestamp: txn_timestamp, non_transferrable_by_owner: Some(non_transferrable_by_owner), }, - RawCurrentTokenOwnershipV2 { + CurrentTokenOwnershipV2 { token_data_id, property_version_v1: BigDecimal::zero(), owner_address, @@ -343,7 +346,7 @@ impl RawTokenOwnershipV2 { prior_nft_ownership: &AHashMap, tokens_burned: &TokenV2Burned, db_context: &mut Option>, - ) -> anyhow::Result> { + ) -> anyhow::Result> { let token_address = standardize_address(&delete_resource.address.to_string()); Self::get_burned_nft_v2_helper( &token_address, @@ -365,7 +368,7 @@ impl RawTokenOwnershipV2 { prior_nft_ownership: &AHashMap, tokens_burned: &TokenV2Burned, db_context: &mut Option>, - ) -> anyhow::Result> { + ) -> anyhow::Result> { let token_address = standardize_address(token_address); if let Some(burn_event) = tokens_burned.get(&token_address) { // 1. Try to lookup token address in burn event mapping @@ -434,7 +437,7 @@ impl RawTokenOwnershipV2 { transaction_timestamp: txn_timestamp, non_transferrable_by_owner: None, // default }, - RawCurrentTokenOwnershipV2 { + CurrentTokenOwnershipV2 { token_data_id, property_version_v1: BigDecimal::zero(), owner_address: previous_owner, @@ -461,7 +464,7 @@ impl RawTokenOwnershipV2 { write_set_change_index: i64, txn_timestamp: chrono::NaiveDateTime, table_handle_to_owner: &TableHandleToOwner, - ) -> anyhow::Result)>> { + ) -> anyhow::Result)>> { let table_item_data = table_item.data.as_ref().unwrap(); let maybe_token = match TokenWriteSet::from_table_item_type( @@ -488,7 +491,7 @@ impl RawTokenOwnershipV2 { } let owner_address = tm.get_owner_address(); ( - Some(RawCurrentTokenOwnershipV2 { + Some(CurrentTokenOwnershipV2 { token_data_id: token_data_id.clone(), property_version_v1: token_id_struct.property_version.clone(), owner_address: owner_address.clone(), @@ -541,7 +544,7 @@ impl RawTokenOwnershipV2 { write_set_change_index: i64, txn_timestamp: chrono::NaiveDateTime, table_handle_to_owner: &TableHandleToOwner, - ) -> anyhow::Result)>> { + ) -> anyhow::Result)>> { let table_item_data = table_item.data.as_ref().unwrap(); let maybe_token_id = match TokenWriteSet::from_table_item_type( @@ -566,7 +569,7 @@ impl RawTokenOwnershipV2 { } let owner_address = tm.get_owner_address(); ( - Some(RawCurrentTokenOwnershipV2 { + Some(CurrentTokenOwnershipV2 { token_data_id: token_data_id.clone(), property_version_v1: token_id_struct.property_version.clone(), owner_address: owner_address.clone(), @@ -656,3 +659,187 @@ impl CurrentTokenOwnershipV2Query { .await } } + +/// This is the parquet version of CurrentTokenOwnershipV2 +#[derive( + Allocative, Clone, Debug, Default, Deserialize, FieldCount, ParquetRecordWriter, Serialize, +)] +pub struct ParquetTokenOwnershipV2 { + pub txn_version: i64, + pub write_set_change_index: i64, + pub token_data_id: String, + pub property_version_v1: u64, + pub owner_address: Option, + pub storage_id: String, + pub amount: String, // this is a string representation of a bigdecimal + pub table_type_v1: Option, + pub token_properties_mutated_v1: Option, + pub is_soulbound_v2: Option, + pub token_standard: String, + #[allocative(skip)] + pub block_timestamp: chrono::NaiveDateTime, + pub non_transferrable_by_owner: Option, +} + +impl NamedTable for ParquetTokenOwnershipV2 { + const TABLE_NAME: &'static str = "token_ownerships_v2"; +} + +impl HasVersion for ParquetTokenOwnershipV2 { + fn version(&self) -> i64 { + self.txn_version + } +} + +impl GetTimeStamp for ParquetTokenOwnershipV2 { + fn get_timestamp(&self) -> chrono::NaiveDateTime { + self.block_timestamp + } +} + +impl From for ParquetTokenOwnershipV2 { + fn from(raw_item: TokenOwnershipV2) -> Self { + Self { + txn_version: raw_item.transaction_version, + write_set_change_index: raw_item.write_set_change_index, + token_data_id: raw_item.token_data_id, + property_version_v1: raw_item.property_version_v1.to_u64().unwrap(), + owner_address: raw_item.owner_address, + storage_id: raw_item.storage_id, + amount: raw_item.amount.to_string(), + table_type_v1: raw_item.table_type_v1, + token_properties_mutated_v1: raw_item + .token_properties_mutated_v1 + .map(|v| v.to_string()), + is_soulbound_v2: raw_item.is_soulbound_v2, + token_standard: raw_item.token_standard, + block_timestamp: raw_item.transaction_timestamp, + non_transferrable_by_owner: raw_item.non_transferrable_by_owner, + } + } +} + +#[derive( + Allocative, Clone, Debug, Default, Deserialize, FieldCount, ParquetRecordWriter, Serialize, +)] +pub struct ParquetCurrentTokenOwnershipV2 { + pub token_data_id: String, + pub property_version_v1: u64, // BigDecimal, + pub owner_address: String, + pub storage_id: String, + pub amount: String, // BigDecimal, + pub table_type_v1: Option, + pub token_properties_mutated_v1: Option, // Option, + pub is_soulbound_v2: Option, + pub token_standard: String, + pub is_fungible_v2: Option, + pub last_transaction_version: i64, + #[allocative(skip)] + pub last_transaction_timestamp: chrono::NaiveDateTime, + pub non_transferrable_by_owner: Option, +} + +impl NamedTable for ParquetCurrentTokenOwnershipV2 { + const TABLE_NAME: &'static str = "current_token_ownerships_v2"; +} + +impl HasVersion for ParquetCurrentTokenOwnershipV2 { + fn version(&self) -> i64 { + self.last_transaction_version + } +} + +impl GetTimeStamp for ParquetCurrentTokenOwnershipV2 { + fn get_timestamp(&self) -> chrono::NaiveDateTime { + self.last_transaction_timestamp + } +} + +// Facilitate tracking when a token is burned +impl From for ParquetCurrentTokenOwnershipV2 { + fn from(raw_item: CurrentTokenOwnershipV2) -> Self { + Self { + token_data_id: raw_item.token_data_id, + property_version_v1: raw_item.property_version_v1.to_u64().unwrap(), + owner_address: raw_item.owner_address, + storage_id: raw_item.storage_id, + amount: raw_item.amount.to_string(), + table_type_v1: raw_item.table_type_v1, + token_properties_mutated_v1: raw_item + .token_properties_mutated_v1 + .and_then(|v| { + canonical_json::to_string(&v) + .map_err(|e| { + error!("Failed to convert token_properties_mutated_v1: {:?}", e); + e + }) + .ok() + }) + .or_else(|| Some(DEFAULT_NONE.to_string())), + is_soulbound_v2: raw_item.is_soulbound_v2, + token_standard: raw_item.token_standard, + is_fungible_v2: raw_item.is_fungible_v2, + last_transaction_version: raw_item.last_transaction_version, + last_transaction_timestamp: raw_item.last_transaction_timestamp, + non_transferrable_by_owner: raw_item.non_transferrable_by_owner, + } + } +} + +/// This is the postgres version of CurrentTokenOwnershipV2 +#[derive( + Clone, Debug, Deserialize, Eq, FieldCount, Identifiable, Insertable, PartialEq, Serialize, +)] +#[diesel(primary_key(token_data_id, property_version_v1, owner_address, storage_id))] +#[diesel(table_name = current_token_ownerships_v2)] +pub struct PostgresCurrentTokenOwnershipV2 { + pub token_data_id: String, + pub property_version_v1: BigDecimal, + pub owner_address: String, + pub storage_id: String, + pub amount: BigDecimal, + pub table_type_v1: Option, + pub token_properties_mutated_v1: Option, + pub is_soulbound_v2: Option, + pub token_standard: String, + pub is_fungible_v2: Option, + pub last_transaction_version: i64, + pub last_transaction_timestamp: chrono::NaiveDateTime, + pub non_transferrable_by_owner: Option, +} + +impl Ord for PostgresCurrentTokenOwnershipV2 { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.token_data_id + .cmp(&other.token_data_id) + .then(self.property_version_v1.cmp(&other.property_version_v1)) + .then(self.owner_address.cmp(&other.owner_address)) + .then(self.storage_id.cmp(&other.storage_id)) + } +} + +impl PartialOrd for PostgresCurrentTokenOwnershipV2 { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl From for PostgresCurrentTokenOwnershipV2 { + fn from(raw_item: CurrentTokenOwnershipV2) -> Self { + Self { + token_data_id: raw_item.token_data_id, + property_version_v1: raw_item.property_version_v1, + owner_address: raw_item.owner_address, + storage_id: raw_item.storage_id, + amount: raw_item.amount, + table_type_v1: raw_item.table_type_v1, + token_properties_mutated_v1: raw_item.token_properties_mutated_v1, + is_soulbound_v2: raw_item.is_soulbound_v2, + token_standard: raw_item.token_standard, + is_fungible_v2: raw_item.is_fungible_v2, + last_transaction_version: raw_item.last_transaction_version, + last_transaction_timestamp: raw_item.last_transaction_timestamp, + non_transferrable_by_owner: raw_item.non_transferrable_by_owner, + } + } +} diff --git a/rust/processor/src/db/common/models/token_v2_models/v2_token_utils.rs b/rust/processor/src/db/common/models/token_v2_models/v2_token_utils.rs index 6f70eb0f9..f29bc9d68 100644 --- a/rust/processor/src/db/common/models/token_v2_models/v2_token_utils.rs +++ b/rust/processor/src/db/common/models/token_v2_models/v2_token_utils.rs @@ -5,9 +5,9 @@ #![allow(clippy::extra_unused_lifetimes)] use crate::{ - db::{ - common::models::object_models::v2_object_utils::CurrentObjectPK, - postgres::models::token_models::token_utils::{NAME_LENGTH, URI_LENGTH}, + db::common::models::{ + object_models::v2_object_utils::CurrentObjectPK, + token_models::token_utils::{NAME_LENGTH, URI_LENGTH}, }, utils::util::{ deserialize_from_string, deserialize_token_object_property_map_from_bcs_hexstring, diff --git a/rust/processor/src/db/parquet/models/mod.rs b/rust/processor/src/db/parquet/models/mod.rs index 318613f27..1266dcf3b 100644 --- a/rust/processor/src/db/parquet/models/mod.rs +++ b/rust/processor/src/db/parquet/models/mod.rs @@ -5,7 +5,6 @@ pub mod event_models; pub mod fungible_asset_models; pub mod object_models; pub mod stake_models; -pub mod token_v2_models; pub mod transaction_metadata_model; pub mod user_transaction_models; const DEFAULT_NONE: &str = "NULL"; diff --git a/rust/processor/src/db/parquet/models/token_v2_models/mod.rs b/rust/processor/src/db/parquet/models/token_v2_models/mod.rs deleted file mode 100644 index 1a959578f..000000000 --- a/rust/processor/src/db/parquet/models/token_v2_models/mod.rs +++ /dev/null @@ -1,6 +0,0 @@ -pub mod token_claims; -pub mod v1_token_royalty; -pub mod v2_token_activities; -pub mod v2_token_datas; -pub mod v2_token_metadata; -pub mod v2_token_ownerships; diff --git a/rust/processor/src/db/parquet/models/token_v2_models/token_claims.rs b/rust/processor/src/db/parquet/models/token_v2_models/token_claims.rs deleted file mode 100644 index 26f148d89..000000000 --- a/rust/processor/src/db/parquet/models/token_v2_models/token_claims.rs +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -// This is required because a diesel macro makes clippy sad -#![allow(clippy::extra_unused_lifetimes)] -#![allow(clippy::unused_unit)] - -use crate::{ - bq_analytics::generic_parquet_processor::{GetTimeStamp, HasVersion, NamedTable}, - db::common::models::token_v2_models::raw_token_claims::{ - CurrentTokenPendingClaimConvertible, RawCurrentTokenPendingClaim, - }, -}; -use allocative_derive::Allocative; -use bigdecimal::ToPrimitive; -use field_count::FieldCount; -use parquet_derive::ParquetRecordWriter; -use serde::{Deserialize, Serialize}; - -#[derive( - Allocative, Clone, Debug, Default, Deserialize, FieldCount, ParquetRecordWriter, Serialize, -)] -pub struct CurrentTokenPendingClaim { - pub token_data_id_hash: String, - pub property_version: u64, - pub from_address: String, - pub to_address: String, - pub collection_data_id_hash: String, - pub creator_address: String, - pub collection_name: String, - pub name: String, - pub amount: String, // String format of BigDecimal - pub table_handle: String, - pub last_transaction_version: i64, - #[allocative(skip)] - pub last_transaction_timestamp: chrono::NaiveDateTime, - pub token_data_id: String, - pub collection_id: String, -} - -impl NamedTable for CurrentTokenPendingClaim { - const TABLE_NAME: &'static str = "current_token_pending_claims"; -} - -impl HasVersion for CurrentTokenPendingClaim { - fn version(&self) -> i64 { - self.last_transaction_version - } -} - -impl GetTimeStamp for CurrentTokenPendingClaim { - fn get_timestamp(&self) -> chrono::NaiveDateTime { - self.last_transaction_timestamp - } -} - -impl CurrentTokenPendingClaimConvertible for CurrentTokenPendingClaim { - fn from_raw(raw_item: RawCurrentTokenPendingClaim) -> Self { - Self { - token_data_id_hash: raw_item.token_data_id_hash, - property_version: raw_item - .property_version - .to_u64() - .expect("Failed to convert property_version to u64"), - from_address: raw_item.from_address, - to_address: raw_item.to_address, - collection_data_id_hash: raw_item.collection_data_id_hash, - creator_address: raw_item.creator_address, - collection_name: raw_item.collection_name, - name: raw_item.name, - amount: raw_item.amount.to_string(), // (assuming amount is non-critical) - table_handle: raw_item.table_handle, - last_transaction_version: raw_item.last_transaction_version, - last_transaction_timestamp: raw_item.last_transaction_timestamp, - token_data_id: raw_item.token_data_id, - collection_id: raw_item.collection_id, - } - } -} diff --git a/rust/processor/src/db/parquet/models/token_v2_models/v1_token_royalty.rs b/rust/processor/src/db/parquet/models/token_v2_models/v1_token_royalty.rs deleted file mode 100644 index 5f5102c6f..000000000 --- a/rust/processor/src/db/parquet/models/token_v2_models/v1_token_royalty.rs +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -// This is required because a diesel macro makes clippy sad -#![allow(clippy::extra_unused_lifetimes)] -#![allow(clippy::unused_unit)] - -use crate::{ - bq_analytics::generic_parquet_processor::{GetTimeStamp, HasVersion, NamedTable}, - db::common::models::token_v2_models::raw_v1_token_royalty::{ - CurrentTokenRoyaltyV1Convertible, RawCurrentTokenRoyaltyV1, - }, -}; -use allocative_derive::Allocative; -use field_count::FieldCount; -use parquet_derive::ParquetRecordWriter; -use serde::{Deserialize, Serialize}; - -#[derive( - Allocative, Clone, Debug, Default, Deserialize, FieldCount, ParquetRecordWriter, Serialize, -)] -pub struct CurrentTokenRoyaltyV1 { - pub token_data_id: String, - pub payee_address: String, - pub royalty_points_numerator: String, // String format of BigDecimal - pub royalty_points_denominator: String, // String format of BigDecimal - pub last_transaction_version: i64, - #[allocative(skip)] - pub last_transaction_timestamp: chrono::NaiveDateTime, -} - -impl NamedTable for CurrentTokenRoyaltyV1 { - const TABLE_NAME: &'static str = "current_token_royalties_v1"; -} - -impl HasVersion for CurrentTokenRoyaltyV1 { - fn version(&self) -> i64 { - self.last_transaction_version - } -} - -impl GetTimeStamp for CurrentTokenRoyaltyV1 { - fn get_timestamp(&self) -> chrono::NaiveDateTime { - self.last_transaction_timestamp - } -} - -impl CurrentTokenRoyaltyV1Convertible for CurrentTokenRoyaltyV1 { - // TODO: consider returning a Result - fn from_raw(raw_item: RawCurrentTokenRoyaltyV1) -> Self { - Self { - token_data_id: raw_item.token_data_id, - payee_address: raw_item.payee_address, - royalty_points_numerator: raw_item.royalty_points_numerator.to_string(), - royalty_points_denominator: raw_item.royalty_points_denominator.to_string(), - last_transaction_version: raw_item.last_transaction_version, - last_transaction_timestamp: raw_item.last_transaction_timestamp, - } - } -} diff --git a/rust/processor/src/db/parquet/models/token_v2_models/v2_token_activities.rs b/rust/processor/src/db/parquet/models/token_v2_models/v2_token_activities.rs deleted file mode 100644 index 36b6df2fb..000000000 --- a/rust/processor/src/db/parquet/models/token_v2_models/v2_token_activities.rs +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -// This is required because a diesel macro makes clippy sad -#![allow(clippy::extra_unused_lifetimes)] -#![allow(clippy::unused_unit)] - -use crate::{ - bq_analytics::generic_parquet_processor::{GetTimeStamp, HasVersion, NamedTable}, - db::common::models::token_v2_models::raw_v2_token_activities::{ - RawTokenActivityV2, TokenActivityV2Convertible, - }, -}; -use allocative_derive::Allocative; -use bigdecimal::ToPrimitive; -use field_count::FieldCount; -use parquet_derive::ParquetRecordWriter; -use serde::{Deserialize, Serialize}; - -#[derive( - Allocative, Clone, Debug, Default, Deserialize, FieldCount, ParquetRecordWriter, Serialize, -)] -pub struct TokenActivityV2 { - pub txn_version: i64, - pub event_index: i64, - pub event_account_address: String, - pub token_data_id: String, - pub property_version_v1: u64, // BigDecimal - pub type_: String, - pub from_address: Option, - pub to_address: Option, - pub token_amount: String, // BigDecimal - pub before_value: Option, - pub after_value: Option, - pub entry_function_id_str: Option, - pub token_standard: String, - pub is_fungible_v2: Option, - #[allocative(skip)] - pub block_timestamp: chrono::NaiveDateTime, -} - -impl NamedTable for TokenActivityV2 { - const TABLE_NAME: &'static str = "token_activities_v2"; -} - -impl HasVersion for TokenActivityV2 { - fn version(&self) -> i64 { - self.txn_version - } -} - -impl GetTimeStamp for TokenActivityV2 { - fn get_timestamp(&self) -> chrono::NaiveDateTime { - self.block_timestamp - } -} - -impl TokenActivityV2Convertible for TokenActivityV2 { - // TODO: consider returning a Result - fn from_raw(raw_item: RawTokenActivityV2) -> Self { - Self { - txn_version: raw_item.transaction_version, - event_index: raw_item.event_index, - event_account_address: raw_item.event_account_address, - token_data_id: raw_item.token_data_id, - property_version_v1: raw_item.property_version_v1.to_u64().unwrap(), - type_: raw_item.type_, - from_address: raw_item.from_address, - to_address: raw_item.to_address, - token_amount: raw_item.token_amount.to_string(), - before_value: raw_item.before_value, - after_value: raw_item.after_value, - entry_function_id_str: raw_item.entry_function_id_str, - token_standard: raw_item.token_standard, - is_fungible_v2: raw_item.is_fungible_v2, - block_timestamp: raw_item.transaction_timestamp, - } - } -} diff --git a/rust/processor/src/db/parquet/models/token_v2_models/v2_token_datas.rs b/rust/processor/src/db/parquet/models/token_v2_models/v2_token_datas.rs deleted file mode 100644 index cca78c2b3..000000000 --- a/rust/processor/src/db/parquet/models/token_v2_models/v2_token_datas.rs +++ /dev/null @@ -1,149 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -// This is required because a diesel macro makes clippy sad -#![allow(clippy::extra_unused_lifetimes)] -#![allow(clippy::unused_unit)] - -use crate::{ - bq_analytics::generic_parquet_processor::{GetTimeStamp, HasVersion, NamedTable}, - db::{ - common::models::token_v2_models::raw_v2_token_datas::{ - CurrentTokenDataV2Convertible, RawCurrentTokenDataV2, RawTokenDataV2, - TokenDataV2Convertible, - }, - parquet::models::DEFAULT_NONE, - }, -}; -use allocative_derive::Allocative; -use anyhow::Context; -use parquet_derive::ParquetRecordWriter; -use serde::{Deserialize, Serialize}; -use tracing::error; - -#[derive(Allocative, Clone, Debug, Default, Deserialize, ParquetRecordWriter, Serialize)] -pub struct TokenDataV2 { - pub txn_version: i64, - pub write_set_change_index: i64, - pub token_data_id: String, - pub collection_id: String, - pub token_name: String, - pub largest_property_version_v1: Option, // String format of BigDecimal - pub token_uri: String, - pub token_properties: String, - pub description: String, - pub token_standard: String, - pub is_fungible_v2: Option, - #[allocative(skip)] - pub block_timestamp: chrono::NaiveDateTime, - pub is_deleted_v2: Option, -} - -impl NamedTable for TokenDataV2 { - const TABLE_NAME: &'static str = "token_datas_v2"; -} - -impl HasVersion for TokenDataV2 { - fn version(&self) -> i64 { - self.txn_version - } -} - -impl GetTimeStamp for TokenDataV2 { - fn get_timestamp(&self) -> chrono::NaiveDateTime { - self.block_timestamp - } -} - -impl TokenDataV2Convertible for TokenDataV2 { - fn from_raw(raw_item: RawTokenDataV2) -> Self { - Self { - txn_version: raw_item.transaction_version, - write_set_change_index: raw_item.write_set_change_index, - token_data_id: raw_item.token_data_id, - collection_id: raw_item.collection_id, - token_name: raw_item.token_name, - largest_property_version_v1: raw_item - .largest_property_version_v1 - .map(|v| v.to_string()), - token_uri: raw_item.token_uri, - token_properties: canonical_json::to_string(&raw_item.token_properties.clone()) - .context("Failed to serialize token properties") - .unwrap(), - description: raw_item.description, - token_standard: raw_item.token_standard, - is_fungible_v2: raw_item.is_fungible_v2, - block_timestamp: raw_item.transaction_timestamp, - is_deleted_v2: raw_item.is_deleted_v2, - } - } -} - -#[derive(Allocative, Clone, Debug, Default, Deserialize, ParquetRecordWriter, Serialize)] -pub struct CurrentTokenDataV2 { - pub token_data_id: String, - pub collection_id: String, - pub token_name: String, - pub maximum: Option, // BigDecimal - pub supply: Option, // BigDecimal - pub largest_property_version_v1: Option, // String format of BigDecimal - pub token_uri: String, - pub token_properties: String, // serde_json::Value, - pub description: String, - pub token_standard: String, - pub is_fungible_v2: Option, - pub last_transaction_version: i64, - #[allocative(skip)] - pub last_transaction_timestamp: chrono::NaiveDateTime, - // Deprecated, but still here for backwards compatibility - pub decimals: Option, - pub is_deleted_v2: Option, -} - -impl NamedTable for CurrentTokenDataV2 { - const TABLE_NAME: &'static str = "current_token_datas_v2"; -} - -impl HasVersion for CurrentTokenDataV2 { - fn version(&self) -> i64 { - self.last_transaction_version - } -} - -impl GetTimeStamp for CurrentTokenDataV2 { - fn get_timestamp(&self) -> chrono::NaiveDateTime { - self.last_transaction_timestamp - } -} - -impl CurrentTokenDataV2Convertible for CurrentTokenDataV2 { - fn from_raw(raw_item: RawCurrentTokenDataV2) -> Self { - Self { - token_data_id: raw_item.token_data_id, - collection_id: raw_item.collection_id, - token_name: raw_item.token_name, - maximum: raw_item.maximum.map(|v| v.to_string()), - supply: raw_item.supply.map(|v| v.to_string()), - largest_property_version_v1: raw_item - .largest_property_version_v1 - .map(|v| v.to_string()), - token_uri: raw_item.token_uri, - token_properties: canonical_json::to_string(&raw_item.token_properties).unwrap_or_else( - |_| { - error!( - "Failed to serialize token_properties to JSON: {:?}", - raw_item.token_properties - ); - DEFAULT_NONE.to_string() - }, - ), - description: raw_item.description, - token_standard: raw_item.token_standard, - is_fungible_v2: raw_item.is_fungible_v2, - last_transaction_version: raw_item.last_transaction_version, - last_transaction_timestamp: raw_item.last_transaction_timestamp, - decimals: raw_item.decimals, - is_deleted_v2: raw_item.is_deleted_v2, - } - } -} diff --git a/rust/processor/src/db/parquet/models/token_v2_models/v2_token_metadata.rs b/rust/processor/src/db/parquet/models/token_v2_models/v2_token_metadata.rs deleted file mode 100644 index 3db1bf630..000000000 --- a/rust/processor/src/db/parquet/models/token_v2_models/v2_token_metadata.rs +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -// This is required because a diesel macro makes clippy sad -#![allow(clippy::extra_unused_lifetimes)] -#![allow(clippy::unused_unit)] - -use crate::{ - bq_analytics::generic_parquet_processor::{GetTimeStamp, HasVersion, NamedTable}, - db::{ - common::models::token_v2_models::raw_v2_token_metadata::{ - CurrentTokenV2MetadataConvertible, RawCurrentTokenV2Metadata, - }, - parquet::models::DEFAULT_NONE, - }, -}; -use allocative_derive::Allocative; -use field_count::FieldCount; -use parquet_derive::ParquetRecordWriter; -use serde::{Deserialize, Serialize}; -use tracing::error; - -#[derive( - Allocative, Clone, Debug, Default, Deserialize, FieldCount, ParquetRecordWriter, Serialize, -)] -pub struct CurrentTokenV2Metadata { - pub object_address: String, - pub resource_type: String, - pub data: String, - pub state_key_hash: String, - pub last_transaction_version: i64, - #[allocative(skip)] - pub last_transaction_timestamp: chrono::NaiveDateTime, -} -impl NamedTable for CurrentTokenV2Metadata { - const TABLE_NAME: &'static str = "current_token_v2_metadata"; -} - -impl HasVersion for CurrentTokenV2Metadata { - fn version(&self) -> i64 { - self.last_transaction_version - } -} - -impl GetTimeStamp for CurrentTokenV2Metadata { - fn get_timestamp(&self) -> chrono::NaiveDateTime { - self.last_transaction_timestamp - } -} - -impl CurrentTokenV2MetadataConvertible for CurrentTokenV2Metadata { - // TODO: consider returning a Result - fn from_raw(raw_item: RawCurrentTokenV2Metadata) -> Self { - Self { - object_address: raw_item.object_address, - resource_type: raw_item.resource_type, - data: canonical_json::to_string(&raw_item.data).unwrap_or_else(|_| { - error!("Failed to serialize data to JSON: {:?}", raw_item.data); - DEFAULT_NONE.to_string() - }), - state_key_hash: raw_item.state_key_hash, - last_transaction_version: raw_item.last_transaction_version, - last_transaction_timestamp: raw_item.last_transaction_timestamp, - } - } -} diff --git a/rust/processor/src/db/parquet/models/token_v2_models/v2_token_ownerships.rs b/rust/processor/src/db/parquet/models/token_v2_models/v2_token_ownerships.rs deleted file mode 100644 index 875d4ff73..000000000 --- a/rust/processor/src/db/parquet/models/token_v2_models/v2_token_ownerships.rs +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -// This is required because a diesel macro makes clippy sad -#![allow(clippy::extra_unused_lifetimes)] -#![allow(clippy::unused_unit)] - -use crate::{ - bq_analytics::generic_parquet_processor::{GetTimeStamp, HasVersion, NamedTable}, - db::{ - common::models::token_v2_models::raw_v2_token_ownerships::{ - CurrentTokenOwnershipV2Convertible, RawCurrentTokenOwnershipV2, RawTokenOwnershipV2, - TokenOwnershipV2Convertible, - }, - parquet::models::DEFAULT_NONE, - }, -}; -use allocative_derive::Allocative; -use bigdecimal::ToPrimitive; -use field_count::FieldCount; -use parquet_derive::ParquetRecordWriter; -use serde::{Deserialize, Serialize}; -use tracing::error; - -#[derive( - Allocative, Clone, Debug, Default, Deserialize, FieldCount, ParquetRecordWriter, Serialize, -)] -pub struct TokenOwnershipV2 { - pub txn_version: i64, - pub write_set_change_index: i64, - pub token_data_id: String, - pub property_version_v1: u64, - pub owner_address: Option, - pub storage_id: String, - pub amount: String, // this is a string representation of a bigdecimal - pub table_type_v1: Option, - pub token_properties_mutated_v1: Option, - pub is_soulbound_v2: Option, - pub token_standard: String, - #[allocative(skip)] - pub block_timestamp: chrono::NaiveDateTime, - pub non_transferrable_by_owner: Option, -} - -impl NamedTable for TokenOwnershipV2 { - const TABLE_NAME: &'static str = "token_ownerships_v2"; -} - -impl HasVersion for TokenOwnershipV2 { - fn version(&self) -> i64 { - self.txn_version - } -} - -impl GetTimeStamp for TokenOwnershipV2 { - fn get_timestamp(&self) -> chrono::NaiveDateTime { - self.block_timestamp - } -} - -impl TokenOwnershipV2Convertible for TokenOwnershipV2 { - fn from_raw(raw_item: RawTokenOwnershipV2) -> Self { - Self { - txn_version: raw_item.transaction_version, - write_set_change_index: raw_item.write_set_change_index, - token_data_id: raw_item.token_data_id, - property_version_v1: raw_item.property_version_v1.to_u64().unwrap(), - owner_address: raw_item.owner_address, - storage_id: raw_item.storage_id, - amount: raw_item.amount.to_string(), - table_type_v1: raw_item.table_type_v1, - token_properties_mutated_v1: raw_item - .token_properties_mutated_v1 - .map(|v| v.to_string()), - is_soulbound_v2: raw_item.is_soulbound_v2, - token_standard: raw_item.token_standard, - block_timestamp: raw_item.transaction_timestamp, - non_transferrable_by_owner: raw_item.non_transferrable_by_owner, - } - } -} - -#[derive( - Allocative, Clone, Debug, Default, Deserialize, FieldCount, ParquetRecordWriter, Serialize, -)] -pub struct CurrentTokenOwnershipV2 { - pub token_data_id: String, - pub property_version_v1: u64, // BigDecimal, - pub owner_address: String, - pub storage_id: String, - pub amount: String, // BigDecimal, - pub table_type_v1: Option, - pub token_properties_mutated_v1: Option, // Option, - pub is_soulbound_v2: Option, - pub token_standard: String, - pub is_fungible_v2: Option, - pub last_transaction_version: i64, - #[allocative(skip)] - pub last_transaction_timestamp: chrono::NaiveDateTime, - pub non_transferrable_by_owner: Option, -} - -impl NamedTable for CurrentTokenOwnershipV2 { - const TABLE_NAME: &'static str = "current_token_ownerships_v2"; -} - -impl HasVersion for CurrentTokenOwnershipV2 { - fn version(&self) -> i64 { - self.last_transaction_version - } -} - -impl GetTimeStamp for CurrentTokenOwnershipV2 { - fn get_timestamp(&self) -> chrono::NaiveDateTime { - self.last_transaction_timestamp - } -} - -// Facilitate tracking when a token is burned -impl CurrentTokenOwnershipV2Convertible for CurrentTokenOwnershipV2 { - fn from_raw(raw_item: RawCurrentTokenOwnershipV2) -> Self { - Self { - token_data_id: raw_item.token_data_id, - property_version_v1: raw_item.property_version_v1.to_u64().unwrap(), - owner_address: raw_item.owner_address, - storage_id: raw_item.storage_id, - amount: raw_item.amount.to_string(), - table_type_v1: raw_item.table_type_v1, - token_properties_mutated_v1: raw_item - .token_properties_mutated_v1 - .and_then(|v| { - canonical_json::to_string(&v) - .map_err(|e| { - error!("Failed to convert token_properties_mutated_v1: {:?}", e); - e - }) - .ok() - }) - .or_else(|| Some(DEFAULT_NONE.to_string())), - is_soulbound_v2: raw_item.is_soulbound_v2, - token_standard: raw_item.token_standard, - is_fungible_v2: raw_item.is_fungible_v2, - last_transaction_version: raw_item.last_transaction_version, - last_transaction_timestamp: raw_item.last_transaction_timestamp, - non_transferrable_by_owner: raw_item.non_transferrable_by_owner, - } - } -} diff --git a/rust/processor/src/db/postgres/models/fungible_asset_models/v2_fungible_asset_utils.rs b/rust/processor/src/db/postgres/models/fungible_asset_models/v2_fungible_asset_utils.rs index b023fe4b8..0a89f7c73 100644 --- a/rust/processor/src/db/postgres/models/fungible_asset_models/v2_fungible_asset_utils.rs +++ b/rust/processor/src/db/postgres/models/fungible_asset_models/v2_fungible_asset_utils.rs @@ -5,9 +5,8 @@ #![allow(clippy::extra_unused_lifetimes)] use crate::{ - db::{ - common::models::token_v2_models::v2_token_utils::ResourceReference, - postgres::models::token_models::token_utils::URI_LENGTH, + db::common::models::{ + token_models::token_utils::URI_LENGTH, token_v2_models::v2_token_utils::ResourceReference, }, utils::util::{deserialize_from_string, truncate_str, Aggregator}, }; diff --git a/rust/processor/src/db/postgres/models/mod.rs b/rust/processor/src/db/postgres/models/mod.rs index 2d671f061..960b91e56 100644 --- a/rust/processor/src/db/postgres/models/mod.rs +++ b/rust/processor/src/db/postgres/models/mod.rs @@ -14,7 +14,5 @@ pub mod processor_status; pub mod property_map; pub mod resources; pub mod stake_models; -pub mod token_models; -pub mod token_v2_models; pub mod transaction_metadata_model; pub mod user_transactions_models; diff --git a/rust/processor/src/db/postgres/models/token_models/nft_points.rs b/rust/processor/src/db/postgres/models/token_models/nft_points.rs deleted file mode 100644 index 1debccb11..000000000 --- a/rust/processor/src/db/postgres/models/token_models/nft_points.rs +++ /dev/null @@ -1,139 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -// This is required because a diesel macro makes clippy sad -#![allow(clippy::extra_unused_lifetimes)] -#![allow(clippy::unused_unit)] - -use crate::{ - schema::nft_points, - utils::{ - counters::PROCESSOR_UNKNOWN_TYPE_COUNT, - util::{ - get_clean_payload, get_entry_function_from_user_request, parse_timestamp, - standardize_address, - }, - }, -}; -use aptos_protos::transaction::v1::{transaction::TxnData, Transaction}; -use bigdecimal::BigDecimal; -use diesel::prelude::*; -use field_count::FieldCount; -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, Deserialize, FieldCount, Identifiable, Insertable, Serialize)] -#[diesel(primary_key(transaction_version))] -#[diesel(table_name = nft_points)] -pub struct NftPoints { - pub transaction_version: i64, - pub owner_address: String, - pub token_name: String, - pub point_type: String, - pub amount: BigDecimal, - pub transaction_timestamp: chrono::NaiveDateTime, -} - -impl NftPoints { - pub fn from_transaction( - transaction: &Transaction, - nft_points_contract: Option, - ) -> Option { - let txn_data = match transaction.txn_data.as_ref() { - Some(data) => data, - None => { - PROCESSOR_UNKNOWN_TYPE_COUNT - .with_label_values(&["NftPoints"]) - .inc(); - tracing::warn!( - transaction_version = transaction.version, - "Transaction data doesn't exist", - ); - return None; - }, - }; - let version = transaction.version as i64; - let timestamp = transaction - .timestamp - .as_ref() - .expect("Transaction timestamp doesn't exist!"); - let transaction_info = transaction - .info - .as_ref() - .expect("Transaction info doesn't exist!"); - if let Some(contract) = nft_points_contract { - if let TxnData::User(user_txn) = txn_data { - let user_request = user_txn - .request - .as_ref() - .expect("Sends is not present in user txn"); - - let payload = match user_request.payload.as_ref() { - Some(payload) => payload, - None => { - tracing::warn!(transaction_version = version, "Payload is empty."); - return None; - }, - }; - let entry_function_id_str = - get_entry_function_from_user_request(user_request).unwrap_or_default(); - - // If failed transaction, end - if !transaction_info.success { - return None; - } - if entry_function_id_str == contract { - let payload_cleaned = get_clean_payload(payload, version).unwrap(); - let args = payload_cleaned["arguments"] - .as_array() - .unwrap_or_else(|| { - tracing::error!( - transaction_version = version, - payload = ?payload_cleaned, - "Failed to get arguments from nft_points transaction" - ); - panic!() - }) - .iter() - .map(|x| { - unescape::unescape(x.as_str().unwrap_or_else(|| { - tracing::error!( - transaction_version = version, - payload = ?payload_cleaned, - "Failed to parse arguments from nft_points transaction" - ); - panic!() - })) - .unwrap_or_else(|| { - tracing::error!( - transaction_version = version, - payload = ?payload_cleaned, - "Failed to escape arguments from nft_points transaction" - ); - panic!() - }) - }) - .collect::>(); - let owner_address = standardize_address(&args[0]); - let amount = args[2].parse().unwrap_or_else(|_| { - tracing::error!( - transaction_version = version, - argument = &args[2], - "Failed to parse amount from nft_points transaction" - ); - panic!() - }); - let transaction_timestamp = parse_timestamp(timestamp, version); - return Some(Self { - transaction_version: version, - owner_address, - token_name: args[1].clone(), - point_type: args[3].clone(), - amount, - transaction_timestamp, - }); - } - } - } - None - } -} diff --git a/rust/processor/src/db/postgres/models/token_models/token_activities.rs b/rust/processor/src/db/postgres/models/token_models/token_activities.rs deleted file mode 100644 index cf2da00ac..000000000 --- a/rust/processor/src/db/postgres/models/token_models/token_activities.rs +++ /dev/null @@ -1,281 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -// This is required because a diesel macro makes clippy sad -#![allow(clippy::extra_unused_lifetimes)] -#![allow(clippy::unused_unit)] - -use super::token_utils::{TokenDataIdType, TokenEvent}; -use crate::{ - schema::token_activities, - utils::{ - counters::PROCESSOR_UNKNOWN_TYPE_COUNT, - util::{parse_timestamp, standardize_address}, - }, -}; -use aptos_protos::transaction::v1::{transaction::TxnData, Event, Transaction}; -use bigdecimal::{BigDecimal, Zero}; -use field_count::FieldCount; -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, Deserialize, FieldCount, Identifiable, Insertable, Serialize)] -#[diesel(primary_key( - transaction_version, - event_account_address, - event_creation_number, - event_sequence_number -))] -#[diesel(table_name = token_activities)] -pub struct TokenActivity { - pub transaction_version: i64, - pub event_account_address: String, - pub event_creation_number: i64, - pub event_sequence_number: i64, - pub token_data_id_hash: String, - pub property_version: BigDecimal, - pub creator_address: String, - pub collection_name: String, - pub name: String, - pub transfer_type: String, - pub from_address: Option, - pub to_address: Option, - pub token_amount: BigDecimal, - pub coin_type: Option, - pub coin_amount: Option, - pub collection_data_id_hash: String, - pub transaction_timestamp: chrono::NaiveDateTime, - pub event_index: Option, -} - -/// A simplified TokenActivity (excluded common fields) to reduce code duplication -struct TokenActivityHelper<'a> { - pub token_data_id: &'a TokenDataIdType, - pub property_version: BigDecimal, - pub from_address: Option, - pub to_address: Option, - pub token_amount: BigDecimal, - pub coin_type: Option, - pub coin_amount: Option, -} - -impl TokenActivity { - pub fn from_transaction(transaction: &Transaction) -> Vec { - let mut token_activities = vec![]; - let txn_data = match transaction.txn_data.as_ref() { - Some(data) => data, - None => { - PROCESSOR_UNKNOWN_TYPE_COUNT - .with_label_values(&["TokenActivity"]) - .inc(); - tracing::warn!( - transaction_version = transaction.version, - "Transaction data doesn't exist", - ); - return token_activities; - }, - }; - if let TxnData::User(user_txn) = txn_data { - for (index, event) in user_txn.events.iter().enumerate() { - let txn_version = transaction.version as i64; - if let Some(token_event) = TokenEvent::from_event( - event.type_str.as_str(), - event.data.as_str(), - txn_version, - ) - .unwrap() - { - token_activities.push(Self::from_parsed_event( - event.type_str.as_str(), - event, - &token_event, - txn_version, - parse_timestamp(transaction.timestamp.as_ref().unwrap(), txn_version), - index as i64, - )) - } - } - } - token_activities - } - - pub fn from_parsed_event( - event_type: &str, - event: &Event, - token_event: &TokenEvent, - txn_version: i64, - txn_timestamp: chrono::NaiveDateTime, - event_index: i64, - ) -> Self { - let event_account_address = - standardize_address(event.key.as_ref().unwrap().account_address.as_str()); - let event_creation_number = event.key.as_ref().unwrap().creation_number as i64; - let event_sequence_number = event.sequence_number as i64; - let token_activity_helper = match token_event { - TokenEvent::MintTokenEvent(inner) => TokenActivityHelper { - token_data_id: &inner.id, - property_version: BigDecimal::zero(), - from_address: Some(event_account_address.clone()), - to_address: None, - token_amount: inner.amount.clone(), - coin_type: None, - coin_amount: None, - }, - TokenEvent::Mint(inner) => TokenActivityHelper { - token_data_id: &inner.id, - property_version: BigDecimal::zero(), - from_address: Some(inner.get_account()), - to_address: None, - token_amount: inner.amount.clone(), - coin_type: None, - coin_amount: None, - }, - TokenEvent::BurnTokenEvent(inner) => TokenActivityHelper { - token_data_id: &inner.id.token_data_id, - property_version: inner.id.property_version.clone(), - from_address: Some(event_account_address.clone()), - to_address: None, - token_amount: inner.amount.clone(), - coin_type: None, - coin_amount: None, - }, - TokenEvent::Burn(inner) => TokenActivityHelper { - token_data_id: &inner.id.token_data_id, - property_version: inner.id.property_version.clone(), - from_address: Some(inner.get_account()), - to_address: None, - token_amount: inner.amount.clone(), - coin_type: None, - coin_amount: None, - }, - TokenEvent::MutateTokenPropertyMapEvent(inner) => TokenActivityHelper { - token_data_id: &inner.new_id.token_data_id, - property_version: inner.new_id.property_version.clone(), - from_address: Some(event_account_address.clone()), - to_address: None, - token_amount: BigDecimal::zero(), - coin_type: None, - coin_amount: None, - }, - TokenEvent::MutatePropertyMap(inner) => TokenActivityHelper { - token_data_id: &inner.new_id.token_data_id, - property_version: inner.new_id.property_version.clone(), - from_address: Some(inner.get_account()), - to_address: None, - token_amount: BigDecimal::zero(), - coin_type: None, - coin_amount: None, - }, - TokenEvent::WithdrawTokenEvent(inner) => TokenActivityHelper { - token_data_id: &inner.id.token_data_id, - property_version: inner.id.property_version.clone(), - from_address: Some(event_account_address.clone()), - to_address: None, - token_amount: inner.amount.clone(), - coin_type: None, - coin_amount: None, - }, - TokenEvent::TokenWithdraw(inner) => TokenActivityHelper { - token_data_id: &inner.id.token_data_id, - property_version: inner.id.property_version.clone(), - from_address: Some(inner.get_account()), - to_address: None, - token_amount: inner.amount.clone(), - coin_type: None, - coin_amount: None, - }, - TokenEvent::DepositTokenEvent(inner) => TokenActivityHelper { - token_data_id: &inner.id.token_data_id, - property_version: inner.id.property_version.clone(), - from_address: None, - to_address: Some(standardize_address(&event_account_address)), - token_amount: inner.amount.clone(), - coin_type: None, - coin_amount: None, - }, - TokenEvent::TokenDeposit(inner) => TokenActivityHelper { - token_data_id: &inner.id.token_data_id, - property_version: inner.id.property_version.clone(), - from_address: None, - to_address: Some(inner.get_account()), - token_amount: inner.amount.clone(), - coin_type: None, - coin_amount: None, - }, - TokenEvent::OfferTokenEvent(inner) => TokenActivityHelper { - token_data_id: &inner.token_id.token_data_id, - property_version: inner.token_id.property_version.clone(), - from_address: Some(event_account_address.clone()), - to_address: Some(inner.get_to_address()), - token_amount: inner.amount.clone(), - coin_type: None, - coin_amount: None, - }, - TokenEvent::CancelTokenOfferEvent(inner) => TokenActivityHelper { - token_data_id: &inner.token_id.token_data_id, - property_version: inner.token_id.property_version.clone(), - from_address: Some(event_account_address.clone()), - to_address: Some(inner.get_to_address()), - token_amount: inner.amount.clone(), - coin_type: None, - coin_amount: None, - }, - TokenEvent::ClaimTokenEvent(inner) => TokenActivityHelper { - token_data_id: &inner.token_id.token_data_id, - property_version: inner.token_id.property_version.clone(), - from_address: Some(event_account_address.clone()), - to_address: Some(inner.get_to_address()), - token_amount: inner.amount.clone(), - coin_type: None, - coin_amount: None, - }, - TokenEvent::Offer(inner) => TokenActivityHelper { - token_data_id: &inner.token_id.token_data_id, - property_version: inner.token_id.property_version.clone(), - from_address: Some(inner.get_from_address()), - to_address: Some(inner.get_to_address()), - token_amount: inner.amount.clone(), - coin_type: None, - coin_amount: None, - }, - TokenEvent::CancelOffer(inner) => TokenActivityHelper { - token_data_id: &inner.token_id.token_data_id, - property_version: inner.token_id.property_version.clone(), - from_address: Some(inner.get_from_address()), - to_address: Some(inner.get_to_address()), - token_amount: inner.amount.clone(), - coin_type: None, - coin_amount: None, - }, - TokenEvent::Claim(inner) => TokenActivityHelper { - token_data_id: &inner.token_id.token_data_id, - property_version: inner.token_id.property_version.clone(), - from_address: Some(inner.get_from_address()), - to_address: Some(inner.get_to_address()), - token_amount: inner.amount.clone(), - coin_type: None, - coin_amount: None, - }, - }; - let token_data_id = token_activity_helper.token_data_id; - Self { - event_account_address, - event_creation_number, - event_sequence_number, - token_data_id_hash: token_data_id.to_hash(), - property_version: token_activity_helper.property_version, - collection_data_id_hash: token_data_id.get_collection_data_id_hash(), - creator_address: token_data_id.get_creator_address(), - collection_name: token_data_id.get_collection_trunc(), - name: token_data_id.get_name_trunc(), - transaction_version: txn_version, - transfer_type: event_type.to_string(), - from_address: token_activity_helper.from_address, - to_address: token_activity_helper.to_address, - token_amount: token_activity_helper.token_amount, - coin_type: token_activity_helper.coin_type, - coin_amount: token_activity_helper.coin_amount, - transaction_timestamp: txn_timestamp, - event_index: Some(event_index), - } - } -} diff --git a/rust/processor/src/db/postgres/models/token_models/token_claims.rs b/rust/processor/src/db/postgres/models/token_models/token_claims.rs deleted file mode 100644 index 5edc60338..000000000 --- a/rust/processor/src/db/postgres/models/token_models/token_claims.rs +++ /dev/null @@ -1,75 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -// This is required because a diesel macro makes clippy sad -#![allow(clippy::extra_unused_lifetimes)] -#![allow(clippy::unused_unit)] - -use crate::{ - db::common::models::token_v2_models::raw_token_claims::{ - CurrentTokenPendingClaimConvertible, RawCurrentTokenPendingClaim, - }, - schema::current_token_pending_claims, -}; -use bigdecimal::BigDecimal; -use field_count::FieldCount; -use serde::{Deserialize, Serialize}; - -#[derive( - Clone, Debug, Deserialize, Eq, FieldCount, Identifiable, Insertable, PartialEq, Serialize, -)] -#[diesel(primary_key(token_data_id_hash, property_version, from_address, to_address))] -#[diesel(table_name = current_token_pending_claims)] -pub struct CurrentTokenPendingClaim { - pub token_data_id_hash: String, - pub property_version: BigDecimal, - pub from_address: String, - pub to_address: String, - pub collection_data_id_hash: String, - pub creator_address: String, - pub collection_name: String, - pub name: String, - pub amount: BigDecimal, - pub table_handle: String, - pub last_transaction_version: i64, - pub last_transaction_timestamp: chrono::NaiveDateTime, - pub token_data_id: String, - pub collection_id: String, -} - -impl Ord for CurrentTokenPendingClaim { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - self.token_data_id_hash - .cmp(&other.token_data_id_hash) - .then(self.property_version.cmp(&other.property_version)) - .then(self.from_address.cmp(&other.from_address)) - .then(self.to_address.cmp(&other.to_address)) - } -} - -impl PartialOrd for CurrentTokenPendingClaim { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl CurrentTokenPendingClaimConvertible for CurrentTokenPendingClaim { - fn from_raw(raw_item: RawCurrentTokenPendingClaim) -> Self { - Self { - token_data_id_hash: raw_item.token_data_id_hash, - property_version: raw_item.property_version, - from_address: raw_item.from_address, - to_address: raw_item.to_address, - collection_data_id_hash: raw_item.collection_data_id_hash, - creator_address: raw_item.creator_address, - collection_name: raw_item.collection_name, - name: raw_item.name, - amount: raw_item.amount, - table_handle: raw_item.table_handle, - last_transaction_version: raw_item.last_transaction_version, - last_transaction_timestamp: raw_item.last_transaction_timestamp, - token_data_id: raw_item.token_data_id, - collection_id: raw_item.collection_id, - } - } -} diff --git a/rust/processor/src/db/postgres/models/token_v2_models/mod.rs b/rust/processor/src/db/postgres/models/token_v2_models/mod.rs deleted file mode 100644 index 051cf246a..000000000 --- a/rust/processor/src/db/postgres/models/token_v2_models/mod.rs +++ /dev/null @@ -1,9 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -pub mod v1_token_royalty; -pub mod v2_collections; -pub mod v2_token_activities; -pub mod v2_token_datas; -pub mod v2_token_metadata; -pub mod v2_token_ownerships; diff --git a/rust/processor/src/db/postgres/models/token_v2_models/parquet_v2_collections.rs b/rust/processor/src/db/postgres/models/token_v2_models/parquet_v2_collections.rs deleted file mode 100644 index 9b3c31e9d..000000000 --- a/rust/processor/src/db/postgres/models/token_v2_models/parquet_v2_collections.rs +++ /dev/null @@ -1,306 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -// This is required because a diesel macro makes clippy sad -#![allow(clippy::extra_unused_lifetimes)] -#![allow(clippy::unused_unit)] - -use crate::{ - bq_analytics::generic_parquet_processor::{GetTimeStamp, HasVersion, NamedTable}, - db::postgres::models::{ - object_models::v2_object_utils::ObjectAggregatedDataMapping, - token_models::{ - collection_datas::CollectionData, - token_utils::{CollectionDataIdType, TokenWriteSet}, - tokens::TableHandleToOwner, - }, - token_v2_models::{ - v2_collections::CreatorFromCollectionTableV1, - v2_token_utils::{TokenStandard, V2TokenResource}, - }, - }, - utils::{database::DbPoolConnection, util::standardize_address}, -}; -use allocative_derive::Allocative; -use anyhow::Context; -use aptos_protos::transaction::v1::{WriteResource, WriteTableItem}; -use bigdecimal::{BigDecimal, Zero}; -use diesel::{sql_query, sql_types::Text}; -use diesel_async::RunQueryDsl; -use field_count::FieldCount; -use parquet_derive::ParquetRecordWriter; -use serde::{Deserialize, Serialize}; - -#[derive( - Allocative, Clone, Debug, Default, Deserialize, FieldCount, ParquetRecordWriter, Serialize, -)] -pub struct CollectionV2 { - pub txn_version: i64, - pub write_set_change_index: i64, - pub collection_id: String, - pub creator_address: String, - pub collection_name: String, - pub description: String, - pub uri: String, - pub current_supply: String, - pub max_supply: Option, - pub total_minted_v2: Option, - pub mutable_description: Option, - pub mutable_uri: Option, - pub table_handle_v1: Option, - pub token_standard: String, - #[allocative(skip)] - pub block_timestamp: chrono::NaiveDateTime, -} - -impl NamedTable for CollectionV2 { - const TABLE_NAME: &'static str = "collection_v2"; -} - -impl HasVersion for CollectionV2 { - fn version(&self) -> i64 { - self.txn_version - } -} - -impl GetTimeStamp for CollectionV2 { - fn get_timestamp(&self) -> chrono::NaiveDateTime { - self.block_timestamp - } -} - -impl CollectionV2 { - pub fn get_v2_from_write_resource( - write_resource: &WriteResource, - txn_version: i64, - write_set_change_index: i64, - txn_timestamp: chrono::NaiveDateTime, - object_metadatas: &ObjectAggregatedDataMapping, - ) -> anyhow::Result> { - let type_str = crate::db::postgres::models::default_models::move_resources::MoveResource::get_outer_type_from_write_resource(write_resource); - if !V2TokenResource::is_resource_supported(type_str.as_str()) { - return Ok(None); - } - let resource = crate::db::postgres::models::default_models::move_resources::MoveResource::from_write_resource( - write_resource, - 0, // Placeholder, this isn't used anyway - txn_version, - 0, // Placeholder, this isn't used anyway - ); - - if let V2TokenResource::Collection(inner) = &V2TokenResource::from_resource( - &type_str, - resource.data.as_ref().unwrap(), - txn_version, - )? { - let (mut current_supply, mut max_supply, mut total_minted_v2) = - (BigDecimal::zero(), None, None); - let (mut mutable_description, mut mutable_uri) = (None, None); - if let Some(object_data) = object_metadatas.get(&resource.address) { - // Getting supply data (prefer fixed supply over unlimited supply although they should never appear at the same time anyway) - let fixed_supply = object_data.fixed_supply.as_ref(); - let unlimited_supply = object_data.unlimited_supply.as_ref(); - if let Some(supply) = unlimited_supply { - (current_supply, max_supply, total_minted_v2) = ( - supply.current_supply.clone(), - None, - Some(supply.total_minted.clone()), - ); - } - if let Some(supply) = fixed_supply { - (current_supply, max_supply, total_minted_v2) = ( - supply.current_supply.clone(), - Some(supply.max_supply.clone()), - Some(supply.total_minted.clone()), - ); - } - - // Aggregator V2 enables a separate struct for supply - let concurrent_supply = object_data.concurrent_supply.as_ref(); - if let Some(supply) = concurrent_supply { - (current_supply, max_supply, total_minted_v2) = ( - supply.current_supply.value.clone(), - if supply.current_supply.max_value == u64::MAX.into() { - None - } else { - Some(supply.current_supply.max_value.clone()) - }, - Some(supply.total_minted.value.clone()), - ); - } - - // Getting collection mutability config from AptosCollection - let collection = object_data.aptos_collection.as_ref(); - if let Some(collection) = collection { - mutable_description = Some(collection.mutable_description); - mutable_uri = Some(collection.mutable_uri); - } - } else { - // ObjectCore should not be missing, returning from entire function early - return Ok(None); - } - - let collection_id = resource.address.clone(); - let creator_address = inner.get_creator_address(); - let collection_name = inner.get_name_trunc(); - let description = inner.description.clone(); - let uri = inner.get_uri_trunc(); - - Ok(Some(Self { - txn_version, - write_set_change_index, - collection_id: collection_id.clone(), - creator_address: creator_address.clone(), - collection_name: collection_name.clone(), - description: description.clone(), - uri: uri.clone(), - current_supply: current_supply.to_string(), - max_supply: Some(max_supply.clone().unwrap().clone().to_string()), - total_minted_v2: Some(total_minted_v2.clone().unwrap().clone().to_string()), - mutable_description, - mutable_uri, - table_handle_v1: None, - token_standard: TokenStandard::V2.to_string(), - block_timestamp: txn_timestamp, - })) - } else { - Ok(None) - } - } - - pub async fn get_v1_from_write_table_item( - table_item: &WriteTableItem, - txn_version: i64, - write_set_change_index: i64, - txn_timestamp: chrono::NaiveDateTime, - table_handle_to_owner: &TableHandleToOwner, - conn: &mut DbPoolConnection<'_>, - query_retries: u32, - query_retry_delay_ms: u64, - ) -> anyhow::Result> { - let table_item_data = table_item.data.as_ref().unwrap(); - - let maybe_collection_data = match TokenWriteSet::from_table_item_type( - table_item_data.value_type.as_str(), - &table_item_data.value, - txn_version, - )? { - Some(TokenWriteSet::CollectionData(inner)) => Some(inner), - _ => None, - }; - if let Some(collection_data) = maybe_collection_data { - let table_handle = table_item.handle.to_string(); - let maybe_creator_address = table_handle_to_owner - .get(&standardize_address(&table_handle)) - .map(|table_metadata| table_metadata.get_owner_address()); - let mut creator_address = match maybe_creator_address { - Some(ca) => ca, - None => { - match Self::get_collection_creator_for_v1( - conn, - &table_handle, - query_retries, - query_retry_delay_ms, - ) - .await - .context(format!( - "Failed to get collection creator for table handle {}, txn version {}", - table_handle, txn_version - )) { - Ok(ca) => ca, - Err(_) => { - // Try our best by getting from the older collection data - match CollectionData::get_collection_creator( - conn, - &table_handle, - query_retries, - query_retry_delay_ms, - ) - .await - { - Ok(creator) => creator, - Err(_) => { - tracing::error!( - transaction_version = txn_version, - lookup_key = &table_handle, - "Failed to get collection v2 creator for table handle. You probably should backfill db." - ); - return Ok(None); - }, - } - }, - } - }, - }; - creator_address = standardize_address(&creator_address); - let collection_id_struct = - CollectionDataIdType::new(creator_address, collection_data.get_name().to_string()); - let collection_id = collection_id_struct.to_id(); - let collection_name = collection_data.get_name_trunc(); - let uri = collection_data.get_uri_trunc(); - - Ok(Some(Self { - txn_version, - write_set_change_index, - collection_id: collection_id.clone(), - creator_address: collection_id_struct.creator.clone(), - collection_name: collection_name.clone(), - description: collection_data.description.clone(), - uri: uri.clone(), - current_supply: collection_data.supply.to_string(), - max_supply: Some(collection_data.maximum.to_string()), - total_minted_v2: None, - mutable_uri: Some(collection_data.mutability_config.uri), - mutable_description: Some(collection_data.mutability_config.description), - table_handle_v1: Some(table_handle.clone()), - token_standard: TokenStandard::V1.to_string(), - block_timestamp: txn_timestamp, - })) - } else { - Ok(None) - } - } - - /// If collection data is not in resources of the same transaction, then try looking for it in the database. Since collection owner - /// cannot change, we can just look in the current_collection_datas table. - /// Retrying a few times since this collection could've been written in a separate thread. - async fn get_collection_creator_for_v1( - conn: &mut DbPoolConnection<'_>, - table_handle: &str, - query_retries: u32, - query_retry_delay_ms: u64, - ) -> anyhow::Result { - let mut tried = 0; - while tried < query_retries { - tried += 1; - match Self::get_by_table_handle(conn, table_handle).await { - Ok(creator) => return Ok(creator), - Err(_) => { - if tried < query_retries { - tokio::time::sleep(std::time::Duration::from_millis(query_retry_delay_ms)) - .await; - } - }, - } - } - Err(anyhow::anyhow!("Failed to get collection creator")) - } - - /// TODO: Change this to a KV store - async fn get_by_table_handle( - conn: &mut DbPoolConnection<'_>, - table_handle: &str, - ) -> anyhow::Result { - let mut res: Vec> = sql_query( - "SELECT creator_address FROM current_collections_v2 WHERE table_handle_v1 = $1", - ) - .bind::(table_handle) - .get_results(conn) - .await?; - Ok(res - .pop() - .context("collection result empty")? - .context("collection result null")? - .creator_address) - } -} diff --git a/rust/processor/src/db/postgres/models/token_v2_models/v1_token_royalty.rs b/rust/processor/src/db/postgres/models/token_v2_models/v1_token_royalty.rs deleted file mode 100644 index 73109d3c9..000000000 --- a/rust/processor/src/db/postgres/models/token_v2_models/v1_token_royalty.rs +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -// This is required because a diesel macro makes clippy sad -#![allow(clippy::extra_unused_lifetimes)] -#![allow(clippy::unused_unit)] - -use crate::{ - db::common::models::token_v2_models::raw_v1_token_royalty::{ - CurrentTokenRoyaltyV1Convertible, RawCurrentTokenRoyaltyV1, - }, - schema::current_token_royalty_v1, -}; -use bigdecimal::BigDecimal; -use field_count::FieldCount; -use serde::{Deserialize, Serialize}; - -#[derive( - Clone, Debug, Deserialize, FieldCount, Identifiable, Insertable, Serialize, PartialEq, Eq, -)] -#[diesel(primary_key(token_data_id))] -#[diesel(table_name = current_token_royalty_v1)] -pub struct CurrentTokenRoyaltyV1 { - pub token_data_id: String, - pub payee_address: String, - pub royalty_points_numerator: BigDecimal, - pub royalty_points_denominator: BigDecimal, - pub last_transaction_version: i64, - pub last_transaction_timestamp: chrono::NaiveDateTime, -} - -impl Ord for CurrentTokenRoyaltyV1 { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - self.token_data_id.cmp(&other.token_data_id) - } -} -impl PartialOrd for CurrentTokenRoyaltyV1 { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl CurrentTokenRoyaltyV1Convertible for CurrentTokenRoyaltyV1 { - fn from_raw(raw_item: RawCurrentTokenRoyaltyV1) -> Self { - Self { - token_data_id: raw_item.token_data_id, - payee_address: raw_item.payee_address, - royalty_points_numerator: raw_item.royalty_points_numerator, - royalty_points_denominator: raw_item.royalty_points_denominator, - last_transaction_version: raw_item.last_transaction_version, - last_transaction_timestamp: raw_item.last_transaction_timestamp, - } - } -} diff --git a/rust/processor/src/db/postgres/models/token_v2_models/v2_token_activities.rs b/rust/processor/src/db/postgres/models/token_v2_models/v2_token_activities.rs deleted file mode 100644 index 038c80381..000000000 --- a/rust/processor/src/db/postgres/models/token_v2_models/v2_token_activities.rs +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -// This is required because a diesel macro makes clippy sad -#![allow(clippy::extra_unused_lifetimes)] -#![allow(clippy::unused_unit)] - -use crate::{ - db::common::models::token_v2_models::raw_v2_token_activities::{ - RawTokenActivityV2, TokenActivityV2Convertible, - }, - schema::token_activities_v2, -}; -use bigdecimal::BigDecimal; -use field_count::FieldCount; -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, Deserialize, FieldCount, Identifiable, Insertable, Serialize)] -#[diesel(primary_key(transaction_version, event_index))] -#[diesel(table_name = token_activities_v2)] -pub struct TokenActivityV2 { - pub transaction_version: i64, - pub event_index: i64, - pub event_account_address: String, - pub token_data_id: String, - pub property_version_v1: BigDecimal, - pub type_: String, - pub from_address: Option, - pub to_address: Option, - pub token_amount: BigDecimal, - pub before_value: Option, - pub after_value: Option, - pub entry_function_id_str: Option, - pub token_standard: String, - pub is_fungible_v2: Option, - pub transaction_timestamp: chrono::NaiveDateTime, -} - -impl TokenActivityV2Convertible for TokenActivityV2 { - fn from_raw(raw_item: RawTokenActivityV2) -> Self { - Self { - transaction_version: raw_item.transaction_version, - event_index: raw_item.event_index, - event_account_address: raw_item.event_account_address, - token_data_id: raw_item.token_data_id, - property_version_v1: raw_item.property_version_v1, - type_: raw_item.type_, - from_address: raw_item.from_address, - to_address: raw_item.to_address, - token_amount: raw_item.token_amount, - before_value: raw_item.before_value, - after_value: raw_item.after_value, - entry_function_id_str: raw_item.entry_function_id_str, - token_standard: raw_item.token_standard, - is_fungible_v2: raw_item.is_fungible_v2, - transaction_timestamp: raw_item.transaction_timestamp, - } - } -} diff --git a/rust/processor/src/db/postgres/models/token_v2_models/v2_token_datas.rs b/rust/processor/src/db/postgres/models/token_v2_models/v2_token_datas.rs deleted file mode 100644 index 8490f4c45..000000000 --- a/rust/processor/src/db/postgres/models/token_v2_models/v2_token_datas.rs +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -// This is required because a diesel macro makes clippy sad -#![allow(clippy::extra_unused_lifetimes)] -#![allow(clippy::unused_unit)] - -use crate::{ - db::common::models::token_v2_models::raw_v2_token_datas::{ - CurrentTokenDataV2Convertible, RawCurrentTokenDataV2, RawTokenDataV2, - TokenDataV2Convertible, - }, - schema::{current_token_datas_v2, token_datas_v2}, -}; -use bigdecimal::BigDecimal; -use diesel::prelude::*; -use field_count::FieldCount; -use serde::{Deserialize, Serialize}; - -// PK of current_token_datas_v2, i.e. token_data_id -pub type CurrentTokenDataV2PK = String; - -#[derive(Clone, Debug, Deserialize, FieldCount, Identifiable, Insertable, Serialize)] -#[diesel(primary_key(transaction_version, write_set_change_index))] -#[diesel(table_name = token_datas_v2)] -pub struct TokenDataV2 { - pub transaction_version: i64, - pub write_set_change_index: i64, - pub token_data_id: String, - pub collection_id: String, - pub token_name: String, - pub maximum: Option, - pub supply: Option, - pub largest_property_version_v1: Option, - pub token_uri: String, - pub token_properties: serde_json::Value, - pub description: String, - pub token_standard: String, - pub is_fungible_v2: Option, - pub transaction_timestamp: chrono::NaiveDateTime, - // Deprecated, but still here for backwards compatibility - pub decimals: Option, - // Here for consistency but we don't need to actually fill it - // pub is_deleted_v2: Option, -} - -impl TokenDataV2Convertible for TokenDataV2 { - fn from_raw(raw_item: RawTokenDataV2) -> Self { - Self { - transaction_version: raw_item.transaction_version, - write_set_change_index: raw_item.write_set_change_index, - token_data_id: raw_item.token_data_id, - collection_id: raw_item.collection_id, - token_name: raw_item.token_name, - maximum: raw_item.maximum, - supply: raw_item.supply, - largest_property_version_v1: raw_item.largest_property_version_v1, - token_uri: raw_item.token_uri, - token_properties: raw_item.token_properties, - description: raw_item.description, - token_standard: raw_item.token_standard, - is_fungible_v2: raw_item.is_fungible_v2, - transaction_timestamp: raw_item.transaction_timestamp, - decimals: raw_item.decimals, - } - } -} - -#[derive(Clone, Debug, Deserialize, FieldCount, Identifiable, Insertable, Serialize)] -#[diesel(primary_key(token_data_id))] -#[diesel(table_name = current_token_datas_v2)] -pub struct CurrentTokenDataV2 { - pub token_data_id: String, - pub collection_id: String, - pub token_name: String, - pub maximum: Option, - pub supply: Option, - pub largest_property_version_v1: Option, - pub token_uri: String, - pub token_properties: serde_json::Value, - pub description: String, - pub token_standard: String, - pub is_fungible_v2: Option, - pub last_transaction_version: i64, - pub last_transaction_timestamp: chrono::NaiveDateTime, - // Deprecated, but still here for backwards compatibility - pub decimals: Option, - pub is_deleted_v2: Option, -} - -impl CurrentTokenDataV2Convertible for CurrentTokenDataV2 { - fn from_raw(raw_item: RawCurrentTokenDataV2) -> Self { - Self { - token_data_id: raw_item.token_data_id, - collection_id: raw_item.collection_id, - token_name: raw_item.token_name, - maximum: raw_item.maximum, - supply: raw_item.supply, - largest_property_version_v1: raw_item.largest_property_version_v1, - token_uri: raw_item.token_uri, - token_properties: raw_item.token_properties, - description: raw_item.description, - token_standard: raw_item.token_standard, - is_fungible_v2: raw_item.is_fungible_v2, - last_transaction_version: raw_item.last_transaction_version, - last_transaction_timestamp: raw_item.last_transaction_timestamp, - decimals: raw_item.decimals, - is_deleted_v2: raw_item.is_deleted_v2, - } - } -} diff --git a/rust/processor/src/db/postgres/models/token_v2_models/v2_token_metadata.rs b/rust/processor/src/db/postgres/models/token_v2_models/v2_token_metadata.rs deleted file mode 100644 index 39600d665..000000000 --- a/rust/processor/src/db/postgres/models/token_v2_models/v2_token_metadata.rs +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -// This is required because a diesel macro makes clippy sad -#![allow(clippy::extra_unused_lifetimes)] -#![allow(clippy::unused_unit)] - -use crate::{ - db::common::models::token_v2_models::raw_v2_token_metadata::{ - CurrentTokenV2MetadataConvertible, RawCurrentTokenV2Metadata, - }, - schema::current_token_v2_metadata, -}; -use field_count::FieldCount; -use serde::{Deserialize, Serialize}; -use serde_json::Value; - -// PK of current_objects, i.e. object_address, resource_type -pub type CurrentTokenV2MetadataPK = (String, String); - -#[derive(Clone, Debug, Deserialize, FieldCount, Identifiable, Insertable, Serialize)] -#[diesel(primary_key(object_address, resource_type))] -#[diesel(table_name = current_token_v2_metadata)] -pub struct CurrentTokenV2Metadata { - pub object_address: String, - pub resource_type: String, - pub data: Value, - pub state_key_hash: String, - pub last_transaction_version: i64, -} - -impl CurrentTokenV2MetadataConvertible for CurrentTokenV2Metadata { - fn from_raw(raw_item: RawCurrentTokenV2Metadata) -> Self { - Self { - object_address: raw_item.object_address, - resource_type: raw_item.resource_type, - data: raw_item.data, - state_key_hash: raw_item.state_key_hash, - last_transaction_version: raw_item.last_transaction_version, - } - } -} diff --git a/rust/processor/src/db/postgres/models/token_v2_models/v2_token_ownerships.rs b/rust/processor/src/db/postgres/models/token_v2_models/v2_token_ownerships.rs deleted file mode 100644 index 8b12c375f..000000000 --- a/rust/processor/src/db/postgres/models/token_v2_models/v2_token_ownerships.rs +++ /dev/null @@ -1,115 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -// This is required because a diesel macro makes clippy sad -#![allow(clippy::extra_unused_lifetimes)] -#![allow(clippy::unused_unit)] - -use crate::{ - db::common::models::token_v2_models::raw_v2_token_ownerships::{ - CurrentTokenOwnershipV2Convertible, RawCurrentTokenOwnershipV2, RawTokenOwnershipV2, - TokenOwnershipV2Convertible, - }, - schema::{current_token_ownerships_v2, token_ownerships_v2}, -}; -use bigdecimal::BigDecimal; -use diesel::prelude::*; -use field_count::FieldCount; -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, Deserialize, FieldCount, Identifiable, Insertable, Serialize)] -#[diesel(primary_key(transaction_version, write_set_change_index))] -#[diesel(table_name = token_ownerships_v2)] -pub struct TokenOwnershipV2 { - pub transaction_version: i64, - pub write_set_change_index: i64, - pub token_data_id: String, - pub property_version_v1: BigDecimal, - pub owner_address: Option, - pub storage_id: String, - pub amount: BigDecimal, - pub table_type_v1: Option, - pub token_properties_mutated_v1: Option, - pub is_soulbound_v2: Option, - pub token_standard: String, - pub is_fungible_v2: Option, - pub transaction_timestamp: chrono::NaiveDateTime, - pub non_transferrable_by_owner: Option, -} - -impl TokenOwnershipV2Convertible for TokenOwnershipV2 { - fn from_raw(raw_item: RawTokenOwnershipV2) -> Self { - Self { - transaction_version: raw_item.transaction_version, - write_set_change_index: raw_item.write_set_change_index, - token_data_id: raw_item.token_data_id, - property_version_v1: raw_item.property_version_v1, - owner_address: raw_item.owner_address, - storage_id: raw_item.storage_id, - amount: raw_item.amount, - table_type_v1: raw_item.table_type_v1, - token_properties_mutated_v1: raw_item.token_properties_mutated_v1, - is_soulbound_v2: raw_item.is_soulbound_v2, - token_standard: raw_item.token_standard, - is_fungible_v2: raw_item.is_fungible_v2, - transaction_timestamp: raw_item.transaction_timestamp, - non_transferrable_by_owner: raw_item.non_transferrable_by_owner, - } - } -} -#[derive( - Clone, Debug, Deserialize, Eq, FieldCount, Identifiable, Insertable, PartialEq, Serialize, -)] -#[diesel(primary_key(token_data_id, property_version_v1, owner_address, storage_id))] -#[diesel(table_name = current_token_ownerships_v2)] -pub struct CurrentTokenOwnershipV2 { - pub token_data_id: String, - pub property_version_v1: BigDecimal, - pub owner_address: String, - pub storage_id: String, - pub amount: BigDecimal, - pub table_type_v1: Option, - pub token_properties_mutated_v1: Option, - pub is_soulbound_v2: Option, - pub token_standard: String, - pub is_fungible_v2: Option, - pub last_transaction_version: i64, - pub last_transaction_timestamp: chrono::NaiveDateTime, - pub non_transferrable_by_owner: Option, -} - -impl Ord for CurrentTokenOwnershipV2 { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - self.token_data_id - .cmp(&other.token_data_id) - .then(self.property_version_v1.cmp(&other.property_version_v1)) - .then(self.owner_address.cmp(&other.owner_address)) - .then(self.storage_id.cmp(&other.storage_id)) - } -} - -impl PartialOrd for CurrentTokenOwnershipV2 { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl CurrentTokenOwnershipV2Convertible for CurrentTokenOwnershipV2 { - fn from_raw(raw_item: RawCurrentTokenOwnershipV2) -> Self { - Self { - token_data_id: raw_item.token_data_id, - property_version_v1: raw_item.property_version_v1, - owner_address: raw_item.owner_address, - storage_id: raw_item.storage_id, - amount: raw_item.amount, - table_type_v1: raw_item.table_type_v1, - token_properties_mutated_v1: raw_item.token_properties_mutated_v1, - is_soulbound_v2: raw_item.is_soulbound_v2, - token_standard: raw_item.token_standard, - is_fungible_v2: raw_item.is_fungible_v2, - last_transaction_version: raw_item.last_transaction_version, - last_transaction_timestamp: raw_item.last_transaction_timestamp, - non_transferrable_by_owner: raw_item.non_transferrable_by_owner, - } - } -} diff --git a/rust/processor/src/processors/nft_metadata_processor.rs b/rust/processor/src/processors/nft_metadata_processor.rs index 67e4a722e..3e1a476c9 100644 --- a/rust/processor/src/processors/nft_metadata_processor.rs +++ b/rust/processor/src/processors/nft_metadata_processor.rs @@ -8,16 +8,13 @@ use crate::{ object_models::v2_object_utils::{ ObjectAggregatedData, ObjectAggregatedDataMapping, ObjectWithMetadata, }, - token_v2_models::raw_v2_token_datas::{RawCurrentTokenDataV2, RawTokenDataV2}, - }, - postgres::models::{ - resources::FromWriteResource, token_models::tokens::{TableHandleToOwner, TableMetadataForToken}, token_v2_models::{ v2_collections::{CollectionV2, CurrentCollectionV2, CurrentCollectionV2PK}, - v2_token_datas::CurrentTokenDataV2PK, + v2_token_datas::{CurrentTokenDataV2, CurrentTokenDataV2PK, TokenDataV2}, }, }, + postgres::models::resources::FromWriteResource, }, gap_detectors::ProcessingResult, utils::{ @@ -200,7 +197,7 @@ impl ProcessorTrait for NftMetadataProcessor { } } -fn clean_token_pubsub_message(ctd: RawCurrentTokenDataV2, db_chain_id: u64) -> String { +fn clean_token_pubsub_message(ctd: CurrentTokenDataV2, db_chain_id: u64) -> String { remove_null_bytes(&format!( "{},{},{},{},{},false", ctd.token_data_id, @@ -229,8 +226,8 @@ async fn parse_v2_token( conn: &mut DbPoolConnection<'_>, query_retries: u32, query_retry_delay_ms: u64, -) -> (Vec, Vec) { - let mut current_token_datas_v2: AHashMap = +) -> (Vec, Vec) { + let mut current_token_datas_v2: AHashMap = AHashMap::new(); let mut current_collections_v2: AHashMap = AHashMap::new(); @@ -273,7 +270,7 @@ async fn parse_v2_token( match wsc.change.as_ref().unwrap() { Change::WriteTableItem(table_item) => { if let Some((_, current_token_data)) = - RawTokenDataV2::get_v1_from_write_table_item( + TokenDataV2::get_v1_from_write_table_item( table_item, txn_version, wsc_index, @@ -303,15 +300,14 @@ async fn parse_v2_token( } }, Change::WriteResource(resource) => { - if let Some((_, current_token_data)) = - RawTokenDataV2::get_v2_from_write_resource( - resource, - txn_version, - wsc_index, - txn_timestamp, - &token_v2_metadata_helper, - ) - .unwrap() + if let Some((_, current_token_data)) = TokenDataV2::get_v2_from_write_resource( + resource, + txn_version, + wsc_index, + txn_timestamp, + &token_v2_metadata_helper, + ) + .unwrap() { current_token_datas_v2 .insert(current_token_data.token_data_id.clone(), current_token_data); @@ -337,7 +333,7 @@ async fn parse_v2_token( let current_token_datas_v2 = current_token_datas_v2 .into_values() - .collect::>(); + .collect::>(); let current_collections_v2 = current_collections_v2 .into_values() .collect::>(); diff --git a/rust/processor/src/processors/parquet_processors/parquet_token_v2_processor.rs b/rust/processor/src/processors/parquet_processors/parquet_token_v2_processor.rs index 1c855973d..d8693745b 100644 --- a/rust/processor/src/processors/parquet_processors/parquet_token_v2_processor.rs +++ b/rust/processor/src/processors/parquet_processors/parquet_token_v2_processor.rs @@ -12,23 +12,18 @@ use crate::{ object_models::v2_object_utils::{ ObjectAggregatedData, ObjectAggregatedDataMapping, ObjectWithMetadata, }, + token_models::tokens::{TableHandleToOwner, TableMetadataForToken}, token_v2_models::{ - raw_v2_token_datas::{RawTokenDataV2, TokenDataV2Convertible}, - raw_v2_token_ownerships::{ - NFTOwnershipV2, RawTokenOwnershipV2, TokenOwnershipV2Convertible, - }, + v2_token_datas::{ParquetTokenDataV2, TokenDataV2}, + v2_token_ownerships::{NFTOwnershipV2, ParquetTokenOwnershipV2, TokenOwnershipV2}, v2_token_utils::{ Burn, BurnEvent, MintEvent, TokenV2Burned, TokenV2Minted, TransferEvent, }, }, }, - parquet::models::token_v2_models::{ - v2_token_datas::TokenDataV2, v2_token_ownerships::TokenOwnershipV2, - }, postgres::models::{ fungible_asset_models::v2_fungible_asset_utils::FungibleAssetMetadata, resources::{FromWriteResource, V2TokenResource}, - token_models::tokens::{TableHandleToOwner, TableMetadataForToken}, }, }, gap_detectors::ProcessingResult, @@ -65,8 +60,8 @@ impl ParquetProcessorTrait for ParquetTokenV2ProcessorConfig { pub struct ParquetTokenV2Processor { connection_pool: ArcDbPool, - v2_token_datas_sender: AsyncSender>, - v2_token_ownerships_sender: AsyncSender>, + v2_token_datas_sender: AsyncSender>, + v2_token_ownerships_sender: AsyncSender>, } impl ParquetTokenV2Processor { @@ -77,7 +72,7 @@ impl ParquetTokenV2Processor { ) -> Self { config.set_google_credentials(config.google_application_credentials.clone()); - let v2_token_datas_sender = create_parquet_handler_loop::( + let v2_token_datas_sender = create_parquet_handler_loop::( new_gap_detector_sender.clone(), ProcessorName::ParquetTokenV2Processor.into(), config.bucket_name.clone(), @@ -87,7 +82,7 @@ impl ParquetTokenV2Processor { config.parquet_upload_interval_in_secs(), ); - let v2_token_ownerships_sender = create_parquet_handler_loop::( + let v2_token_ownerships_sender = create_parquet_handler_loop::( new_gap_detector_sender.clone(), ProcessorName::ParquetTokenV2Processor.into(), config.bucket_name.clone(), @@ -143,9 +138,9 @@ impl ProcessorTrait for ParquetTokenV2Processor { ) .await; - let parquet_token_datas_v2: Vec = raw_token_datas_v2 + let parquet_token_datas_v2: Vec = raw_token_datas_v2 .into_iter() - .map(TokenDataV2::from_raw) + .map(ParquetTokenDataV2::from) .collect(); let token_data_v2_parquet_data = ParquetDataGeneric { @@ -157,9 +152,9 @@ impl ProcessorTrait for ParquetTokenV2Processor { .await .context("Failed to send token data v2 parquet data")?; - let parquet_token_ownerships_v2: Vec = raw_token_ownerships_v2 + let parquet_token_ownerships_v2: Vec = raw_token_ownerships_v2 .into_iter() - .map(TokenOwnershipV2::from_raw) + .map(ParquetTokenOwnershipV2::from) .collect(); let token_ownerships_v2_parquet_data = ParquetDataGeneric { @@ -193,7 +188,7 @@ async fn parse_v2_token( table_handle_to_owner: &TableHandleToOwner, db_context: &mut Option>, transaction_version_to_struct_count: &mut AHashMap, -) -> (Vec, Vec) { +) -> (Vec, Vec) { // Token V2 and V1 combined let mut token_datas_v2 = vec![]; let mut token_ownerships_v2 = vec![]; @@ -334,7 +329,7 @@ async fn parse_v2_token( match wsc.change.as_ref().unwrap() { Change::WriteTableItem(table_item) => { if let Some((raw_token_data, _)) = - RawTokenDataV2::get_v1_from_write_table_item( + TokenDataV2::get_v1_from_write_table_item( table_item, txn_version, wsc_index, @@ -349,7 +344,7 @@ async fn parse_v2_token( .or_insert(1); } if let Some((token_ownership, current_token_ownership)) = - RawTokenOwnershipV2::get_v1_from_write_table_item( + TokenOwnershipV2::get_v1_from_write_table_item( table_item, txn_version, wsc_index, @@ -377,7 +372,7 @@ async fn parse_v2_token( }, Change::DeleteTableItem(table_item) => { if let Some((token_ownership, current_token_ownership)) = - RawTokenOwnershipV2::get_v1_from_delete_table_item( + TokenOwnershipV2::get_v1_from_delete_table_item( table_item, txn_version, wsc_index, @@ -405,7 +400,7 @@ async fn parse_v2_token( }, Change::WriteResource(resource) => { if let Some((raw_token_data, _current_token_data)) = - RawTokenDataV2::get_v2_from_write_resource( + TokenDataV2::get_v2_from_write_resource( resource, txn_version, wsc_index, @@ -415,12 +410,11 @@ async fn parse_v2_token( .unwrap() { // Add NFT ownership - let (mut ownerships, _) = - RawTokenOwnershipV2::get_nft_v2_from_token_data( - &raw_token_data, - &token_v2_metadata_helper, - ) - .unwrap(); + let (mut ownerships, _) = TokenOwnershipV2::get_nft_v2_from_token_data( + &raw_token_data, + &token_v2_metadata_helper, + ) + .unwrap(); if let Some(current_nft_ownership) = ownerships.first() { // Note that the first element in ownerships is the current ownership. We need to cache // it in prior_nft_ownership so that moving forward if we see a burn we'll know @@ -447,7 +441,7 @@ async fn parse_v2_token( } // Add burned NFT handling if let Some((nft_ownership, current_nft_ownership)) = - RawTokenOwnershipV2::get_burned_nft_v2_from_write_resource( + TokenOwnershipV2::get_burned_nft_v2_from_write_resource( resource, txn_version, wsc_index, @@ -477,7 +471,7 @@ async fn parse_v2_token( }, Change::DeleteResource(resource) => { if let Some((nft_ownership, current_nft_ownership)) = - RawTokenOwnershipV2::get_burned_nft_v2_from_delete_resource( + TokenOwnershipV2::get_burned_nft_v2_from_delete_resource( resource, txn_version, wsc_index, diff --git a/rust/processor/src/processors/token_v2_processor.rs b/rust/processor/src/processors/token_v2_processor.rs index b1ad38e16..b5c6d945d 100644 --- a/rust/processor/src/processors/token_v2_processor.rs +++ b/rust/processor/src/processors/token_v2_processor.rs @@ -8,25 +8,24 @@ use crate::{ object_models::v2_object_utils::{ ObjectAggregatedData, ObjectAggregatedDataMapping, ObjectWithMetadata, }, - token_v2_models::{ - raw_token_claims::{ - CurrentTokenPendingClaimConvertible, RawCurrentTokenPendingClaim, - TokenV1Claimed, - }, - raw_v1_token_royalty::{ - CurrentTokenRoyaltyV1Convertible, RawCurrentTokenRoyaltyV1, - }, - raw_v2_token_activities::{RawTokenActivityV2, TokenActivityV2Convertible}, - raw_v2_token_datas::{ - CurrentTokenDataV2Convertible, RawCurrentTokenDataV2, RawTokenDataV2, - TokenDataV2Convertible, + token_models::{ + token_claims::{ + CurrentTokenPendingClaim, PostgresCurrentTokenPendingClaim, TokenV1Claimed, }, - raw_v2_token_metadata::{ - CurrentTokenV2MetadataConvertible, RawCurrentTokenV2Metadata, + token_royalty::{CurrentTokenRoyaltyV1, PostgresCurrentTokenRoyaltyV1}, + tokens::{CurrentTokenPendingClaimPK, TableHandleToOwner, TableMetadataForToken}, + }, + token_v2_models::{ + v2_collections::{CollectionV2, CurrentCollectionV2, CurrentCollectionV2PK}, + v2_token_activities::{PostgresTokenActivityV2, TokenActivityV2}, + v2_token_datas::{ + CurrentTokenDataV2, CurrentTokenDataV2PK, PostgresCurrentTokenDataV2, + TokenDataV2, }, - raw_v2_token_ownerships::{ - CurrentTokenOwnershipV2Convertible, CurrentTokenOwnershipV2PK, NFTOwnershipV2, - RawCurrentTokenOwnershipV2, RawTokenOwnershipV2, TokenOwnershipV2Convertible, + v2_token_metadata::{CurrentTokenV2Metadata, CurrentTokenV2MetadataPK}, + v2_token_ownerships::{ + CurrentTokenOwnershipV2, CurrentTokenOwnershipV2PK, NFTOwnershipV2, + PostgresCurrentTokenOwnershipV2, TokenOwnershipV2, }, v2_token_utils::{ Burn, BurnEvent, Mint, MintEvent, TokenV2Burned, TokenV2Minted, TransferEvent, @@ -36,18 +35,6 @@ use crate::{ postgres::models::{ fungible_asset_models::v2_fungible_asset_utils::FungibleAssetMetadata, resources::{FromWriteResource, V2TokenResource}, - token_models::{ - token_claims::CurrentTokenPendingClaim, - tokens::{CurrentTokenPendingClaimPK, TableHandleToOwner, TableMetadataForToken}, - }, - token_v2_models::{ - v1_token_royalty::CurrentTokenRoyaltyV1, - v2_collections::{CollectionV2, CurrentCollectionV2, CurrentCollectionV2PK}, - v2_token_activities::TokenActivityV2, - v2_token_datas::{CurrentTokenDataV2, CurrentTokenDataV2PK, TokenDataV2}, - v2_token_metadata::{CurrentTokenV2Metadata, CurrentTokenV2MetadataPK}, - v2_token_ownerships::{CurrentTokenOwnershipV2, TokenOwnershipV2}, - }, }, }, gap_detectors::ProcessingResult, @@ -122,22 +109,18 @@ async fn insert_to_db( name: &'static str, start_version: u64, end_version: u64, - collections_v2: &[CollectionV2], - token_datas_v2: &[TokenDataV2], - token_ownerships_v2: &[TokenOwnershipV2], current_collections_v2: &[CurrentCollectionV2], (current_token_datas_v2, current_deleted_token_datas_v2): ( - &[CurrentTokenDataV2], - &[CurrentTokenDataV2], + &[PostgresCurrentTokenDataV2], + &[PostgresCurrentTokenDataV2], ), (current_token_ownerships_v2, current_deleted_token_ownerships_v2): ( - &[CurrentTokenOwnershipV2], - &[CurrentTokenOwnershipV2], + &[PostgresCurrentTokenOwnershipV2], + &[PostgresCurrentTokenOwnershipV2], ), - token_activities_v2: &[TokenActivityV2], - current_token_v2_metadata: &[CurrentTokenV2Metadata], - current_token_royalties_v1: &[CurrentTokenRoyaltyV1], - current_token_claims: &[CurrentTokenPendingClaim], + token_activities_v2: &[PostgresTokenActivityV2], + current_token_royalties_v1: &[PostgresCurrentTokenRoyaltyV1], + current_token_claims: &[PostgresCurrentTokenPendingClaim], per_table_chunk_sizes: &AHashMap, ) -> Result<(), diesel::result::Error> { tracing::trace!( @@ -147,27 +130,6 @@ async fn insert_to_db( "Inserting to db", ); - let coll_v2 = execute_in_chunks( - conn.clone(), - insert_collections_v2_query, - collections_v2, - get_config_table_chunk_size::("collections_v2", per_table_chunk_sizes), - ); - let td_v2 = execute_in_chunks( - conn.clone(), - insert_token_datas_v2_query, - token_datas_v2, - get_config_table_chunk_size::("token_datas_v2", per_table_chunk_sizes), - ); - let to_v2 = execute_in_chunks( - conn.clone(), - insert_token_ownerships_v2_query, - token_ownerships_v2, - get_config_table_chunk_size::( - "token_ownerships_v2", - per_table_chunk_sizes, - ), - ); let cc_v2 = execute_in_chunks( conn.clone(), insert_current_collections_v2_query, @@ -181,7 +143,7 @@ async fn insert_to_db( conn.clone(), insert_current_token_datas_v2_query, current_token_datas_v2, - get_config_table_chunk_size::( + get_config_table_chunk_size::( "current_token_datas_v2", per_table_chunk_sizes, ), @@ -190,7 +152,7 @@ async fn insert_to_db( conn.clone(), insert_current_deleted_token_datas_v2_query, current_deleted_token_datas_v2, - get_config_table_chunk_size::( + get_config_table_chunk_size::( "current_token_datas_v2", per_table_chunk_sizes, ), @@ -199,7 +161,7 @@ async fn insert_to_db( conn.clone(), insert_current_token_ownerships_v2_query, current_token_ownerships_v2, - get_config_table_chunk_size::( + get_config_table_chunk_size::( "current_token_ownerships_v2", per_table_chunk_sizes, ), @@ -208,7 +170,7 @@ async fn insert_to_db( conn.clone(), insert_current_deleted_token_ownerships_v2_query, current_deleted_token_ownerships_v2, - get_config_table_chunk_size::( + get_config_table_chunk_size::( "current_token_ownerships_v2", per_table_chunk_sizes, ), @@ -217,25 +179,16 @@ async fn insert_to_db( conn.clone(), insert_token_activities_v2_query, token_activities_v2, - get_config_table_chunk_size::( + get_config_table_chunk_size::( "token_activities_v2", per_table_chunk_sizes, ), ); - let ct_v2 = execute_in_chunks( - conn.clone(), - insert_current_token_v2_metadatas_query, - current_token_v2_metadata, - get_config_table_chunk_size::( - "current_token_v2_metadata", - per_table_chunk_sizes, - ), - ); let ctr_v1 = execute_in_chunks( conn.clone(), insert_current_token_royalties_v1_query, current_token_royalties_v1, - get_config_table_chunk_size::( + get_config_table_chunk_size::( "current_token_royalty_v1", per_table_chunk_sizes, ), @@ -244,41 +197,30 @@ async fn insert_to_db( conn, insert_current_token_claims_query, current_token_claims, - get_config_table_chunk_size::( + get_config_table_chunk_size::( "current_token_pending_claims", per_table_chunk_sizes, ), ); let ( - coll_v2_res, - td_v2_res, - to_v2_res, cc_v2_res, ctd_v2_res, cdtd_v2_res, cto_v2_res, cdto_v2_res, ta_v2_res, - ct_v2_res, ctr_v1_res, ctc_v1_res, - ) = tokio::join!( - coll_v2, td_v2, to_v2, cc_v2, ctd_v2, cdtd_v2, cto_v2, cdto_v2, ta_v2, ct_v2, ctr_v1, - ctc_v1 - ); + ) = tokio::join!(cc_v2, ctd_v2, cdtd_v2, cto_v2, cdto_v2, ta_v2, ctr_v1, ctc_v1); for res in [ - coll_v2_res, - td_v2_res, - to_v2_res, cc_v2_res, ctd_v2_res, cdtd_v2_res, cto_v2_res, cdto_v2_res, ta_v2_res, - ct_v2_res, ctr_v1_res, ctc_v1_res, ] { @@ -288,71 +230,6 @@ async fn insert_to_db( Ok(()) } -pub fn insert_collections_v2_query( - items_to_insert: Vec, -) -> ( - impl QueryFragment + diesel::query_builder::QueryId + Send, - Option<&'static str>, -) { - use schema::collections_v2::dsl::*; - ( - diesel::insert_into(schema::collections_v2::table) - .values(items_to_insert) - .on_conflict((transaction_version, write_set_change_index)) - .do_update() - .set(( - collection_properties.eq(excluded(collection_properties)), - inserted_at.eq(excluded(inserted_at)), - )), - None, - ) -} - -pub fn insert_token_datas_v2_query( - items_to_insert: Vec, -) -> ( - impl QueryFragment + diesel::query_builder::QueryId + Send, - Option<&'static str>, -) { - use schema::token_datas_v2::dsl::*; - - ( - diesel::insert_into(schema::token_datas_v2::table) - .values(items_to_insert) - .on_conflict((transaction_version, write_set_change_index)) - .do_update() - .set(( - maximum.eq(excluded(maximum)), - supply.eq(excluded(supply)), - is_fungible_v2.eq(excluded(is_fungible_v2)), - inserted_at.eq(excluded(inserted_at)), - decimals.eq(excluded(decimals)), - )), - None, - ) -} - -pub fn insert_token_ownerships_v2_query( - items_to_insert: Vec, -) -> ( - impl QueryFragment + diesel::query_builder::QueryId + Send, - Option<&'static str>, -) { - use schema::token_ownerships_v2::dsl::*; - - ( - diesel::insert_into(schema::token_ownerships_v2::table) - .values(items_to_insert) - .on_conflict((transaction_version, write_set_change_index)) - .do_update() - .set(( - is_fungible_v2.eq(excluded(is_fungible_v2)), - inserted_at.eq(excluded(inserted_at)), - )), - None, - ) -} - pub fn insert_current_collections_v2_query( items_to_insert: Vec, ) -> ( @@ -388,7 +265,7 @@ pub fn insert_current_collections_v2_query( } pub fn insert_current_token_datas_v2_query( - items_to_insert: Vec, + items_to_insert: Vec, ) -> ( impl QueryFragment + diesel::query_builder::QueryId + Send, Option<&'static str>, @@ -423,7 +300,7 @@ pub fn insert_current_token_datas_v2_query( } pub fn insert_current_deleted_token_datas_v2_query( - items_to_insert: Vec, + items_to_insert: Vec, ) -> ( impl QueryFragment + diesel::query_builder::QueryId + Send, Option<&'static str>, @@ -446,7 +323,7 @@ pub fn insert_current_deleted_token_datas_v2_query( } pub fn insert_current_token_ownerships_v2_query( - items_to_insert: Vec, + items_to_insert: Vec, ) -> ( impl QueryFragment + diesel::query_builder::QueryId + Send, Option<&'static str>, @@ -475,7 +352,7 @@ pub fn insert_current_token_ownerships_v2_query( } pub fn insert_current_deleted_token_ownerships_v2_query( - items_to_insert: Vec, + items_to_insert: Vec, ) -> ( impl QueryFragment + diesel::query_builder::QueryId + Send, Option<&'static str>, @@ -499,7 +376,7 @@ pub fn insert_current_deleted_token_ownerships_v2_query( } pub fn insert_token_activities_v2_query( - items_to_insert: Vec, + items_to_insert: Vec, ) -> ( impl QueryFragment + diesel::query_builder::QueryId + Send, Option<&'static str>, @@ -519,31 +396,8 @@ pub fn insert_token_activities_v2_query( ) } -pub fn insert_current_token_v2_metadatas_query( - items_to_insert: Vec, -) -> ( - impl QueryFragment + diesel::query_builder::QueryId + Send, - Option<&'static str>, -) { - use schema::current_token_v2_metadata::dsl::*; - - ( - diesel::insert_into(schema::current_token_v2_metadata::table) - .values(items_to_insert) - .on_conflict((object_address, resource_type)) - .do_update() - .set(( - data.eq(excluded(data)), - state_key_hash.eq(excluded(state_key_hash)), - last_transaction_version.eq(excluded(last_transaction_version)), - inserted_at.eq(excluded(inserted_at)), - )), - Some(" WHERE current_token_v2_metadata.last_transaction_version <= excluded.last_transaction_version "), - ) -} - pub fn insert_current_token_royalties_v1_query( - items_to_insert: Vec, + items_to_insert: Vec, ) -> ( impl QueryFragment + diesel::query_builder::QueryId + Send, Option<&'static str>, @@ -567,7 +421,7 @@ pub fn insert_current_token_royalties_v1_query( } pub fn insert_current_token_claims_query( - items_to_insert: Vec, + items_to_insert: Vec, ) -> ( impl QueryFragment + diesel::query_builder::QueryId + Send, Option<&'static str>, @@ -627,16 +481,16 @@ impl ProcessorTrait for TokenV2Processor { }; // Token V2 processing which includes token v1 let ( - mut collections_v2, - raw_token_datas_v2, - raw_token_ownerships_v2, + _, + _, + _, current_collections_v2, raw_current_token_datas_v2, raw_current_deleted_token_datas_v2, raw_current_token_ownerships_v2, raw_current_deleted_token_ownerships_v2, raw_token_activities_v2, - raw_current_token_v2_metadata, + _, raw_current_token_royalties_v1, raw_current_token_claims, ) = parse_v2_token( @@ -646,91 +500,55 @@ impl ProcessorTrait for TokenV2Processor { ) .await; - let postgres_current_token_claims: Vec = raw_current_token_claims - .into_iter() - .map(CurrentTokenPendingClaim::from_raw) - .collect(); - - let postgres_current_token_royalties_v1: Vec = - raw_current_token_royalties_v1 + let postgres_current_token_claims: Vec = + raw_current_token_claims .into_iter() - .map(CurrentTokenRoyaltyV1::from_raw) + .map(PostgresCurrentTokenPendingClaim::from) .collect(); - let mut postgres_current_token_v2_metadata: Vec = - raw_current_token_v2_metadata + let postgres_current_token_royalties_v1: Vec = + raw_current_token_royalties_v1 .into_iter() - .map(CurrentTokenV2Metadata::from_raw) + .map(PostgresCurrentTokenRoyaltyV1::from) .collect(); - let postgres_token_activities_v2: Vec = raw_token_activities_v2 - .into_iter() - .map(TokenActivityV2::from_raw) - .collect(); - - let mut postgres_token_datas_v2: Vec = raw_token_datas_v2 + let postgres_token_activities_v2: Vec = raw_token_activities_v2 .into_iter() - .map(TokenDataV2::from_raw) + .map(PostgresTokenActivityV2::from) .collect(); - let postgres_current_token_datas_v2: Vec = raw_current_token_datas_v2 - .into_iter() - .map(CurrentTokenDataV2::from_raw) - .collect(); + let postgres_current_token_datas_v2: Vec = + raw_current_token_datas_v2 + .into_iter() + .map(PostgresCurrentTokenDataV2::from) + .collect(); - let postgres_current_deleted_token_datas_v2: Vec = + let postgres_current_deleted_token_datas_v2: Vec = raw_current_deleted_token_datas_v2 .into_iter() - .map(CurrentTokenDataV2::from_raw) + .map(PostgresCurrentTokenDataV2::from) .collect(); - let mut postgres_token_ownerships_v2: Vec = raw_token_ownerships_v2 - .into_iter() - .map(TokenOwnershipV2::from_raw) - .collect(); - - let postgres_current_token_ownerships_v2: Vec = + let postgres_current_token_ownerships_v2: Vec = raw_current_token_ownerships_v2 .into_iter() - .map(CurrentTokenOwnershipV2::from_raw) + .map(PostgresCurrentTokenOwnershipV2::from) .collect(); - let postgres_current_deleted_token_ownerships_v2: Vec = + let postgres_current_deleted_token_ownerships_v2: Vec = raw_current_deleted_token_ownerships_v2 .into_iter() - .map(CurrentTokenOwnershipV2::from_raw) + .map(PostgresCurrentTokenOwnershipV2::from) .collect(); let processing_duration_in_secs = processing_start.elapsed().as_secs_f64(); let db_insertion_start = std::time::Instant::now(); - if self - .deprecated_tables - .contains(TableFlags::TOKEN_OWNERSHIPS_V2) - { - postgres_token_ownerships_v2.clear(); - } - if self.deprecated_tables.contains(TableFlags::TOKEN_DATAS_V2) { - postgres_token_datas_v2.clear(); - } - if self.deprecated_tables.contains(TableFlags::COLLECTIONS_V2) { - collections_v2.clear(); - } - if self - .deprecated_tables - .contains(TableFlags::CURRENT_TOKEN_V2_METADATA) - { - postgres_current_token_v2_metadata.clear(); - } - let tx_result = insert_to_db( self.get_pool(), self.name(), start_version, end_version, - &collections_v2, - &postgres_token_datas_v2, - &postgres_token_ownerships_v2, ¤t_collections_v2, ( &postgres_current_token_datas_v2, @@ -741,7 +559,6 @@ impl ProcessorTrait for TokenV2Processor { &postgres_current_deleted_token_ownerships_v2, ), &postgres_token_activities_v2, - &postgres_current_token_v2_metadata, &postgres_current_token_royalties_v1, &postgres_current_token_claims, &self.per_table_chunk_sizes, @@ -777,43 +594,23 @@ impl ProcessorTrait for TokenV2Processor { } } -pub async fn parse_v2_token_for_parquet( - transactions: &[Transaction], - table_handle_to_owner: &TableHandleToOwner, -) -> ( - Vec, - Vec, - Vec, - Vec, - Vec, - Vec, - Vec, - Vec, // deleted token ownerships - Vec, - Vec, - Vec, - Vec, -) { - parse_v2_token(transactions, table_handle_to_owner, &mut None).await -} - pub async fn parse_v2_token( transactions: &[Transaction], table_handle_to_owner: &TableHandleToOwner, db_context: &mut Option>, ) -> ( Vec, - Vec, - Vec, + Vec, + Vec, Vec, - Vec, - Vec, - Vec, - Vec, // deleted token ownerships - Vec, - Vec, - Vec, - Vec, + Vec, + Vec, + Vec, + Vec, // deleted token ownerships + Vec, + Vec, + Vec, + Vec, ) { // Token V2 and V1 combined let mut collections_v2 = vec![]; @@ -823,13 +620,13 @@ pub async fn parse_v2_token( let mut current_collections_v2: AHashMap = AHashMap::new(); - let mut current_token_datas_v2: AHashMap = + let mut current_token_datas_v2: AHashMap = AHashMap::new(); - let mut current_deleted_token_datas_v2: AHashMap = + let mut current_deleted_token_datas_v2: AHashMap = AHashMap::new(); let mut current_token_ownerships_v2: AHashMap< CurrentTokenOwnershipV2PK, - RawCurrentTokenOwnershipV2, + CurrentTokenOwnershipV2, > = AHashMap::new(); let mut current_deleted_token_ownerships_v2 = AHashMap::new(); // Optimization to track prior ownership in case a token gets burned so we can lookup the ownership @@ -839,16 +636,14 @@ pub async fn parse_v2_token( // we can still get the object core metadata for it let mut token_v2_metadata_helper: ObjectAggregatedDataMapping = AHashMap::new(); // Basically token properties - let mut current_token_v2_metadata: AHashMap< - CurrentTokenV2MetadataPK, - RawCurrentTokenV2Metadata, - > = AHashMap::new(); - let mut current_token_royalties_v1: AHashMap = + let mut current_token_v2_metadata: AHashMap = + AHashMap::new(); + let mut current_token_royalties_v1: AHashMap = AHashMap::new(); // migrating this from v1 token model as we don't have any replacement table for this let mut all_current_token_claims: AHashMap< CurrentTokenPendingClaimPK, - RawCurrentTokenPendingClaim, + CurrentTokenPendingClaim, > = AHashMap::new(); // Code above is inefficient (multiple passthroughs) so I'm approaching TokenV2 with a cleaner code structure @@ -989,7 +784,7 @@ pub async fn parse_v2_token( } } // handling all the token v1 events - if let Some(event) = RawTokenActivityV2::get_v1_from_parsed_event( + if let Some(event) = TokenActivityV2::get_v1_from_parsed_event( event, txn_version, txn_timestamp, @@ -1002,7 +797,7 @@ pub async fn parse_v2_token( token_activities_v2.push(event); } // handling all the token v2 events - if let Some(event) = RawTokenActivityV2::get_nft_v2_from_parsed_event( + if let Some(event) = TokenActivityV2::get_nft_v2_from_parsed_event( event, txn_version, txn_timestamp, @@ -1048,7 +843,7 @@ pub async fn parse_v2_token( } if let Some((token_data, current_token_data)) = - RawTokenDataV2::get_v1_from_write_table_item( + TokenDataV2::get_v1_from_write_table_item( table_item, txn_version, wsc_index, @@ -1063,7 +858,7 @@ pub async fn parse_v2_token( ); } if let Some(current_token_royalty) = - RawCurrentTokenRoyaltyV1::get_v1_from_write_table_item( + CurrentTokenRoyaltyV1::get_v1_from_write_table_item( table_item, txn_version, txn_timestamp, @@ -1076,7 +871,7 @@ pub async fn parse_v2_token( ); } if let Some((token_ownership, current_token_ownership)) = - RawTokenOwnershipV2::get_v1_from_write_table_item( + TokenOwnershipV2::get_v1_from_write_table_item( table_item, txn_version, wsc_index, @@ -1107,7 +902,7 @@ pub async fn parse_v2_token( } } if let Some(current_token_token_claim) = - RawCurrentTokenPendingClaim::from_write_table_item( + CurrentTokenPendingClaim::from_write_table_item( table_item, txn_version, txn_timestamp, @@ -1128,7 +923,7 @@ pub async fn parse_v2_token( }, Change::DeleteTableItem(table_item) => { if let Some((token_ownership, current_token_ownership)) = - RawTokenOwnershipV2::get_v1_from_delete_table_item( + TokenOwnershipV2::get_v1_from_delete_table_item( table_item, txn_version, wsc_index, @@ -1159,7 +954,7 @@ pub async fn parse_v2_token( } } if let Some(current_token_token_claim) = - RawCurrentTokenPendingClaim::from_delete_table_item( + CurrentTokenPendingClaim::from_delete_table_item( table_item, txn_version, txn_timestamp, @@ -1197,7 +992,7 @@ pub async fn parse_v2_token( ); } if let Some((raw_token_data, current_token_data)) = - RawTokenDataV2::get_v2_from_write_resource( + TokenDataV2::get_v2_from_write_resource( resource, txn_version, wsc_index, @@ -1208,7 +1003,7 @@ pub async fn parse_v2_token( { // Add NFT ownership let (mut ownerships, current_ownerships) = - RawTokenOwnershipV2::get_nft_v2_from_token_data( + TokenOwnershipV2::get_nft_v2_from_token_data( &raw_token_data, &token_v2_metadata_helper, ) @@ -1242,7 +1037,7 @@ pub async fn parse_v2_token( // Add burned NFT handling for token datas (can probably be merged with below) // This handles the case where token is burned but objectCore is still there if let Some(deleted_token_data) = - RawTokenDataV2::get_burned_nft_v2_from_write_resource( + TokenDataV2::get_burned_nft_v2_from_write_resource( resource, txn_version, txn_timestamp, @@ -1259,7 +1054,7 @@ pub async fn parse_v2_token( // Add burned NFT handling // This handles the case where token is burned but objectCore is still there if let Some((nft_ownership, current_nft_ownership)) = - RawTokenOwnershipV2::get_burned_nft_v2_from_write_resource( + TokenOwnershipV2::get_burned_nft_v2_from_write_resource( resource, txn_version, wsc_index, @@ -1293,14 +1088,13 @@ pub async fn parse_v2_token( } // Track token properties - if let Some(token_metadata) = - RawCurrentTokenV2Metadata::from_write_resource( - resource, - txn_version, - &token_v2_metadata_helper, - txn_timestamp, - ) - .unwrap() + if let Some(token_metadata) = CurrentTokenV2Metadata::from_write_resource( + resource, + txn_version, + &token_v2_metadata_helper, + txn_timestamp, + ) + .unwrap() { current_token_v2_metadata.insert( ( @@ -1314,7 +1108,7 @@ pub async fn parse_v2_token( Change::DeleteResource(resource) => { // Add burned NFT handling for token datas (can probably be merged with below) if let Some(deleted_token_data) = - RawTokenDataV2::get_burned_nft_v2_from_delete_resource( + TokenDataV2::get_burned_nft_v2_from_delete_resource( resource, txn_version, txn_timestamp, @@ -1329,7 +1123,7 @@ pub async fn parse_v2_token( ); } if let Some((nft_ownership, current_nft_ownership)) = - RawTokenOwnershipV2::get_burned_nft_v2_from_delete_resource( + TokenOwnershipV2::get_burned_nft_v2_from_delete_resource( resource, txn_version, wsc_index, @@ -1373,25 +1167,25 @@ pub async fn parse_v2_token( .collect::>(); let mut current_token_datas_v2 = current_token_datas_v2 .into_values() - .collect::>(); + .collect::>(); let mut current_deleted_token_datas_v2 = current_deleted_token_datas_v2 .into_values() - .collect::>(); + .collect::>(); let mut current_token_ownerships_v2 = current_token_ownerships_v2 .into_values() - .collect::>(); + .collect::>(); let mut current_token_v2_metadata = current_token_v2_metadata .into_values() - .collect::>(); + .collect::>(); let mut current_deleted_token_ownerships_v2 = current_deleted_token_ownerships_v2 .into_values() - .collect::>(); + .collect::>(); let mut current_token_royalties_v1 = current_token_royalties_v1 .into_values() - .collect::>(); + .collect::>(); let mut all_current_token_claims = all_current_token_claims .into_values() - .collect::>(); + .collect::>(); // Sort by PK current_collections_v2.sort_by(|a, b| a.collection_id.cmp(&b.collection_id)); current_deleted_token_datas_v2.sort_by(|a, b| a.token_data_id.cmp(&b.token_data_id)); diff --git a/rust/sdk-processor/src/config/processor_config.rs b/rust/sdk-processor/src/config/processor_config.rs index b692e8540..908aefe96 100644 --- a/rust/sdk-processor/src/config/processor_config.rs +++ b/rust/sdk-processor/src/config/processor_config.rs @@ -9,45 +9,51 @@ use crate::{ use ahash::AHashMap; use processor::{ bq_analytics::generic_parquet_processor::NamedTable, - db::parquet::models::{ - account_transaction_models::parquet_account_transactions::AccountTransaction, - ans_models::{ - ans_lookup_v2::{AnsLookupV2, CurrentAnsLookupV2}, - ans_primary_name_v2::{AnsPrimaryNameV2, CurrentAnsPrimaryNameV2}, - }, - default_models::{ - parquet_block_metadata_transactions::BlockMetadataTransaction, - parquet_move_modules::MoveModule, - parquet_move_resources::MoveResource, - parquet_move_tables::{CurrentTableItem, TableItem, TableMetadata}, - parquet_transactions::Transaction, - parquet_write_set_changes::WriteSetChangeModel, - }, - event_models::parquet_events::EventPQ, - fungible_asset_models::{ - parquet_v2_fungible_asset_activities::FungibleAssetActivity, - parquet_v2_fungible_asset_balances::{ - CurrentFungibleAssetBalance, CurrentUnifiedFungibleAssetBalance, - FungibleAssetBalance, + db::{ + common::models::{ + token_models::{ + token_claims::ParquetCurrentTokenPendingClaim, + token_royalty::ParquetCurrentTokenRoyaltyV1, + }, + token_v2_models::{ + v2_token_activities::ParquetTokenActivityV2, + v2_token_datas::{ParquetCurrentTokenDataV2, ParquetTokenDataV2}, + v2_token_metadata::ParquetCurrentTokenV2Metadata, + v2_token_ownerships::{ParquetCurrentTokenOwnershipV2, ParquetTokenOwnershipV2}, }, - parquet_v2_fungible_metadata::FungibleAssetMetadataModel, - }, - object_models::v2_objects::{CurrentObject, Object}, - stake_models::{ - parquet_delegator_activities::DelegatedStakingActivity, - parquet_delegator_balances::{CurrentDelegatorBalance, DelegatorBalance}, - parquet_proposal_voters::ProposalVote, }, - token_v2_models::{ - token_claims::CurrentTokenPendingClaim, - v1_token_royalty::CurrentTokenRoyaltyV1, - v2_token_activities::TokenActivityV2, - v2_token_datas::{CurrentTokenDataV2, TokenDataV2}, - v2_token_metadata::CurrentTokenV2Metadata, - v2_token_ownerships::{CurrentTokenOwnershipV2, TokenOwnershipV2}, + parquet::models::{ + account_transaction_models::parquet_account_transactions::AccountTransaction, + ans_models::{ + ans_lookup_v2::{AnsLookupV2, CurrentAnsLookupV2}, + ans_primary_name_v2::{AnsPrimaryNameV2, CurrentAnsPrimaryNameV2}, + }, + default_models::{ + parquet_block_metadata_transactions::BlockMetadataTransaction, + parquet_move_modules::MoveModule, + parquet_move_resources::MoveResource, + parquet_move_tables::{CurrentTableItem, TableItem, TableMetadata}, + parquet_transactions::Transaction, + parquet_write_set_changes::WriteSetChangeModel, + }, + event_models::parquet_events::EventPQ, + fungible_asset_models::{ + parquet_v2_fungible_asset_activities::FungibleAssetActivity, + parquet_v2_fungible_asset_balances::{ + CurrentFungibleAssetBalance, CurrentUnifiedFungibleAssetBalance, + FungibleAssetBalance, + }, + parquet_v2_fungible_metadata::FungibleAssetMetadataModel, + }, + object_models::v2_objects::{CurrentObject, Object}, + stake_models::{ + parquet_delegator_activities::DelegatedStakingActivity, + parquet_delegator_balances::{CurrentDelegatorBalance, DelegatorBalance}, + parquet_proposal_voters::ProposalVote, + }, + transaction_metadata_model::parquet_write_set_size_info::WriteSetSize, + user_transaction_models::parquet_user_transactions::UserTransaction, }, - transaction_metadata_model::parquet_write_set_size_info::WriteSetSize, - user_transaction_models::parquet_user_transactions::UserTransaction, }, }; use serde::{Deserialize, Serialize}; @@ -200,14 +206,14 @@ impl ProcessorConfig { HashSet::from([AccountTransaction::TABLE_NAME.to_string()]) }, ProcessorName::ParquetTokenV2Processor => HashSet::from([ - CurrentTokenPendingClaim::TABLE_NAME.to_string(), - CurrentTokenRoyaltyV1::TABLE_NAME.to_string(), - CurrentTokenV2Metadata::TABLE_NAME.to_string(), - TokenActivityV2::TABLE_NAME.to_string(), - TokenDataV2::TABLE_NAME.to_string(), - CurrentTokenDataV2::TABLE_NAME.to_string(), - TokenOwnershipV2::TABLE_NAME.to_string(), - CurrentTokenOwnershipV2::TABLE_NAME.to_string(), + ParquetCurrentTokenPendingClaim::TABLE_NAME.to_string(), + ParquetCurrentTokenRoyaltyV1::TABLE_NAME.to_string(), + ParquetCurrentTokenV2Metadata::TABLE_NAME.to_string(), + ParquetTokenActivityV2::TABLE_NAME.to_string(), + ParquetTokenDataV2::TABLE_NAME.to_string(), + ParquetCurrentTokenDataV2::TABLE_NAME.to_string(), + ParquetTokenOwnershipV2::TABLE_NAME.to_string(), + ParquetCurrentTokenOwnershipV2::TABLE_NAME.to_string(), ]), ProcessorName::ParquetObjectsProcessor => HashSet::from([ Object::TABLE_NAME.to_string(), diff --git a/rust/sdk-processor/src/parquet_processors/mod.rs b/rust/sdk-processor/src/parquet_processors/mod.rs index eec923a1d..86fefa5ab 100644 --- a/rust/sdk-processor/src/parquet_processors/mod.rs +++ b/rust/sdk-processor/src/parquet_processors/mod.rs @@ -12,46 +12,52 @@ use enum_dispatch::enum_dispatch; use google_cloud_storage::client::{Client as GCSClient, ClientConfig as GcsClientConfig}; use parquet::schema::types::Type; use processor::{ - db::parquet::models::{ - account_transaction_models::parquet_account_transactions::AccountTransaction, - ans_models::{ - ans_lookup_v2::{AnsLookupV2, CurrentAnsLookupV2}, - ans_primary_name_v2::{AnsPrimaryNameV2, CurrentAnsPrimaryNameV2}, - }, - default_models::{ - parquet_block_metadata_transactions::BlockMetadataTransaction, - parquet_move_modules::MoveModule, - parquet_move_resources::MoveResource, - parquet_move_tables::{CurrentTableItem, TableItem}, - parquet_table_metadata::TableMetadata, - parquet_transactions::Transaction as ParquetTransaction, - parquet_write_set_changes::WriteSetChangeModel, - }, - event_models::parquet_events::EventPQ, - fungible_asset_models::{ - parquet_v2_fungible_asset_activities::FungibleAssetActivity, - parquet_v2_fungible_asset_balances::{ - CurrentFungibleAssetBalance, CurrentUnifiedFungibleAssetBalance, - FungibleAssetBalance, - }, - parquet_v2_fungible_metadata::FungibleAssetMetadataModel, - }, - object_models::v2_objects::{CurrentObject, Object}, - stake_models::{ - parquet_delegator_activities::DelegatedStakingActivity, - parquet_delegator_balances::{CurrentDelegatorBalance, DelegatorBalance}, - parquet_proposal_voters::ProposalVote, + db::{ + common::models::{ + token_models::{ + token_claims::ParquetCurrentTokenPendingClaim, + token_royalty::ParquetCurrentTokenRoyaltyV1, + }, + token_v2_models::{ + v2_token_activities::ParquetTokenActivityV2, + v2_token_datas::{ParquetCurrentTokenDataV2, ParquetTokenDataV2}, + v2_token_metadata::ParquetCurrentTokenV2Metadata, + v2_token_ownerships::{ParquetCurrentTokenOwnershipV2, ParquetTokenOwnershipV2}, + }, }, - token_v2_models::{ - token_claims::CurrentTokenPendingClaim, - v1_token_royalty::CurrentTokenRoyaltyV1, - v2_token_activities::TokenActivityV2, - v2_token_datas::{CurrentTokenDataV2, TokenDataV2}, - v2_token_metadata::CurrentTokenV2Metadata, - v2_token_ownerships::{CurrentTokenOwnershipV2, TokenOwnershipV2}, + parquet::models::{ + account_transaction_models::parquet_account_transactions::AccountTransaction, + ans_models::{ + ans_lookup_v2::{AnsLookupV2, CurrentAnsLookupV2}, + ans_primary_name_v2::{AnsPrimaryNameV2, CurrentAnsPrimaryNameV2}, + }, + default_models::{ + parquet_block_metadata_transactions::BlockMetadataTransaction, + parquet_move_modules::MoveModule, + parquet_move_resources::MoveResource, + parquet_move_tables::{CurrentTableItem, TableItem}, + parquet_table_metadata::TableMetadata, + parquet_transactions::Transaction as ParquetTransaction, + parquet_write_set_changes::WriteSetChangeModel, + }, + event_models::parquet_events::EventPQ, + fungible_asset_models::{ + parquet_v2_fungible_asset_activities::FungibleAssetActivity, + parquet_v2_fungible_asset_balances::{ + CurrentFungibleAssetBalance, CurrentUnifiedFungibleAssetBalance, + FungibleAssetBalance, + }, + parquet_v2_fungible_metadata::FungibleAssetMetadataModel, + }, + object_models::v2_objects::{CurrentObject, Object}, + stake_models::{ + parquet_delegator_activities::DelegatedStakingActivity, + parquet_delegator_balances::{CurrentDelegatorBalance, DelegatorBalance}, + parquet_proposal_voters::ProposalVote, + }, + transaction_metadata_model::parquet_write_set_size_info::WriteSetSize, + user_transaction_models::parquet_user_transactions::UserTransaction, }, - transaction_metadata_model::parquet_write_set_size_info::WriteSetSize, - user_transaction_models::parquet_user_transactions::UserTransaction, }, utils::table_flags::TableFlags, }; @@ -224,29 +230,32 @@ impl_parquet_trait!( impl_parquet_trait!(WriteSetSize, ParquetTypeEnum::WriteSetSize); impl_parquet_trait!(AccountTransaction, ParquetTypeEnum::AccountTransactions); impl_parquet_trait!( - CurrentTokenPendingClaim, + ParquetCurrentTokenPendingClaim, ParquetTypeEnum::CurrentTokenPendingClaims ); impl_parquet_trait!( - CurrentTokenRoyaltyV1, + ParquetCurrentTokenRoyaltyV1, ParquetTypeEnum::CurrentTokenRoyaltiesV1 ); impl_parquet_trait!( - CurrentTokenV2Metadata, + ParquetCurrentTokenV2Metadata, ParquetTypeEnum::CurrentTokenV2Metadata ); -impl_parquet_trait!(TokenActivityV2, ParquetTypeEnum::TokenActivitiesV2); +impl_parquet_trait!(ParquetTokenActivityV2, ParquetTypeEnum::TokenActivitiesV2); impl_parquet_trait!( CurrentAnsPrimaryNameV2, ParquetTypeEnum::CurrentAnsPrimaryNameV2 ); impl_parquet_trait!(AnsLookupV2, ParquetTypeEnum::AnsLookupV2); impl_parquet_trait!(CurrentAnsLookupV2, ParquetTypeEnum::CurrentAnsLookupV2); -impl_parquet_trait!(TokenDataV2, ParquetTypeEnum::TokenDatasV2); -impl_parquet_trait!(CurrentTokenDataV2, ParquetTypeEnum::CurrentTokenDatasV2); -impl_parquet_trait!(TokenOwnershipV2, ParquetTypeEnum::TokenOwnershipsV2); +impl_parquet_trait!(ParquetTokenDataV2, ParquetTypeEnum::TokenDatasV2); +impl_parquet_trait!( + ParquetCurrentTokenDataV2, + ParquetTypeEnum::CurrentTokenDatasV2 +); +impl_parquet_trait!(ParquetTokenOwnershipV2, ParquetTypeEnum::TokenOwnershipsV2); impl_parquet_trait!( - CurrentTokenOwnershipV2, + ParquetCurrentTokenOwnershipV2, ParquetTypeEnum::CurrentTokenOwnershipsV2 ); impl_parquet_trait!( @@ -293,14 +302,14 @@ pub enum ParquetTypeStructs { // account txn AccountTransaction(Vec), // Token V2 - CurrentTokenPendingClaim(Vec), - CurrentTokenRoyaltyV1(Vec), - CurrentTokenV2Metadata(Vec), - TokenActivityV2(Vec), - TokenDataV2(Vec), - CurrentTokenDataV2(Vec), - TokenOwnershipV2(Vec), - CurrentTokenOwnershipV2(Vec), + CurrentTokenPendingClaim(Vec), + CurrentTokenRoyaltyV1(Vec), + CurrentTokenV2Metadata(Vec), + TokenActivityV2(Vec), + TokenDataV2(Vec), + CurrentTokenDataV2(Vec), + TokenOwnershipV2(Vec), + CurrentTokenOwnershipV2(Vec), // Stake DelegatedStakingActivity(Vec), CurrentDelegatorBalance(Vec), diff --git a/rust/sdk-processor/src/parquet_processors/parquet_token_v2_processor.rs b/rust/sdk-processor/src/parquet_processors/parquet_token_v2_processor.rs index 2403d3970..9b3ab7051 100644 --- a/rust/sdk-processor/src/parquet_processors/parquet_token_v2_processor.rs +++ b/rust/sdk-processor/src/parquet_processors/parquet_token_v2_processor.rs @@ -30,13 +30,17 @@ use aptos_indexer_processor_sdk::{ use parquet::schema::types::Type; use processor::{ bq_analytics::generic_parquet_processor::HasParquetSchema, - db::parquet::models::token_v2_models::{ - token_claims::CurrentTokenPendingClaim, - v1_token_royalty::CurrentTokenRoyaltyV1, - v2_token_activities::TokenActivityV2, - v2_token_datas::{CurrentTokenDataV2, TokenDataV2}, - v2_token_metadata::CurrentTokenV2Metadata, - v2_token_ownerships::{CurrentTokenOwnershipV2, TokenOwnershipV2}, + db::common::models::{ + token_models::{ + token_claims::ParquetCurrentTokenPendingClaim, + token_royalty::ParquetCurrentTokenRoyaltyV1, + }, + token_v2_models::{ + v2_token_activities::ParquetTokenActivityV2, + v2_token_datas::{ParquetCurrentTokenDataV2, ParquetTokenDataV2}, + v2_token_metadata::ParquetCurrentTokenV2Metadata, + v2_token_ownerships::{ParquetCurrentTokenOwnershipV2, ParquetTokenOwnershipV2}, + }, }, }; use std::{collections::HashMap, sync::Arc}; @@ -132,32 +136,32 @@ impl ProcessorTrait for ParquetTokenV2Processor { let parquet_type_to_schemas: HashMap> = [ ( ParquetTypeEnum::CurrentTokenPendingClaims, - CurrentTokenPendingClaim::schema(), + ParquetCurrentTokenPendingClaim::schema(), ), ( ParquetTypeEnum::CurrentTokenRoyaltiesV1, - CurrentTokenRoyaltyV1::schema(), + ParquetCurrentTokenRoyaltyV1::schema(), ), ( ParquetTypeEnum::CurrentTokenV2Metadata, - CurrentTokenV2Metadata::schema(), + ParquetCurrentTokenV2Metadata::schema(), ), ( ParquetTypeEnum::TokenActivitiesV2, - TokenActivityV2::schema(), + ParquetTokenActivityV2::schema(), ), - (ParquetTypeEnum::TokenDatasV2, TokenDataV2::schema()), + (ParquetTypeEnum::TokenDatasV2, ParquetTokenDataV2::schema()), ( ParquetTypeEnum::CurrentTokenDatasV2, - CurrentTokenDataV2::schema(), + ParquetCurrentTokenDataV2::schema(), ), ( ParquetTypeEnum::TokenOwnershipsV2, - TokenOwnershipV2::schema(), + ParquetTokenOwnershipV2::schema(), ), ( ParquetTypeEnum::CurrentTokenOwnershipsV2, - CurrentTokenOwnershipV2::schema(), + ParquetCurrentTokenOwnershipV2::schema(), ), ] .into_iter() diff --git a/rust/sdk-processor/src/steps/parquet_token_v2_processor/parquet_token_v2_extractor.rs b/rust/sdk-processor/src/steps/parquet_token_v2_processor/parquet_token_v2_extractor.rs index cf59f0f59..1d4816a74 100644 --- a/rust/sdk-processor/src/steps/parquet_token_v2_processor/parquet_token_v2_extractor.rs +++ b/rust/sdk-processor/src/steps/parquet_token_v2_processor/parquet_token_v2_extractor.rs @@ -10,26 +10,17 @@ use aptos_indexer_processor_sdk::{ }; use async_trait::async_trait; use processor::{ - db::{ - common::models::token_v2_models::{ - raw_token_claims::CurrentTokenPendingClaimConvertible, - raw_v1_token_royalty::CurrentTokenRoyaltyV1Convertible, - raw_v2_token_activities::TokenActivityV2Convertible, - raw_v2_token_datas::{CurrentTokenDataV2Convertible, TokenDataV2Convertible}, - raw_v2_token_metadata::CurrentTokenV2MetadataConvertible, - raw_v2_token_ownerships::{ - CurrentTokenOwnershipV2Convertible, TokenOwnershipV2Convertible, - }, + db::common::models::{ + token_models::{ + token_claims::ParquetCurrentTokenPendingClaim, + token_royalty::ParquetCurrentTokenRoyaltyV1, tokens::TableMetadataForToken, }, - parquet::models::token_v2_models::{ - token_claims::CurrentTokenPendingClaim, - v1_token_royalty::CurrentTokenRoyaltyV1, - v2_token_activities::TokenActivityV2, - v2_token_datas::{CurrentTokenDataV2, TokenDataV2}, - v2_token_metadata::CurrentTokenV2Metadata, - v2_token_ownerships::{CurrentTokenOwnershipV2, TokenOwnershipV2}, + token_v2_models::{ + v2_token_activities::ParquetTokenActivityV2, + v2_token_datas::{ParquetCurrentTokenDataV2, ParquetTokenDataV2}, + v2_token_metadata::ParquetCurrentTokenV2Metadata, + v2_token_ownerships::{ParquetCurrentTokenOwnershipV2, ParquetTokenOwnershipV2}, }, - postgres::models::token_models::tokens::TableMetadataForToken, }, processors::token_v2_processor::parse_v2_token, utils::table_flags::TableFlags, @@ -77,59 +68,61 @@ impl Processable for ParquetTokenV2Extractor { raw_current_token_claims, ) = parse_v2_token(&transactions.data, &table_handle_to_owner, &mut None).await; - let parquet_current_token_claims: Vec = raw_current_token_claims - .into_iter() - .map(CurrentTokenPendingClaim::from_raw) - .collect(); + let parquet_current_token_claims: Vec = + raw_current_token_claims + .into_iter() + .map(ParquetCurrentTokenPendingClaim::from) + .collect(); - let parquet_current_token_royalties_v1: Vec = + let parquet_current_token_royalties_v1: Vec = raw_current_token_royalties_v1 .into_iter() - .map(CurrentTokenRoyaltyV1::from_raw) + .map(ParquetCurrentTokenRoyaltyV1::from) .collect(); - let parquet_current_token_v2_metadata: Vec = + let parquet_current_token_v2_metadata: Vec = raw_current_token_v2_metadata .into_iter() - .map(CurrentTokenV2Metadata::from_raw) + .map(ParquetCurrentTokenV2Metadata::from) .collect(); - let parquet_token_activities_v2: Vec = raw_token_activities_v2 + let parquet_token_activities_v2: Vec = raw_token_activities_v2 .into_iter() - .map(TokenActivityV2::from_raw) + .map(ParquetTokenActivityV2::from) .collect(); - let parquet_token_datas_v2: Vec = raw_token_datas_v2 + let parquet_token_datas_v2: Vec = raw_token_datas_v2 .into_iter() - .map(TokenDataV2::from_raw) + .map(ParquetTokenDataV2::from) .collect(); - let parquet_current_token_datas_v2: Vec = raw_current_token_datas_v2 - .into_iter() - .map(CurrentTokenDataV2::from_raw) - .collect(); + let parquet_current_token_datas_v2: Vec = + raw_current_token_datas_v2 + .into_iter() + .map(ParquetCurrentTokenDataV2::from) + .collect(); - let parquet_deleted_current_token_datss_v2: Vec = + let parquet_deleted_current_token_datss_v2: Vec = raw_current_deleted_token_datas_v2 .into_iter() - .map(CurrentTokenDataV2::from_raw) + .map(ParquetCurrentTokenDataV2::from) .collect(); - let parquet_token_ownerships_v2: Vec = raw_token_ownerships_v2 + let parquet_token_ownerships_v2: Vec = raw_token_ownerships_v2 .into_iter() - .map(TokenOwnershipV2::from_raw) + .map(ParquetTokenOwnershipV2::from) .collect(); - let parquet_current_token_ownerships_v2: Vec = + let parquet_current_token_ownerships_v2: Vec = raw_current_token_ownerships_v2 .into_iter() - .map(CurrentTokenOwnershipV2::from_raw) + .map(ParquetCurrentTokenOwnershipV2::from) .collect(); - let parquet_deleted_current_token_ownerships_v2: Vec = + let parquet_deleted_current_token_ownerships_v2: Vec = raw_current_deleted_token_ownerships_v2 .into_iter() - .map(CurrentTokenOwnershipV2::from_raw) + .map(ParquetCurrentTokenOwnershipV2::from) .collect(); // Print the size of each extracted data type @@ -167,7 +160,7 @@ impl Processable for ParquetTokenV2Extractor { ); // We are merging these two tables, b/c they are essentially the same table - let mut combined_current_token_datas_v2: Vec = Vec::new(); + let mut combined_current_token_datas_v2: Vec = Vec::new(); parquet_current_token_datas_v2 .iter() .for_each(|x| combined_current_token_datas_v2.push(x.clone())); @@ -175,7 +168,8 @@ impl Processable for ParquetTokenV2Extractor { .iter() .for_each(|x| combined_current_token_datas_v2.push(x.clone())); - let mut merged_current_token_ownerships_v2: Vec = Vec::new(); + let mut merged_current_token_ownerships_v2: Vec = + Vec::new(); parquet_current_token_ownerships_v2 .iter() .for_each(|x| merged_current_token_ownerships_v2.push(x.clone())); diff --git a/rust/sdk-processor/src/steps/token_v2_processor/token_v2_extractor.rs b/rust/sdk-processor/src/steps/token_v2_processor/token_v2_extractor.rs index c67018ae6..3f60cbe2e 100644 --- a/rust/sdk-processor/src/steps/token_v2_processor/token_v2_extractor.rs +++ b/rust/sdk-processor/src/steps/token_v2_processor/token_v2_extractor.rs @@ -7,27 +7,16 @@ use aptos_indexer_processor_sdk::{ }; use async_trait::async_trait; use processor::{ - db::{ - common::models::token_v2_models::{ - raw_token_claims::CurrentTokenPendingClaimConvertible, - raw_v1_token_royalty::CurrentTokenRoyaltyV1Convertible, - raw_v2_token_activities::TokenActivityV2Convertible, - raw_v2_token_datas::{CurrentTokenDataV2Convertible, TokenDataV2Convertible}, - raw_v2_token_metadata::CurrentTokenV2MetadataConvertible, - raw_v2_token_ownerships::{ - CurrentTokenOwnershipV2Convertible, TokenOwnershipV2Convertible, - }, + db, + db::common::models::{ + token_models::{ + token_claims::PostgresCurrentTokenPendingClaim, + token_royalty::PostgresCurrentTokenRoyaltyV1, tokens::TableMetadataForToken, }, - postgres::models::{ - token_models::{token_claims::CurrentTokenPendingClaim, tokens::TableMetadataForToken}, - token_v2_models::{ - v1_token_royalty::CurrentTokenRoyaltyV1, - v2_collections::{CollectionV2, CurrentCollectionV2}, - v2_token_activities::TokenActivityV2, - v2_token_datas::{CurrentTokenDataV2, TokenDataV2}, - v2_token_metadata::CurrentTokenV2Metadata, - v2_token_ownerships::{CurrentTokenOwnershipV2, TokenOwnershipV2}, - }, + token_v2_models::{ + v2_collections::CurrentCollectionV2, v2_token_activities::PostgresTokenActivityV2, + v2_token_datas::PostgresCurrentTokenDataV2, + v2_token_ownerships::PostgresCurrentTokenOwnershipV2, }, }, processors::token_v2_processor::parse_v2_token, @@ -58,18 +47,14 @@ impl TokenV2Extractor { impl Processable for TokenV2Extractor { type Input = Vec; type Output = ( - Vec, - Vec, - Vec, Vec, - Vec, - Vec, - Vec, - Vec, - Vec, - Vec, - Vec, - Vec, + Vec, + Vec, + Vec, + Vec, + Vec, + Vec, + Vec, ); type RunType = AsyncRunType; @@ -79,18 +64,14 @@ impl Processable for TokenV2Extractor { ) -> Result< Option< TransactionContext<( - Vec, // TODO: Deprecate this - Vec, // TODO: Deprecate this - Vec, // TODO: Deprecate this Vec, - Vec, - Vec, - Vec, - Vec, - Vec, - Vec, - Vec, - Vec, + Vec, + Vec, + Vec, + Vec, + Vec, + Vec, + Vec, )>, >, ProcessorError, @@ -115,16 +96,16 @@ impl Processable for TokenV2Extractor { }; let ( - collections_v2, - raw_token_datas_v2, - raw_token_ownerships_v2, + _, + _, + _, current_collections_v2, raw_current_token_datas_v2, raw_current_deleted_token_datas_v2, raw_current_token_ownerships_v2, raw_current_deleted_token_ownerships_v2, raw_token_activities_v2, - raw_current_token_v2_metadata, + _, raw_current_token_royalties_v1, raw_current_token_claims, ) = parse_v2_token( @@ -134,73 +115,55 @@ impl Processable for TokenV2Extractor { ) .await; - let postgres_current_token_claims: Vec = raw_current_token_claims - .into_iter() - .map(CurrentTokenPendingClaim::from_raw) - .collect(); - - let postgres_current_token_royalties_v1: Vec = - raw_current_token_royalties_v1 + let postgres_current_token_claims: Vec = + raw_current_token_claims .into_iter() - .map(CurrentTokenRoyaltyV1::from_raw) + .map(PostgresCurrentTokenPendingClaim::from) .collect(); - let postgres_current_token_v2_metadata: Vec = - raw_current_token_v2_metadata + let postgres_current_token_royalties_v1: Vec = + raw_current_token_royalties_v1 .into_iter() - .map(CurrentTokenV2Metadata::from_raw) + .map(PostgresCurrentTokenRoyaltyV1::from) .collect(); - let postgres_token_activities_v2: Vec = raw_token_activities_v2 - .into_iter() - .map(TokenActivityV2::from_raw) - .collect(); - - let postgres_token_datas_v2: Vec = raw_token_datas_v2 + let postgres_token_activities_v2: Vec = raw_token_activities_v2 .into_iter() - .map(TokenDataV2::from_raw) + .map(PostgresTokenActivityV2::from) .collect(); - let postgres_current_token_datas_v2: Vec = raw_current_token_datas_v2 - .into_iter() - .map(CurrentTokenDataV2::from_raw) - .collect(); + let postgres_current_token_datas_v2: Vec = + raw_current_token_datas_v2 + .into_iter() + .map(PostgresCurrentTokenDataV2::from) + .collect(); - let postgress_current_deleted_token_datas_v2: Vec = + let postgress_current_deleted_token_datas_v2: Vec = raw_current_deleted_token_datas_v2 .into_iter() - .map(CurrentTokenDataV2::from_raw) + .map(PostgresCurrentTokenDataV2::from) .collect(); - let postgres_token_ownerships_v2: Vec = raw_token_ownerships_v2 - .into_iter() - .map(TokenOwnershipV2::from_raw) - .collect(); - - let postgres_current_token_ownerships_v2: Vec = + let postgres_current_token_ownerships_v2: Vec = raw_current_token_ownerships_v2 .into_iter() - .map(CurrentTokenOwnershipV2::from_raw) + .map(PostgresCurrentTokenOwnershipV2::from) .collect(); - let postgres_current_deleted_token_ownerships_v2: Vec = + let postgres_current_deleted_token_ownerships_v2: Vec = raw_current_deleted_token_ownerships_v2 .into_iter() - .map(CurrentTokenOwnershipV2::from_raw) + .map(PostgresCurrentTokenOwnershipV2::from) .collect(); Ok(Some(TransactionContext { data: ( - collections_v2, - postgres_token_datas_v2, - postgres_token_ownerships_v2, current_collections_v2, postgres_current_token_datas_v2, postgress_current_deleted_token_datas_v2, postgres_current_token_ownerships_v2, postgres_current_deleted_token_ownerships_v2, postgres_token_activities_v2, - postgres_current_token_v2_metadata, postgres_current_token_royalties_v1, postgres_current_token_claims, ), diff --git a/rust/sdk-processor/src/steps/token_v2_processor/token_v2_storer.rs b/rust/sdk-processor/src/steps/token_v2_processor/token_v2_storer.rs index 105e19c06..47b0692a4 100644 --- a/rust/sdk-processor/src/steps/token_v2_processor/token_v2_storer.rs +++ b/rust/sdk-processor/src/steps/token_v2_processor/token_v2_storer.rs @@ -12,25 +12,23 @@ use aptos_indexer_processor_sdk::{ use async_trait::async_trait; use processor::{ self, - db::postgres::models::{ - token_models::token_claims::CurrentTokenPendingClaim, + db::common::models::{ + token_models::{ + token_claims::PostgresCurrentTokenPendingClaim, + token_royalty::PostgresCurrentTokenRoyaltyV1, + }, token_v2_models::{ - v1_token_royalty::CurrentTokenRoyaltyV1, v2_collections::{CollectionV2, CurrentCollectionV2}, - v2_token_activities::TokenActivityV2, - v2_token_datas::{CurrentTokenDataV2, TokenDataV2}, - v2_token_metadata::CurrentTokenV2Metadata, - v2_token_ownerships::{CurrentTokenOwnershipV2, TokenOwnershipV2}, + v2_token_activities::PostgresTokenActivityV2, + v2_token_datas::PostgresCurrentTokenDataV2, + v2_token_ownerships::PostgresCurrentTokenOwnershipV2, }, }, processors::token_v2_processor::{ - insert_collections_v2_query, insert_current_collections_v2_query, - insert_current_deleted_token_datas_v2_query, + insert_current_collections_v2_query, insert_current_deleted_token_datas_v2_query, insert_current_deleted_token_ownerships_v2_query, insert_current_token_claims_query, insert_current_token_datas_v2_query, insert_current_token_ownerships_v2_query, - insert_current_token_royalties_v1_query, insert_current_token_v2_metadatas_query, - insert_token_activities_v2_query, insert_token_datas_v2_query, - insert_token_ownerships_v2_query, + insert_current_token_royalties_v1_query, insert_token_activities_v2_query, }, }; @@ -54,18 +52,14 @@ impl TokenV2Storer { #[async_trait] impl Processable for TokenV2Storer { type Input = ( - Vec, - Vec, - Vec, Vec, - Vec, - Vec, - Vec, - Vec, - Vec, - Vec, - Vec, - Vec, + Vec, + Vec, + Vec, + Vec, + Vec, + Vec, + Vec, ); type Output = (); type RunType = AsyncRunType; @@ -73,31 +67,23 @@ impl Processable for TokenV2Storer { async fn process( &mut self, input: TransactionContext<( - Vec, - Vec, - Vec, Vec, - Vec, - Vec, - Vec, - Vec, - Vec, - Vec, - Vec, - Vec, + Vec, + Vec, + Vec, + Vec, + Vec, + Vec, + Vec, )>, ) -> Result>, ProcessorError> { let ( - collections_v2, - token_datas_v2, - token_ownerships_v2, current_collections_v2, current_token_datas_v2, current_deleted_token_datas_v2, current_token_ownerships_v2, current_deleted_token_ownerships_v2, token_activities_v2, - current_token_v2_metadata, current_token_royalties_v1, current_token_claims, ) = input.data; @@ -108,27 +94,6 @@ impl Processable for TokenV2Storer { .per_table_chunk_sizes .clone(); - let coll_v2 = execute_in_chunks( - self.conn_pool.clone(), - insert_collections_v2_query, - &collections_v2, - get_config_table_chunk_size::("collections_v2", &per_table_chunk_sizes), - ); - let td_v2 = execute_in_chunks( - self.conn_pool.clone(), - insert_token_datas_v2_query, - &token_datas_v2, - get_config_table_chunk_size::("token_datas_v2", &per_table_chunk_sizes), - ); - let to_v2 = execute_in_chunks( - self.conn_pool.clone(), - insert_token_ownerships_v2_query, - &token_ownerships_v2, - get_config_table_chunk_size::( - "token_ownerships_v2", - &per_table_chunk_sizes, - ), - ); let cc_v2 = execute_in_chunks( self.conn_pool.clone(), insert_current_collections_v2_query, @@ -142,7 +107,7 @@ impl Processable for TokenV2Storer { self.conn_pool.clone(), insert_current_token_datas_v2_query, ¤t_token_datas_v2, - get_config_table_chunk_size::( + get_config_table_chunk_size::( "current_token_datas_v2", &per_table_chunk_sizes, ), @@ -151,7 +116,7 @@ impl Processable for TokenV2Storer { self.conn_pool.clone(), insert_current_deleted_token_datas_v2_query, ¤t_deleted_token_datas_v2, - get_config_table_chunk_size::( + get_config_table_chunk_size::( "current_token_datas_v2", &per_table_chunk_sizes, ), @@ -160,7 +125,7 @@ impl Processable for TokenV2Storer { self.conn_pool.clone(), insert_current_token_ownerships_v2_query, ¤t_token_ownerships_v2, - get_config_table_chunk_size::( + get_config_table_chunk_size::( "current_token_ownerships_v2", &per_table_chunk_sizes, ), @@ -169,7 +134,7 @@ impl Processable for TokenV2Storer { self.conn_pool.clone(), insert_current_deleted_token_ownerships_v2_query, ¤t_deleted_token_ownerships_v2, - get_config_table_chunk_size::( + get_config_table_chunk_size::( "current_token_ownerships_v2", &per_table_chunk_sizes, ), @@ -178,25 +143,16 @@ impl Processable for TokenV2Storer { self.conn_pool.clone(), insert_token_activities_v2_query, &token_activities_v2, - get_config_table_chunk_size::( + get_config_table_chunk_size::( "token_activities_v2", &per_table_chunk_sizes, ), ); - let ct_v2 = execute_in_chunks( - self.conn_pool.clone(), - insert_current_token_v2_metadatas_query, - ¤t_token_v2_metadata, - get_config_table_chunk_size::( - "current_token_v2_metadata", - &per_table_chunk_sizes, - ), - ); let ctr_v1 = execute_in_chunks( self.conn_pool.clone(), insert_current_token_royalties_v1_query, ¤t_token_royalties_v1, - get_config_table_chunk_size::( + get_config_table_chunk_size::( "current_token_royalty_v1", &per_table_chunk_sizes, ), @@ -205,41 +161,30 @@ impl Processable for TokenV2Storer { self.conn_pool.clone(), insert_current_token_claims_query, ¤t_token_claims, - get_config_table_chunk_size::( + get_config_table_chunk_size::( "current_token_pending_claims", &per_table_chunk_sizes, ), ); let ( - coll_v2_res, - td_v2_res, - to_v2_res, cc_v2_res, ctd_v2_res, cdtd_v2_res, cto_v2_res, cdto_v2_res, ta_v2_res, - ct_v2_res, ctr_v1_res, ctc_v1_res, - ) = tokio::join!( - coll_v2, td_v2, to_v2, cc_v2, ctd_v2, cdtd_v2, cto_v2, cdto_v2, ta_v2, ct_v2, ctr_v1, - ctc_v1 - ); + ) = tokio::join!(cc_v2, ctd_v2, cdtd_v2, cto_v2, cdto_v2, ta_v2, ctr_v1, ctc_v1); for res in [ - coll_v2_res, - td_v2_res, - to_v2_res, cc_v2_res, ctd_v2_res, cdtd_v2_res, cto_v2_res, cdto_v2_res, ta_v2_res, - ct_v2_res, ctr_v1_res, ctc_v1_res, ] {