From 58fc72e8be849cd6c112b1597283ca5e98f7b57f Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Fri, 24 Sep 2021 15:07:01 +0300 Subject: [PATCH 01/10] Remove old V4 Channel: - primitives - remove V4 Channel - Config - remove unused values & add update with analytics - sentry - remove old V4 Channel routes --- docs/config/dev.toml | 3 +- docs/config/prod.toml | 3 +- primitives/src/channel.rs | 255 +-------------------------- primitives/src/config.rs | 16 +- primitives/src/sentry.rs | 10 +- sentry/src/db/event_aggregate.rs | 46 ----- sentry/src/lib.rs | 120 +++++-------- sentry/src/routes/channel.rs | 82 +-------- sentry/src/routes/event_aggregate.rs | 14 +- 9 files changed, 76 insertions(+), 473 deletions(-) diff --git a/docs/config/dev.toml b/docs/config/dev.toml index 6ad28e737..ccd2cf7e8 100644 --- a/docs/config/dev.toml +++ b/docs/config/dev.toml @@ -4,9 +4,10 @@ max_channels = 512 channels_find_limit = 200 wait_time = 500 -aggr_throttle = 0 events_find_limit = 100 msgs_find_limit = 10 +analytics_find_limit_v5 = 5000 +analytics_maxtime_v5 = 20000 heartbeat_time = 30000 health_threshold_promilles = 950 diff --git a/docs/config/prod.toml b/docs/config/prod.toml index 97371f420..ee9ca734b 100644 --- a/docs/config/prod.toml +++ b/docs/config/prod.toml @@ -4,9 +4,10 @@ max_channels = 512 channels_find_limit = 512 wait_time = 40000 -aggr_throttle = 40000 events_find_limit = 100 msgs_find_limit = 10 +analytics_find_limit_v5 = 5000 +analytics_maxtime_v5 = 15000 heartbeat_time = 60000 health_threshold_promilles = 970 diff --git a/primitives/src/channel.rs b/primitives/src/channel.rs index acaf6c89e..6a5d6dd46 100644 --- a/primitives/src/channel.rs +++ b/primitives/src/channel.rs @@ -92,229 +92,6 @@ impl FromStr for ChannelId { } } -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] -#[serde(rename_all = "camelCase")] -#[deprecated = "This is the old V4 Channel"] -pub struct Channel { - pub id: ChannelId, - pub creator: ValidatorId, - pub deposit_asset: String, - pub deposit_amount: BigNum, - #[serde(with = "ts_seconds")] - pub valid_until: DateTime, - #[serde(default)] - pub targeting_rules: Rules, - pub spec: ChannelSpec, - #[serde(default)] - pub exhausted: Vec, -} - -impl Channel { - pub fn is_exhausted(&self) -> bool { - self.exhausted.len() == 2 && self.exhausted.iter().all(|&x| x) - } -} -// -// -// -// TODO REMOVE -// -// -// -#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] -pub struct Pricing { - pub max: BigNum, - pub min: BigNum, -} - -#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] -#[serde(rename_all = "UPPERCASE")] -pub struct PricingBounds { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub impression: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub click: Option, -} - -impl PricingBounds { - pub fn to_vec(&self) -> Vec<(&str, Pricing)> { - let mut vec = Vec::new(); - - if let Some(pricing) = self.impression.as_ref() { - vec.push(("IMPRESSION", pricing.clone())); - } - - if let Some(pricing) = self.click.as_ref() { - vec.push(("CLICK", pricing.clone())) - } - - vec - } - - pub fn get(&self, event_type: &str) -> Option<&Pricing> { - match event_type { - "IMPRESSION" => self.impression.as_ref(), - "CLICK" => self.click.as_ref(), - _ => None, - } - } -} - -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] -#[serde(rename_all = "camelCase")] -#[deprecated = "This is the old V4 Channel Spec"] -pub struct ChannelSpec { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub title: Option, - pub validators: SpecValidators, - /// Maximum payment per impression - /// **OBSOLETE**, only used if `pricingBounds` is missing an `IMPRESSION` entry - pub max_per_impression: BigNum, - /// Minimum payment offered per impression - /// **OBSOLETE**, only used if `pricingBounds` is missing an `IMPRESSION` entry - pub min_per_impression: BigNum, - /// Event pricing bounds - #[serde(default, skip_serializing_if = "Option::is_none")] - pub pricing_bounds: Option, - /// EventSubmission object, applies to event submission (POST /channel/:id/events) - #[serde(default, skip_serializing_if = "Option::is_none")] - pub event_submission: Option, - /// A millisecond timestamp of when the campaign was created - #[serde(with = "ts_milliseconds")] - pub created: DateTime, - /// A millisecond timestamp representing the time you want this campaign to become active (optional) - /// Used by the AdViewManager & Targeting AIP#31 - #[serde( - default, - skip_serializing_if = "Option::is_none", - with = "ts_milliseconds_option" - )] - pub active_from: Option>, - /// A random number to ensure the campaignSpec hash is unique - #[serde(default, skip_serializing_if = "Option::is_none")] - pub nonce: Option, - /// A millisecond timestamp of when the campaign should enter a withdraw period - /// (no longer accept any events other than CHANNEL_CLOSE) - /// A sane value should be lower than channel.validUntil * 1000 and higher than created - /// It's recommended to set this at least one month prior to channel.validUntil * 1000 - #[serde(with = "ts_milliseconds")] - pub withdraw_period_start: DateTime, - /// An array of AdUnit (optional) - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub ad_units: Vec, - #[serde(default)] - pub targeting_rules: Rules, -} - -#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] -/// A (leader, follower) tuple -#[deprecated = "This is the old V4 Channel ValidatorSpecs"] -pub struct SpecValidators(ValidatorDesc, ValidatorDesc); - -#[derive(Debug)] -#[deprecated = "This is the old V4 Channel ValidatorSpecs"] -pub enum SpecValidator<'a> { - Leader(&'a ValidatorDesc), - Follower(&'a ValidatorDesc), -} - -impl<'a> SpecValidator<'a> { - pub fn validator(&self) -> &'a ValidatorDesc { - match self { - SpecValidator::Leader(validator) => validator, - SpecValidator::Follower(validator) => validator, - } - } -} - -impl SpecValidators { - pub fn new(leader: ValidatorDesc, follower: ValidatorDesc) -> Self { - Self(leader, follower) - } - - pub fn leader(&self) -> &ValidatorDesc { - &self.0 - } - - pub fn follower(&self) -> &ValidatorDesc { - &self.1 - } - - pub fn find(&self, validator_id: &ValidatorId) -> Option> { - if &self.leader().id == validator_id { - Some(SpecValidator::Leader(self.leader())) - } else if &self.follower().id == validator_id { - Some(SpecValidator::Follower(self.follower())) - } else { - None - } - } - - pub fn find_index(&self, validator_id: &ValidatorId) -> Option { - if &self.leader().id == validator_id { - Some(0) - } else if &self.follower().id == validator_id { - Some(1) - } else { - None - } - } - - pub fn iter(&self) -> Iter<'_> { - Iter::new(self) - } -} - -impl From<(ValidatorDesc, ValidatorDesc)> for SpecValidators { - fn from((leader, follower): (ValidatorDesc, ValidatorDesc)) -> Self { - Self(leader, follower) - } -} - -/// Fixed size iterator of 2, as we need an iterator in couple of occasions -impl<'a> IntoIterator for &'a SpecValidators { - type Item = &'a ValidatorDesc; - type IntoIter = Iter<'a>; - - fn into_iter(self) -> Self::IntoIter { - self.iter() - } -} - -pub struct Iter<'a> { - validators: &'a SpecValidators, - index: u8, -} - -impl<'a> Iter<'a> { - fn new(validators: &'a SpecValidators) -> Self { - Self { - validators, - index: 0, - } - } -} - -impl<'a> Iterator for Iter<'a> { - type Item = &'a ValidatorDesc; - - fn next(&mut self) -> Option { - match self.index { - 0 => { - self.index += 1; - - Some(self.validators.leader()) - } - 1 => { - self.index += 1; - - Some(self.validators.follower()) - } - _ => None, - } - } -} - #[derive(Debug, PartialEq, Eq)] pub enum ChannelError { InvalidArgument(String), @@ -403,29 +180,12 @@ mod test { #[cfg(feature = "postgres")] pub mod postgres { use super::ChannelId; - use super::{Channel, ChannelSpec}; - use crate::targeting::Rules; use bytes::BytesMut; use hex::FromHex; - use postgres_types::{accepts, to_sql_checked, FromSql, IsNull, Json, ToSql, Type}; + use postgres_types::{accepts, to_sql_checked, FromSql, IsNull, ToSql, Type}; use std::error::Error; use tokio_postgres::Row; - impl From<&Row> for Channel { - fn from(row: &Row) -> Self { - Self { - id: row.get("id"), - creator: row.get("creator"), - deposit_asset: row.get("deposit_asset"), - deposit_amount: row.get("deposit_amount"), - valid_until: row.get("valid_until"), - targeting_rules: row.get::<_, Json>("targeting_rules").0, - spec: row.get::<_, Json>("spec").0, - exhausted: row.get("exhausted"), - } - } - } - impl<'a> FromSql<'a> for ChannelId { fn from_sql(ty: &Type, raw: &'a [u8]) -> Result> { let str_slice = <&str as FromSql>::from_sql(ty, raw)?; @@ -459,17 +219,4 @@ pub mod postgres { to_sql_checked!(); } - - impl ToSql for ChannelSpec { - fn to_sql( - &self, - ty: &Type, - w: &mut BytesMut, - ) -> Result> { - Json(self).to_sql(ty, w) - } - - accepts!(JSONB); - to_sql_checked!(); - } } diff --git a/primitives/src/config.rs b/primitives/src/config.rs index 784362005..d6dc9bebb 100644 --- a/primitives/src/config.rs +++ b/primitives/src/config.rs @@ -2,9 +2,7 @@ use crate::{event_submission::RateLimit, Address, BigNum, ValidatorId}; use lazy_static::lazy_static; use serde::{Deserialize, Deserializer, Serialize}; use serde_hex::{SerHex, StrictPfx}; -use std::collections::HashMap; -use std::fs; -use std::num::NonZeroU8; +use std::{collections::HashMap, fs, num::NonZeroU8}; lazy_static! { static ref DEVELOPMENT_CONFIG: Config = @@ -29,13 +27,21 @@ pub struct Config { pub channels_find_limit: u32, pub wait_time: u32, pub aggr_throttle: u32, + #[deprecated = "For V5 this should probably be part of the Analytics"] pub events_find_limit: u32, pub msgs_find_limit: u32, - pub heartbeat_time: u32, // in milliseconds + pub analytics_find_limit_v5: u32, + // in milliseconds + pub analytics_maxtime: u32, + // in milliseconds + pub heartbeat_time: u32, pub health_threshold_promilles: u32, pub health_unsignable_promilles: u32, - pub propagation_timeout: u32, + /// in milliseconds + /// set's the Client timeout for [`SentryApi`] + /// This includes requests made for propagating new messages pub fetch_timeout: u32, + /// in milliseconds pub validator_tick_timeout: u32, pub ip_rate_limit: RateLimit, // HashMap?? pub sid_rate_limit: RateLimit, // HashMap ?? diff --git a/primitives/src/sentry.rs b/primitives/src/sentry.rs index 011159949..5c79295ca 100644 --- a/primitives/src/sentry.rs +++ b/primitives/src/sentry.rs @@ -1,10 +1,4 @@ -use crate::{ - balances::BalancesState, - channel::Channel as ChannelOld, - spender::Spender, - validator::{ApproveState, Heartbeat, MessageTypes, NewState, Type as MessageType}, - Address, Balances, BigNum, ChannelId, ValidatorId, IPFS, -}; +use crate::{Address, Balances, BigNum, Channel, ChannelId, IPFS, ValidatorId, balances::BalancesState, spender::Spender, validator::{ApproveState, Heartbeat, MessageTypes, NewState, Type as MessageType}}; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use std::{collections::HashMap, fmt, hash::Hash}; @@ -241,7 +235,7 @@ pub struct ValidatorMessageResponse { #[derive(Serialize, Deserialize, Debug)] pub struct EventAggregateResponse { - pub channel: ChannelOld, + pub channel: Channel, pub events: Vec, } diff --git a/sentry/src/db/event_aggregate.rs b/sentry/src/db/event_aggregate.rs index 89b9404e7..d3a89acfb 100644 --- a/sentry/src/db/event_aggregate.rs +++ b/sentry/src/db/event_aggregate.rs @@ -2,7 +2,6 @@ use chrono::{DateTime, Utc}; use futures::pin_mut; use primitives::{ balances::UncheckedState, - channel::Channel as ChannelOld, channel_v5::Channel as ChannelV5, sentry::{EventAggregate, MessageResponse}, validator::{ApproveState, Heartbeat, NewState}, @@ -16,26 +15,6 @@ use tokio_postgres::{ use super::{DbPool, PoolError}; -pub async fn latest_approve_state( - pool: &DbPool, - channel: &ChannelOld, -) -> Result>, PoolError> { - let client = pool.get().await?; - - let select = client.prepare("SELECT \"from\", msg, received FROM validator_messages WHERE channel_id = $1 AND \"from\" = $2 AND msg ->> 'type' = 'ApproveState' ORDER BY received DESC LIMIT 1").await?; - let rows = client - .query( - &select, - &[&channel.id, &channel.spec.validators.follower().id], - ) - .await?; - - rows.get(0) - .map(MessageResponse::::try_from) - .transpose() - .map_err(PoolError::Backend) -} - pub async fn latest_approve_state_v5( pool: &DbPool, channel: &ChannelV5, @@ -53,31 +32,6 @@ pub async fn latest_approve_state_v5( .map_err(PoolError::Backend) } -pub async fn latest_new_state( - pool: &DbPool, - channel: &ChannelOld, - state_root: &str, -) -> Result>>, PoolError> { - let client = pool.get().await?; - - let select = client.prepare("SELECT \"from\", msg, received FROM validator_messages WHERE channel_id = $1 AND \"from\" = $2 AND msg ->> 'type' = 'NewState' AND msg->> 'stateRoot' = $3 ORDER BY received DESC LIMIT 1").await?; - let rows = client - .query( - &select, - &[ - &channel.id, - &channel.spec.validators.leader().id, - &state_root, - ], - ) - .await?; - - rows.get(0) - .map(MessageResponse::>::try_from) - .transpose() - .map_err(PoolError::Backend) -} - pub async fn latest_new_state_v5( pool: &DbPool, channel: &ChannelV5, diff --git a/sentry/src/lib.rs b/sentry/src/lib.rs index fe514aa92..eb71b8608 100644 --- a/sentry/src/lib.rs +++ b/sentry/src/lib.rs @@ -2,36 +2,36 @@ #![deny(rust_2018_idioms)] #![allow(deprecated)] -use crate::db::DbPool; -use crate::routes::campaign; -use crate::routes::channel::channel_status; -use crate::routes::event_aggregate::list_channel_event_aggregates; -use crate::routes::validator_message::{extract_params, list_validator_messages}; use chrono::Utc; -use db::CampaignRemaining; use hyper::{Body, Method, Request, Response, StatusCode}; use lazy_static::lazy_static; use middleware::{ auth::{AuthRequired, Authenticate}, + campaign::CampaignLoad, channel::{ChannelLoad, GetChannelId}, cors::{cors, Cors}, + Chain, Middleware, }; -use middleware::{campaign::CampaignLoad, Chain, Middleware}; use once_cell::sync::Lazy; -use primitives::adapter::Adapter; -use primitives::sentry::ValidationErrorResponse; -use primitives::{Config, ValidatorId}; +use primitives::{adapter::Adapter, sentry::ValidationErrorResponse, Config, ValidatorId}; use redis::aio::MultiplexedConnection; use regex::Regex; -use routes::analytics::{advanced_analytics, advertiser_analytics, analytics, publisher_analytics}; -use routes::campaign::{create_campaign, update_campaign}; -use routes::cfg::config; -use routes::channel::{ - channel_list, channel_validate, create_channel, create_validator_messages, - get_all_spender_limits, get_spender_limits, last_approved, -}; use slog::Logger; use std::collections::HashMap; +use { + db::{CampaignRemaining, DbPool}, + routes::{ + campaign, + campaign::{create_campaign, update_campaign}, + cfg::config, + channel::{ + channel_list, create_validator_messages, get_all_spender_limits, get_spender_limits, + last_approved, + }, + event_aggregate::list_channel_event_aggregates, + validator_message::{extract_params, list_validator_messages}, + }, +}; pub mod middleware; pub mod routes { @@ -139,40 +139,7 @@ impl Application { let mut response = match (req.uri().path(), req.method()) { ("/cfg", &Method::GET) => config(req, self).await, - ("/channel", &Method::POST) => create_channel(req, self).await, ("/channel/list", &Method::GET) => channel_list(req, self).await, - ("/channel/validate", &Method::POST) => channel_validate(req, self).await, - - ("/analytics", &Method::GET) => analytics(req, self).await, - ("/analytics/advanced", &Method::GET) => { - let req = match AuthRequired.call(req, self).await { - Ok(req) => req, - Err(error) => { - return map_response_error(error); - } - }; - - advanced_analytics(req, self).await - } - ("/analytics/for-advertiser", &Method::GET) => { - let req = match AuthRequired.call(req, self).await { - Ok(req) => req, - Err(error) => { - return map_response_error(error); - } - }; - advertiser_analytics(req, self).await - } - ("/analytics/for-publisher", &Method::GET) => { - let req = match AuthRequired.call(req, self).await { - Ok(req) => req, - Err(error) => { - return map_response_error(error); - } - }; - - publisher_analytics(req, self).await - } // For creating campaigns ("/v5/campaign", &Method::POST) => { let req = match AuthRequired.call(req, self).await { @@ -246,19 +213,27 @@ async fn analytics_router( mut req: Request, app: &Application, ) -> Result, ResponseError> { + use routes::analytics::{advanced_analytics, advertiser_analytics, analytics, publisher_analytics}; + let (route, method) = (req.uri().path(), req.method()); - // TODO AIP#61: Add routes for: - // - POST /channel/:id/pay - // #[serde(rename_all = "camelCase")] - // Pay { payout: BalancesMap }, - // - // - GET /channel/:id/spender/:addr - // - GET /channel/:id/spender/all - // - POST /channel/:id/spender/:addr - // - GET /channel/:id/get-leaf - match *method { - Method::GET => { + match (route, method) { + ("/analytics", &Method::GET) => analytics(req, app).await, + ("/analytics/advanced", &Method::GET) => { + let req = AuthRequired.call(req, app).await?; + + advanced_analytics(req, app).await + } + ("/analytics/for-advertiser", &Method::GET) => { + let req = AuthRequired.call(req, app).await?; + advertiser_analytics(req, app).await + } + ("/analytics/for-publisher", &Method::GET) => { + let req = AuthRequired.call(req, app).await?; + + publisher_analytics(req, app).await + } + (route, &Method::GET) => { if let Some(caps) = ANALYTICS_BY_CHANNEL_ID.captures(route) { let param = RouteParams(vec![caps .get(1) @@ -304,8 +279,8 @@ async fn analytics_router( } else { Err(ResponseError::NotFound) } - } - _ => Err(ResponseError::NotFound), + }, + _ => Err(ResponseError::NotFound) } } @@ -315,6 +290,15 @@ async fn channels_router( ) -> Result, ResponseError> { let (path, method) = (req.uri().path().to_owned(), req.method()); + // TODO AIP#61: Add routes for: + // - POST /channel/:id/pay + // #[serde(rename_all = "camelCase")] + // Pay { payout: BalancesMap }, + // + // - GET /channel/:id/spender/:addr + // - GET /channel/:id/spender/all + // - POST /channel/:id/spender/:addr + // - GET /channel/:id/get-leaf if let (Some(caps), &Method::GET) = (LAST_APPROVED_BY_CHANNEL_ID.captures(&path), method) { let param = RouteParams(vec![caps .get(1) @@ -324,16 +308,6 @@ async fn channels_router( req = ChannelLoad.call(req, app).await?; last_approved(req, app).await - } else if let (Some(caps), &Method::GET) = - (CHANNEL_STATUS_BY_CHANNEL_ID.captures(&path), method) - { - let param = RouteParams(vec![caps - .get(1) - .map_or("".to_string(), |m| m.as_str().to_string())]); - req.extensions_mut().insert(param); - - req = ChannelLoad.call(req, app).await?; - channel_status(req, app).await } else if let (Some(caps), &Method::GET) = (CHANNEL_VALIDATOR_MESSAGES.captures(&path), method) { let param = RouteParams(vec![caps diff --git a/sentry/src/routes/channel.rs b/sentry/src/routes/channel.rs index a423cd184..0a3e5227e 100644 --- a/sentry/src/routes/channel.rs +++ b/sentry/src/routes/channel.rs @@ -10,8 +10,7 @@ use hyper::{Body, Request, Response}; use primitives::{ adapter::Adapter, balances::{CheckedState, UncheckedState}, - channel::Channel as ChannelOld, - channel_v5::Channel as ChannelV5, + Channel, config::TokenInfo, sentry::{ channel_list::ChannelListQuery, AllSpendersResponse, LastApproved, LastApprovedQuery, @@ -25,64 +24,6 @@ use slog::{error, Logger}; use std::{collections::HashMap, str::FromStr}; use tokio_postgres::error::SqlState; -pub async fn channel_status( - req: Request, - _: &Application, -) -> Result, ResponseError> { - use serde::Serialize; - #[derive(Serialize)] - struct ChannelStatusResponse<'a> { - channel: &'a ChannelOld, - } - - let channel = req - .extensions() - .get::() - .expect("Request should have Channel"); - - let response = ChannelStatusResponse { channel }; - - Ok(success_response(serde_json::to_string(&response)?)) -} - -#[deprecated = "V5 Channel no longer needs creation of channel route"] -pub async fn create_channel( - req: Request, - app: &Application, -) -> Result, ResponseError> { - let body = hyper::body::to_bytes(req.into_body()).await?; - - let channel = serde_json::from_slice::(&body) - .map_err(|e| ResponseError::FailedValidation(e.to_string()))?; - - // TODO AIP#61: No longer needed, remove! - // if let Err(e) = app.adapter.validate_channel(&channel).await { - // return Err(ResponseError::BadRequest(e.to_string())); - // } - - let error_response = ResponseError::BadRequest("err occurred; please try again later".into()); - - match insert_channel(&app.pool, channel).await { - Err(error) => { - error!(&app.logger, "{}", &error; "module" => "create_channel"); - - match error { - PoolError::Backend(error) if error.code() == Some(&SqlState::UNIQUE_VIOLATION) => { - Err(ResponseError::Conflict( - "channel already exists".to_string(), - )) - } - _ => Err(error_response), - } - } - _ => Ok(()), - }?; - - let create_response = SuccessResponse { success: true }; - - Ok(success_response(serde_json::to_string(&create_response)?)) -} - pub async fn channel_list( req: Request, app: &Application, @@ -104,17 +45,6 @@ pub async fn channel_list( Ok(success_response(serde_json::to_string(&list_response)?)) } -pub async fn channel_validate( - req: Request, - _: &Application, -) -> Result, ResponseError> { - let body = hyper::body::to_bytes(req.into_body()).await?; - let _channel = serde_json::from_slice::(&body) - .map_err(|e| ResponseError::FailedValidation(e.to_string()))?; - let create_response = SuccessResponse { success: true }; - Ok(success_response(serde_json::to_string(&create_response)?)) -} - pub async fn last_approved( req: Request, app: &Application, @@ -190,7 +120,7 @@ pub async fn create_validator_messages( let channel = req .extensions() - .get::() + .get::() .expect("Request should have Channel") .to_owned(); @@ -222,7 +152,7 @@ async fn create_or_update_spendable_document( adapter: &impl Adapter, token_info: &TokenInfo, pool: DbPool, - channel: &ChannelV5, + channel: &Channel, spender: Address, ) -> Result { insert_channel(&pool, *channel).await?; @@ -278,7 +208,7 @@ pub async fn get_spender_limits( let channel = req .extensions() - .get::() + .get::() .expect("Request should have Channel") .to_owned(); @@ -334,7 +264,7 @@ pub async fn get_all_spender_limits( ) -> Result, ResponseError> { let channel = req .extensions() - .get::() + .get::() .expect("Request should have Channel") .to_owned(); @@ -385,7 +315,7 @@ pub async fn get_all_spender_limits( async fn get_corresponding_new_state( pool: &DbPool, logger: &Logger, - channel: &ChannelV5, + channel: &Channel, ) -> Result>, ResponseError> { let approve_state = match latest_approve_state_v5(pool, channel).await? { Some(approve_state) => approve_state, diff --git a/sentry/src/routes/event_aggregate.rs b/sentry/src/routes/event_aggregate.rs index 312697259..ac0dcb314 100644 --- a/sentry/src/routes/event_aggregate.rs +++ b/sentry/src/routes/event_aggregate.rs @@ -3,7 +3,7 @@ use hyper::{Body, Request, Response}; use serde::Deserialize; use primitives::{ - adapter::Adapter, channel::Channel as ChannelOld, sentry::EventAggregateResponse, + adapter::Adapter, channel_v5::Channel, sentry::EventAggregateResponse, }; use crate::{success_response, Application, Auth, ResponseError}; @@ -14,14 +14,14 @@ pub struct EventAggregatesQuery { #[allow(dead_code)] after: Option>, } - +#[deprecated = "V5 - Double check what is need from the event aggregates from V4"] pub async fn list_channel_event_aggregates( req: Request, _app: &Application, ) -> Result, ResponseError> { let channel = req .extensions() - .get::() + .get::() .expect("Request should have Channel"); let auth = req @@ -32,11 +32,7 @@ pub async fn list_channel_event_aggregates( let _query = serde_urlencoded::from_str::(req.uri().query().unwrap_or(""))?; - let _from = if channel.spec.validators.find(&auth.uid).is_some() { - None - } else { - Some(auth.uid) - }; + let _from = channel.find_validator(auth.uid); let event_aggregates = vec![]; // let event_aggregates = list_event_aggregates( @@ -49,7 +45,7 @@ pub async fn list_channel_event_aggregates( // .await?; let response = EventAggregateResponse { - channel: channel.clone(), + channel: channel, events: event_aggregates, }; From 195d119141857f3af1e40fe7a14ecd907dfd5619 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Fri, 24 Sep 2021 15:07:34 +0300 Subject: [PATCH 02/10] primitives - adapter - remove unused Error variants --- primitives/src/adapter.rs | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/primitives/src/adapter.rs b/primitives/src/adapter.rs index 5879c3be8..ae71b90ff 100644 --- a/primitives/src/adapter.rs +++ b/primitives/src/adapter.rs @@ -1,5 +1,5 @@ use crate::{ - channel::ChannelError, channel_v5::Channel, Address, BigNum, DomainError, ValidatorId, + Channel, Address, BigNum, ValidatorId, }; use async_trait::async_trait; use serde::{Deserialize, Serialize}; @@ -14,11 +14,9 @@ pub type Deposit = crate::Deposit; pub enum Error { Authentication(String), Authorization(String), - InvalidChannel(ChannelError), /// Adapter specific errors // Since we don't know the size of the Adapter Error we use a Box to limit the size of this enum Adapter(Box), - Domain(DomainError), /// You need to `.unlock()` the wallet first LockedWallet, } @@ -36,20 +34,12 @@ impl fmt::Display for Error { match self { Error::Authentication(error) => write!(f, "Authentication: {}", error), Error::Authorization(error) => write!(f, "Authorization: {}", error), - Error::InvalidChannel(error) => write!(f, "{}", error), Error::Adapter(error) => write!(f, "Adapter: {}", *error), - Error::Domain(error) => write!(f, "Domain: {}", error), Error::LockedWallet => write!(f, "You must `.unlock()` the wallet first"), } } } -impl From for Error { - fn from(err: DomainError) -> Error { - Error::Domain(err) - } -} - pub struct DummyAdapterOptions { pub dummy_identity: ValidatorId, pub dummy_auth: HashMap, From f30af9d46b1fc1b1f9ce73deb7df535ef7a0a91f Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Fri, 24 Sep 2021 16:29:58 +0300 Subject: [PATCH 03/10] =?UTF-8?q?Remove=20old=20v4=20Channel=20?= =?UTF-8?q?=F0=9F=99=8C=20-=20Adapter=20&=20error=20imporvements=20in=20ad?= =?UTF-8?q?apters?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- adapter/src/dummy.rs | 2 +- adapter/src/ethereum.rs | 32 ++-- adapter/src/ethereum/error.rs | 19 ++- adapter/src/ethereum/test_utils.rs | 2 +- primitives/src/adapter.rs | 4 +- primitives/src/balances_map.rs | 12 +- primitives/src/big_num.rs | 13 +- primitives/src/campaign.rs | 4 +- primitives/src/channel.rs | 208 +++++++++++++++++++++-- primitives/src/channel_v5.rs | 205 ---------------------- primitives/src/eth_checksum.rs | 2 +- primitives/src/lib.rs | 5 +- primitives/src/merkle_tree.rs | 12 +- primitives/src/sentry.rs | 15 +- primitives/src/spender.rs | 2 +- primitives/src/targeting.rs | 3 +- primitives/src/targeting/eval_test.rs | 10 +- primitives/src/targeting/input.rs | 6 +- primitives/src/util/tests/prep_db.rs | 49 ++---- sentry/src/db/channel.rs | 2 +- sentry/src/db/event_aggregate.rs | 2 +- sentry/src/routes/campaign.rs | 2 +- sentry/src/routes/event_aggregate.rs | 2 +- validator_worker/src/channel.rs | 2 +- validator_worker/src/follower.rs | 2 +- validator_worker/src/leader.rs | 2 +- validator_worker/src/main.rs | 2 +- validator_worker/src/sentry_interface.rs | 4 +- 28 files changed, 298 insertions(+), 327 deletions(-) delete mode 100644 primitives/src/channel_v5.rs diff --git a/adapter/src/dummy.rs b/adapter/src/dummy.rs index b80f4f1fa..1026109a6 100644 --- a/adapter/src/dummy.rs +++ b/adapter/src/dummy.rs @@ -5,7 +5,7 @@ use primitives::{ Adapter, AdapterErrorKind, AdapterResult, Deposit, DummyAdapterOptions, Error as AdapterError, Session, }, - channel_v5::Channel, + channel::Channel, config::Config, Address, ChannelId, ToETHChecksum, ValidatorId, }; diff --git a/adapter/src/ethereum.rs b/adapter/src/ethereum.rs index 0e652403b..fb72ac74d 100644 --- a/adapter/src/ethereum.rs +++ b/adapter/src/ethereum.rs @@ -6,11 +6,10 @@ use ethstore::{ ethkey::{public_to_address, recover, verify_address, Message, Password, Signature}, SafeAccount, }; -use futures::TryFutureExt; use lazy_static::lazy_static; use primitives::{ adapter::{Adapter, AdapterResult, Deposit, Error as AdapterError, KeystoreOptions, Session}, - channel_v5::Channel, + channel::Channel, config::Config, Address, BigNum, ToETHChecksum, ValidatorId, }; @@ -18,6 +17,7 @@ use reqwest::Client; use serde::{Deserialize, Serialize}; use serde_json::Value; use std::{convert::TryFrom, fs, str::FromStr}; +use thiserror::Error; use tiny_keccak::Keccak; use web3::{ contract::{Contract, Options}, @@ -116,8 +116,7 @@ impl EthereumAdapter { let transport = web3::transports::Http::new(&config.ethereum_network).map_err(Error::Web3)?; let web3 = web3::Web3::new(transport); - let relayer = - RelayerClient::new(&config.ethereum_adapter_relayer).map_err(Error::RelayerClient)?; + let relayer = RelayerClient::new(&config.ethereum_adapter_relayer)?; Ok(Self { address, @@ -307,7 +306,7 @@ impl Adapter for EthereumAdapter { .await .map_err(Error::ContractQuerying)?; - let on_outpace = BigNum::from_str(&on_outpace.to_string())?; + let on_outpace = BigNum::from_str(&on_outpace.to_string()).map_err(Error::BigNumParsing)?; let counterfactual_address = get_counterfactual_address( sweeper_address, @@ -326,7 +325,10 @@ impl Adapter for EthereumAdapter { .await .map_err(Error::ContractQuerying)?; - let still_on_create2: BigNum = still_on_create2.to_string().parse()?; + let still_on_create2: BigNum = still_on_create2 + .to_string() + .parse() + .map_err(Error::BigNumParsing)?; let token_info = self .config @@ -357,8 +359,12 @@ struct RelayerClient { relayer_url: String, } +#[derive(Debug, Error)] +#[error(transparent)] +pub struct RelayerError(#[from] pub reqwest::Error); + impl RelayerClient { - pub fn new(relayer_url: &str) -> Result { + pub fn new(relayer_url: &str) -> Result { let client = Client::builder().build()?; Ok(Self { @@ -372,8 +378,7 @@ impl RelayerClient { &self, from: &ValidatorId, identity: &ValidatorId, - ) -> Result> { - use reqwest::Response; + ) -> Result { use std::collections::HashMap; let relay_url = format!( "{}/identity/by-owner/{}", @@ -381,13 +386,8 @@ impl RelayerClient { from.to_checksum() ); - let identities_owned: HashMap = self - .client - .get(&relay_url) - .send() - .and_then(|res: Response| res.json()) - .await - .map_err(Error::RelayerClient)?; + let identities_owned: HashMap = + self.client.get(&relay_url).send().await?.json().await?; let has_privileges = identities_owned .get(identity) diff --git a/adapter/src/ethereum/error.rs b/adapter/src/ethereum/error.rs index 5826115ea..5edf416ca 100644 --- a/adapter/src/ethereum/error.rs +++ b/adapter/src/ethereum/error.rs @@ -1,16 +1,19 @@ use primitives::{ adapter::{AdapterErrorKind, Error as AdapterError}, address::Error as AddressError, + big_num::ParseBigIntError, Address, ChannelId, }; use std::fmt; +use super::RelayerError; + #[derive(Debug)] pub enum Error { Keystore(KeystoreError), WalletUnlock(ethstore::Error), Web3(web3::Error), - RelayerClient(reqwest::Error), + RelayerClient(RelayerError), /// When the ChannelId that we get from hashing the EthereumChannel with the contract address /// does not align with the provided Channel InvalidChannelId { @@ -27,6 +30,7 @@ pub enum Error { VerifyAddress(VerifyError), TokenNotWhitelisted(Address), InvalidDepositAsset(AddressError), + BigNumParsing(ParseBigIntError), } impl std::error::Error for Error {} @@ -51,10 +55,23 @@ impl fmt::Display for Error { VerifyAddress(err) => write!(f, "Verifying address: {}", err), TokenNotWhitelisted(deposit_asset) => write!(f, "Token not whitelisted: {}", deposit_asset), InvalidDepositAsset(err) => write!(f, "Deposit asset {} is invalid", err), + BigNumParsing(err) => write!(f, "Parsing BigNum: {}", err), } } } +impl From for Error { + fn from(relayer_error: RelayerError) -> Self { + Error::RelayerClient(relayer_error) + } +} + +impl From for AdapterError { + fn from(relayer_error: RelayerError) -> Self { + AdapterError::Adapter(Box::new(Error::RelayerClient(relayer_error))) + } +} + #[derive(Debug)] /// Error returned on `eth_adapter.verify()` when the combination of /// (signer, state_root, signature) **doesn't align**. diff --git a/adapter/src/ethereum/test_utils.rs b/adapter/src/ethereum/test_utils.rs index da2e40c57..840597d89 100644 --- a/adapter/src/ethereum/test_utils.rs +++ b/adapter/src/ethereum/test_utils.rs @@ -10,7 +10,7 @@ use web3::{ use primitives::{ adapter::KeystoreOptions, - channel_v5::{Channel, Nonce}, + channel::{Channel, Nonce}, config::{configuration, TokenInfo}, Address, BigNum, ValidatorId, }; diff --git a/primitives/src/adapter.rs b/primitives/src/adapter.rs index ae71b90ff..3abcaba9b 100644 --- a/primitives/src/adapter.rs +++ b/primitives/src/adapter.rs @@ -1,6 +1,4 @@ -use crate::{ - Channel, Address, BigNum, ValidatorId, -}; +use crate::{Address, BigNum, Channel, ValidatorId}; use async_trait::async_trait; use serde::{Deserialize, Serialize}; use std::{collections::HashMap, convert::From, fmt}; diff --git a/primitives/src/balances_map.rs b/primitives/src/balances_map.rs index 9836fd596..34a76eca0 100644 --- a/primitives/src/balances_map.rs +++ b/primitives/src/balances_map.rs @@ -101,8 +101,8 @@ mod test { #[test] fn test_unified_map_de_serialization() { let unified_map: UnifiedMap = vec![ - (ADDRESSES["leader"].clone(), UnifiedNum::from(50_u64)), - (ADDRESSES["follower"].clone(), UnifiedNum::from(100_u64)), + (ADDRESSES["leader"], UnifiedNum::from(50_u64)), + (ADDRESSES["follower"], UnifiedNum::from(100_u64)), ] .into_iter() .collect(); @@ -124,8 +124,8 @@ mod test { #[test] fn test_balances_map_de_serialization() { let balances_map: BalancesMap = vec![ - (ADDRESSES["leader"].clone(), BigNum::from(50_u64)), - (ADDRESSES["follower"].clone(), BigNum::from(100_u64)), + (ADDRESSES["leader"], BigNum::from(50_u64)), + (ADDRESSES["follower"], BigNum::from(100_u64)), ] .into_iter() .collect(); @@ -157,9 +157,9 @@ mod test { serde_json::from_value(json).expect("Should deserialize it"); let expected_deserialized: BalancesMap = vec![ - (ADDRESSES["leader"].clone(), BigNum::from(50_u64)), + (ADDRESSES["leader"], BigNum::from(50_u64)), // only the second should be accepted, as it appears second in the string and it's the latest one - (ADDRESSES["follower"].clone(), BigNum::from(20_u64)), + (ADDRESSES["follower"], BigNum::from(20_u64)), ] .into_iter() .collect(); diff --git a/primitives/src/big_num.rs b/primitives/src/big_num.rs index 0633baea9..1c11306ee 100644 --- a/primitives/src/big_num.rs +++ b/primitives/src/big_num.rs @@ -12,6 +12,8 @@ use serde::{Deserialize, Deserializer, Serialize, Serializer}; use crate::UnifiedNum; +/// Re-export of the [`num::bigint::ParseBigIntError`] when using [`BigNum`] +pub use num::bigint::ParseBigIntError; #[derive( Serialize, Deserialize, Clone, PartialEq, Eq, PartialOrd, Ord, NumOps, One, Zero, Num, Default, )] @@ -24,7 +26,7 @@ pub struct BigNum( ); impl BigNum { - pub fn new(num: BigUint) -> Result { + pub fn new(num: BigUint) -> Result { Ok(Self(num)) } @@ -240,18 +242,15 @@ impl Mul<&Ratio> for BigNum { } impl TryFrom<&str> for BigNum { - type Error = super::DomainError; + type Error = ParseBigIntError; fn try_from(num: &str) -> Result { - let big_uint = BigUint::from_str(num) - .map_err(|err| super::DomainError::InvalidArgument(err.to_string()))?; - - Ok(Self(big_uint)) + BigUint::from_str(num).map(Self) } } impl FromStr for BigNum { - type Err = super::DomainError; + type Err = ParseBigIntError; fn from_str(s: &str) -> Result { BigNum::try_from(s) diff --git a/primitives/src/campaign.rs b/primitives/src/campaign.rs index a51c6dfc9..75684da9b 100644 --- a/primitives/src/campaign.rs +++ b/primitives/src/campaign.rs @@ -1,5 +1,5 @@ use crate::{ - channel_v5::Channel, targeting::Rules, AdUnit, Address, EventSubmission, UnifiedNum, Validator, + channel::Channel, targeting::Rules, AdUnit, Address, EventSubmission, UnifiedNum, Validator, ValidatorDesc, ValidatorId, }; @@ -358,7 +358,7 @@ pub mod validators { #[cfg(feature = "postgres")] mod postgres { - use crate::channel_v5::Channel; + use crate::channel::Channel; use super::{Active, Campaign, CampaignId, PricingBounds, Validators}; use bytes::BytesMut; diff --git a/primitives/src/channel.rs b/primitives/src/channel.rs index 6a5d6dd46..4504add81 100644 --- a/primitives/src/channel.rs +++ b/primitives/src/channel.rs @@ -1,16 +1,14 @@ -use std::error::Error; -use std::fmt; -use std::ops::Deref; -use std::str::FromStr; +use std::{error::Error, fmt, ops::Deref, str::FromStr}; + +use ethereum_types::U256; -use chrono::serde::{ts_milliseconds, ts_milliseconds_option, ts_seconds}; -use chrono::{DateTime, Utc}; use serde::{Deserialize, Deserializer, Serialize}; use serde_hex::{SerHex, StrictPfx}; -use crate::{targeting::Rules, AdUnit, BigNum, EventSubmission, ValidatorDesc, ValidatorId}; use hex::{FromHex, FromHexError}; +use crate::{Address, Validator, ValidatorId}; + #[derive(Serialize, Deserialize, PartialEq, Eq, Copy, Clone, Hash)] #[serde(transparent)] pub struct ChannelId( @@ -92,6 +90,108 @@ impl FromStr for ChannelId { } } +#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[serde(rename_all = "camelCase")] +pub struct Channel { + pub leader: ValidatorId, + pub follower: ValidatorId, + pub guardian: Address, + pub token: Address, + pub nonce: Nonce, +} + +impl Channel { + pub fn id(&self) -> ChannelId { + use ethabi::{encode, Token}; + use tiny_keccak::{Hasher, Keccak}; + + let tokens = [ + Token::Address(self.leader.as_bytes().into()), + Token::Address(self.follower.as_bytes().into()), + Token::Address(self.guardian.as_bytes().into()), + Token::Address(self.token.as_bytes().into()), + Token::FixedBytes(self.nonce.to_bytes().to_vec()), + ]; + + let mut channel_id = [0_u8; 32]; + let mut hasher = Keccak::v256(); + hasher.update(&encode(&tokens)); + hasher.finalize(&mut channel_id); + + ChannelId::from(channel_id) + } + + pub fn find_validator(&self, validator: ValidatorId) -> Option> { + match (self.leader, self.follower) { + (leader, _) if leader == validator => Some(Validator::Leader(leader)), + (_, follower) if follower == validator => Some(Validator::Follower(follower)), + _ => None, + } + } +} + +/// The nonce is an Unsigned 256 number +#[derive(Clone, Copy, PartialEq, Eq, Hash)] +pub struct Nonce(pub U256); + +impl Nonce { + /// In Big-Endian + pub fn to_bytes(self) -> [u8; 32] { + // the impl of From uses BigEndian + self.0.into() + } +} + +impl fmt::Display for Nonce { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(&self.0.to_string()) + } +} + +impl fmt::Debug for Nonce { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Nonce({})", self.0.to_string()) + } +} + +impl From for Nonce { + fn from(value: u64) -> Self { + Self(U256::from(value)) + } +} + +impl From for Nonce { + fn from(value: u32) -> Self { + Self(U256::from(value)) + } +} + +// The U256 implementation deserializes the value from a hex String value with a prefix `0x...` +// This is why we we need to impl it our selves +impl<'de> Deserialize<'de> for Nonce { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let string = String::deserialize(deserializer)?; + + U256::from_dec_str(&string) + .map_err(serde::de::Error::custom) + .map(Nonce) + } +} + +// The U256 implementation serializes the value as a hex String value with a prefix `0x...` +// This is why we we need to impl it our selves +impl Serialize for Nonce { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + self.0.to_string().serialize(serializer) + } +} + #[derive(Debug, PartialEq, Eq)] pub enum ChannelError { InvalidArgument(String), @@ -140,6 +240,8 @@ impl Error for ChannelError { mod test { use super::*; + use serde_json::{from_value, to_value, Value}; + #[test] fn test_channel_id_() { let hex_string = "061d5e2a67d0a9a10f1c732bca12a676d83f79663a396f7d87b3e30b9b411088"; @@ -160,9 +262,9 @@ mod test { // Deserialization from JSON let de_hex_json = - serde_json::from_value::(hex_value.clone()).expect("Should deserialize"); - let de_prefixed_json = serde_json::from_value::(prefixed_value.clone()) - .expect("Should deserialize"); + serde_json::from_value::(hex_value).expect("Should deserialize"); + let de_prefixed_json = + serde_json::from_value::(prefixed_value).expect("Should deserialize"); assert_eq!(de_hex_json, expected_id); assert_eq!(de_prefixed_json, expected_id); @@ -175,11 +277,25 @@ mod test { serde_json::Value::String(prefixed_string) ) } + + #[test] + fn de_serializes_nonce() { + let nonce_str = "12345"; + let json = Value::String(nonce_str.into()); + + let nonce: Nonce = from_value(json.clone()).expect("Should deserialize a Nonce"); + let expected_nonce = Nonce::from(12345_u64); + + assert_eq!(&expected_nonce, &nonce); + assert_eq!(json, to_value(nonce).expect("Should serialize a Nonce")); + assert_eq!(nonce_str, &nonce.to_string()); + assert_eq!("Nonce(12345)", &format!("{:?}", nonce)); + } } #[cfg(feature = "postgres")] pub mod postgres { - use super::ChannelId; + use super::{Channel, ChannelId, Nonce}; use bytes::BytesMut; use hex::FromHex; use postgres_types::{accepts, to_sql_checked, FromSql, IsNull, ToSql, Type}; @@ -219,4 +335,74 @@ pub mod postgres { to_sql_checked!(); } + + impl<'a> FromSql<'a> for Nonce { + fn from_sql(ty: &Type, raw: &'a [u8]) -> Result> { + let nonce_string = String::from_sql(ty, raw)?; + + Ok(serde_json::from_value(serde_json::Value::String( + nonce_string, + ))?) + } + + accepts!(VARCHAR); + } + + impl ToSql for Nonce { + fn to_sql( + &self, + ty: &Type, + w: &mut BytesMut, + ) -> Result> { + self.0.to_string().to_sql(ty, w) + } + + accepts!(VARCHAR); + to_sql_checked!(); + } + + impl From<&Row> for Channel { + fn from(row: &Row) -> Self { + Self { + leader: row.get("leader"), + follower: row.get("follower"), + guardian: row.get("guardian"), + token: row.get("token"), + nonce: row.get("nonce"), + } + } + } + + #[cfg(test)] + mod test { + use crate::{channel::Nonce, util::tests::prep_db::postgres::POSTGRES_POOL}; + #[tokio::test] + async fn nonce_to_from_sql() { + let client = POSTGRES_POOL.get().await.unwrap(); + + let nonce = Nonce::from(123_456_789_u64); + let sql_type = "VARCHAR"; + + // from SQL + { + let row_nonce = client + .query_one(&*format!("SELECT '{}'::{}", nonce, sql_type), &[]) + .await + .unwrap() + .get(0); + + assert_eq!(&nonce, &row_nonce); + } + + // to SQL + { + let row_nonce = client + .query_one(&*format!("SELECT $1::{}", sql_type), &[&nonce]) + .await + .unwrap() + .get(0); + assert_eq!(&nonce, &row_nonce); + } + } + } } diff --git a/primitives/src/channel_v5.rs b/primitives/src/channel_v5.rs deleted file mode 100644 index 6334b31ca..000000000 --- a/primitives/src/channel_v5.rs +++ /dev/null @@ -1,205 +0,0 @@ -use ethereum_types::U256; -use serde::{Deserialize, Serialize}; -use std::fmt; - -use crate::{Address, ChannelId, Validator, ValidatorId}; - -#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Hash)] -#[serde(rename_all = "camelCase")] -pub struct Channel { - pub leader: ValidatorId, - pub follower: ValidatorId, - pub guardian: Address, - pub token: Address, - pub nonce: Nonce, -} - -impl Channel { - pub fn id(&self) -> ChannelId { - use ethabi::{encode, Token}; - use tiny_keccak::{Hasher, Keccak}; - - let tokens = [ - Token::Address(self.leader.as_bytes().into()), - Token::Address(self.follower.as_bytes().into()), - Token::Address(self.guardian.as_bytes().into()), - Token::Address(self.token.as_bytes().into()), - Token::FixedBytes(self.nonce.to_bytes().to_vec()), - ]; - - let mut channel_id = [0_u8; 32]; - let mut hasher = Keccak::v256(); - hasher.update(&encode(&tokens)); - hasher.finalize(&mut channel_id); - - ChannelId::from(channel_id) - } - - pub fn find_validator(&self, validator: ValidatorId) -> Option> { - match (self.leader, self.follower) { - (leader, _) if leader == validator => Some(Validator::Leader(leader)), - (_, follower) if follower == validator => Some(Validator::Follower(follower)), - _ => None, - } - } -} - -/// The nonce is an Unsigned 256 number -#[derive(Clone, Copy, PartialEq, Eq, Hash)] -pub struct Nonce(pub U256); - -impl Nonce { - /// In Big-Endian - pub fn to_bytes(self) -> [u8; 32] { - // the impl of From uses BigEndian - self.0.into() - } -} - -impl fmt::Display for Nonce { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.0.to_string()) - } -} - -impl fmt::Debug for Nonce { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "Nonce({})", self.0.to_string()) - } -} - -impl From for Nonce { - fn from(value: u64) -> Self { - Self(U256::from(value)) - } -} - -impl From for Nonce { - fn from(value: u32) -> Self { - Self(U256::from(value)) - } -} - -// The U256 implementation deserializes the value from a hex String value with a prefix `0x...` -// This is why we we need to impl it our selves -impl<'de> Deserialize<'de> for Nonce { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - let string = String::deserialize(deserializer)?; - - U256::from_dec_str(&string) - .map_err(serde::de::Error::custom) - .map(Nonce) - } -} - -// The U256 implementation serializes the value as a hex String value with a prefix `0x...` -// This is why we we need to impl it our selves -impl Serialize for Nonce { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - self.0.to_string().serialize(serializer) - } -} -#[cfg(test)] -mod test { - use super::*; - use serde_json::{from_value, to_value, Value}; - - #[test] - fn de_serializes_nonce() { - let nonce_str = "12345"; - let json = Value::String(nonce_str.into()); - - let nonce: Nonce = from_value(json.clone()).expect("Should deserialize a Nonce"); - let expected_nonce = Nonce::from(12345_u64); - - assert_eq!(&expected_nonce, &nonce); - assert_eq!(json, to_value(nonce).expect("Should serialize a Nonce")); - assert_eq!(nonce_str, &nonce.to_string()); - assert_eq!("Nonce(12345)", &format!("{:?}", nonce)); - } -} - -#[cfg(feature = "postgres")] -mod postgres { - use super::{Channel, Nonce}; - use bytes::BytesMut; - use postgres_types::{accepts, to_sql_checked, FromSql, IsNull, ToSql, Type}; - use std::error::Error; - use tokio_postgres::Row; - - impl<'a> FromSql<'a> for Nonce { - fn from_sql(ty: &Type, raw: &'a [u8]) -> Result> { - let nonce_string = String::from_sql(ty, raw)?; - - Ok(serde_json::from_value(serde_json::Value::String( - nonce_string, - ))?) - } - - accepts!(VARCHAR); - } - - impl ToSql for Nonce { - fn to_sql( - &self, - ty: &Type, - w: &mut BytesMut, - ) -> Result> { - self.0.to_string().to_sql(ty, w) - } - - accepts!(VARCHAR); - to_sql_checked!(); - } - - impl From<&Row> for Channel { - fn from(row: &Row) -> Self { - Self { - leader: row.get("leader"), - follower: row.get("follower"), - guardian: row.get("guardian"), - token: row.get("token"), - nonce: row.get("nonce"), - } - } - } - - #[cfg(test)] - mod test { - use crate::{channel_v5::Nonce, util::tests::prep_db::postgres::POSTGRES_POOL}; - #[tokio::test] - async fn nonce_to_from_sql() { - let client = POSTGRES_POOL.get().await.unwrap(); - - let nonce = Nonce::from(123_456_789_u64); - let sql_type = "VARCHAR"; - - // from SQL - { - let row_nonce = client - .query_one(&*format!("SELECT '{}'::{}", nonce, sql_type), &[]) - .await - .unwrap() - .get(0); - - assert_eq!(&nonce, &row_nonce); - } - - // to SQL - { - let row_nonce = client - .query_one(&*format!("SELECT $1::{}", sql_type), &[&nonce]) - .await - .unwrap() - .get(0); - assert_eq!(&nonce, &row_nonce); - } - } - } -} diff --git a/primitives/src/eth_checksum.rs b/primitives/src/eth_checksum.rs index 20c204e73..07de96b0f 100644 --- a/primitives/src/eth_checksum.rs +++ b/primitives/src/eth_checksum.rs @@ -47,6 +47,6 @@ mod test { .strip_prefix("0x") .expect("should have prefix"); - assert_eq!(expected_checksum, checksum(&non_prefixed)) + assert_eq!(expected_checksum, checksum(non_prefixed)) } } diff --git a/primitives/src/lib.rs b/primitives/src/lib.rs index 9ec8d9339..ec6740618 100644 --- a/primitives/src/lib.rs +++ b/primitives/src/lib.rs @@ -11,8 +11,8 @@ pub use self::{ balances_map::{BalancesMap, UnifiedMap}, big_num::BigNum, campaign::{Campaign, CampaignId}, - channel::{ChannelId, ChannelSpec, SpecValidator, SpecValidators}, - channel_v5::Channel, + channel::Channel, + channel::ChannelId, config::Config, deposit::Deposit, event_submission::EventSubmission, @@ -32,7 +32,6 @@ pub mod big_num; pub mod campaign; pub mod campaign_validator; pub mod channel; -pub mod channel_v5; pub mod config; mod eth_checksum; pub mod event_submission; diff --git a/primitives/src/merkle_tree.rs b/primitives/src/merkle_tree.rs index 2bcac4a74..dd57cab06 100644 --- a/primitives/src/merkle_tree.rs +++ b/primitives/src/merkle_tree.rs @@ -177,8 +177,8 @@ mod test { let proof = top.proof(0); - let verify = top.verify(proof); - assert_eq!(verify, true, "should verify proof successfully"); + let verified = top.verify(proof); + assert!(verified, "should verify proof successfully"); } #[test] @@ -203,9 +203,9 @@ mod test { ); let proof = top.proof(0); - let verify = top.verify(proof); + let verified = top.verify(proof); - assert_eq!(verify, true, "should verify proof successfully"); + assert!(verified, "should verify proof successfully"); } #[test] @@ -235,8 +235,8 @@ mod test { ); let proof = top.proof(0); - let verify = top.verify(proof); + let verified = top.verify(proof); - assert_eq!(verify, true, "should verify proof successfully"); + assert!(verified, "should verify proof successfully"); } } diff --git a/primitives/src/sentry.rs b/primitives/src/sentry.rs index 5c79295ca..87fdcd6df 100644 --- a/primitives/src/sentry.rs +++ b/primitives/src/sentry.rs @@ -1,4 +1,9 @@ -use crate::{Address, Balances, BigNum, Channel, ChannelId, IPFS, ValidatorId, balances::BalancesState, spender::Spender, validator::{ApproveState, Heartbeat, MessageTypes, NewState, Type as MessageType}}; +use crate::{ + balances::BalancesState, + spender::Spender, + validator::{ApproveState, Heartbeat, MessageTypes, NewState, Type as MessageType}, + Address, Balances, BigNum, Channel, ChannelId, ValidatorId, IPFS, +}; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use std::{collections::HashMap, fmt, hash::Hash}; @@ -295,7 +300,7 @@ impl fmt::Display for ChannelReport { } pub mod channel_list { - use crate::{channel_v5::Channel, ValidatorId}; + use crate::{channel::Channel, ValidatorId}; use serde::{Deserialize, Serialize}; use super::Pagination; @@ -355,7 +360,7 @@ pub mod campaign_create { use crate::{ campaign::{prefix_active, Active, PricingBounds, Validators}, - channel_v5::Channel, + channel::Channel, targeting::Rules, AdUnit, Address, Campaign, CampaignId, EventSubmission, UnifiedNum, }; @@ -564,8 +569,8 @@ mod test { pub fn de_serialize_events() { let click = Event::Click { publisher: ADDRESSES["publisher"], - ad_unit: Some(DUMMY_IPFS[0].clone()), - ad_slot: Some(DUMMY_IPFS[1].clone()), + ad_unit: Some(DUMMY_IPFS[0]), + ad_slot: Some(DUMMY_IPFS[1]), referrer: Some("some_referrer".to_string()), }; diff --git a/primitives/src/spender.rs b/primitives/src/spender.rs index e220f9ef9..bc7f84c2b 100644 --- a/primitives/src/spender.rs +++ b/primitives/src/spender.rs @@ -1,4 +1,4 @@ -use crate::{channel_v5::Channel, Address, Deposit, UnifiedNum}; +use crate::{channel::Channel, Address, Deposit, UnifiedNum}; use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug, Clone)] diff --git a/primitives/src/targeting.rs b/primitives/src/targeting.rs index 283f649ed..2f70fc82e 100644 --- a/primitives/src/targeting.rs +++ b/primitives/src/targeting.rs @@ -104,6 +104,7 @@ mod test { } #[test] + #[allow(clippy::float_cmp)] fn test_output_from_channel() { use crate::campaign::{Pricing, PricingBounds}; use crate::util::tests::prep_db::DUMMY_CAMPAIGN; @@ -122,7 +123,7 @@ mod test { let output = Output::from(&campaign); - assert_eq!(true, output.show); + assert!(output.show); assert_eq!(1.0, output.boost); assert_eq!( Some(&UnifiedNum::from(1_000)), diff --git a/primitives/src/targeting/eval_test.rs b/primitives/src/targeting/eval_test.rs index 021941dc1..c3988c78c 100644 --- a/primitives/src/targeting/eval_test.rs +++ b/primitives/src/targeting/eval_test.rs @@ -22,13 +22,13 @@ fn get_default_input() -> Input { publisher_id: ADDRESSES["leader"], country: Some("bg".to_string()), event_type: "IMPRESSION".to_string(), - seconds_since_epoch: Utc.ymd(2020, 11, 06).and_hms(12, 0, 0), + seconds_since_epoch: Utc.ymd(2020, 11, 6).and_hms(12, 0, 0), user_agent_os: Some("os".to_string()), user_agent_browser_family: Some("family".to_string()), }, campaign: None, balances: None, - ad_unit_id: Some(DUMMY_IPFS[0].clone()), + ad_unit_id: Some(DUMMY_IPFS[0]), ad_slot: None, }; @@ -915,11 +915,11 @@ mod control_flow_and_logic { let rule = Rule::Function(Function::new_if_else( Value::Bool(false), - if_true.clone(), + if_true, if_false.clone(), )); - assert_eq!(Ok(Some(if_false.clone())), rule.eval(&input, &mut output)); + assert_eq!(Ok(Some(if_false)), rule.eval(&input, &mut output)); } #[test] fn test_or_eval() { @@ -1270,7 +1270,7 @@ mod string_and_array { let rule = Rule::Function(Function::new_at(arr.clone(), index)); assert_eq!(Ok(Some(at_index)), rule.eval(&input, &mut output)); - let broken_rule = Rule::Function(Function::new_at(arr.clone(), out_of_range)); + let broken_rule = Rule::Function(Function::new_at(arr, out_of_range)); assert_eq!(Err(Error::TypeError), broken_rule.eval(&input, &mut output)); } #[test] diff --git a/primitives/src/targeting/input.rs b/primitives/src/targeting/input.rs index e1ad4dbef..6c0ccdf82 100644 --- a/primitives/src/targeting/input.rs +++ b/primitives/src/targeting/input.rs @@ -446,13 +446,13 @@ mod test { event_min_price: Some( CAMPAIGN .pricing("IMPRESSION") - .map(|price| price.min.clone()) + .map(|price| price.min) .expect("should have price"), ), event_max_price: Some( CAMPAIGN .pricing("IMPRESSION") - .map(|price| price.max.clone()) + .map(|price| price.max) .expect("Should have price"), ), })), @@ -460,7 +460,7 @@ mod test { balances, publisher_id: ADDRESSES["publisher"], })), - ad_unit_id: Some(IPFS[1].clone()), + ad_unit_id: Some(IPFS[1]), ad_slot: Some(AdSlot { categories: vec!["IAB3".into(), "IAB13-7".into(), "IAB5".into()], hostname: "adex.network".into(), diff --git a/primitives/src/util/tests/prep_db.rs b/primitives/src/util/tests/prep_db.rs index 9deb29339..381a70dfe 100644 --- a/primitives/src/util/tests/prep_db.rs +++ b/primitives/src/util/tests/prep_db.rs @@ -1,24 +1,21 @@ use crate::{ campaign::{self, Active, Validators}, - channel::{Pricing, PricingBounds}, - channel_v5::Nonce, + channel::Nonce, targeting::Rules, - AdUnit, Address, BigNum, Campaign, Channel, ChannelId, ChannelSpec, EventSubmission, - SpecValidators, UnifiedNum, ValidatorDesc, ValidatorId, IPFS, + AdUnit, Address, Campaign, Channel, ChannelId, EventSubmission, UnifiedNum, ValidatorDesc, + ValidatorId, IPFS, }; use chrono::{TimeZone, Utc}; -use fake::faker::{Faker, Number}; use hex::FromHex; use lazy_static::lazy_static; use std::{collections::HashMap, convert::TryFrom}; lazy_static! { - // dummy auth - // session_tokens + /// Dummy [`crate::ValidatorId`]s pub static ref IDS: HashMap = { let mut ids = HashMap::new(); - ids.insert("leader".into(), ValidatorId::try_from("0xce07CbB7e054514D590a0262C93070D838bFBA2e").expect("failed to parse id")); + ids.insert("leader".into(), ValidatorId::try_from("0xce07CbB7e054514D590a0262C93070D838bFBA2e").expect("failed to parse id")); ids.insert("follower".into(), ValidatorId::try_from("0xc91763d7f14ac5c5ddfbcd012e0d2a61ab9bded3").expect("failed to parse id")); ids.insert("user".into(), ValidatorId::try_from("0x20754168c00a6e58116ccfd0a5f7d1bb66c5de9d").expect("failed to parse id")); ids.insert("publisher".into(), ValidatorId::try_from("0xb7d3f81e857692d13e9d63b232a90f4a1793189e").expect("failed to parse id")); @@ -29,6 +26,7 @@ lazy_static! { ids }; + /// Dummy [`crate::Address`]es pub static ref ADDRESSES: HashMap = { let mut addresses = HashMap::new(); @@ -43,7 +41,7 @@ lazy_static! { addresses }; - // These are the Goerli testnet Addresses of these stablecoins + // These are the Goerli testnet [`Addresses`] of the following stablecoins: pub static ref TOKENS: HashMap = { let mut tokens = HashMap::new(); @@ -53,8 +51,9 @@ lazy_static! { tokens }; - // dummy auth tokens + // Dummy adapter auth tokens // authorization tokens + /// pub static ref AUTH: HashMap = { let mut auth = HashMap::new(); @@ -110,35 +109,7 @@ lazy_static! { } }; - pub static ref DUMMY_CHANNEL: crate::channel::Channel = { - let nonce = BigNum::from(::between(100_000_000, 999_999_999)); - - crate::channel::Channel { - id: ChannelId::from_hex("061d5e2a67d0a9a10f1c732bca12a676d83f79663a396f7d87b3e30b9b411088").expect("prep_db: failed to deserialize channel id"), - creator: ValidatorId::try_from("033ed90e0fec3f3ea1c9b005c724d704501e0196").expect("Should be valid ValidatorId"), - deposit_asset: "0x89d24A6b4CcB1B6fAA2625fE562bDD9a23260359".to_string(), - deposit_amount: 1_000.into(), - targeting_rules: Rules::new(), - // UNIX timestamp for 2100-01-01 - valid_until: Utc.timestamp(4_102_444_800, 0), - spec: ChannelSpec { - title: None, - validators: SpecValidators::new(DUMMY_VALIDATOR_LEADER.clone(), DUMMY_VALIDATOR_FOLLOWER.clone()), - max_per_impression: 10.into(), - min_per_impression: 1.into(), - targeting_rules: Rules::new(), - event_submission: Some(EventSubmission { allow: vec![] }), - // July 29, 2019 7:00:00 AM - created: Utc.timestamp(1_564_383_600, 0), - active_from: None, - nonce: Some(nonce), - withdraw_period_start: Utc.timestamp_millis(4_073_414_400_000), - ad_units: vec![], - pricing_bounds: Some(PricingBounds {impression: None, click: Some(Pricing { max: 0.into(), min: 0.into()})}), - }, - exhausted: Default::default(), - } - }; + pub static ref DUMMY_CHANNEL_ID: ChannelId = ChannelId::from_hex("061d5e2a67d0a9a10f1c732bca12a676d83f79663a396f7d87b3e30b9b411088").expect("prep_db: failed to deserialize channel id"); pub static ref DUMMY_AD_UNITS: [AdUnit; 4] = [ AdUnit { diff --git a/sentry/src/db/channel.rs b/sentry/src/db/channel.rs index 0a6db0cd6..e1d01355a 100644 --- a/sentry/src/db/channel.rs +++ b/sentry/src/db/channel.rs @@ -95,7 +95,7 @@ pub async fn insert_validator_messages( mod list_channels { use primitives::{ - channel_v5::Channel, + channel::Channel, sentry::{channel_list::ChannelListResponse, Pagination}, ValidatorId, }; diff --git a/sentry/src/db/event_aggregate.rs b/sentry/src/db/event_aggregate.rs index d3a89acfb..6ee8d14c9 100644 --- a/sentry/src/db/event_aggregate.rs +++ b/sentry/src/db/event_aggregate.rs @@ -2,7 +2,7 @@ use chrono::{DateTime, Utc}; use futures::pin_mut; use primitives::{ balances::UncheckedState, - channel_v5::Channel as ChannelV5, + channel::Channel as ChannelV5, sentry::{EventAggregate, MessageResponse}, validator::{ApproveState, Heartbeat, NewState}, Address, BigNum, ChannelId, ValidatorId, diff --git a/sentry/src/routes/campaign.rs b/sentry/src/routes/campaign.rs index f96264a59..54838077a 100644 --- a/sentry/src/routes/campaign.rs +++ b/sentry/src/routes/campaign.rs @@ -13,7 +13,7 @@ use hyper::{Body, Request, Response}; use primitives::{ adapter::{Adapter, AdapterErrorKind, Error as AdapterError}, campaign_validator::Validator, - channel_v5::Channel, + channel::Channel, config::TokenInfo, sentry::campaign_create::{CreateCampaign, ModifyCampaign}, spender::Spendable, diff --git a/sentry/src/routes/event_aggregate.rs b/sentry/src/routes/event_aggregate.rs index ac0dcb314..62c5083c1 100644 --- a/sentry/src/routes/event_aggregate.rs +++ b/sentry/src/routes/event_aggregate.rs @@ -3,7 +3,7 @@ use hyper::{Body, Request, Response}; use serde::Deserialize; use primitives::{ - adapter::Adapter, channel_v5::Channel, sentry::EventAggregateResponse, + adapter::Adapter, channel::Channel, sentry::EventAggregateResponse, }; use crate::{success_response, Application, Auth, ResponseError}; diff --git a/validator_worker/src/channel.rs b/validator_worker/src/channel.rs index 70541b4cd..357df8303 100644 --- a/validator_worker/src/channel.rs +++ b/validator_worker/src/channel.rs @@ -4,7 +4,7 @@ use crate::{ sentry_interface::{campaigns::all_campaigns, Validator, Validators}, SentryApi, }; -use primitives::{adapter::Adapter, channel_v5::Channel, config::Config, util::ApiUrl, ChannelId}; +use primitives::{adapter::Adapter, channel::Channel, config::Config, util::ApiUrl, ChannelId}; use slog::Logger; use std::collections::{hash_map::Entry, HashSet}; diff --git a/validator_worker/src/follower.rs b/validator_worker/src/follower.rs index 4bc26cbd6..a20c6f2dc 100644 --- a/validator_worker/src/follower.rs +++ b/validator_worker/src/follower.rs @@ -4,7 +4,7 @@ use primitives::{ adapter::{Adapter, AdapterErrorKind, Error as AdapterError}, balances, balances::{Balances, CheckedState, UncheckedState}, - channel_v5::Channel, + channel::Channel, config::TokenInfo, spender::Spender, validator::{ApproveState, MessageTypes, NewState, RejectState}, diff --git a/validator_worker/src/leader.rs b/validator_worker/src/leader.rs index 510617d18..0e5e18bbf 100644 --- a/validator_worker/src/leader.rs +++ b/validator_worker/src/leader.rs @@ -4,7 +4,7 @@ use thiserror::Error; use primitives::{ adapter::{Adapter, AdapterErrorKind, Error as AdapterError}, balances::CheckedState, - channel_v5::Channel, + channel::Channel, config::TokenInfo, validator::{MessageError, MessageTypes, NewState}, Balances, ChannelId, diff --git a/validator_worker/src/main.rs b/validator_worker/src/main.rs index 82f560e43..4501649f0 100644 --- a/validator_worker/src/main.rs +++ b/validator_worker/src/main.rs @@ -14,7 +14,7 @@ use tokio::{ use adapter::{AdapterTypes, DummyAdapter, EthereumAdapter}; use primitives::{ adapter::{Adapter, DummyAdapterOptions, KeystoreOptions}, - // channel_v5::Channel as ChannelV5, + // channel::Channel as ChannelV5, config::{configuration, Config}, util::tests::prep_db::{AUTH, IDS}, util::ApiUrl, diff --git a/validator_worker/src/sentry_interface.rs b/validator_worker/src/sentry_interface.rs index 687084f22..db299c77e 100644 --- a/validator_worker/src/sentry_interface.rs +++ b/validator_worker/src/sentry_interface.rs @@ -8,7 +8,7 @@ use slog::Logger; use primitives::{ adapter::Adapter, balances::{CheckedState, UncheckedState}, - channel_v5::Channel, + channel::Channel, sentry::{ AccountingResponse, EventAggregateResponse, LastApprovedResponse, SuccessResponse, ValidatorMessageResponse, @@ -281,7 +281,7 @@ async fn propagate_to( mod channels { use futures::{future::try_join_all, TryFutureExt}; use primitives::{ - channel_v5::Channel, + channel::Channel, sentry::channel_list::{ChannelListQuery, ChannelListResponse}, util::ApiUrl, ValidatorId, From 1f88b46ce951b81d706ee5cf0a0cc25d86518d30 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Fri, 24 Sep 2021 16:34:26 +0300 Subject: [PATCH 04/10] run rustfmt & clippy --- sentry/src/lib.rs | 8 +++++--- sentry/src/routes/campaign.rs | 2 +- sentry/src/routes/channel.rs | 4 +--- sentry/src/routes/event_aggregate.rs | 8 +++----- 4 files changed, 10 insertions(+), 12 deletions(-) diff --git a/sentry/src/lib.rs b/sentry/src/lib.rs index eb71b8608..5defdfbb8 100644 --- a/sentry/src/lib.rs +++ b/sentry/src/lib.rs @@ -213,7 +213,9 @@ async fn analytics_router( mut req: Request, app: &Application, ) -> Result, ResponseError> { - use routes::analytics::{advanced_analytics, advertiser_analytics, analytics, publisher_analytics}; + use routes::analytics::{ + advanced_analytics, advertiser_analytics, analytics, publisher_analytics, + }; let (route, method) = (req.uri().path(), req.method()); @@ -279,8 +281,8 @@ async fn analytics_router( } else { Err(ResponseError::NotFound) } - }, - _ => Err(ResponseError::NotFound) + } + _ => Err(ResponseError::NotFound), } } diff --git a/sentry/src/routes/campaign.rs b/sentry/src/routes/campaign.rs index 54838077a..b12e8bed3 100644 --- a/sentry/src/routes/campaign.rs +++ b/sentry/src/routes/campaign.rs @@ -294,7 +294,7 @@ pub mod update_campaign { .ok_or(Error::ChannelTokenNotWhitelisted)?; let latest_spendable = - update_latest_spendable(&adapter, &pool, campaign.channel, token, campaign.creator) + update_latest_spendable(&adapter, pool, campaign.channel, token, campaign.creator) .await?; // Gets the latest Spendable for this (spender, channelId) pair diff --git a/sentry/src/routes/channel.rs b/sentry/src/routes/channel.rs index 0a3e5227e..48c789d36 100644 --- a/sentry/src/routes/channel.rs +++ b/sentry/src/routes/channel.rs @@ -2,7 +2,7 @@ use crate::db::{ event_aggregate::{latest_approve_state_v5, latest_heartbeats, latest_new_state_v5}, insert_channel, insert_validator_messages, list_channels, spendable::{fetch_spendable, get_all_spendables_for_channel, update_spendable}, - DbPool, PoolError, + DbPool, }; use crate::{success_response, Application, Auth, ResponseError, RouteParams}; use futures::future::try_join_all; @@ -10,7 +10,6 @@ use hyper::{Body, Request, Response}; use primitives::{ adapter::Adapter, balances::{CheckedState, UncheckedState}, - Channel, config::TokenInfo, sentry::{ channel_list::ChannelListQuery, AllSpendersResponse, LastApproved, LastApprovedQuery, @@ -22,7 +21,6 @@ use primitives::{ }; use slog::{error, Logger}; use std::{collections::HashMap, str::FromStr}; -use tokio_postgres::error::SqlState; pub async fn channel_list( req: Request, diff --git a/sentry/src/routes/event_aggregate.rs b/sentry/src/routes/event_aggregate.rs index 62c5083c1..d217f0d9a 100644 --- a/sentry/src/routes/event_aggregate.rs +++ b/sentry/src/routes/event_aggregate.rs @@ -2,9 +2,7 @@ use chrono::{serde::ts_milliseconds_option, DateTime, Utc}; use hyper::{Body, Request, Response}; use serde::Deserialize; -use primitives::{ - adapter::Adapter, channel::Channel, sentry::EventAggregateResponse, -}; +use primitives::{adapter::Adapter, channel::Channel, sentry::EventAggregateResponse}; use crate::{success_response, Application, Auth, ResponseError}; @@ -19,7 +17,7 @@ pub async fn list_channel_event_aggregates( req: Request, _app: &Application, ) -> Result, ResponseError> { - let channel = req + let channel = *req .extensions() .get::() .expect("Request should have Channel"); @@ -45,7 +43,7 @@ pub async fn list_channel_event_aggregates( // .await?; let response = EventAggregateResponse { - channel: channel, + channel, events: event_aggregates, }; From 43e22f0338ceb75bb069e25c35b56760b1b2a0de Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Mon, 27 Sep 2021 10:55:18 +0300 Subject: [PATCH 05/10] adapter - ethereum - remove tokio_compat calls --- adapter/src/ethereum/test_utils.rs | 131 ++++++++++++++--------------- 1 file changed, 61 insertions(+), 70 deletions(-) diff --git a/adapter/src/ethereum/test_utils.rs b/adapter/src/ethereum/test_utils.rs index 840597d89..4814e9b61 100644 --- a/adapter/src/ethereum/test_utils.rs +++ b/adapter/src/ethereum/test_utils.rs @@ -113,13 +113,14 @@ pub async fn mock_set_balance( address: [u8; 20], amount: u64, ) -> web3::contract::Result { - tokio_compat_02::FutureExt::compat(token_contract.call( - "setBalanceTo", - (H160(address), U256::from(amount)), - H160(from), - Options::default(), - )) - .await + token_contract + .call( + "setBalanceTo", + (H160(address), U256::from(amount)), + H160(from), + Options::default(), + ) + .await } pub async fn outpace_deposit( @@ -128,16 +129,17 @@ pub async fn outpace_deposit( to: [u8; 20], amount: u64, ) -> web3::contract::Result { - tokio_compat_02::FutureExt::compat(outpace_contract.call( - "deposit", - (channel.tokenize(), H160(to), U256::from(amount)), - H160(to), - Options::with(|opt| { - opt.gas_price = Some(1.into()); - opt.gas = Some(6_721_975.into()); - }), - )) - .await + outpace_contract + .call( + "deposit", + (channel.tokenize(), H160(to), U256::from(amount)), + H160(to), + Options::with(|opt| { + opt.gas_price = Some(1.into()); + opt.gas = Some(6_721_975.into()); + }), + ) + .await } pub async fn sweeper_sweep( @@ -148,20 +150,21 @@ pub async fn sweeper_sweep( ) -> web3::contract::Result { let from_leader_account = H160(*GANACHE_ADDRESSES["leader"].as_bytes()); - tokio_compat_02::FutureExt::compat(sweeper_contract.call( - "sweep", - ( - Token::Address(H160(outpace_address)), - channel.tokenize(), - Token::Array(vec![Token::Address(H160(depositor))]), - ), - from_leader_account, - Options::with(|opt| { - opt.gas_price = Some(1.into()); - opt.gas = Some(6_721_975.into()); - }), - )) - .await + sweeper_contract + .call( + "sweep", + ( + Token::Address(H160(outpace_address)), + channel.tokenize(), + Token::Array(vec![Token::Address(H160(depositor))]), + ), + from_leader_account, + Options::with(|opt| { + opt.gas_price = Some(1.into()); + opt.gas = Some(6_721_975.into()); + }), + ) + .await } /// Deploys the Sweeper contract from `GANACHE_ADDRESS['leader']` @@ -170,19 +173,15 @@ pub async fn deploy_sweeper_contract( ) -> web3::contract::Result<(H160, Contract)> { let from_leader_account = H160(*GANACHE_ADDRESSES["leader"].as_bytes()); - let feature = tokio_compat_02::FutureExt::compat(async { - Contract::deploy(web3.eth(), &SWEEPER_ABI) - .expect("Invalid ABI of Sweeper contract") - .confirmations(0) - .options(Options::with(|opt| { - opt.gas_price = Some(1.into()); - opt.gas = Some(6_721_975.into()); - })) - .execute(*SWEEPER_BYTECODE, (), from_leader_account) - }) - .await; - - let sweeper_contract = tokio_compat_02::FutureExt::compat(feature).await?; + let sweeper_contract = Contract::deploy(web3.eth(), &SWEEPER_ABI) + .expect("Invalid ABI of Sweeper contract") + .confirmations(0) + .options(Options::with(|opt| { + opt.gas_price = Some(1.into()); + opt.gas = Some(6_721_975.into()); + })) + .execute(*SWEEPER_BYTECODE, (), from_leader_account) + .await?; Ok((sweeper_contract.address(), sweeper_contract)) } @@ -193,19 +192,15 @@ pub async fn deploy_outpace_contract( ) -> web3::contract::Result<(H160, Contract)> { let from_leader_account = H160(*GANACHE_ADDRESSES["leader"].as_bytes()); - let feature = tokio_compat_02::FutureExt::compat(async { - Contract::deploy(web3.eth(), &OUTPACE_ABI) - .expect("Invalid ABI of Sweeper contract") - .confirmations(0) - .options(Options::with(|opt| { - opt.gas_price = Some(1.into()); - opt.gas = Some(6_721_975.into()); - })) - .execute(*OUTPACE_BYTECODE, (), from_leader_account) - }) - .await; - - let outpace_contract = tokio_compat_02::FutureExt::compat(feature).await?; + let outpace_contract = Contract::deploy(web3.eth(), &OUTPACE_ABI) + .expect("Invalid ABI of Sweeper contract") + .confirmations(0) + .options(Options::with(|opt| { + opt.gas_price = Some(1.into()); + opt.gas = Some(6_721_975.into()); + })) + .execute(*OUTPACE_BYTECODE, (), from_leader_account) + .await?; Ok((outpace_contract.address(), outpace_contract)) } @@ -217,19 +212,15 @@ pub async fn deploy_token_contract( ) -> web3::contract::Result<(TokenInfo, H160, Contract)> { let from_leader_account = H160(*GANACHE_ADDRESSES["leader"].as_bytes()); - let feature = tokio_compat_02::FutureExt::compat(async { - Contract::deploy(web3.eth(), &MOCK_TOKEN_ABI) - .expect("Invalid ABI of Mock Token contract") - .confirmations(0) - .options(Options::with(|opt| { - opt.gas_price = Some(1.into()); - opt.gas = Some(6_721_975.into()); - })) - .execute(*MOCK_TOKEN_BYTECODE, (), from_leader_account) - }) - .await; - - let token_contract = tokio_compat_02::FutureExt::compat(feature).await?; + let token_contract = Contract::deploy(web3.eth(), &MOCK_TOKEN_ABI) + .expect("Invalid ABI of Mock Token contract") + .confirmations(0) + .options(Options::with(|opt| { + opt.gas_price = Some(1.into()); + opt.gas = Some(6_721_975.into()); + })) + .execute(*MOCK_TOKEN_BYTECODE, (), from_leader_account) + .await?; let token_info = TokenInfo { min_token_units_for_deposit: BigNum::from(min_token_units), From 9a404ff2f9798d0c51ff216b25dfc55185b971ef Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Mon, 27 Sep 2021 10:56:50 +0300 Subject: [PATCH 06/10] worker - fix test for state root hash --- validator_worker/src/lib.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/validator_worker/src/lib.rs b/validator_worker/src/lib.rs index e9c4eb88c..24baaca18 100644 --- a/validator_worker/src/lib.rs +++ b/validator_worker/src/lib.rs @@ -56,7 +56,7 @@ pub(crate) fn get_state_root_hash( mod test { use super::*; - use primitives::util::tests::prep_db::{ADDRESSES, DUMMY_CAMPAIGN, DUMMY_CHANNEL}; + use primitives::util::tests::prep_db::{ADDRESSES, DUMMY_CAMPAIGN, DUMMY_CHANNEL_ID}; #[test] // TODO: Double check this test - encoded value! after introducing `spenders` ("spender", address, amount) @@ -84,7 +84,7 @@ mod test { /// we re-use it in order to double check if we haven't change anything with the `get_state_root_hash()` changes /// when we introduced `spenders` `("spender", address, amount)` & `UnifiedNum` fn get_state_root_hash_returns_correct_hash_for_added_address_to_spenders() { - let channel = DUMMY_CHANNEL.id; + let channel = *DUMMY_CHANNEL_ID; let mut balances = Balances::::default(); balances.add_earner(ADDRESSES["publisher"]); From 442b589a4577a8134aee9edfc501d5bc8acfb682 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Mon, 27 Sep 2021 10:58:31 +0300 Subject: [PATCH 07/10] adapter & primitives - update Cargo dependencies - adapter - remove tokio-compat-02 - primitives - move once_cell from dev deps to deps --- Cargo.lock | 109 +++++++++++++++--------------------------- adapter/Cargo.toml | 1 - primitives/Cargo.toml | 2 +- 3 files changed, 39 insertions(+), 73 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index dcb55fee2..474590182 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -25,8 +25,7 @@ dependencies = [ "sha2 0.9.3", "thiserror", "tiny-keccak 1.5.0", - "tokio 1.8.1", - "tokio-compat-02", + "tokio", "web3", "wiremock", ] @@ -278,7 +277,7 @@ dependencies = [ "memchr", "num_cpus", "once_cell", - "pin-project-lite 0.2.6", + "pin-project-lite", "pin-utils", "slab", "wasm-bindgen-futures", @@ -627,8 +626,8 @@ dependencies = [ "bytes 1.0.1", "futures-util", "memchr", - "pin-project-lite 0.2.6", - "tokio 1.8.1", + "pin-project-lite", + "tokio", ] [[package]] @@ -899,7 +898,7 @@ dependencies = [ "crossbeam-queue", "num_cpus", "serde", - "tokio 1.8.1", + "tokio", ] [[package]] @@ -912,7 +911,7 @@ dependencies = [ "config 0.11.0", "num_cpus", "serde", - "tokio 1.8.1", + "tokio", ] [[package]] @@ -927,7 +926,7 @@ dependencies = [ "futures", "log", "serde", - "tokio 1.8.1", + "tokio", "tokio-postgres", ] @@ -1339,7 +1338,7 @@ dependencies = [ "futures-io", "memchr", "parking", - "pin-project-lite 0.2.6", + "pin-project-lite", "waker-fn", ] @@ -1386,7 +1385,7 @@ dependencies = [ "futures-sink", "futures-task", "memchr", - "pin-project-lite 0.2.6", + "pin-project-lite", "pin-utils", "proc-macro-hack", "proc-macro-nested", @@ -1482,7 +1481,7 @@ dependencies = [ "http", "indexmap", "slab", - "tokio 1.8.1", + "tokio", "tokio-util", "tracing", ] @@ -1595,7 +1594,7 @@ dependencies = [ "futures-lite", "http", "infer", - "pin-project-lite 0.2.6", + "pin-project-lite", "rand 0.7.3", "serde", "serde_json", @@ -1634,7 +1633,7 @@ dependencies = [ "itoa", "pin-project", "socket2 0.3.19", - "tokio 1.8.1", + "tokio", "tower-service", "tracing", "want", @@ -1649,7 +1648,7 @@ dependencies = [ "bytes 1.0.1", "hyper", "native-tls", - "tokio 1.8.1", + "tokio", "tokio-native-tls", ] @@ -2189,9 +2188,9 @@ checksum = "a9a7ab5d64814df0fe4a4b5ead45ed6c5f181ee3ff04ba344313a6c80446c5d4" [[package]] name = "once_cell" -version = "1.7.2" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af8b08b04175473088b46763e51ee54da5f9a164bc162f615b91bc179dbf15a3" +checksum = "692fcb63b64b1758029e0a96ee63e049ce8c5948587f2f7208df04625e5f6b56" [[package]] name = "opaque-debug" @@ -2445,12 +2444,6 @@ dependencies = [ "syn", ] -[[package]] -name = "pin-project-lite" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "257b64915a082f7811703966789728173279bdebb956b143dbcd23f6f970a777" - [[package]] name = "pin-project-lite" version = "0.2.6" @@ -2504,7 +2497,7 @@ dependencies = [ "fallible-iterator", "futures", "log", - "tokio 1.8.1", + "tokio", "tokio-postgres", ] @@ -2527,7 +2520,7 @@ checksum = "2d442770e2b1e244bb5eb03b31c79b65bb2568f413b899eaba850fa945a65954" dependencies = [ "futures", "native-tls", - "tokio 1.8.1", + "tokio", "tokio-native-tls", "tokio-postgres", ] @@ -2631,7 +2624,7 @@ dependencies = [ "thiserror", "time", "tiny-keccak 2.0.2", - "tokio 1.8.1", + "tokio", "tokio-postgres", "toml", "url", @@ -2971,9 +2964,9 @@ dependencies = [ "futures-util", "itoa", "percent-encoding", - "pin-project-lite 0.2.6", + "pin-project-lite", "sha1", - "tokio 1.8.1", + "tokio", "tokio-util", "url", ] @@ -3045,11 +3038,11 @@ dependencies = [ "mime", "native-tls", "percent-encoding", - "pin-project-lite 0.2.6", + "pin-project-lite", "serde", "serde_json", "serde_urlencoded", - "tokio 1.8.1", + "tokio", "tokio-native-tls", "url", "wasm-bindgen", @@ -3278,7 +3271,7 @@ dependencies = [ "serde_urlencoded", "slog", "thiserror", - "tokio 1.8.1", + "tokio", "tokio-postgres", ] @@ -3796,21 +3789,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] name = "tokio" -version = "0.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6703a273949a90131b290be1fe7b039d0fc884aa1935860dfcbe056f28cd8092" -dependencies = [ - "bytes 0.5.6", - "num_cpus", - "pin-project-lite 0.1.12", - "slab", -] - -[[package]] -name = "tokio" -version = "1.8.1" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98c8b05dc14c75ea83d63dd391100353789f5f24b8b3866542a5e85c8be8e985" +checksum = "c2c2416fdedca8443ae44b4527de1ea633af61d8f7169ffa6e72c5b53d24efcc" dependencies = [ "autocfg 1.0.1", "bytes 1.0.1", @@ -3820,26 +3801,12 @@ dependencies = [ "num_cpus", "once_cell", "parking_lot 0.11.1", - "pin-project-lite 0.2.6", + "pin-project-lite", "signal-hook-registry", "tokio-macros", "winapi 0.3.9", ] -[[package]] -name = "tokio-compat-02" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7d4237822b7be8fff0a7a27927462fad435dcb6650f95cea9e946bf6bdc7e07" -dependencies = [ - "bytes 0.5.6", - "once_cell", - "pin-project-lite 0.2.6", - "tokio 0.2.25", - "tokio 1.8.1", - "tokio-stream", -] - [[package]] name = "tokio-macros" version = "1.1.0" @@ -3858,7 +3825,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7d995660bd2b7f8c1568414c1126076c13fbb725c40112dc0120b78eb9b717b" dependencies = [ "native-tls", - "tokio 1.8.1", + "tokio", ] [[package]] @@ -3876,11 +3843,11 @@ dependencies = [ "parking_lot 0.11.1", "percent-encoding", "phf", - "pin-project-lite 0.2.6", + "pin-project-lite", "postgres-protocol", "postgres-types", "socket2 0.4.0", - "tokio 1.8.1", + "tokio", "tokio-util", ] @@ -3891,8 +3858,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c535f53c0cfa1acace62995a8994fc9cc1f12d202420da96ff306ee24d576469" dependencies = [ "futures-core", - "pin-project-lite 0.2.6", - "tokio 1.8.1", + "pin-project-lite", + "tokio", ] [[package]] @@ -3906,8 +3873,8 @@ dependencies = [ "futures-io", "futures-sink", "log", - "pin-project-lite 0.2.6", - "tokio 1.8.1", + "pin-project-lite", + "tokio", ] [[package]] @@ -3932,7 +3899,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "01ebdc2bb4498ab1ab5f5b73c5803825e60199229ccba0698170e3be0e7f959f" dependencies = [ "cfg-if 1.0.0", - "pin-project-lite 0.2.6", + "pin-project-lite", "tracing-core", ] @@ -4065,7 +4032,7 @@ dependencies = [ "serde_urlencoded", "slog", "thiserror", - "tokio 1.8.1", + "tokio", "toml", ] @@ -4246,7 +4213,7 @@ dependencies = [ "serde_json", "soketto", "tiny-keccak 2.0.2", - "tokio 1.8.1", + "tokio", "tokio-stream", "tokio-util", "url", @@ -4261,7 +4228,7 @@ checksum = "1f6d8d1636b2627fe63518d5a9b38a569405d9c9bc665c43c9c341de57227ebb" dependencies = [ "native-tls", "thiserror", - "tokio 1.8.1", + "tokio", "url", ] @@ -4343,7 +4310,7 @@ dependencies = [ "regex", "serde", "serde_json", - "tokio 1.8.1", + "tokio", ] [[package]] diff --git a/adapter/Cargo.toml b/adapter/Cargo.toml index ff730798e..61bb7856e 100644 --- a/adapter/Cargo.toml +++ b/adapter/Cargo.toml @@ -33,7 +33,6 @@ thiserror = "^1" # Futures futures = "0.3" async-trait = "0.1" -tokio-compat-02 = "0.2" # Dummy adapter dashmap = "4.0" diff --git a/primitives/Cargo.toml b/primitives/Cargo.toml index 958ae7e9e..ef883fd7f 100644 --- a/primitives/Cargo.toml +++ b/primitives/Cargo.toml @@ -58,10 +58,10 @@ futures = "0.3" async-trait = "0.1" # Other lazy_static = "1.4.0" +once_cell = "^1.8" [dev-dependencies] pretty_assertions = "^0.7" # testing FromSql & ToSql implementation of structs deadpool-postgres = "0.9.0" tokio = { version = "1", features = ["rt-multi-thread", "macros"] } -once_cell = "1.5" From fc6a405af2cfc79a2abd421d088cfad3c49004d0 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Mon, 27 Sep 2021 10:59:34 +0300 Subject: [PATCH 08/10] primitives - Config - use once_cell::sync::Lazy - fix typo in config attribute --- primitives/src/config.rs | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/primitives/src/config.rs b/primitives/src/config.rs index d6dc9bebb..d757ee99b 100644 --- a/primitives/src/config.rs +++ b/primitives/src/config.rs @@ -1,17 +1,17 @@ use crate::{event_submission::RateLimit, Address, BigNum, ValidatorId}; -use lazy_static::lazy_static; +use once_cell::sync::Lazy; use serde::{Deserialize, Deserializer, Serialize}; use serde_hex::{SerHex, StrictPfx}; use std::{collections::HashMap, fs, num::NonZeroU8}; -lazy_static! { - static ref DEVELOPMENT_CONFIG: Config = - toml::from_str(include_str!("../../docs/config/dev.toml")) - .expect("Failed to parse dev.toml config file"); - static ref PRODUCTION_CONFIG: Config = - toml::from_str(include_str!("../../docs/config/prod.toml")) - .expect("Failed to parse prod.toml config file"); -} +static DEVELOPMENT_CONFIG: Lazy = Lazy::new(|| { + toml::from_str(include_str!("../../docs/config/dev.toml")) + .expect("Failed to parse dev.toml config file") +}); +static PRODUCTION_CONFIG: Lazy = Lazy::new(|| { + toml::from_str(include_str!("../../docs/config/prod.toml")) + .expect("Failed to parse prod.toml config file") +}); #[derive(Serialize, Deserialize, Debug, Clone)] pub struct TokenInfo { @@ -32,7 +32,7 @@ pub struct Config { pub msgs_find_limit: u32, pub analytics_find_limit_v5: u32, // in milliseconds - pub analytics_maxtime: u32, + pub analytics_maxtime_v5: u32, // in milliseconds pub heartbeat_time: u32, pub health_threshold_promilles: u32, From 9694de8e3cde4b0eedbe8e029a2edfd15ca549bc Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Mon, 27 Sep 2021 11:25:53 +0300 Subject: [PATCH 09/10] primitives - Config - deprecated aggr_throttle --- docs/config/dev.toml | 4 ++++ docs/config/prod.toml | 4 ++++ primitives/src/config.rs | 1 + 3 files changed, 9 insertions(+) diff --git a/docs/config/dev.toml b/docs/config/dev.toml index ccd2cf7e8..1fb582a64 100644 --- a/docs/config/dev.toml +++ b/docs/config/dev.toml @@ -4,7 +4,11 @@ max_channels = 512 channels_find_limit = 200 wait_time = 500 +# V4 Deprecated +aggr_throttle = 0 + events_find_limit = 100 + msgs_find_limit = 10 analytics_find_limit_v5 = 5000 analytics_maxtime_v5 = 20000 diff --git a/docs/config/prod.toml b/docs/config/prod.toml index ee9ca734b..b41656965 100644 --- a/docs/config/prod.toml +++ b/docs/config/prod.toml @@ -4,7 +4,11 @@ max_channels = 512 channels_find_limit = 512 wait_time = 40000 +# V4 Deprecated +aggr_throttle = 0 + events_find_limit = 100 + msgs_find_limit = 10 analytics_find_limit_v5 = 5000 analytics_maxtime_v5 = 15000 diff --git a/primitives/src/config.rs b/primitives/src/config.rs index d757ee99b..df7bb8b21 100644 --- a/primitives/src/config.rs +++ b/primitives/src/config.rs @@ -26,6 +26,7 @@ pub struct Config { pub max_channels: u32, pub channels_find_limit: u32, pub wait_time: u32, + #[deprecated = "redundant V4 value. No aggregates are needed for V5"] pub aggr_throttle: u32, #[deprecated = "For V5 this should probably be part of the Analytics"] pub events_find_limit: u32, From b32d2212c2c6092c978e22565674617e2906948b Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Tue, 28 Sep 2021 09:51:52 +0300 Subject: [PATCH 10/10] clean up use statements --- adapter/src/dummy.rs | 3 +-- adapter/src/ethereum.rs | 3 +-- primitives/src/campaign.rs | 4 ++-- primitives/src/lib.rs | 3 +-- primitives/src/sentry.rs | 5 ++--- primitives/src/spender.rs | 2 +- primitives/src/util/tests/prep_db.rs | 1 - sentry/src/db/channel.rs | 3 +-- sentry/src/db/event_aggregate.rs | 7 +++---- sentry/src/routes/campaign.rs | 3 +-- sentry/src/routes/event_aggregate.rs | 2 +- validator_worker/src/channel.rs | 2 +- validator_worker/src/follower.rs | 3 +-- validator_worker/src/leader.rs | 3 +-- validator_worker/src/main.rs | 14 +++++--------- validator_worker/src/sentry_interface.rs | 6 ++---- 16 files changed, 24 insertions(+), 40 deletions(-) diff --git a/adapter/src/dummy.rs b/adapter/src/dummy.rs index 1026109a6..bf958b390 100644 --- a/adapter/src/dummy.rs +++ b/adapter/src/dummy.rs @@ -5,9 +5,8 @@ use primitives::{ Adapter, AdapterErrorKind, AdapterResult, Deposit, DummyAdapterOptions, Error as AdapterError, Session, }, - channel::Channel, config::Config, - Address, ChannelId, ToETHChecksum, ValidatorId, + Address, Channel, ChannelId, ToETHChecksum, ValidatorId, }; use std::{collections::HashMap, fmt, sync::Arc}; diff --git a/adapter/src/ethereum.rs b/adapter/src/ethereum.rs index fb72ac74d..a7a53e7eb 100644 --- a/adapter/src/ethereum.rs +++ b/adapter/src/ethereum.rs @@ -9,9 +9,8 @@ use ethstore::{ use lazy_static::lazy_static; use primitives::{ adapter::{Adapter, AdapterResult, Deposit, Error as AdapterError, KeystoreOptions, Session}, - channel::Channel, config::Config, - Address, BigNum, ToETHChecksum, ValidatorId, + Address, BigNum, Channel, ToETHChecksum, ValidatorId, }; use reqwest::Client; use serde::{Deserialize, Serialize}; diff --git a/primitives/src/campaign.rs b/primitives/src/campaign.rs index 75684da9b..d0ef2bcc5 100644 --- a/primitives/src/campaign.rs +++ b/primitives/src/campaign.rs @@ -1,5 +1,5 @@ use crate::{ - channel::Channel, targeting::Rules, AdUnit, Address, EventSubmission, UnifiedNum, Validator, + targeting::Rules, AdUnit, Address, Channel, EventSubmission, UnifiedNum, Validator, ValidatorDesc, ValidatorId, }; @@ -358,7 +358,7 @@ pub mod validators { #[cfg(feature = "postgres")] mod postgres { - use crate::channel::Channel; + use crate::Channel; use super::{Active, Campaign, CampaignId, PricingBounds, Validators}; use bytes::BytesMut; diff --git a/primitives/src/lib.rs b/primitives/src/lib.rs index ec6740618..f448f882c 100644 --- a/primitives/src/lib.rs +++ b/primitives/src/lib.rs @@ -11,8 +11,7 @@ pub use self::{ balances_map::{BalancesMap, UnifiedMap}, big_num::BigNum, campaign::{Campaign, CampaignId}, - channel::Channel, - channel::ChannelId, + channel::{Channel, ChannelId}, config::Config, deposit::Deposit, event_submission::EventSubmission, diff --git a/primitives/src/sentry.rs b/primitives/src/sentry.rs index 87fdcd6df..62016c0c8 100644 --- a/primitives/src/sentry.rs +++ b/primitives/src/sentry.rs @@ -300,7 +300,7 @@ impl fmt::Display for ChannelReport { } pub mod channel_list { - use crate::{channel::Channel, ValidatorId}; + use crate::{Channel, ValidatorId}; use serde::{Deserialize, Serialize}; use super::Pagination; @@ -360,9 +360,8 @@ pub mod campaign_create { use crate::{ campaign::{prefix_active, Active, PricingBounds, Validators}, - channel::Channel, targeting::Rules, - AdUnit, Address, Campaign, CampaignId, EventSubmission, UnifiedNum, + AdUnit, Address, Campaign, CampaignId, Channel, EventSubmission, UnifiedNum, }; #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] diff --git a/primitives/src/spender.rs b/primitives/src/spender.rs index bc7f84c2b..7aba539e3 100644 --- a/primitives/src/spender.rs +++ b/primitives/src/spender.rs @@ -1,4 +1,4 @@ -use crate::{channel::Channel, Address, Deposit, UnifiedNum}; +use crate::{Address, Channel, Deposit, UnifiedNum}; use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug, Clone)] diff --git a/primitives/src/util/tests/prep_db.rs b/primitives/src/util/tests/prep_db.rs index 381a70dfe..a2a7279c2 100644 --- a/primitives/src/util/tests/prep_db.rs +++ b/primitives/src/util/tests/prep_db.rs @@ -53,7 +53,6 @@ lazy_static! { // Dummy adapter auth tokens // authorization tokens - /// pub static ref AUTH: HashMap = { let mut auth = HashMap::new(); diff --git a/sentry/src/db/channel.rs b/sentry/src/db/channel.rs index e1d01355a..b44787cce 100644 --- a/sentry/src/db/channel.rs +++ b/sentry/src/db/channel.rs @@ -95,9 +95,8 @@ pub async fn insert_validator_messages( mod list_channels { use primitives::{ - channel::Channel, sentry::{channel_list::ChannelListResponse, Pagination}, - ValidatorId, + Channel, ValidatorId, }; use std::str::FromStr; use tokio_postgres::types::{accepts, FromSql, Type}; diff --git a/sentry/src/db/event_aggregate.rs b/sentry/src/db/event_aggregate.rs index 6ee8d14c9..985b7fe61 100644 --- a/sentry/src/db/event_aggregate.rs +++ b/sentry/src/db/event_aggregate.rs @@ -2,10 +2,9 @@ use chrono::{DateTime, Utc}; use futures::pin_mut; use primitives::{ balances::UncheckedState, - channel::Channel as ChannelV5, sentry::{EventAggregate, MessageResponse}, validator::{ApproveState, Heartbeat, NewState}, - Address, BigNum, ChannelId, ValidatorId, + Address, BigNum, Channel, ChannelId, ValidatorId, }; use std::{convert::TryFrom, ops::Add}; use tokio_postgres::{ @@ -17,7 +16,7 @@ use super::{DbPool, PoolError}; pub async fn latest_approve_state_v5( pool: &DbPool, - channel: &ChannelV5, + channel: &Channel, ) -> Result>, PoolError> { let client = pool.get().await?; @@ -34,7 +33,7 @@ pub async fn latest_approve_state_v5( pub async fn latest_new_state_v5( pool: &DbPool, - channel: &ChannelV5, + channel: &Channel, state_root: &str, ) -> Result>>, PoolError> { let client = pool.get().await?; diff --git a/sentry/src/routes/campaign.rs b/sentry/src/routes/campaign.rs index b12e8bed3..bf5dc620f 100644 --- a/sentry/src/routes/campaign.rs +++ b/sentry/src/routes/campaign.rs @@ -13,11 +13,10 @@ use hyper::{Body, Request, Response}; use primitives::{ adapter::{Adapter, AdapterErrorKind, Error as AdapterError}, campaign_validator::Validator, - channel::Channel, config::TokenInfo, sentry::campaign_create::{CreateCampaign, ModifyCampaign}, spender::Spendable, - Address, Campaign, Deposit, UnifiedNum, + Address, Campaign, Channel, Deposit, UnifiedNum, }; use slog::error; use std::cmp::{max, Ordering}; diff --git a/sentry/src/routes/event_aggregate.rs b/sentry/src/routes/event_aggregate.rs index d217f0d9a..8bc6017a0 100644 --- a/sentry/src/routes/event_aggregate.rs +++ b/sentry/src/routes/event_aggregate.rs @@ -2,7 +2,7 @@ use chrono::{serde::ts_milliseconds_option, DateTime, Utc}; use hyper::{Body, Request, Response}; use serde::Deserialize; -use primitives::{adapter::Adapter, channel::Channel, sentry::EventAggregateResponse}; +use primitives::{adapter::Adapter, sentry::EventAggregateResponse, Channel}; use crate::{success_response, Application, Auth, ResponseError}; diff --git a/validator_worker/src/channel.rs b/validator_worker/src/channel.rs index 357df8303..256c1a14c 100644 --- a/validator_worker/src/channel.rs +++ b/validator_worker/src/channel.rs @@ -4,7 +4,7 @@ use crate::{ sentry_interface::{campaigns::all_campaigns, Validator, Validators}, SentryApi, }; -use primitives::{adapter::Adapter, channel::Channel, config::Config, util::ApiUrl, ChannelId}; +use primitives::{adapter::Adapter, config::Config, util::ApiUrl, Channel, ChannelId}; use slog::Logger; use std::collections::{hash_map::Entry, HashSet}; diff --git a/validator_worker/src/follower.rs b/validator_worker/src/follower.rs index a20c6f2dc..872195a0a 100644 --- a/validator_worker/src/follower.rs +++ b/validator_worker/src/follower.rs @@ -4,11 +4,10 @@ use primitives::{ adapter::{Adapter, AdapterErrorKind, Error as AdapterError}, balances, balances::{Balances, CheckedState, UncheckedState}, - channel::Channel, config::TokenInfo, spender::Spender, validator::{ApproveState, MessageTypes, NewState, RejectState}, - Address, ChannelId, UnifiedNum, + Address, Channel, ChannelId, UnifiedNum, }; use crate::core::follower_rules::{get_health, is_valid_transition}; diff --git a/validator_worker/src/leader.rs b/validator_worker/src/leader.rs index 0e5e18bbf..a65b664a4 100644 --- a/validator_worker/src/leader.rs +++ b/validator_worker/src/leader.rs @@ -4,10 +4,9 @@ use thiserror::Error; use primitives::{ adapter::{Adapter, AdapterErrorKind, Error as AdapterError}, balances::CheckedState, - channel::Channel, config::TokenInfo, validator::{MessageError, MessageTypes, NewState}, - Balances, ChannelId, + Balances, Channel, ChannelId, }; use crate::{ diff --git a/validator_worker/src/main.rs b/validator_worker/src/main.rs index 4501649f0..00a748e87 100644 --- a/validator_worker/src/main.rs +++ b/validator_worker/src/main.rs @@ -5,21 +5,17 @@ use std::{convert::TryFrom, error::Error, time::Duration}; use clap::{crate_version, App, Arg}; use futures::future::{join, join_all}; -use tokio::{ - runtime::Runtime, - // time::{sleep, timeout}, - time::sleep, -}; +use tokio::{runtime::Runtime, time::sleep}; use adapter::{AdapterTypes, DummyAdapter, EthereumAdapter}; use primitives::{ adapter::{Adapter, DummyAdapterOptions, KeystoreOptions}, - // channel::Channel as ChannelV5, config::{configuration, Config}, - util::tests::prep_db::{AUTH, IDS}, - util::ApiUrl, + util::{ + tests::prep_db::{AUTH, IDS}, + ApiUrl, + }, ValidatorId, - // Campaign, ChannelId, SpecValidator, }; use slog::{error, info, Logger}; use std::fmt::Debug; diff --git a/validator_worker/src/sentry_interface.rs b/validator_worker/src/sentry_interface.rs index db299c77e..649244bd2 100644 --- a/validator_worker/src/sentry_interface.rs +++ b/validator_worker/src/sentry_interface.rs @@ -8,7 +8,6 @@ use slog::Logger; use primitives::{ adapter::Adapter, balances::{CheckedState, UncheckedState}, - channel::Channel, sentry::{ AccountingResponse, EventAggregateResponse, LastApprovedResponse, SuccessResponse, ValidatorMessageResponse, @@ -16,7 +15,7 @@ use primitives::{ spender::Spender, util::ApiUrl, validator::MessageTypes, - Address, Campaign, {ChannelId, Config, ValidatorId}, + Address, Campaign, Channel, {ChannelId, Config, ValidatorId}, }; use thiserror::Error; @@ -281,10 +280,9 @@ async fn propagate_to( mod channels { use futures::{future::try_join_all, TryFutureExt}; use primitives::{ - channel::Channel, sentry::channel_list::{ChannelListQuery, ChannelListResponse}, util::ApiUrl, - ValidatorId, + Channel, ValidatorId, }; use reqwest::{Client, Response};