diff --git a/adapter/src/ethereum/test_utils.rs b/adapter/src/ethereum/test_utils.rs index 0dbb4b5fb..da2e40c57 100644 --- a/adapter/src/ethereum/test_utils.rs +++ b/adapter/src/ethereum/test_utils.rs @@ -234,6 +234,8 @@ pub async fn deploy_token_contract( let token_info = TokenInfo { min_token_units_for_deposit: BigNum::from(min_token_units), precision: NonZeroU8::new(18).expect("should create NonZeroU8"), + // 0.000_1 + min_validator_fee: BigNum::from(100_000_000_000_000), }; Ok((token_info, token_contract.address(), token_contract)) diff --git a/docs/config/dev.toml b/docs/config/dev.toml index 3ec9f9a98..6ad28e737 100644 --- a/docs/config/dev.toml +++ b/docs/config/dev.toml @@ -19,7 +19,6 @@ validator_tick_timeout = 5000 ip_rate_limit = { type = 'ip', timeframe = 20000 } sid_rate_limit = { type = 'sid', timeframe = 20000 } -ethereum_core_address = '0x333420fc6a897356e69b62417cd17ff012177d2b' # TODO: Replace with real contract address outpace_address = '0x333420fc6a897356e69b62417cd17ff012177d2b' # TODO: Replace with real contract address @@ -29,21 +28,30 @@ ethereum_network = 'http://localhost:8545' ethereum_adapter_relayer = 'https://goerli-relayer.adex.network' creators_whitelist = [] -minimal_deposit = "0" -minimal_fee = "0" validators_whitelist = [] [[token_address_whitelist]] -address = '0x73967c6a0904aa032c103b4104747e88c566b1a2' #DAI +# DAI +address = '0x73967c6a0904aa032c103b4104747e88c566b1a2' +# 1 * 10^-10 = 0.0_000_000_001 min_token_units_for_deposit = '100000000' +min_validator_fee = '100000000' precision = 18 [[token_address_whitelist]] -address = '0x509ee0d083ddf8ac028f2a56731412edd63223b9' #USDT -min_token_units_for_deposit = '100000000' +# USDT +address = '0x509ee0d083ddf8ac028f2a56731412edd63223b9' +# 1.000_000 +min_token_units_for_deposit = '1000000' +# 0.001 +min_validator_fee = '1000' precision = 6 [[token_address_whitelist]] -address = '0x44dcfcead37be45206af6079648988b29284b2c6' #USDC +# USDC +address = '0x44dcfcead37be45206af6079648988b29284b2c6' +# 1.000_000 min_token_units_for_deposit = '100000000' +# 0.001 +min_validator_fee = '1000' precision = 6 diff --git a/docs/config/prod.toml b/docs/config/prod.toml index 57e7b8fcb..97371f420 100644 --- a/docs/config/prod.toml +++ b/docs/config/prod.toml @@ -18,7 +18,7 @@ validator_tick_timeout = 10000 ip_rate_limit = { type = 'ip', timeframe = 1200000 } sid_rate_limit = { type = 'sid', timeframe = 0 } -ethereum_core_address = '0x333420fc6a897356e69b62417cd17ff012177d2b' + # TODO: Replace with real contract address outpace_address = '0x333420fc6a897356e69b62417cd17ff012177d2b' # TODO: Replace with real contract address @@ -28,27 +28,39 @@ ethereum_network = 'http://localhost:8545' ethereum_adapter_relayer = 'https://relayer.adex.network' creators_whitelist = [] -minimal_deposit = "0" -minimal_fee = "0" validators_whitelist = [] + [[token_address_whitelist]] -address = '0x6b175474e89094c44da98b954eedeac495271d0f' #DAI +# DAI +address = '0x6b175474e89094c44da98b954eedeac495271d0f' +# 1 * 10^-10 = 0.0_000_000_001 min_token_units_for_deposit = '100000000' +min_validator_fee = '100000000' precision = 18 [[token_address_whitelist]] -address = '0x89d24A6b4CcB1B6fAA2625fE562bDD9a23260359' #SAI +# SAI +address = '0x89d24A6b4CcB1B6fAA2625fE562bDD9a23260359' +# 1 * 10^-10 = 0.0_000_000_001 min_token_units_for_deposit = '100000000' +min_validator_fee = '100000000' precision = 18 [[token_address_whitelist]] -address = '0xdac17f958d2ee523a2206206994597c13d831ec7' #USDT -min_token_units_for_deposit = '100000000' +# USDT +address = '0xdac17f958d2ee523a2206206994597c13d831ec7' +# 1.000_000 +min_token_units_for_deposit = '1000000' +# 0.001 +min_validator_fee = '1000' precision = 6 [[token_address_whitelist]] -address = '0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48' #USDC +# USDC +address = '0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48' +# 1.000_000 min_token_units_for_deposit = '100000000' +# 0.001 +min_validator_fee = '1000' precision = 6 - diff --git a/primitives/src/ad_unit.rs b/primitives/src/ad_unit.rs index 521ff2147..d9a5e4b56 100644 --- a/primitives/src/ad_unit.rs +++ b/primitives/src/ad_unit.rs @@ -52,3 +52,34 @@ pub struct AdUnit { )] pub modified: Option>, } + +#[cfg(feature = "postgres")] +mod postgres { + use super::AdUnit; + + use bytes::BytesMut; + use postgres_types::{accepts, to_sql_checked, FromSql, IsNull, Json, ToSql, Type}; + use std::error::Error; + impl<'a> FromSql<'a> for AdUnit { + fn from_sql(ty: &Type, raw: &'a [u8]) -> Result> { + let json = as FromSql>::from_sql(ty, raw)?; + + Ok(json.0) + } + + accepts!(JSONB); + } + + impl ToSql for AdUnit { + fn to_sql( + &self, + ty: &Type, + w: &mut BytesMut, + ) -> Result> { + Json(self).to_sql(ty, w) + } + + accepts!(JSONB); + to_sql_checked!(); + } +} diff --git a/primitives/src/adapter.rs b/primitives/src/adapter.rs index f65f1a3a7..af336daff 100644 --- a/primitives/src/adapter.rs +++ b/primitives/src/adapter.rs @@ -1,6 +1,5 @@ use crate::{ - channel::ChannelError, channel_v5::Channel, Address, - BigNum, DomainError, ValidatorId, + channel::ChannelError, channel_v5::Channel, Address, BigNum, DomainError, ValidatorId, }; use async_trait::async_trait; use serde::{Deserialize, Serialize}; diff --git a/primitives/src/campaign.rs b/primitives/src/campaign.rs index c4691ed7c..5774ec478 100644 --- a/primitives/src/campaign.rs +++ b/primitives/src/campaign.rs @@ -35,6 +35,7 @@ mod campaign_id { pub struct CampaignId([u8; 16]); impl CampaignId { + /// Generates randomly a `CampaignId` using `Uuid::new_v4().to_simple()` pub fn new() -> Self { Self::default() } @@ -184,10 +185,12 @@ pub struct Campaign { } impl Campaign { - pub fn find_validator(&self, validator: ValidatorId) -> Option<&'_ ValidatorDesc> { + pub fn find_validator(&self, validator: &ValidatorId) -> Option> { match (self.leader(), self.follower()) { - (Some(leader), _) if leader.id == validator => Some(leader), - (_, Some(follower)) if follower.id == validator => Some(follower), + (Some(leader), _) if &leader.id == validator => Some(ValidatorRole::Leader(leader)), + (_, Some(follower)) if &follower.id == validator => { + Some(ValidatorRole::Follower(follower)) + } _ => None, } } @@ -195,21 +198,13 @@ impl Campaign { /// Matches the Channel.leader to the Campaign.spec.leader /// If they match it returns `Some`, otherwise, it returns `None` pub fn leader(&self) -> Option<&'_ ValidatorDesc> { - if self.channel.leader == self.validators.leader().id { - Some(self.validators.leader()) - } else { - None - } + self.validators.find(&self.channel.leader) } /// Matches the Channel.follower to the Campaign.spec.follower /// If they match it returns `Some`, otherwise, it returns `None` pub fn follower(&self) -> Option<&'_ ValidatorDesc> { - if self.channel.follower == self.validators.follower().id { - Some(self.validators.follower()) - } else { - None - } + self.validators.find(&self.channel.follower) } /// Returns the pricing of a given event @@ -289,10 +284,10 @@ pub mod validators { use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] - /// A (leader, follower) tuple + /// Unordered list of the validators representing the leader & follower pub struct Validators(ValidatorDesc, ValidatorDesc); - #[derive(Debug)] + #[derive(Debug, PartialEq, Eq, Clone)] pub enum ValidatorRole<'a> { Leader(&'a ValidatorDesc), Follower(&'a ValidatorDesc), @@ -308,33 +303,15 @@ pub mod validators { } impl Validators { - pub fn new(leader: ValidatorDesc, follower: ValidatorDesc) -> Self { - Self(leader, follower) - } - - pub fn leader(&self) -> &ValidatorDesc { - &self.0 + pub fn new(validators: (ValidatorDesc, ValidatorDesc)) -> Self { + Self(validators.0, validators.1) } - pub fn follower(&self) -> &ValidatorDesc { - &self.1 - } - - pub fn find(&self, validator_id: &ValidatorId) -> Option> { - if &self.leader().id == validator_id { - Some(ValidatorRole::Leader(&self.leader())) - } else if &self.follower().id == validator_id { - Some(ValidatorRole::Follower(&self.follower())) - } else { - None - } - } - - pub fn find_index(&self, validator_id: &ValidatorId) -> Option { - if &self.leader().id == validator_id { - Some(0) - } else if &self.follower().id == validator_id { - Some(1) + pub fn find(&self, validator_id: &ValidatorId) -> Option<&ValidatorDesc> { + if &self.0.id == validator_id { + Some(&self.0) + } else if &self.1.id == validator_id { + Some(&self.1) } else { None } @@ -346,8 +323,8 @@ pub mod validators { } impl From<(ValidatorDesc, ValidatorDesc)> for Validators { - fn from((leader, follower): (ValidatorDesc, ValidatorDesc)) -> Self { - Self(leader, follower) + fn from(validators: (ValidatorDesc, ValidatorDesc)) -> Self { + Self(validators.0, validators.1) } } @@ -383,12 +360,12 @@ pub mod validators { 0 => { self.index += 1; - Some(self.validators.leader()) + Some(&self.validators.0) } 1 => { self.index += 1; - Some(self.validators.follower()) + Some(&self.validators.1) } _ => None, } @@ -396,6 +373,102 @@ pub mod validators { } } -// TODO: Postgres Campaign -// TODO: Postgres CampaignSpec -// TODO: Postgres Validators +#[cfg(feature = "postgres")] +mod postgres { + use super::{Active, Campaign, CampaignId, PricingBounds, Validators}; + use bytes::BytesMut; + use postgres_types::{accepts, to_sql_checked, FromSql, IsNull, Json, ToSql, Type}; + use std::error::Error; + use tokio_postgres::Row; + + impl From<&Row> for Campaign { + fn from(row: &Row) -> Self { + Self { + id: row.get("id"), + channel: row.get("channel"), + creator: row.get("creator"), + budget: row.get("budget"), + validators: row.get("validators"), + title: row.get("title"), + pricing_bounds: row.get("pricing_bounds"), + event_submission: row.get("event_submission"), + ad_units: row.get::<_, Json<_>>("ad_units").0, + targeting_rules: row.get("targeting_rules"), + created: row.get("created"), + active: Active { + from: row.get("active_from"), + to: row.get("active_to"), + }, + } + } + } + + impl<'a> FromSql<'a> for CampaignId { + fn from_sql(ty: &Type, raw: &'a [u8]) -> Result> { + let str_slice = <&str as FromSql>::from_sql(ty, raw)?; + + Ok(str_slice.parse()?) + } + + accepts!(TEXT, VARCHAR); + } + + impl ToSql for CampaignId { + fn to_sql( + &self, + ty: &Type, + w: &mut BytesMut, + ) -> Result> { + self.to_string().to_sql(ty, w) + } + + accepts!(TEXT, VARCHAR); + to_sql_checked!(); + } + + impl<'a> FromSql<'a> for Validators { + fn from_sql(ty: &Type, raw: &'a [u8]) -> Result> { + let json = as FromSql>::from_sql(ty, raw)?; + + Ok(json.0) + } + + accepts!(JSONB); + } + + impl ToSql for Validators { + fn to_sql( + &self, + ty: &Type, + w: &mut BytesMut, + ) -> Result> { + Json(self).to_sql(ty, w) + } + + accepts!(JSONB); + to_sql_checked!(); + } + + impl<'a> FromSql<'a> for PricingBounds { + fn from_sql(ty: &Type, raw: &'a [u8]) -> Result> { + let json = as FromSql>::from_sql(ty, raw)?; + + Ok(json.0) + } + + accepts!(JSONB); + } + + impl ToSql for PricingBounds { + fn to_sql( + &self, + ty: &Type, + w: &mut BytesMut, + ) -> Result> { + Json(self).to_sql(ty, w) + } + + accepts!(JSONB); + to_sql_checked!(); + } +} diff --git a/primitives/src/campaign_validator.rs b/primitives/src/campaign_validator.rs new file mode 100644 index 000000000..657b12040 --- /dev/null +++ b/primitives/src/campaign_validator.rs @@ -0,0 +1,133 @@ +use crate::{ + campaign::Validators, + config::{Config, TokenInfo}, + Address, Campaign, UnifiedNum, ValidatorId, +}; +use chrono::Utc; +use std::{cmp::PartialEq, collections::HashMap}; +use thiserror::Error; + +pub trait Validator { + fn validate(&self, config: &Config, validator_identity: &ValidatorId) -> Result<(), Error>; +} + +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub enum Validation { + /// When the Adapter address is not listed in the `campaign.validators` & `campaign.channel.(leader/follower)` + /// which in terms means, that the adapter shouldn't handle this Campaign + AdapterNotIncluded, + /// when `channel.active.to` has passed (i.e. < now), the Campaign should not be handled + // campaign.active.to must be in the future + InvalidActiveTo, + UnlistedValidator, + UnlistedCreator, + UnlistedAsset, + MinimumDepositNotMet, + MinimumValidatorFeeNotMet, + FeeConstraintViolated, +} + +#[derive(Debug, Eq, PartialEq, Clone, Copy, Error)] +pub enum Error { + #[error("Summing the Validators fee results in overflow")] + FeeSumOverflow, + #[error("Validation error: {0:?}")] + Validation(Validation), +} + +impl From for Error { + fn from(v: Validation) -> Self { + Self::Validation(v) + } +} + +impl Validator for Campaign { + fn validate(&self, config: &Config, validator_identity: &ValidatorId) -> Result<(), Error> { + // check if the channel validators include our adapter identity + let whoami_validator = match self.find_validator(validator_identity) { + Some(role) => role.validator(), + None => return Err(Validation::AdapterNotIncluded.into()), + }; + + if self.active.to < Utc::now() { + return Err(Validation::InvalidActiveTo.into()); + } + + if !all_validators_listed(&self.validators, &config.validators_whitelist) { + return Err(Validation::UnlistedValidator.into()); + } + + if !creator_listed(&self, &config.creators_whitelist) { + return Err(Validation::UnlistedCreator.into()); + } + + // Check if the token is listed in the Configuration + let token_info = config + .token_address_whitelist + .get(&self.channel.token) + .ok_or(Validation::UnlistedAsset)?; + + // Check if the campaign budget is above the minimum deposit configured + if self.budget.to_precision(token_info.precision.get()) + < token_info.min_token_units_for_deposit + { + return Err(Validation::MinimumDepositNotMet.into()); + } + + // Check if the validator fee is greater than the minimum configured fee + if whoami_validator + .fee + .to_precision(token_info.precision.get()) + < token_info.min_validator_fee + { + return Err(Validation::MinimumValidatorFeeNotMet.into()); + } + + let total_validator_fee: UnifiedNum = self + .validators + .iter() + .map(|v| &v.fee) + .sum::>() + // on overflow return an error + .ok_or(Error::FeeSumOverflow)?; + + if total_validator_fee >= self.budget { + return Err(Validation::FeeConstraintViolated.into()); + } + + Ok(()) + } +} + +pub fn all_validators_listed(validators: &Validators, whitelist: &[ValidatorId]) -> bool { + if whitelist.is_empty() { + true + } else { + let found_validators = whitelist + .iter() + .filter(|&allowed| validators.find(allowed).is_some()) + // this will ensure that if we find the 2 validators earlier + // we don't go over the other values of the whitelist + .take(2); + // the found validators should be exactly 2, if they are not, then 1 or 2 are missing + found_validators.count() == 2 + } +} + +pub fn creator_listed(campaign: &Campaign, whitelist: &[Address]) -> bool { + // if the list is empty, return true, as we don't have a whitelist to restrict us to + // or if we have a list, check if it includes the `channel.creator` + whitelist.is_empty() + || whitelist + .iter() + .any(|allowed| allowed.eq(&campaign.creator)) +} + +pub fn asset_listed(campaign: &Campaign, whitelist: &HashMap) -> bool { + // if the list is empty, return true, as we don't have a whitelist to restrict us to + // or if we have a list, check if it includes the `channel.deposit_asset` + whitelist.is_empty() + || whitelist + .keys() + .any(|allowed| allowed == &campaign.channel.token) +} diff --git a/primitives/src/channel_validator.rs b/primitives/src/channel_validator.rs deleted file mode 100644 index ecb10e175..000000000 --- a/primitives/src/channel_validator.rs +++ /dev/null @@ -1,111 +0,0 @@ -use crate::channel::{Channel, ChannelError, SpecValidator, SpecValidators}; -use crate::config::{Config, TokenInfo}; -use crate::Address; -use crate::BigNum; -use crate::ValidatorId; -use chrono::Utc; -use std::cmp::PartialEq; -use std::collections::HashMap; -use time::Duration; - -// -// TODO: AIP#61 How relevant is this validator? Check and remove if it's obsolete -// -pub trait ChannelValidator { - fn is_channel_valid( - config: &Config, - validator_identity: &ValidatorId, - channel: &Channel, - ) -> Result<(), ChannelError> { - let adapter_channel_validator = match channel.spec.validators.find(validator_identity) { - // check if the channel validators include our adapter identity - None => return Err(ChannelError::AdapterNotIncluded), - Some(SpecValidator::Leader(validator)) | Some(SpecValidator::Follower(validator)) => { - validator - } - }; - - if channel.valid_until < Utc::now() { - return Err(ChannelError::InvalidValidUntil( - "channel.validUntil has passed".to_string(), - )); - } - - if channel.valid_until > (Utc::now() + Duration::days(365)) { - return Err(ChannelError::InvalidValidUntil( - "channel.validUntil should not be greater than one year".to_string(), - )); - } - - if channel.spec.withdraw_period_start > channel.valid_until { - return Err(ChannelError::InvalidValidUntil( - "channel withdrawPeriodStart is invalid".to_string(), - )); - } - - if !all_validators_listed(&channel.spec.validators, &config.validators_whitelist) { - return Err(ChannelError::UnlistedValidator); - } - - if !creator_listed(&channel, &config.creators_whitelist) { - return Err(ChannelError::UnlistedCreator); - } - - if !asset_listed(&channel, &config.token_address_whitelist) { - return Err(ChannelError::UnlistedAsset); - } - - if channel.deposit_amount < config.minimal_deposit { - return Err(ChannelError::MinimumDepositNotMet); - } - - if BigNum::from(adapter_channel_validator.fee.to_u64()) < config.minimal_fee { - return Err(ChannelError::MinimumValidatorFeeNotMet); - } - - let total_validator_fee: BigNum = channel - .spec - .validators - .iter() - .map(|v| BigNum::from(v.fee.to_u64())) - .fold(BigNum::from(0), |acc, x| acc + x); - - if total_validator_fee >= channel.deposit_amount { - return Err(ChannelError::FeeConstraintViolated); - } - - Ok(()) - } -} - -pub fn all_validators_listed(validators: &SpecValidators, whitelist: &[ValidatorId]) -> bool { - if whitelist.is_empty() { - true - } else { - let found_validators = whitelist - .iter() - .filter(|&allowed| { - allowed == &validators.leader().id || allowed == &validators.follower().id - }) - // this will ensure that if we find the 2 validators earlier - // we don't go over the other values of the whitelist - .take(2); - // the found validators should be exactly 2, if they are not, then 1 or 2 are missing - found_validators.count() == 2 - } -} - -pub fn creator_listed(channel: &Channel, whitelist: &[ValidatorId]) -> bool { - // if the list is empty, return true, as we don't have a whitelist to restrict us to - // or if we have a list, check if it includes the `channel.creator` - whitelist.is_empty() || whitelist.iter().any(|allowed| allowed.eq(&channel.creator)) -} - -pub fn asset_listed(channel: &Channel, whitelist: &HashMap) -> bool { - // if the list is empty, return true, as we don't have a whitelist to restrict us to - // or if we have a list, check if it includes the `channel.deposit_asset` - whitelist.is_empty() - || whitelist - .keys() - .any(|allowed| allowed.to_string() == channel.deposit_asset) -} diff --git a/primitives/src/config.rs b/primitives/src/config.rs index e353c5601..784362005 100644 --- a/primitives/src/config.rs +++ b/primitives/src/config.rs @@ -1,5 +1,4 @@ -use crate::event_submission::RateLimit; -use crate::{Address, BigNum, ValidatorId}; +use crate::{event_submission::RateLimit, Address, BigNum, ValidatorId}; use lazy_static::lazy_static; use serde::{Deserialize, Deserializer, Serialize}; use serde_hex::{SerHex, StrictPfx}; @@ -19,6 +18,7 @@ lazy_static! { #[derive(Serialize, Deserialize, Debug, Clone)] pub struct TokenInfo { pub min_token_units_for_deposit: BigNum, + pub min_validator_fee: BigNum, pub precision: NonZeroU8, } @@ -26,12 +26,12 @@ pub struct TokenInfo { #[serde(rename_all(serialize = "SCREAMING_SNAKE_CASE"))] pub struct Config { pub max_channels: u32, + pub channels_find_limit: u32, pub wait_time: u32, pub aggr_throttle: u32, - pub heartbeat_time: u32, // in milliseconds - pub channels_find_limit: u32, pub events_find_limit: u32, pub msgs_find_limit: u32, + pub heartbeat_time: u32, // in milliseconds pub health_threshold_promilles: u32, pub health_unsignable_promilles: u32, pub propagation_timeout: u32, @@ -39,28 +39,23 @@ pub struct Config { pub validator_tick_timeout: u32, pub ip_rate_limit: RateLimit, // HashMap?? pub sid_rate_limit: RateLimit, // HashMap ?? - pub creators_whitelist: Vec, - pub minimal_deposit: BigNum, - pub minimal_fee: BigNum, - #[serde(deserialize_with = "deserialize_token_whitelist")] - pub token_address_whitelist: HashMap, - /// DEPRECATED since this is v4! - #[deprecated(note = "REMOVE, this is V4")] - #[serde(with = "SerHex::")] - pub ethereum_core_address: [u8; 20], #[serde(with = "SerHex::")] pub outpace_address: [u8; 20], #[serde(with = "SerHex::")] pub sweeper_address: [u8; 20], pub ethereum_network: String, pub ethereum_adapter_relayer: String, + pub creators_whitelist: Vec
, pub validators_whitelist: Vec, + #[serde(deserialize_with = "deserialize_token_whitelist")] + pub token_address_whitelist: HashMap, } #[derive(Serialize, Deserialize, Debug, Clone)] struct ConfigWhitelist { address: Address, min_token_units_for_deposit: BigNum, + min_validator_fee: BigNum, precision: NonZeroU8, } @@ -74,12 +69,13 @@ where let tokens_whitelist: HashMap = array .into_iter() - .map(|i| { + .map(|config_whitelist| { ( - i.address, + config_whitelist.address, TokenInfo { - min_token_units_for_deposit: i.min_token_units_for_deposit, - precision: i.precision, + min_token_units_for_deposit: config_whitelist.min_token_units_for_deposit, + min_validator_fee: config_whitelist.min_validator_fee, + precision: config_whitelist.precision, }, ) }) diff --git a/primitives/src/event_submission.rs b/primitives/src/event_submission.rs index d6205c615..d76c38845 100644 --- a/primitives/src/event_submission.rs +++ b/primitives/src/event_submission.rs @@ -26,3 +26,35 @@ pub struct RateLimit { #[serde(rename = "timeframe", with = "serde_millis")] pub time_frame: Duration, } + +#[cfg(feature = "postgres")] +mod postgres { + use super::EventSubmission; + + use bytes::BytesMut; + use postgres_types::{accepts, to_sql_checked, FromSql, IsNull, Json, ToSql, Type}; + use std::error::Error; + + impl<'a> FromSql<'a> for EventSubmission { + fn from_sql(ty: &Type, raw: &'a [u8]) -> Result> { + let json = as FromSql>::from_sql(ty, raw)?; + + Ok(json.0) + } + + accepts!(JSONB); + } + + impl ToSql for EventSubmission { + fn to_sql( + &self, + ty: &Type, + w: &mut BytesMut, + ) -> Result> { + Json(self).to_sql(ty, w) + } + + accepts!(JSONB); + to_sql_checked!(); + } +} diff --git a/primitives/src/lib.rs b/primitives/src/lib.rs index 8f99c5abe..0ede0b218 100644 --- a/primitives/src/lib.rs +++ b/primitives/src/lib.rs @@ -25,9 +25,9 @@ pub mod analytics; pub mod balances_map; pub mod big_num; pub mod campaign; +pub mod campaign_validator; pub mod channel; pub mod channel_v5; -pub mod channel_validator; pub mod config; mod eth_checksum; pub mod event_submission; diff --git a/primitives/src/sentry.rs b/primitives/src/sentry.rs index 630ec5ca0..f70c882a6 100644 --- a/primitives/src/sentry.rs +++ b/primitives/src/sentry.rs @@ -315,6 +315,72 @@ pub mod channel_list { } } +pub mod campaign_create { + use chrono::{serde::ts_milliseconds, DateTime, Utc}; + use serde::{Deserialize, Serialize}; + use serde_with::with_prefix; + + use crate::{ + campaign::{Active, PricingBounds, Validators}, + channel_v5::Channel, + targeting::Rules, + AdUnit, Address, Campaign, CampaignId, EventSubmission, UnifiedNum, + }; + + // use the same prefix for Active + with_prefix!(prefix_active "active_"); + + #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] + /// All fields are present except the `CampaignId` which is randomly created + /// This struct defines the Body of the request (in JSON) + pub struct CreateCampaign { + pub channel: Channel, + pub creator: Address, + pub budget: UnifiedNum, + pub validators: Validators, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub title: Option, + /// Event pricing bounds + #[serde(default, skip_serializing_if = "Option::is_none")] + pub pricing_bounds: Option, + /// EventSubmission object, applies to event submission (POST /channel/:id/events) + #[serde(default, skip_serializing_if = "Option::is_none")] + pub event_submission: Option, + /// An array of AdUnit (optional) + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub ad_units: Vec, + #[serde(default)] + pub targeting_rules: Rules, + /// A millisecond timestamp of when the campaign was created + #[serde(with = "ts_milliseconds")] + pub created: DateTime, + /// A millisecond timestamp representing the time you want this campaign to become active (optional) + /// Used by the AdViewManager & Targeting AIP#31 + #[serde(flatten, with = "prefix_active")] + pub active: Active, + } + + impl CreateCampaign { + /// Creates the new `Campaign` with randomly generated `CampaignId` + pub fn into_campaign(self) -> Campaign { + Campaign { + id: CampaignId::new(), + channel: self.channel, + creator: self.creator, + budget: self.budget, + validators: self.validators, + title: self.title, + pricing_bounds: self.pricing_bounds, + event_submission: self.event_submission, + ad_units: self.ad_units, + targeting_rules: self.targeting_rules, + created: self.created, + active: self.active, + } + } + } +} + #[cfg(feature = "postgres")] mod postgres { use super::{MessageResponse, ValidatorMessage}; diff --git a/primitives/src/targeting/eval.rs b/primitives/src/targeting/eval.rs index 023916881..29fe9cdbd 100644 --- a/primitives/src/targeting/eval.rs +++ b/primitives/src/targeting/eval.rs @@ -1288,7 +1288,7 @@ fn math_operator(lhs: Number, rhs: Number, ops: MathOperator) -> Result FromSql<'a> for Rules { + fn from_sql(ty: &Type, raw: &'a [u8]) -> Result> { + let json = as FromSql>::from_sql(ty, raw)?; + + Ok(json.0) + } + + accepts!(JSONB); + } } diff --git a/primitives/src/util/tests/prep_db.rs b/primitives/src/util/tests/prep_db.rs index 48cbd5124..01fdb7943 100644 --- a/primitives/src/util/tests/prep_db.rs +++ b/primitives/src/util/tests/prep_db.rs @@ -98,7 +98,7 @@ lazy_static! { creator: IDS["creator"].to_address(), // 1000.00000000 budget: UnifiedNum::from(100_000_000_000), - validators: Validators::new(DUMMY_VALIDATOR_LEADER.clone(), DUMMY_VALIDATOR_FOLLOWER.clone()), + validators: Validators::new((DUMMY_VALIDATOR_LEADER.clone(), DUMMY_VALIDATOR_FOLLOWER.clone())), title: Some("Dummy Campaign".to_string()), pricing_bounds: Some(campaign::PricingBounds {impression: Some(campaign::Pricing { min: 1.into(), max: 10.into()}), click: Some(campaign::Pricing { min: 0.into(), max: 0.into()})}), event_submission: Some(EventSubmission { allow: vec![] }), diff --git a/sentry/src/db.rs b/sentry/src/db.rs index 81a336788..6dd6b6639 100644 --- a/sentry/src/db.rs +++ b/sentry/src/db.rs @@ -6,6 +6,7 @@ use tokio_postgres::NoTls; use lazy_static::lazy_static; pub mod analytics; +mod campaign; mod channel; pub mod event_aggregate; pub mod spendable; @@ -146,26 +147,27 @@ pub mod tests_postgres { use deadpool::managed::{Manager as ManagerTrait, RecycleResult}; use deadpool_postgres::ManagerConfig; + use once_cell::sync::Lazy; use tokio_postgres::{NoTls, SimpleQueryMessage}; use async_trait::async_trait; - use super::{DbPool, PoolError}; + use super::{DbPool, PoolError, POSTGRES_CONFIG}; pub type Pool = deadpool::managed::Pool; - /// we must have a duplication of the migration because of how migrant is handling migratoins - /// we need to separately setup test migrations - pub static MIGRATIONS: &[&str] = &["20190806011140_initial-tables"]; - - pub fn test_postgres_connection(base_config: tokio_postgres::Config) -> Pool { + pub static DATABASE_POOL: Lazy = Lazy::new(|| { let manager_config = ManagerConfig { recycling_method: deadpool_postgres::RecyclingMethod::Fast, }; - let manager = Manager::new(base_config, manager_config); + let manager = Manager::new(POSTGRES_CONFIG.clone(), manager_config); Pool::new(manager, 15) - } + }); + + /// we must have a duplication of the migration because of how migrant is handling migrations + /// we need to separately setup test migrations + pub static MIGRATIONS: &[&str] = &["20190806011140_initial-tables"]; /// A Database is used to isolate test runs from each other /// we need to know the name of the database we've created. @@ -190,9 +192,8 @@ pub mod tests_postgres { } } - /// Base Pool and Config are used to create a new SCHEMA and later on - /// create the actual with default options set for each connection to that SCHEMA - /// Otherwise we cannot create/ + /// Base Pool and Config are used to create a new DATABASE and later on + /// create the actual connection to the database with default options set pub struct Manager { base_config: tokio_postgres::Config, base_pool: deadpool_postgres::Pool, diff --git a/sentry/src/db/campaign.rs b/sentry/src/db/campaign.rs new file mode 100644 index 000000000..c3660d68e --- /dev/null +++ b/sentry/src/db/campaign.rs @@ -0,0 +1,86 @@ +use crate::db::{DbPool, PoolError}; +use primitives::Campaign; +use tokio_postgres::types::Json; + +// TODO: Remove once we use this fn +#[allow(dead_code)] +pub async fn insert_campaign(pool: &DbPool, campaign: &Campaign) -> Result { + let client = pool.get().await?; + let ad_units = Json(campaign.ad_units.clone()); + let stmt = client.prepare("INSERT INTO campaigns (id, channel_id, channel, creator, budget, validators, title, pricing_bounds, event_submission, ad_units, targeting_rules, created, active_from, active_to) values ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)").await?; + let row = client + .execute( + &stmt, + &[ + &campaign.id, + &campaign.channel.id(), + &campaign.channel, + &campaign.creator, + &campaign.budget, + &campaign.validators, + &campaign.title, + &campaign.pricing_bounds, + &campaign.event_submission, + &ad_units, + &campaign.targeting_rules, + &campaign.created, + &campaign.active.from, + &campaign.active.to, + ], + ) + .await?; + + let inserted = row == 1; + Ok(inserted) +} + +/// ```text +/// SELECT id, channel, creator, budget, validators, title, pricing_bounds, event_submission, ad_units, targeting_rules, created, active_from, active_to FROM campaigns +/// WHERE id = $1 +/// ``` +// TODO: Remove once we use this fn +#[allow(dead_code)] +pub async fn fetch_campaign(pool: DbPool, campaign: &Campaign) -> Result { + let client = pool.get().await?; + let statement = client.prepare("SELECT id, channel, creator, budget, validators, title, pricing_bounds, event_submission, ad_units, targeting_rules, created, active_from, active_to FROM campaigns WHERE id = $1").await?; + + let row = client.query_one(&statement, &[&campaign.id]).await?; + + Ok(Campaign::from(&row)) +} + +#[cfg(test)] +mod test { + use primitives::{ + campaign::{Campaign, CampaignId}, + channel_v5::Channel, + util::tests::prep_db::{ADDRESSES, DUMMY_CAMPAIGN}, + ChannelId, UnifiedNum, + }; + + use crate::db::tests_postgres::{setup_test_migrations, DATABASE_POOL}; + + use super::*; + + #[tokio::test] + async fn it_inserts_and_fetches_campaign() { + let db_pool = DATABASE_POOL.get().await.expect("Should get a DB pool"); + + setup_test_migrations(db_pool.clone()) + .await + .expect("Migrations should succeed"); + + let campaign_for_testing = DUMMY_CAMPAIGN.clone(); + let is_inserted = insert_campaign(&db_pool.clone(), &campaign_for_testing) + .await + .expect("Should succeed"); + + assert!(is_inserted); + + let fetched_campaign: Campaign = fetch_campaign(db_pool.clone(), &campaign_for_testing) + .await + .expect("Should fetch successfully"); + + assert_eq!(campaign_for_testing, fetched_campaign); + } +} diff --git a/sentry/src/db/spendable.rs b/sentry/src/db/spendable.rs index 293358fb1..2d9eeaee7 100644 --- a/sentry/src/db/spendable.rs +++ b/sentry/src/db/spendable.rs @@ -54,22 +54,15 @@ mod test { UnifiedNum, }; - use crate::db::{ - tests_postgres::{setup_test_migrations, test_postgres_connection}, - POSTGRES_CONFIG, - }; + use crate::db::tests_postgres::{setup_test_migrations, DATABASE_POOL}; use super::*; #[tokio::test] async fn it_inserts_and_fetches_spendable() { - let test_pool = test_postgres_connection(POSTGRES_CONFIG.clone()) - .get() - .await - .unwrap(); - // let pool = test_pool.get().await.expect("Should get a DB pool"); + let db_pool = DATABASE_POOL.get().await.expect("Should get a DB pool"); - setup_test_migrations(test_pool.clone()) + setup_test_migrations(db_pool.clone()) .await .expect("Migrations should succeed"); @@ -81,19 +74,16 @@ mod test { still_on_create2: UnifiedNum::from(500_000), }, }; - let is_inserted = insert_spendable(test_pool.clone(), &spendable) + let is_inserted = insert_spendable(db_pool.clone(), &spendable) .await .expect("Should succeed"); assert!(is_inserted); - let fetched_spendable = fetch_spendable( - test_pool.clone(), - &spendable.spender, - &spendable.channel.id(), - ) - .await - .expect("Should fetch successfully"); + let fetched_spendable = + fetch_spendable(db_pool.clone(), &spendable.spender, &spendable.channel.id()) + .await + .expect("Should fetch successfully"); assert_eq!(spendable, fetched_spendable); } diff --git a/sentry/src/routes/campaign.rs b/sentry/src/routes/campaign.rs new file mode 100644 index 000000000..94076e34b --- /dev/null +++ b/sentry/src/routes/campaign.rs @@ -0,0 +1,44 @@ +use crate::{success_response, Application, Auth, ResponseError, RouteParams, Session}; +use hyper::{Body, Request, Response}; +use primitives::{adapter::Adapter, sentry::{ + campaign_create::CreateCampaign,SuccessResponse}}; + +pub async fn create_campaign( + req: Request, + app: &Application, +) -> Result, ResponseError> { + let body = hyper::body::to_bytes(req.into_body()).await?; + + let campaign = serde_json::from_slice::(&body) + .map_err(|e| ResponseError::FailedValidation(e.to_string()))? + // create the actual `Campaign` with random `CampaignId` + .into_campaign(); + + + // TODO AIP#61: Validate Campaign + + let error_response = ResponseError::BadRequest("err occurred; please try again later".into()); + + // insert Campaign + + // match insert_campaign(&app.pool, &campaign).await { + // Err(error) => { + // error!(&app.logger, "{}", &error; "module" => "create_channel"); + + // match error { + // PoolError::Backend(error) if error.code() == Some(&SqlState::UNIQUE_VIOLATION) => { + // Err(ResponseError::Conflict( + // "channel already exists".to_string(), + // )) + // } + // _ => Err(error_response), + // } + // } + // Ok(false) => Err(error_response), + // _ => Ok(()), + // }?; + + let create_response = SuccessResponse { success: true }; + + Ok(success_response(serde_json::to_string(&campaign)?)) +} diff --git a/sentry/src/spender.rs b/sentry/src/spender.rs index 2e7f3d20c..da6b9cacd 100644 --- a/sentry/src/spender.rs +++ b/sentry/src/spender.rs @@ -36,11 +36,11 @@ pub mod fee { campaign: &Campaign, for_validator: ValidatorId, ) -> Result, DomainError> { - let payout = match campaign.find_validator(for_validator) { - Some(validator) => { + let payout = match campaign.find_validator(&for_validator) { + Some(validator_role) => { // should never overflow let fee_payout = payout - .checked_mul(&validator.fee) + .checked_mul(&validator_role.validator().fee) .ok_or_else(|| { DomainError::InvalidArgument("payout calculation overflow".to_string()) })?