Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into payment_charges
Browse files Browse the repository at this point in the history
  • Loading branch information
kashif-m committed May 23, 2024
2 parents b7f9153 + ae77373 commit c78ebf1
Show file tree
Hide file tree
Showing 91 changed files with 3,147 additions and 2,391 deletions.
25 changes: 25 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,31 @@ All notable changes to HyperSwitch will be documented here.

- - -

## 2024.05.23.0

### Features

- **connector:**
- Accept connector_transaction_id in 4xx error_response of connector ([#4720](https://github.com/juspay/hyperswitch/pull/4720)) ([`2ad7fc0`](https://github.com/juspay/hyperswitch/commit/2ad7fc0cd6c102bea4d671c98f7fe50fd709d4ec))
- [AUTHORIZEDOTNET] Implement zero mandates ([#4704](https://github.com/juspay/hyperswitch/pull/4704)) ([`8afeda5`](https://github.com/juspay/hyperswitch/commit/8afeda54fc5e3f3d510c48c81c222387e9cacc0e))
- **payment_methods:** Enable auto-retries for apple pay ([#4721](https://github.com/juspay/hyperswitch/pull/4721)) ([`d942a31`](https://github.com/juspay/hyperswitch/commit/d942a31d60595d366977746be7215620da0ababd))
- **routing:** Use Moka cache for routing with cache invalidation ([#3216](https://github.com/juspay/hyperswitch/pull/3216)) ([`431560b`](https://github.com/juspay/hyperswitch/commit/431560b7fb4401d000c11dbb9c7eb70663591307))
- **users:** Create generate recovery codes API ([#4708](https://github.com/juspay/hyperswitch/pull/4708)) ([`8fa2cd5`](https://github.com/juspay/hyperswitch/commit/8fa2cd556bf898621a1a8722a0af99d174447485))
- **webhook:** Add frm webhook support ([#4662](https://github.com/juspay/hyperswitch/pull/4662)) ([`ae601e8`](https://github.com/juspay/hyperswitch/commit/ae601e8e1be9215488daaae7cb39ad5a030e98d9))

### Bug Fixes

- **core:** Fix failing token based MIT payments ([#4735](https://github.com/juspay/hyperswitch/pull/4735)) ([`1bd4061`](https://github.com/juspay/hyperswitch/commit/1bd406197b5baf1c041f0dffa5bc02dce10f1529))
- Added hget lookup for all updated_by existing cases ([#4716](https://github.com/juspay/hyperswitch/pull/4716)) ([`fabf80c`](https://github.com/juspay/hyperswitch/commit/fabf80c2b18ca690b7fb709c8c12d1ef7f24e5b6))

### Miscellaneous Tasks

- **postman:** Update Postman collection files ([`ec50843`](https://github.com/juspay/hyperswitch/commit/ec508435a19c2942a5d66757a74dd06bed5b1a76))

**Full Changelog:** [`2024.05.22.0...2024.05.23.0`](https://github.com/juspay/hyperswitch/compare/2024.05.22.0...2024.05.23.0)

- - -

## 2024.05.22.0

### Features
Expand Down
8 changes: 8 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 8 additions & 0 deletions crates/analytics/docs/clickhouse/scripts/payment_attempts.sql
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,8 @@ CREATE TABLE payment_attempt_queue (
`unified_code` Nullable(String),
`unified_message` Nullable(String),
`mandate_data` Nullable(String),
`client_source` LowCardinality(Nullable(String)),
`client_version` LowCardinality(Nullable(String)),
`sign_flag` Int8
) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
kafka_topic_list = 'hyperswitch-payment-attempt-events',
Expand Down Expand Up @@ -86,6 +88,8 @@ CREATE TABLE payment_attempts (
`unified_message` Nullable(String),
`mandate_data` Nullable(String),
`inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`client_source` LowCardinality(Nullable(String)),
`client_version` LowCardinality(Nullable(String)),
`sign_flag` Int8,
INDEX connectorIndex connector TYPE bloom_filter GRANULARITY 1,
INDEX paymentMethodIndex payment_method TYPE bloom_filter GRANULARITY 1,
Expand Down Expand Up @@ -137,6 +141,8 @@ CREATE MATERIALIZED VIEW payment_attempt_mv TO payment_attempts (
`unified_message` Nullable(String),
`mandate_data` Nullable(String),
`inserted_at` DateTime64(3),
`client_source` LowCardinality(Nullable(String)),
`client_version` LowCardinality(Nullable(String)),
`sign_flag` Int8
) AS
SELECT
Expand Down Expand Up @@ -180,6 +186,8 @@ SELECT
unified_message,
mandate_data,
now() AS inserted_at,
client_source,
client_version,
sign_flag
FROM
payment_attempt_queue
Expand Down
2 changes: 2 additions & 0 deletions crates/analytics/src/auth_events/accumulator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ pub struct AuthEventMetricsAccumulator {
pub challenge_attempt_count: CountAccumulator,
pub challenge_success_count: CountAccumulator,
pub frictionless_flow_count: CountAccumulator,
pub frictionless_success_count: CountAccumulator,
}

#[derive(Debug, Default)]
Expand Down Expand Up @@ -53,6 +54,7 @@ impl AuthEventMetricsAccumulator {
challenge_attempt_count: self.challenge_attempt_count.collect(),
challenge_success_count: self.challenge_success_count.collect(),
frictionless_flow_count: self.frictionless_flow_count.collect(),
frictionless_success_count: self.frictionless_success_count.collect(),
}
}
}
3 changes: 3 additions & 0 deletions crates/analytics/src/auth_events/core.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,9 @@ pub async fn get_metrics(
AuthEventMetrics::FrictionlessFlowCount => metrics_builder
.frictionless_flow_count
.add_metrics_bucket(&value),
AuthEventMetrics::FrictionlessSuccessCount => metrics_builder
.frictionless_success_count
.add_metrics_bucket(&value),
}
}
}
Expand Down
7 changes: 7 additions & 0 deletions crates/analytics/src/auth_events/metrics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ mod challenge_attempt_count;
mod challenge_flow_count;
mod challenge_success_count;
mod frictionless_flow_count;
mod frictionless_success_count;
mod three_ds_sdk_count;

use authentication_attempt_count::AuthenticationAttemptCount;
Expand All @@ -23,6 +24,7 @@ use challenge_attempt_count::ChallengeAttemptCount;
use challenge_flow_count::ChallengeFlowCount;
use challenge_success_count::ChallengeSuccessCount;
use frictionless_flow_count::FrictionlessFlowCount;
use frictionless_success_count::FrictionlessSuccessCount;
use three_ds_sdk_count::ThreeDsSdkCount;

#[derive(Debug, PartialEq, Eq, serde::Deserialize)]
Expand Down Expand Up @@ -102,6 +104,11 @@ where
.load_metrics(merchant_id, publishable_key, granularity, time_range, pool)
.await
}
Self::FrictionlessSuccessCount => {
FrictionlessSuccessCount
.load_metrics(merchant_id, publishable_key, granularity, time_range, pool)
.await
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use time::PrimitiveDateTime;

use super::AuthEventMetricRow;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
query::{Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};

Expand All @@ -34,7 +34,7 @@ where
pool: &T,
) -> MetricsResult<Vec<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::ConnectorEventsAnalytics);
QueryBuilder::new(AnalyticsCollection::ApiEventsAnalytics);

query_builder
.add_select_column(Aggregate::Count {
Expand All @@ -54,7 +54,11 @@ where
.switch()?;

query_builder
.add_filter_clause("flow", AuthEventFlows::PostAuthentication)
.add_filter_clause("api_flow", AuthEventFlows::IncomingWebhookReceive)
.switch()?;

query_builder
.add_custom_filter_clause("request", "threeDSServerTransID", FilterTypes::Like)
.switch()?;

time_range
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ where
pool: &T,
) -> MetricsResult<Vec<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::ConnectorEventsAnalytics);
QueryBuilder::new(AnalyticsCollection::ApiEventsAnalytics);

query_builder
.add_select_column(Aggregate::Count {
Expand All @@ -54,11 +54,11 @@ where
.switch()?;

query_builder
.add_filter_clause("flow", AuthEventFlows::PostAuthentication)
.add_filter_clause("api_flow", AuthEventFlows::IncomingWebhookReceive)
.switch()?;

query_builder
.add_filter_clause("visitParamExtractRaw(response, 'transStatus')", "\"Y\"")
.add_filter_clause("visitParamExtractRaw(request, 'transStatus')", "\"Y\"")
.switch()?;

time_range
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
use api_models::analytics::{
auth_events::{AuthEventFlows, AuthEventMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;

use super::AuthEventMetricRow;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};

#[derive(Default)]
pub(super) struct FrictionlessSuccessCount;

#[async_trait::async_trait]
impl<T> super::AuthEventMetric<T> for FrictionlessSuccessCount
where
T: AnalyticsDataSource + super::AuthEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
merchant_id: &str,
_publishable_key: &str,
granularity: &Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<Vec<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::ApiEventsAnalytics);

query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;

if let Some(granularity) = granularity.as_ref() {
query_builder
.add_granularity_in_mins(granularity)
.switch()?;
}

query_builder
.add_filter_clause("merchant_id", merchant_id)
.switch()?;

query_builder
.add_filter_clause("api_flow", AuthEventFlows::PaymentsExternalAuthentication)
.switch()?;

query_builder
.add_filter_clause("visitParamExtractRaw(response, 'transStatus')", "\"Y\"")
.switch()?;

time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;

if let Some(_granularity) = granularity.as_ref() {
query_builder
.add_group_by_clause("time_bucket")
.attach_printable("Error adding granularity")
.switch()?;
}

query_builder
.execute_query::<AuthEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
AuthEventMetricsBucketIdentifier::new(i.time_bucket.clone()),
i,
))
})
.collect::<error_stack::Result<
Vec<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
4 changes: 2 additions & 2 deletions crates/analytics/src/clickhouse.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ impl AnalyticsDataSource for ClickhouseClient {
AnalyticsCollection::SdkEvents
| AnalyticsCollection::ApiEvents
| AnalyticsCollection::ConnectorEvents
| AnalyticsCollection::ConnectorEventsAnalytics
| AnalyticsCollection::ApiEventsAnalytics
| AnalyticsCollection::OutgoingWebhookEvent => TableEngine::BasicTree,
}
}
Expand Down Expand Up @@ -374,7 +374,7 @@ impl ToSql<ClickhouseClient> for AnalyticsCollection {
Self::Refund => Ok("refunds".to_string()),
Self::SdkEvents => Ok("sdk_events_audit".to_string()),
Self::ApiEvents => Ok("api_events_audit".to_string()),
Self::ConnectorEventsAnalytics => Ok("connector_events".to_string()),
Self::ApiEventsAnalytics => Ok("api_events".to_string()),
Self::PaymentIntent => Ok("payment_intents".to_string()),
Self::ConnectorEvents => Ok("connector_events_audit".to_string()),
Self::OutgoingWebhookEvent => Ok("outgoing_webhook_events_audit".to_string()),
Expand Down
2 changes: 2 additions & 0 deletions crates/analytics/src/payments/core.rs
Original file line number Diff line number Diff line change
Expand Up @@ -291,6 +291,8 @@ pub async fn get_filters(
PaymentDimensions::AuthType => fil.authentication_type.map(|i| i.as_ref().to_string()),
PaymentDimensions::PaymentMethod => fil.payment_method,
PaymentDimensions::PaymentMethodType => fil.payment_method_type,
PaymentDimensions::ClientSource => fil.client_source,
PaymentDimensions::ClientVersion => fil.client_version,
})
.collect::<Vec<String>>();
res.query_data.push(FilterValue {
Expand Down
2 changes: 2 additions & 0 deletions crates/analytics/src/payments/distribution.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ pub struct PaymentDistributionRow {
pub authentication_type: Option<DBEnumWrapper<storage_enums::AuthenticationType>>,
pub payment_method: Option<String>,
pub payment_method_type: Option<String>,
pub client_source: Option<String>,
pub client_version: Option<String>,
pub total: Option<bigdecimal::BigDecimal>,
pub count: Option<i64>,
pub error_message: Option<String>,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,8 @@ where
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.client_source.clone(),
i.client_version.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
Expand Down
2 changes: 2 additions & 0 deletions crates/analytics/src/payments/filters.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,4 +57,6 @@ pub struct FilterRow {
pub authentication_type: Option<DBEnumWrapper<AuthenticationType>>,
pub payment_method: Option<String>,
pub payment_method_type: Option<String>,
pub client_source: Option<String>,
pub client_version: Option<String>,
}
2 changes: 2 additions & 0 deletions crates/analytics/src/payments/metrics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,8 @@ pub struct PaymentMetricRow {
pub authentication_type: Option<DBEnumWrapper<storage_enums::AuthenticationType>>,
pub payment_method: Option<String>,
pub payment_method_type: Option<String>,
pub client_source: Option<String>,
pub client_version: Option<String>,
pub total: Option<bigdecimal::BigDecimal>,
pub count: Option<i64>,
#[serde(with = "common_utils::custom_serde::iso8601::option")]
Expand Down
2 changes: 2 additions & 0 deletions crates/analytics/src/payments/metrics/avg_ticket_size.rs
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,8 @@ where
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.client_source.clone(),
i.client_version.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,8 @@ where
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.client_source.clone(),
i.client_version.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
Expand Down
2 changes: 2 additions & 0 deletions crates/analytics/src/payments/metrics/payment_count.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,8 @@ where
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.client_source.clone(),
i.client_version.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
Expand Down
Loading

0 comments on commit c78ebf1

Please sign in to comment.