-
{children}
+ {this.renderTags()}
{tct(
'Tags are automatically indexed for searching and breakdown charts. Learn how to [link: add custom tags to issues]',
{
- link: ,
+ link: (
+
+ ),
}
)}
@@ -162,27 +135,39 @@ class GroupTags extends React.Component
{
}
}
-const DetailsLinkWrapper = styled('div')`
- display: flex;
-`;
-
const Container = styled('div')`
display: flex;
flex-wrap: wrap;
`;
+const StyledPanelHeader = styled(PanelHeader)`
+ text-transform: none;
+`;
+
+const TagHeading = styled('h5')`
+ font-size: ${p => p.theme.fontSizeLarge};
+ margin-bottom: 0;
+`;
+
+const UnstyledUnorderedList = styled('ul')`
+ list-style: none;
+ padding-left: 0;
+ margin-bottom: 0;
+`;
+
const TagItem = styled('div')`
padding: 0 ${space(1)};
width: 50%;
`;
-const TagBarBackground = styled('div')`
+const TagBarBackground = styled('div')<{widthPercent: string}>`
position: absolute;
top: 0;
bottom: 0;
left: 0;
background: ${p => p.theme.tagBar};
border-radius: ${p => p.theme.borderRadius};
+ width: ${p => p.widthPercent};
`;
const TagBarGlobalSelectionLink = styled(GlobalSelectionLink)`
@@ -217,4 +202,4 @@ const TagBarCount = styled('div')`
font-variant-numeric: tabular-nums;
`;
-export default withApi(GroupTags);
+export default GroupTags;
diff --git a/static/app/views/organizationGroupDetails/index.tsx b/static/app/views/organizationGroupDetails/index.tsx
index 5b2ea3af806a51..92e632e49abdc9 100644
--- a/static/app/views/organizationGroupDetails/index.tsx
+++ b/static/app/views/organizationGroupDetails/index.tsx
@@ -8,6 +8,7 @@ import withOrganization from 'app/utils/withOrganization';
import withProjects from 'app/utils/withProjects';
import GroupDetails from './groupDetails';
+import SampleEventAlert from './sampleEventAlert';
type Props = {
selection: GlobalSelection;
@@ -27,13 +28,16 @@ class OrganizationGroupDetails extends React.Component {
render() {
const {selection, ...props} = this.props;
-
return (
-
+
+
+
+
+
);
}
}
diff --git a/static/app/views/organizationGroupDetails/sampleEventAlert.tsx b/static/app/views/organizationGroupDetails/sampleEventAlert.tsx
new file mode 100644
index 00000000000000..bd1f9671565c8a
--- /dev/null
+++ b/static/app/views/organizationGroupDetails/sampleEventAlert.tsx
@@ -0,0 +1,65 @@
+import styled from '@emotion/styled';
+
+import Button from 'app/components/button';
+import PageAlertBar from 'app/components/pageAlertBar';
+import {IconLightning} from 'app/icons';
+import {t} from 'app/locale';
+import space from 'app/styles/space';
+import {GlobalSelection, Organization, Project} from 'app/types';
+import trackAdvancedAnalyticsEvent from 'app/utils/analytics/trackAdvancedAnalyticsEvent';
+import withGlobalSelection from 'app/utils/withGlobalSelection';
+import withOrganization from 'app/utils/withOrganization';
+import withProjects from 'app/utils/withProjects';
+
+function SampleEventAlert({
+ selection,
+ organization,
+ projects,
+}: {
+ selection: GlobalSelection;
+ organization: Organization;
+ projects: Project[];
+}) {
+ if (projects.length === 0) {
+ return null;
+ }
+ if (selection.projects.length !== 1) {
+ return null;
+ }
+ const selectedProject = projects.find(p => p.id === selection.projects[0].toString());
+ if (!selectedProject || selectedProject.firstEvent) {
+ return null;
+ }
+ return (
+
+
+
+ {t(
+ 'You are viewing a sample error. Configure Sentry to start viewing real errors.'
+ )}
+
+
+
+ );
+}
+
+export default withProjects(withOrganization(withGlobalSelection(SampleEventAlert)));
+
+const TextWrapper = styled('span')`
+ margin: 0 ${space(1)};
+`;
diff --git a/static/app/views/organizationIntegrations/sentryAppExternalForm.tsx b/static/app/views/organizationIntegrations/sentryAppExternalForm.tsx
index 8ded31d66203e8..139c51af42c149 100644
--- a/static/app/views/organizationIntegrations/sentryAppExternalForm.tsx
+++ b/static/app/views/organizationIntegrations/sentryAppExternalForm.tsx
@@ -114,15 +114,6 @@ export class SentryAppExternalForm extends Component {
this.debouncedOptionLoad(field, input, resolve);
});
- getSubmitEndpoint() {
- const {sentryAppInstallationUuid, element} = this.props;
- if (element === 'alert-rule-action') {
- // TODO(leander): Send request to the correct endpoint
- return '/404/';
- }
- return `/sentry-app-installations/${sentryAppInstallationUuid}/external-issue-actions/`;
- }
-
getElementText = () => {
const {element} = this.props;
switch (element) {
diff --git a/static/app/views/performance/table.tsx b/static/app/views/performance/table.tsx
index f802da9b1a5aa7..09f8317f4ed3ce 100644
--- a/static/app/views/performance/table.tsx
+++ b/static/app/views/performance/table.tsx
@@ -194,11 +194,6 @@ class _Table extends React.Component {
);
}
- if (field.startsWith('key_transaction')) {
- // don't display per cell actions for key_transaction
- return rendered;
- }
-
if (field.startsWith('team_key_transaction')) {
// don't display per cell actions for team_key_transaction
return rendered;
@@ -311,28 +306,11 @@ class _Table extends React.Component {
renderPrependCellWithData = (tableData: TableData | null) => {
const {eventView} = this.props;
- const keyTransactionColumn = eventView
- .getColumns()
- .find((col: TableColumn) => col.name === 'key_transaction');
const teamKeyTransactionColumn = eventView
.getColumns()
.find((col: TableColumn) => col.name === 'team_key_transaction');
return (isHeader: boolean, dataRow?: any) => {
- if (keyTransactionColumn) {
- if (isHeader) {
- const star = (
-
- );
- return [this.renderHeadCell(tableData?.meta, keyTransactionColumn, star)];
- } else {
- return [this.renderBodyCell(tableData, keyTransactionColumn, dataRow)];
- }
- } else if (teamKeyTransactionColumn) {
+ if (teamKeyTransactionColumn) {
if (isHeader) {
const star = (
@@ -387,11 +365,10 @@ class _Table extends React.Component {
this.state;
const columnOrder = eventView
.getColumns()
- // remove key_transactions from the column order as we'll be rendering it
+ // remove team_key_transactions from the column order as we'll be rendering it
// via a prepended column
.filter(
(col: TableColumn) =>
- col.name !== 'key_transaction' &&
col.name !== 'team_key_transaction' &&
!col.name.startsWith('count_miserable') &&
col.name !== 'project_threshold_config'
diff --git a/static/app/views/performance/transactionDetails/finishSetupAlert.tsx b/static/app/views/performance/transactionDetails/finishSetupAlert.tsx
index 092e33d78313bf..011c06762586be 100644
--- a/static/app/views/performance/transactionDetails/finishSetupAlert.tsx
+++ b/static/app/views/performance/transactionDetails/finishSetupAlert.tsx
@@ -1,6 +1,7 @@
import styled from '@emotion/styled';
import Button from 'app/components/button';
+import PageAlertBar from 'app/components/pageAlertBar';
import {IconLightning} from 'app/icons';
import {t} from 'app/locale';
import space from 'app/styles/space';
@@ -15,7 +16,7 @@ export default function FinishSetupAlert({
project: Project;
}) {
return (
-
+
{t(
@@ -37,20 +38,10 @@ export default function FinishSetupAlert({
>
{t('Get Started')}
-
+
);
}
-const AlertBar = styled('div')`
- display: flex;
- align-items: center;
- justify-content: center;
- color: ${p => p.theme.headerBackground};
- background-color: ${p => p.theme.bannerBackground};
- padding: 6px 30px;
- font-size: 14px;
-`;
-
const TextWrapper = styled('span')`
margin: 0 ${space(1)};
`;
diff --git a/static/app/views/performance/vitalDetail/table.tsx b/static/app/views/performance/vitalDetail/table.tsx
index d775cfe59e680f..ba2537202b5e4c 100644
--- a/static/app/views/performance/vitalDetail/table.tsx
+++ b/static/app/views/performance/vitalDetail/table.tsx
@@ -203,10 +203,6 @@ class Table extends React.Component {
);
}
- if (field.startsWith('key_transaction')) {
- return rendered;
- }
-
if (field.startsWith('team_key_transaction')) {
return rendered;
}
@@ -273,30 +269,11 @@ class Table extends React.Component {
renderPrependCellWithData = (tableData: TableData | null, vitalName: WebVital) => {
const {eventView} = this.props;
- const keyTransactionColumn = eventView
- .getColumns()
- .find((col: TableColumn) => col.name === 'key_transaction');
const teamKeyTransactionColumn = eventView
.getColumns()
.find((col: TableColumn) => col.name === 'team_key_transaction');
return (isHeader: boolean, dataRow?: any) => {
- if (keyTransactionColumn) {
- if (isHeader) {
- const star = (
-
- );
- return [this.renderHeadCell(tableData?.meta, keyTransactionColumn, star)];
- } else {
- return [
- this.renderBodyCell(tableData, keyTransactionColumn, dataRow, vitalName),
- ];
- }
- } else if (teamKeyTransactionColumn) {
+ if (teamKeyTransactionColumn) {
if (isHeader) {
const star = (
{
.getColumns()
// remove key_transactions from the column order as we'll be rendering it
// via a prepended column
- .filter(
- (col: TableColumn) =>
- col.name !== 'key_transaction' && col.name !== 'team_key_transaction'
- )
+ .filter((col: TableColumn) => col.name !== 'team_key_transaction')
.slice(0, -1)
.map((col: TableColumn, i: number) => {
if (typeof widths[i] === 'number') {
diff --git a/static/app/views/settings/account/accountNotificationFineTuning.tsx b/static/app/views/settings/account/accountNotificationFineTuning.tsx
index 980c58d6653de3..70ec84c40609c6 100644
--- a/static/app/views/settings/account/accountNotificationFineTuning.tsx
+++ b/static/app/views/settings/account/accountNotificationFineTuning.tsx
@@ -65,7 +65,7 @@ const AccountNotificationsByProject = ({projects, field}: ANBPProps) => {
@@ -101,7 +101,7 @@ const AccountNotificationsByOrganization = ({organizations, field}: ANBOProps) =
@@ -183,7 +183,7 @@ class AccountNotificationFineTuning extends AsyncView {
if (fineTuneType === 'email') {
// Fetch verified email addresses
- field.choices = this.emailChoices.map(({email}) => [email, email]);
+ field.options = this.emailChoices.map(({email}) => ({value: email, label: email}));
}
if (!notifications || !fineTuneData) {
diff --git a/static/app/views/settings/account/notifications/fields.tsx b/static/app/views/settings/account/notifications/fields.tsx
index 77a2232dd7f086..1d118a59425f4b 100644
--- a/static/app/views/settings/account/notifications/fields.tsx
+++ b/static/app/views/settings/account/notifications/fields.tsx
@@ -1,10 +1,11 @@
import {t} from 'app/locale';
+import {SelectValue} from 'app/types';
export type FineTuneField = {
title: string;
description: string;
type: 'select';
- choices?: string[][];
+ options?: SelectValue[];
defaultValue?: string;
defaultFieldName?: string;
};
@@ -16,10 +17,10 @@ export const ACCOUNT_NOTIFICATION_FIELDS: Record = {
'Notifications from Alert Rules that your team has setup. You’ll always receive notifications from Alerts configured to be sent directly to you.'
),
type: 'select',
- choices: [
- ['-1', t('Default')],
- ['1', t('On')],
- ['0', t('Off')],
+ options: [
+ {value: '-1', label: t('Default')},
+ {value: '1', label: t('On')},
+ {value: '0', label: t('Off')},
],
defaultValue: '-1',
defaultFieldName: 'subscribeByDefault',
@@ -30,11 +31,11 @@ export const ACCOUNT_NOTIFICATION_FIELDS: Record = {
'Control workflow notifications, e.g. changes in issue assignment, resolution status, and comments.'
),
type: 'select',
- choices: [
- ['-1', t('Default')],
- ['0', t('Always')],
- ['1', t('Only on issues I subscribe to')],
- ['2', t('Never')],
+ options: [
+ {value: '-1', label: t('Default')},
+ {value: '0', label: t('Always')},
+ {value: '1', label: t('Only on issues I subscribe to')},
+ {value: '2', label: t('Never')},
],
defaultValue: '-1',
defaultFieldName: 'workflowNotifications',
@@ -45,11 +46,11 @@ export const ACCOUNT_NOTIFICATION_FIELDS: Record = {
'Control deploy notifications that include release, environment, and commit overviews.'
),
type: 'select',
- choices: [
- ['-1', t('Default')],
- ['2', t('Always')],
- ['3', t('Only on deploys with my commits')],
- ['4', t('Never')],
+ options: [
+ {value: '-1', label: t('Default')},
+ {value: '2', label: t('Always')},
+ {value: '3', label: t('Only on deploys with my commits')},
+ {value: '4', label: t('Never')},
],
defaultValue: '-1',
defaultFieldName: 'deployNotifications',
@@ -62,9 +63,9 @@ export const ACCOUNT_NOTIFICATION_FIELDS: Record = {
type: 'select',
// API only saves organizations that have this disabled, so we should default to "On"
defaultValue: '1',
- choices: [
- ['1', t('On')],
- ['0', t('Off')],
+ options: [
+ {value: '1', label: t('On')},
+ {value: '0', label: t('Off')},
],
defaultFieldName: 'weeklyReports',
},
diff --git a/static/app/views/teamInsights/filter.tsx b/static/app/views/teamInsights/filter.tsx
deleted file mode 100644
index 69a844e78495ab..00000000000000
--- a/static/app/views/teamInsights/filter.tsx
+++ /dev/null
@@ -1,162 +0,0 @@
-import {Component, Fragment} from 'react';
-import styled from '@emotion/styled';
-
-import DropdownButton from 'app/components/dropdownButton';
-import DropdownControl, {Content} from 'app/components/dropdownControl';
-import {t} from 'app/locale';
-import overflowEllipsis from 'app/styles/overflowEllipsis';
-import space from 'app/styles/space';
-
-type DropdownButtonProps = React.ComponentProps;
-
-type DropdownSection = {
- id: string;
- label: string;
- items: Array<{label: string; value: string; checked: boolean; filtered: boolean}>;
-};
-
-type SectionProps = DropdownSection & {
- toggleFilter: (value: string) => void;
-};
-
-function FilterSection({label, items, toggleFilter}: SectionProps) {
- return (
-
-
- {items
- .filter(item => !item.filtered)
- .map(item => (
- {
- toggleFilter(item.value);
- }}
- >
- {item.label}
-
- ))}
-
- );
-}
-
-type Props = {
- header: React.ReactElement;
- onFilterChange: (selectedValue: string) => void;
- dropdownSection: DropdownSection;
-};
-
-class Filter extends Component {
- toggleFilter = (value: string) => {
- const {onFilterChange} = this.props;
- onFilterChange(value);
- };
-
- render() {
- const {dropdownSection, header} = this.props;
- const selected = this.props.dropdownSection.items.find(item => item.checked);
-
- const dropDownButtonProps: Pick & {
- hasDarkBorderBottomColor: boolean;
- } = {
- priority: 'default',
- hasDarkBorderBottomColor: false,
- };
-
- return (
- (
-
- {t('Team: ')}
- {selected?.label}
-
- )}
- >
- {({isOpen, getMenuProps}) => (
-
-
- {header}
-
-
-
- )}
-
- );
- }
-}
-
-const MenuContent = styled(Content)`
- max-height: 290px;
- overflow-y: auto;
-`;
-
-const Header = styled('div')`
- display: grid;
- grid-template-columns: auto min-content;
- grid-column-gap: ${space(1)};
- align-items: center;
-
- margin: 0;
- background-color: ${p => p.theme.backgroundSecondary};
- color: ${p => p.theme.gray300};
- font-weight: normal;
- font-size: ${p => p.theme.fontSizeMedium};
- padding: ${space(1)} ${space(2)};
- border-bottom: 1px solid ${p => p.theme.border};
-`;
-
-const StyledDropdownButton = styled(DropdownButton)<{hasDarkBorderBottomColor?: boolean}>`
- white-space: nowrap;
- max-width: 200px;
- height: 42px;
-
- z-index: ${p => p.theme.zIndex.dropdown};
-`;
-
-const List = styled('ul')`
- list-style: none;
- margin: 0;
- padding: 0;
-`;
-
-const ListItem = styled('li')<{isChecked?: boolean}>`
- display: grid;
- grid-template-columns: 1fr max-content;
- grid-column-gap: ${space(1)};
- align-items: center;
- padding: ${space(1)} ${space(2)};
- border-bottom: 1px solid ${p => p.theme.border};
- cursor: pointer;
- :hover {
- background-color: ${p => p.theme.backgroundSecondary};
- }
-
- &:hover span {
- color: ${p => p.theme.blue300};
- text-decoration: underline;
- }
-`;
-
-const TeamName = styled('div')`
- font-size: ${p => p.theme.fontSizeMedium};
- ${overflowEllipsis};
-`;
-
-export default Filter;
diff --git a/static/app/views/teamInsights/index.tsx b/static/app/views/teamInsights/index.tsx
index c31fcf788c6e68..e5a66e101ead93 100644
--- a/static/app/views/teamInsights/index.tsx
+++ b/static/app/views/teamInsights/index.tsx
@@ -1,6 +1,7 @@
import {cloneElement, isValidElement} from 'react';
import Feature from 'app/components/acl/feature';
+import NoProjectMessage from 'app/components/noProjectMessage';
import SentryDocumentTitle from 'app/components/sentryDocumentTitle';
import {t} from 'app/locale';
import {Organization} from 'app/types';
@@ -14,13 +15,15 @@ type Props = {
function TeamInsightsContainer({children, organization}: Props) {
return (
-
- {children && isValidElement(children)
- ? cloneElement(children, {
- organization,
- })
- : children}
-
+
+
+ {children && isValidElement(children)
+ ? cloneElement(children, {
+ organization,
+ })
+ : children}
+
+
);
}
diff --git a/static/app/views/teamInsights/overview.tsx b/static/app/views/teamInsights/overview.tsx
index a12f7da68f296b..d5f3d0d95fd3db 100644
--- a/static/app/views/teamInsights/overview.tsx
+++ b/static/app/views/teamInsights/overview.tsx
@@ -7,6 +7,7 @@ import moment from 'moment';
import {Client} from 'app/api';
import {DateTimeObject} from 'app/components/charts/utils';
+import TeamSelector from 'app/components/forms/teamSelector';
import * as Layout from 'app/components/layouts/thirds';
import LoadingIndicator from 'app/components/loadingIndicator';
import {getParams} from 'app/components/organizations/globalSelectionHeader/getParams';
@@ -23,7 +24,6 @@ import withTeamsForUser from 'app/utils/withTeamsForUser';
import DescriptionCard from './descriptionCard';
import HeaderTabs from './headerTabs';
import TeamAlertsTriggered from './teamAlertsTriggered';
-import TeamDropdown from './teamDropdown';
import TeamMisery from './teamMisery';
import TeamStability from './teamStability';
@@ -174,10 +174,12 @@ function TeamInsightsOverview({
{!loadingTeams && (
- handleChangeTeam(choice.actor.id)}
+ teamFilter={filterTeam => filterTeam.isMember}
/>
{
renderBody() {
const {alertsTriggered} = this.state;
+ const data = Object.entries(alertsTriggered ?? {})
+ .map(([bucket, count]) => ({
+ value: count,
+ name: new Date(bucket).getTime(),
+ }))
+ .sort((a, b) => a.name - b.name);
+
+ // Convert from days to 7 day groups
+ const seriesData = chunk(data, 7).map(week => {
+ return {
+ name: week[0].name,
+ value: week.reduce((total, currentData) => total + currentData.value, 0),
+ };
+ });
return (
@@ -79,24 +93,17 @@ class TeamIssues extends AsyncComponent {
moment(new Date(value)).format('MMM D'),
- },
}}
series={[
{
seriesName: t('Alerts Triggered'),
- data: Object.entries(alertsTriggered).map(([bucket, count]) => ({
- value: count,
- name: bucket,
- })),
+ data: seriesData,
},
].reverse()}
/>
diff --git a/static/app/views/teamInsights/teamDropdown.tsx b/static/app/views/teamInsights/teamDropdown.tsx
deleted file mode 100644
index d1dd0ebd0c4e20..00000000000000
--- a/static/app/views/teamInsights/teamDropdown.tsx
+++ /dev/null
@@ -1,77 +0,0 @@
-import {useState} from 'react';
-import styled from '@emotion/styled';
-
-import Input from 'app/components/forms/input';
-import {t} from 'app/locale';
-import {Team} from 'app/types';
-
-import Filter from './filter';
-
-const ALERT_LIST_QUERY_DEFAULT_TEAMS = ['myteams', 'unassigned'];
-
-type Props = {
- teams: Team[];
- selectedTeam: string;
- handleChangeTeam: (teamId: string) => void;
-};
-
-export function getTeamParams(team?: string | string[]): string[] {
- if (team === undefined) {
- return ALERT_LIST_QUERY_DEFAULT_TEAMS;
- }
-
- if (team === '') {
- return [];
- }
-
- if (Array.isArray(team)) {
- return team;
- }
-
- return [team];
-}
-
-function TeamDropdown({teams, selectedTeam, handleChangeTeam}: Props) {
- const [teamFilterSearch, setTeamFilterSearch] = useState();
-
- const teamItems = teams.map(({id, name}) => ({
- label: name,
- value: id,
- filtered: teamFilterSearch
- ? !name.toLowerCase().includes(teamFilterSearch.toLowerCase())
- : false,
- checked: selectedTeam === id,
- }));
-
- return (
- {
- event.stopPropagation();
- }}
- onChange={(event: React.ChangeEvent) => {
- setTeamFilterSearch(event.target.value);
- }}
- value={teamFilterSearch || ''}
- />
- }
- onFilterChange={handleChangeTeam}
- dropdownSection={{
- id: 'teams',
- label: t('Teams'),
- items: teamItems,
- }}
- />
- );
-}
-
-export default TeamDropdown;
-
-const StyledInput = styled(Input)`
- border: none;
- border-bottom: 1px solid ${p => p.theme.gray200};
- border-radius: 0;
-`;
diff --git a/static/app/views/teamInsights/teamMisery.tsx b/static/app/views/teamInsights/teamMisery.tsx
index 886759165ea106..03ab69cbc969d7 100644
--- a/static/app/views/teamInsights/teamMisery.tsx
+++ b/static/app/views/teamInsights/teamMisery.tsx
@@ -1,4 +1,5 @@
import {Fragment} from 'react';
+import {css} from '@emotion/react';
import styled from '@emotion/styled';
import {Location} from 'history';
@@ -67,6 +68,7 @@ function TeamMisery({
return (
+ );
+ }
+
const commonEventView = {
id: undefined,
query: 'transaction.duration:<15m team_key_transaction:true',
@@ -195,7 +211,7 @@ function TeamMiseryWrapper({
export default TeamMiseryWrapper;
-const StyledPanelTable = styled(PanelTable)`
+const StyledPanelTable = styled(PanelTable)<{isEmpty: boolean}>`
grid-template-columns: 1fr 0.5fr 112px 112px 0.25fr;
font-size: ${p => p.theme.fontSizeMedium};
white-space: nowrap;
@@ -206,6 +222,14 @@ const StyledPanelTable = styled(PanelTable)`
& > div {
padding: ${space(1)} ${space(2)};
}
+
+ ${p =>
+ p.isEmpty &&
+ css`
+ & > div:last-child {
+ padding: 48px ${space(2)};
+ }
+ `}
`;
const ProjectBadgeContainer = styled('div')`
diff --git a/static/app/views/teamInsights/teamStability.tsx b/static/app/views/teamInsights/teamStability.tsx
index b23f3edd42be05..703c5314a58dbe 100644
--- a/static/app/views/teamInsights/teamStability.tsx
+++ b/static/app/views/teamInsights/teamStability.tsx
@@ -173,6 +173,7 @@ class TeamStability extends AsyncComponent {
return (
{tct('Last [period]', {period})},
diff --git a/static/less/base.less b/static/less/base.less
index b80c913481923c..34fb9d1d05b544 100644
--- a/static/less/base.less
+++ b/static/less/base.less
@@ -27,6 +27,7 @@ body {
color: @gray-darker;
background: @white-dark;
-webkit-font-smoothing: antialiased;
+ -moz-osx-font-smoothing: grayscale;
overflow-x: hidden;
min-height: 100vh;
}
diff --git a/tests/acceptance/test_emails.py b/tests/acceptance/test_emails.py
index f7e1b061e5280b..8106e7c969cd75 100644
--- a/tests/acceptance/test_emails.py
+++ b/tests/acceptance/test_emails.py
@@ -38,30 +38,30 @@ def read_txt_email_fixture(name: str) -> str:
filename = name.replace(" ", "_") + ".txt"
path = join(dirname(__file__), os.pardir, "fixtures", "emails", filename)
- fixture = None
with open(path) as f:
- fixture = f.read()
- return fixture
+ return f.read()
+
+
+def build_url(path: str, format: str = "html") -> str:
+ return f"{path}?{urlencode({'format': format, 'seed': b'123'})}"
class EmailTestCase(AcceptanceTestCase):
def setUp(self):
super().setUp()
+ # This email address is required to match FIXTURES.
self.user = self.create_user("foo@example.com")
self.login_as(self.user)
- def build_url(self, path: str, format: str = "html") -> str:
- return "{}?{}".format(path, urlencode({"format": format, "seed": b"123"}))
-
def test_emails(self):
for url, name in EMAILS:
# HTML output is captured as a snapshot
- self.browser.get(self.build_url(url, "html"))
+ self.browser.get(build_url(url, "html"))
self.browser.wait_until("#preview")
self.browser.snapshot(f"{name} email html")
# Text output is asserted against static fixture files
- self.browser.get(self.build_url(url, "txt"))
+ self.browser.get(build_url(url, "txt"))
self.browser.wait_until("#preview")
elem = self.browser.find_element_by_css_selector("#preview pre")
text_src = elem.get_attribute("innerHTML")
diff --git a/tests/fixtures/emails/release.txt b/tests/fixtures/emails/release.txt
index 9bc58b8423be9e..71aa9cdbd43101 100644
--- a/tests/fixtures/emails/release.txt
+++ b/tests/fixtures/emails/release.txt
@@ -1,4 +1,4 @@
-Version 6c998f755f304593a4713abd123eaf8833a2de5e was deployed to production on Oct. 12, 2016, 3:39 p.m.
+Version 6c998f755f30 was deployed to production on Oct. 12, 2016, 3:39 p.m.
http://testserver/organizations/organization/releases/6c998f755f304593a4713abd123eaf8833a2de5e/?project=1
diff --git a/tests/js/spec/components/createSampleEventButton.spec.jsx b/tests/js/spec/components/createSampleEventButton.spec.jsx
index 56be14c5915f3a..2b96d859b49aa6 100644
--- a/tests/js/spec/components/createSampleEventButton.spec.jsx
+++ b/tests/js/spec/components/createSampleEventButton.spec.jsx
@@ -69,7 +69,7 @@ describe('CreateSampleEventButton', function () {
).toBe(false);
expect(browserHistory.push).toHaveBeenCalledWith(
- `/organizations/${org.slug}/issues/${groupID}/`
+ `/organizations/${org.slug}/issues/${groupID}/?project=${project.id}`
);
});
@@ -115,7 +115,7 @@ describe('CreateSampleEventButton', function () {
await Promise.resolve();
expect(browserHistory.push).toHaveBeenCalledWith(
- `/organizations/${org.slug}/issues/${groupID}/`
+ `/organizations/${org.slug}/issues/${groupID}/?project=${project.id}`
);
expect(trackAnalyticsEvent).toHaveBeenCalledWith(
diff --git a/tests/js/spec/components/modals/addDashboardWidgetModal.spec.jsx b/tests/js/spec/components/modals/addDashboardWidgetModal.spec.jsx
index 8145bfbe068302..95e0fda9831cd5 100644
--- a/tests/js/spec/components/modals/addDashboardWidgetModal.spec.jsx
+++ b/tests/js/spec/components/modals/addDashboardWidgetModal.spec.jsx
@@ -2,15 +2,23 @@ import {browserHistory} from 'react-router';
import {mountWithTheme} from 'sentry-test/enzyme';
import {initializeOrg} from 'sentry-test/initializeOrg';
-import {getOptionByLabel, selectByLabel} from 'sentry-test/select-new';
+import {getOptionByLabel, openMenu, selectByLabel} from 'sentry-test/select-new';
import AddDashboardWidgetModal from 'app/components/modals/addDashboardWidgetModal';
import {t} from 'app/locale';
import TagStore from 'app/stores/tagStore';
+import * as types from 'app/views/dashboardsV2/types';
const stubEl = props => {props.children}
;
-function mountModal({initialData, onAddWidget, onUpdateWidget, widget, fromDiscover}) {
+function mountModal({
+ initialData,
+ onAddWidget,
+ onUpdateWidget,
+ widget,
+ fromDiscover,
+ defaultWidgetQuery,
+}) {
return mountWithTheme(
void 0}
fromDiscover={fromDiscover}
+ defaultWidgetQuery={defaultWidgetQuery}
/>,
initialData.routerContext
);
@@ -97,7 +106,13 @@ describe('Modals -> AddDashboardWidgetModal', function () {
});
MockApiClient.addMockResponse({
url: '/organizations/org-slug/dashboards/',
- body: [{id: '1', title: t('Test Dashboard')}],
+ body: [
+ TestStubs.Dashboard([], {
+ id: '1',
+ title: 'Test Dashboard',
+ widgetDisplay: ['area'],
+ }),
+ ],
});
});
@@ -109,6 +124,7 @@ describe('Modals -> AddDashboardWidgetModal', function () {
const wrapper = mountModal({initialData, fromDiscover: true});
// @ts-expect-error
await tick();
+ await wrapper.update();
selectDashboard(wrapper, {label: t('+ Create New Dashboard'), value: 'new'});
await clickSubmit(wrapper);
expect(browserHistory.push).toHaveBeenCalledWith(
@@ -123,6 +139,7 @@ describe('Modals -> AddDashboardWidgetModal', function () {
const wrapper = mountModal({initialData, fromDiscover: true});
// @ts-expect-error
await tick();
+ await wrapper.update();
selectDashboard(wrapper, {label: t('Test Dashboard'), value: '1'});
await clickSubmit(wrapper);
expect(browserHistory.push).toHaveBeenCalledWith(
@@ -133,6 +150,22 @@ describe('Modals -> AddDashboardWidgetModal', function () {
wrapper.unmount();
});
+ it('disables dashboards with max widgets', async function () {
+ types.MAX_WIDGETS = 1;
+ const wrapper = mountModal({initialData, fromDiscover: true});
+ // @ts-expect-error
+ await tick();
+ await wrapper.update();
+ openMenu(wrapper, {name: 'dashboard', control: true});
+
+ const input = wrapper.find('SelectControl[name="dashboard"]');
+ expect(input.find('Option Option')).toHaveLength(2);
+ expect(input.find('Option Option').at(0).props().isDisabled).toBe(false);
+ expect(input.find('Option Option').at(1).props().isDisabled).toBe(true);
+
+ wrapper.unmount();
+ });
+
it('can update the title', async function () {
let widget = undefined;
const wrapper = mountModal({
@@ -840,4 +873,28 @@ describe('Modals -> AddDashboardWidgetModal', function () {
wrapper.unmount();
});
+
+ it('should use defaultWidgetQuery Y-Axis and Conditions if given a defaultWidgetQuery', async function () {
+ const wrapper = mountModal({
+ initialData,
+ onAddWidget: () => undefined,
+ onUpdateWidget: () => undefined,
+ widget: undefined,
+ fromDiscover: true,
+ defaultWidgetQuery: {
+ name: '',
+ fields: ['count()', 'failure_count()', 'count_unique(user)'],
+ conditions: 'tag:value',
+ orderby: '',
+ },
+ });
+
+ expect(wrapper.find('SearchBar').props().query).toEqual('tag:value');
+ const queryFields = wrapper.find('QueryField');
+ expect(queryFields.length).toEqual(3);
+ expect(queryFields.at(0).props().fieldValue.function[0]).toEqual('count');
+ expect(queryFields.at(1).props().fieldValue.function[0]).toEqual('failure_count');
+ expect(queryFields.at(2).props().fieldValue.function[0]).toEqual('count_unique');
+ wrapper.unmount();
+ });
});
diff --git a/tests/js/spec/utils/discover/fieldRenderer.spec.jsx b/tests/js/spec/utils/discover/fieldRenderer.spec.jsx
index b573c1e96877ce..32ced23c1102ef 100644
--- a/tests/js/spec/utils/discover/fieldRenderer.spec.jsx
+++ b/tests/js/spec/utils/discover/fieldRenderer.spec.jsx
@@ -23,7 +23,6 @@ describe('getFieldRenderer', function () {
query: {},
};
data = {
- key_transaction: 1,
team_key_transaction: 1,
title: 'ValueError: something bad',
transaction: 'api.do_things',
@@ -197,56 +196,6 @@ describe('getFieldRenderer', function () {
expect(value.text()).toEqual(project.slug);
});
- it('can render key transaction as a star', async function () {
- const renderer = getFieldRenderer('key_transaction', {key_transaction: 'boolean'});
- delete data.project;
-
- const wrapper = mountWithTheme(
- renderer(data, {location, organization}),
- context.routerContext
- );
-
- const value = wrapper.find('StyledKey');
- expect(value).toHaveLength(1);
- expect(value.props().isSolid).toBeTruthy();
-
- // Since there is not project column, it's not clickable
- expect(wrapper.find('KeyColumn')).toHaveLength(0);
- });
-
- it('can render key transaction as a clickable star', async function () {
- const renderer = getFieldRenderer('key_transaction', {key_transaction: 'boolean'});
-
- const wrapper = mountWithTheme(
- renderer(data, {location, organization}),
- context.routerContext
- );
- await tick();
- wrapper.update();
-
- let value;
-
- value = wrapper.find('StyledKey');
- expect(value).toHaveLength(1);
- expect(value.props().isSolid).toBeTruthy();
-
- wrapper.find('KeyColumn').simulate('click');
- await tick();
- wrapper.update();
-
- value = wrapper.find('StyledKey');
- expect(value).toHaveLength(1);
- expect(value.props().isSolid).toBeFalsy();
-
- wrapper.find('KeyColumn').simulate('click');
- await tick();
- wrapper.update();
-
- value = wrapper.find('StyledKey');
- expect(value).toHaveLength(1);
- expect(value.props().isSolid).toBeTruthy();
- });
-
it('can render team key transaction as a star with the dropdown', async function () {
const renderer = getFieldRenderer('team_key_transaction', {
team_key_transaction: 'boolean',
diff --git a/tests/js/spec/views/alerts/issueRuleEditor/ruleNode.spec.jsx b/tests/js/spec/views/alerts/issueRuleEditor/ruleNode.spec.jsx
index a937d885e95719..bab5eeb38054a2 100644
--- a/tests/js/spec/views/alerts/issueRuleEditor/ruleNode.spec.jsx
+++ b/tests/js/spec/views/alerts/issueRuleEditor/ruleNode.spec.jsx
@@ -1,6 +1,7 @@
import {mountWithTheme} from 'sentry-test/enzyme';
import {getSelector, openMenu, selectByValue} from 'sentry-test/select-new';
+import ModalActions from 'app/actions/modalActions';
import RuleNode from 'app/views/alerts/issueRuleEditor/ruleNode';
describe('RuleNode', function () {
@@ -58,8 +59,31 @@ describe('RuleNode', function () {
// TODO: Add this node and test if it implements correctly (e.g. Jira Tickets)
// const ticketNode = {actionType: 'ticket'};
- // TODO(Leander): Add this node and test if it implements correctly (e.g. Integrations w/ Alert Rule UI)
- // const sentryAppNode = {actionType: 'sentryapp'}
+ const sentryAppNode = {
+ id: 'sentry.rules.schema_form_mock',
+ label: 'Configure SentryApp with these',
+ enabled: true,
+ actionType: 'sentryapp',
+ sentryAppInstallationUuid: '1027',
+ formFields: {
+ exampleStringField: {
+ type: 'string',
+ placeholder: 'placeholder',
+ },
+ exampleNumberField: {
+ type: 'number',
+ placeholder: 100,
+ },
+ exampleStringChoiceField: {
+ type: 'choice',
+ choices: [
+ ['value1', 'label1'],
+ ['value2', 'label2'],
+ ['value3', 'label3'],
+ ],
+ },
+ },
+ };
const createWrapper = node => {
project = TestStubs.Project();
@@ -182,6 +206,13 @@ describe('RuleNode', function () {
});
it('renders sentry apps with schema forms correctly', async function () {
- // TODO(Leander)
+ wrapper = createWrapper(sentryAppNode);
+ const openModal = jest.spyOn(ModalActions, 'openModal');
+
+ expect(wrapper.text()).toEqual(sentryAppNode.label + 'Settings');
+ expect(wrapper.find('button[aria-label="Settings"]').exists()).toEqual(true);
+ wrapper.find('button[aria-label="Settings"]').simulate('click');
+
+ expect(openModal).toHaveBeenCalled();
});
});
diff --git a/tests/js/spec/views/alerts/issueRuleEditor/sentryAppRuleModal.spec.jsx b/tests/js/spec/views/alerts/issueRuleEditor/sentryAppRuleModal.spec.jsx
index 01c13a4d337491..1b30ecd2b0f36e 100644
--- a/tests/js/spec/views/alerts/issueRuleEditor/sentryAppRuleModal.spec.jsx
+++ b/tests/js/spec/views/alerts/issueRuleEditor/sentryAppRuleModal.spec.jsx
@@ -110,14 +110,6 @@ describe('SentryAppRuleModal', function () {
changeInputValue(descriptionInput, 'v');
changeInputValue(channelInput, 'v');
selectByValue(wrapper, 'valor', {name: 'channel', control: true});
-
- MockApiClient.addMockResponse({
- // TODO(leander): Replace with real endpoint for alert rule actions
- url: '/404/',
- method: 'POST',
- body: {it: 'worked'},
- });
-
submitSuccess(wrapper);
});
});
diff --git a/tests/js/spec/views/alerts/utils.spec.jsx b/tests/js/spec/views/alerts/utils.spec.jsx
index 9b110d764efc3a..a33e86879800ea 100644
--- a/tests/js/spec/views/alerts/utils.spec.jsx
+++ b/tests/js/spec/views/alerts/utils.spec.jsx
@@ -5,7 +5,12 @@ import {
Datasource,
SessionsAggregate,
} from 'app/views/alerts/incidentRules/types';
-import {getQueryDatasource, isSessionAggregate} from 'app/views/alerts/utils';
+import {
+ alertAxisFormatter,
+ alertTooltipValueFormatter,
+ getQueryDatasource,
+ isSessionAggregate,
+} from 'app/views/alerts/utils';
import {getIncidentDiscoverUrl} from 'app/views/alerts/utils/getIncidentDiscoverUrl';
describe('Alert utils', function () {
@@ -163,4 +168,32 @@ describe('Alert utils', function () {
expect(isSessionAggregate('p95(transaction.duration)')).toBeFalsy();
});
});
+
+ describe('alertAxisFormatter', () => {
+ it('formatts', () => {
+ expect(
+ alertAxisFormatter(
+ 98.312,
+ 'Crash Free Rate',
+ SessionsAggregate.CRASH_FREE_SESSIONS
+ )
+ ).toBe('98.31%');
+ expect(alertAxisFormatter(0.1234, 'failure_rate()', 'failure_rate()')).toBe('12%');
+ });
+ });
+
+ describe('alertTooltipValueFormatter', () => {
+ it('formatts', () => {
+ expect(
+ alertTooltipValueFormatter(
+ 98.312,
+ 'Crash Free Rate',
+ SessionsAggregate.CRASH_FREE_SESSIONS
+ )
+ ).toBe('98.312%');
+ expect(alertTooltipValueFormatter(0.1234, 'failure_rate()', 'failure_rate()')).toBe(
+ '12.34%'
+ );
+ });
+ });
});
diff --git a/tests/js/spec/views/dashboardsV2/detail.spec.jsx b/tests/js/spec/views/dashboardsV2/detail.spec.jsx
index 8e2fe423571316..aaad130425902e 100644
--- a/tests/js/spec/views/dashboardsV2/detail.spec.jsx
+++ b/tests/js/spec/views/dashboardsV2/detail.spec.jsx
@@ -8,6 +8,7 @@ import {mountGlobalModal} from 'sentry-test/modal';
import ProjectsStore from 'app/stores/projectsStore';
import {DashboardState} from 'app/views/dashboardsV2/types';
+import * as types from 'app/views/dashboardsV2/types';
import ViewEditDashboard from 'app/views/dashboardsV2/view';
describe('Dashboards > Detail', function () {
@@ -225,8 +226,16 @@ describe('Dashboards > Detail', function () {
MockApiClient.addMockResponse({
url: '/organizations/org-slug/dashboards/',
body: [
- TestStubs.Dashboard([], {id: 'default-overview', title: 'Default'}),
- TestStubs.Dashboard([], {id: '1', title: 'Custom Errors'}),
+ TestStubs.Dashboard([], {
+ id: 'default-overview',
+ title: 'Default',
+ widgetDisplay: ['area'],
+ }),
+ TestStubs.Dashboard([], {
+ id: '1',
+ title: 'Custom Errors',
+ widgetDisplay: ['area'],
+ }),
],
});
MockApiClient.addMockResponse({
@@ -337,6 +346,50 @@ describe('Dashboards > Detail', function () {
expect(modal.find('AddDashboardWidgetModal').props().widget).toEqual(widgets[0]);
});
+ it('shows add wiget option', async function () {
+ wrapper = mountWithTheme(
+ ,
+ initialData.routerContext
+ );
+ await tick();
+ wrapper.update();
+
+ // Enter edit mode.
+ wrapper.find('Controls Button[data-test-id="dashboard-edit"]').simulate('click');
+ wrapper.update();
+ expect(wrapper.find('AddWidget').exists()).toBe(true);
+
+ wrapper.unmount();
+ });
+
+ it('hides add widget option', async function () {
+ types.MAX_WIDGETS = 1;
+
+ wrapper = mountWithTheme(
+ ,
+ initialData.routerContext
+ );
+ await tick();
+ wrapper.update();
+
+ // Enter edit mode.
+ wrapper.find('Controls Button[data-test-id="dashboard-edit"]').simulate('click');
+ wrapper.update();
+ expect(wrapper.find('AddWidget').exists()).toBe(false);
+
+ wrapper.unmount();
+ });
+
it('hides and shows breadcrumbs based on feature', async function () {
const newOrg = initializeOrg({
organization: TestStubs.Organization({
diff --git a/tests/js/spec/views/eventsV2/chartFooter.spec.tsx b/tests/js/spec/views/eventsV2/chartFooter.spec.tsx
index 40b06c94db2130..0ca44a8b154bb1 100644
--- a/tests/js/spec/views/eventsV2/chartFooter.spec.tsx
+++ b/tests/js/spec/views/eventsV2/chartFooter.spec.tsx
@@ -43,6 +43,8 @@ describe('EventsV2 > ChartFooter', function () {
displayMode={DisplayModes.DEFAULT}
displayOptions={[{label: DisplayModes.DEFAULT, value: DisplayModes.DEFAULT}]}
onDisplayChange={() => undefined}
+ onTopEventsChange={() => undefined}
+ topEvents="5"
/>,
initialData.routerContext
);
@@ -82,6 +84,8 @@ describe('EventsV2 > ChartFooter', function () {
displayMode={DisplayModes.DEFAULT}
displayOptions={[{label: DisplayModes.DEFAULT, value: DisplayModes.DEFAULT}]}
onDisplayChange={() => undefined}
+ onTopEventsChange={() => undefined}
+ topEvents="5"
/>,
initialData.routerContext
);
@@ -94,4 +98,44 @@ describe('EventsV2 > ChartFooter', function () {
expect(optionCheckboxSelector.props().title).toEqual(t('Y-Axis'));
expect(optionCheckboxSelector.props().selected).toEqual(yAxisValue);
});
+
+ it('renders display limits with default limit when top 5 mode is selected', async function () {
+ // @ts-expect-error
+ const organization = TestStubs.Organization({
+ features: [...features, 'discover-top-events'],
+ });
+
+ // Start off with an invalid view (empty is invalid)
+ const initialData = initializeOrg({
+ organization,
+ router: {
+ location: {query: {query: 'tag:value'}},
+ },
+ project: 1,
+ projects: [],
+ });
+
+ const wrapper = mountWithTheme(
+ undefined}
+ displayMode={DisplayModes.TOP5}
+ displayOptions={[{label: DisplayModes.DEFAULT, value: DisplayModes.DEFAULT}]}
+ onDisplayChange={() => undefined}
+ onTopEventsChange={() => undefined}
+ topEvents="5"
+ />,
+ initialData.routerContext
+ );
+
+ // @ts-expect-error
+ await tick();
+ wrapper.update();
+
+ const optionSelector = wrapper.find('OptionSelector[title="Limit"]');
+ expect(optionSelector.props().selected).toEqual('5');
+ });
});
diff --git a/tests/js/spec/views/eventsV2/miniGraph.spec.tsx b/tests/js/spec/views/eventsV2/miniGraph.spec.tsx
new file mode 100644
index 00000000000000..14b1ca07162c4c
--- /dev/null
+++ b/tests/js/spec/views/eventsV2/miniGraph.spec.tsx
@@ -0,0 +1,50 @@
+import {mountWithTheme} from 'sentry-test/enzyme';
+import {initializeOrg} from 'sentry-test/initializeOrg';
+
+import EventView from 'app/utils/discover/eventView';
+import MiniGraph from 'app/views/eventsV2/miniGraph';
+
+describe('EventsV2 > MiniGraph', function () {
+ const features = ['discover-basic', 'connect-discover-and-dashboards'];
+ const location = {
+ query: {query: 'tag:value'},
+ pathname: '/',
+ };
+
+ let organization, eventView, initialData;
+
+ beforeEach(() => {
+ // @ts-expect-error
+ organization = TestStubs.Organization({
+ features,
+ // @ts-expect-error
+ projects: [TestStubs.Project()],
+ });
+ initialData = initializeOrg({
+ organization,
+ router: {
+ location,
+ },
+ project: 1,
+ projects: [],
+ });
+ // @ts-expect-error
+ eventView = EventView.fromSavedQueryOrLocation(undefined, location);
+ });
+
+ it('makes an EventsRequest with all selected multi y axis', async function () {
+ const yAxis = ['count()', 'failure_count()'];
+ const wrapper = mountWithTheme(
+ ,
+ initialData.routerContext
+ );
+ const eventsRequestProps = wrapper.find('EventsRequest').props();
+ expect(eventsRequestProps.yAxis).toEqual(yAxis);
+ });
+});
diff --git a/tests/js/spec/views/eventsV2/queryList.spec.jsx b/tests/js/spec/views/eventsV2/queryList.spec.jsx
index 9ab80bf647201d..317e3e07eb91cd 100644
--- a/tests/js/spec/views/eventsV2/queryList.spec.jsx
+++ b/tests/js/spec/views/eventsV2/queryList.spec.jsx
@@ -239,4 +239,28 @@ describe('EventsV2 > QueryList', function () {
expect(menuItems.at(0).find('span').children().html()).toEqual('Delete Query');
expect(menuItems.at(1).find('span').children().html()).toEqual('Duplicate Query');
});
+
+ it('passes yAxis from the savedQuery to MiniGraph', function () {
+ const featuredOrganization = TestStubs.Organization({
+ features: ['connect-discover-and-dashboards', 'dashboards-edit'],
+ });
+ const yAxis = ['count()', 'failure_count()'];
+ const savedQueryWithMultiYAxis = {
+ ...savedQueries.slice(1)[0],
+ yAxis,
+ };
+ const wrapper = mountWithTheme(
+ ,
+ TestStubs.routerContext()
+ );
+
+ const miniGraph = wrapper.find('MiniGraph');
+ expect(miniGraph.props().yAxis).toEqual(['count()', 'failure_count()']);
+ });
});
diff --git a/tests/js/spec/views/eventsV2/table/columnEditModal.spec.js b/tests/js/spec/views/eventsV2/table/columnEditModal.spec.js
index c896549ee40526..46e484a04f338a 100644
--- a/tests/js/spec/views/eventsV2/table/columnEditModal.spec.js
+++ b/tests/js/spec/views/eventsV2/table/columnEditModal.spec.js
@@ -500,7 +500,7 @@ describe('EventsV2 -> ColumnEditModal', function () {
// Apply the changes so we can see the new columns.
newWrapper.find('Button[priority="primary"]').simulate('click');
expect(onApply).toHaveBeenCalledWith([
- {kind: 'function', function: ['count_unique', '', undefined, undefined]},
+ {kind: 'function', function: ['count_unique', 'user', undefined, undefined]},
{kind: 'function', function: ['count', '', undefined, undefined]},
{kind: 'equation', field: 'count() - count()'},
]);
@@ -534,8 +534,11 @@ describe('EventsV2 -> ColumnEditModal', function () {
newWrapper.find('Button[priority="primary"]').simulate('click');
// With the way the parser works only tokens up to the error will be updated
expect(onApply).toHaveBeenCalledWith([
- {kind: 'function', function: ['count_unique', '', undefined, undefined]},
- {kind: 'equation', field: 'count_unique() - count_unique() arst count() '},
+ {kind: 'function', function: ['count_unique', 'user', undefined, undefined]},
+ {
+ kind: 'equation',
+ field: 'count_unique(user) - count_unique(user) arst count() ',
+ },
]);
});
});
diff --git a/tests/js/spec/views/eventsV2/table/tableView.spec.jsx b/tests/js/spec/views/eventsV2/table/tableView.spec.jsx
index 0e6531de2b566c..8019e50f8e9099 100644
--- a/tests/js/spec/views/eventsV2/table/tableView.spec.jsx
+++ b/tests/js/spec/views/eventsV2/table/tableView.spec.jsx
@@ -138,6 +138,21 @@ describe('TableView > CellActions', function () {
});
});
+ it('handles add cell action with multiple y axis', function () {
+ location.query.yAxis = ['count()', 'failure_count()'];
+ const wrapper = makeWrapper(initialData, rows, eventView);
+ const menu = openContextMenu(wrapper, 0);
+ menu.find('button[data-test-id="add-to-filter"]').simulate('click');
+
+ expect(browserHistory.push).toHaveBeenCalledWith({
+ pathname: location.pathname,
+ query: expect.objectContaining({
+ query: 'title:"some title"',
+ yAxis: ['count()', 'failure_count()'],
+ }),
+ });
+ });
+
it('handles exclude cell action on string value', function () {
const wrapper = makeWrapper(initialData, rows, eventView);
const menu = openContextMenu(wrapper, 0);
diff --git a/tests/js/spec/views/integrationPipeline/pipelineView.spec.jsx b/tests/js/spec/views/integrationPipeline/pipelineView.spec.jsx
index dc6230ee714920..7fd6bb4faaa906 100644
--- a/tests/js/spec/views/integrationPipeline/pipelineView.spec.jsx
+++ b/tests/js/spec/views/integrationPipeline/pipelineView.spec.jsx
@@ -1,20 +1,39 @@
-import {shallow} from 'sentry-test/enzyme';
+import {mountWithTheme} from 'sentry-test/reactTestingLibrary';
-import AwsLambdaProjectSelect from 'app/views/integrationPipeline/awsLambdaProjectSelect';
import PipelineView from 'app/views/integrationPipeline/pipelineView';
+function MockAwsLambdaProjectSelect() {
+ return mock_AwsLambdaProjectSelect
;
+}
+
+jest.mock(
+ 'app/views/integrationPipeline/awsLambdaProjectSelect',
+ () => MockAwsLambdaProjectSelect
+);
+
describe('PipelineView', () => {
it('renders awsLambdaProjectSelect', () => {
- const wrapper = shallow(
+ const {findByText} = mountWithTheme(
,
- TestStubs.routerContext()
+ {context: TestStubs.routerContext()}
);
- expect(wrapper.find(AwsLambdaProjectSelect).prop('someField')).toBe('someVal');
+
+ findByText('mock_AwsLambdaProjectSelect');
+
expect(document.title).toBe('AWS Lambda Select Project');
});
+
it('errros on invalid pipelineName', () => {
- expect(() =>
- shallow(, TestStubs.routerContext())
- ).toThrow('Invalid pipeline name other');
+ jest.spyOn(console, 'error');
+
+ // eslint-disable-next-line no-console
+ console.error.mockImplementation(() => {});
+
+ expect(() => mountWithTheme()).toThrow(
+ 'Invalid pipeline name other'
+ );
+
+ // eslint-disable-next-line no-console
+ console.error.mockRestore();
});
});
diff --git a/tests/js/spec/views/organizationGroupDetails/groupDetails.spec.jsx b/tests/js/spec/views/organizationGroupDetails/groupDetails.spec.jsx
index 6ba1318d58afee..fa9b7b56675bdf 100644
--- a/tests/js/spec/views/organizationGroupDetails/groupDetails.spec.jsx
+++ b/tests/js/spec/views/organizationGroupDetails/groupDetails.spec.jsx
@@ -10,6 +10,9 @@ import GroupDetails from 'app/views/organizationGroupDetails';
jest.unmock('app/utils/recreateRoute');
+const SAMPLE_EVENT_ALERT_TEXT =
+ 'You are viewing a sample error. Configure Sentry to start viewing real errors.';
+
describe('groupDetails', () => {
const group = TestStubs.Group();
const event = TestStubs.Event();
@@ -216,4 +219,21 @@ describe('groupDetails', () => {
expect(await findByText('New Issue')).toBeTruthy();
});
+
+ it('renders alert for sample event', async function () {
+ const aProject = TestStubs.Project({firstEvent: false});
+ ProjectsStore.reset();
+ ProjectsStore.loadInitialData([aProject]);
+ const {findByText} = createWrapper();
+
+ expect(await findByText(SAMPLE_EVENT_ALERT_TEXT)).toBeTruthy();
+ });
+ it('does not render alert for non sample events', async function () {
+ const aProject = TestStubs.Project({firstEvent: false});
+ ProjectsStore.reset();
+ ProjectsStore.loadInitialData([aProject]);
+ const {queryByText} = createWrapper();
+
+ expect(await queryByText(SAMPLE_EVENT_ALERT_TEXT)).toBeNull();
+ });
});
diff --git a/tests/js/spec/views/organizationGroupDetails/groupTags.spec.jsx b/tests/js/spec/views/organizationGroupDetails/groupTags.spec.jsx
index bcf4bbab842eb2..394993fe0d6bff 100644
--- a/tests/js/spec/views/organizationGroupDetails/groupTags.spec.jsx
+++ b/tests/js/spec/views/organizationGroupDetails/groupTags.spec.jsx
@@ -1,5 +1,5 @@
-import {mountWithTheme} from 'sentry-test/enzyme';
import {initializeOrg} from 'sentry-test/initializeOrg';
+import {fireEvent, mountWithTheme} from 'sentry-test/reactTestingLibrary';
import GroupTags from 'app/views/organizationGroupDetails/groupTags';
@@ -18,12 +18,11 @@ describe('GroupTags', function () {
const wrapper = mountWithTheme(
,
- routerContext
+ {context: routerContext}
);
expect(tagsMock).toHaveBeenCalledWith(
@@ -33,7 +32,11 @@ describe('GroupTags', function () {
})
);
- wrapper.find('li[data-test-id="user"] Link').first().simulate('click', {button: 0});
+ const headers = wrapper.getAllByRole('heading').map(header => header.innerHTML);
+ // Check headers have been sorted alphabetically
+ expect(headers).toEqual(['browser', 'device', 'environment', 'url', 'user']);
+
+ fireEvent.click(wrapper.getByText('david'));
expect(router.push).toHaveBeenCalledWith({
pathname: '/organizations/org-slug/issues/1/events/',
diff --git a/tests/js/spec/views/performance/content.spec.jsx b/tests/js/spec/views/performance/content.spec.jsx
index a0320cbe5d35bc..8894cfd48a7660 100644
--- a/tests/js/spec/views/performance/content.spec.jsx
+++ b/tests/js/spec/views/performance/content.spec.jsx
@@ -143,7 +143,7 @@ describe('Performance > Content', function () {
} else if (!options.query.hasOwnProperty('field')) {
return false;
}
- return !options.query.field.includes('key_transaction');
+ return !options.query.field.includes('team_key_transaction');
},
}
);
@@ -166,7 +166,7 @@ describe('Performance > Content', function () {
},
data: [
{
- key_transaction: 1,
+ team_key_transaction: 1,
transaction: '/apple/cart',
'project.id': 1,
user: 'uhoh@example.com',
@@ -180,7 +180,7 @@ describe('Performance > Content', function () {
user_misery_300: 0.114,
},
{
- key_transaction: 0,
+ team_key_transaction: 0,
transaction: '/apple/checkout',
'project.id': 1,
user: 'uhoh@example.com',
@@ -203,7 +203,7 @@ describe('Performance > Content', function () {
} else if (!options.query.hasOwnProperty('field')) {
return false;
}
- return options.query.field.includes('key_transaction');
+ return options.query.field.includes('team_key_transaction');
},
}
);
diff --git a/tests/js/spec/views/performance/table.spec.jsx b/tests/js/spec/views/performance/table.spec.jsx
index 9be51f05d022e2..28e4bc6dc98892 100644
--- a/tests/js/spec/views/performance/table.spec.jsx
+++ b/tests/js/spec/views/performance/table.spec.jsx
@@ -109,7 +109,7 @@ describe('Performance > Table', function () {
},
data: [
{
- key_transaction: 1,
+ team_key_transaction: 1,
transaction: '/apple/cart',
project: project1.slug,
user: 'uhoh@example.com',
@@ -124,7 +124,7 @@ describe('Performance > Table', function () {
project_threshold_config: ['duration', 300],
},
{
- key_transaction: 0,
+ team_key_transaction: 0,
transaction: '/apple/checkout',
project: project2.slug,
user: 'uhoh@example.com',
diff --git a/tests/js/spec/views/performance/vitalDetail/index.spec.jsx b/tests/js/spec/views/performance/vitalDetail/index.spec.jsx
index fb3f75c538d544..7cf75dedb55366 100644
--- a/tests/js/spec/views/performance/vitalDetail/index.spec.jsx
+++ b/tests/js/spec/views/performance/vitalDetail/index.spec.jsx
@@ -131,7 +131,7 @@ describe('Performance > VitalDetail', function () {
compare_numeric_aggregate_p75_measurements_cls_greater_0_25: 'number',
count: 'integer',
count_unique_user: 'integer',
- key_transaction: 'boolean',
+ team_key_transaction: 'boolean',
p50_measurements_cls: 'number',
p75_measurements_cls: 'number',
p95_measurements_cls: 'number',
@@ -144,7 +144,7 @@ describe('Performance > VitalDetail', function () {
compare_numeric_aggregate_p75_measurements_cls_greater_0_25: 0,
count: 10000,
count_unique_user: 2740,
- key_transaction: 1,
+ team_key_transaction: 1,
p50_measurements_cls: 0.143,
p75_measurements_cls: 0.215,
p95_measurements_cls: 0.302,
diff --git a/tests/js/spec/views/teamInsights/index.spec.tsx b/tests/js/spec/views/teamInsights/index.spec.tsx
index bf0480c0be700c..6152e115d634b7 100644
--- a/tests/js/spec/views/teamInsights/index.spec.tsx
+++ b/tests/js/spec/views/teamInsights/index.spec.tsx
@@ -1,8 +1,13 @@
import {mountWithTheme} from 'sentry-test/reactTestingLibrary';
+import ProjectsStore from 'app/stores/projectsStore';
import TeamInsightsContainer from 'app/views/teamInsights';
describe('TeamInsightsContainer', () => {
+ afterEach(() => {
+ ProjectsStore.reset();
+ });
+
it('blocks access if org is missing flag', () => {
// @ts-expect-error
const organization = TestStubs.Organization();
@@ -18,6 +23,10 @@ describe('TeamInsightsContainer', () => {
expect(wrapper.queryByText('test')).toBeNull();
});
it('allows access for orgs with flag', () => {
+ ProjectsStore.loadInitialData([
+ // @ts-expect-error
+ TestStubs.Project(),
+ ]);
// @ts-expect-error
const organization = TestStubs.Organization({features: ['team-insights']});
// @ts-expect-error
@@ -31,4 +40,19 @@ describe('TeamInsightsContainer', () => {
expect(wrapper.getByText('test')).toBeTruthy();
});
+ it('shows message for users with no teams', () => {
+ ProjectsStore.loadInitialData([]);
+ // @ts-expect-error
+ const organization = TestStubs.Organization({features: ['team-insights']});
+ // @ts-expect-error
+ const context = TestStubs.routerContext([{organization}]);
+ const wrapper = mountWithTheme(
+ ,
+ {context}
+ );
+
+ expect(
+ wrapper.getByText('You need at least one project to use this view')
+ ).toBeTruthy();
+ });
});
diff --git a/tests/js/spec/views/teamInsights/overview.spec.jsx b/tests/js/spec/views/teamInsights/overview.spec.jsx
index 6db28caa22a511..175b0e3b0ccc4a 100644
--- a/tests/js/spec/views/teamInsights/overview.spec.jsx
+++ b/tests/js/spec/views/teamInsights/overview.spec.jsx
@@ -1,5 +1,6 @@
-import {fireEvent, mountWithTheme, waitFor} from 'sentry-test/reactTestingLibrary';
+import {act, fireEvent, mountWithTheme, waitFor} from 'sentry-test/reactTestingLibrary';
+import TeamStore from 'app/stores/teamStore';
import localStorage from 'app/utils/localStorage';
import {TeamInsightsOverview} from 'app/views/teamInsights/overview';
@@ -8,8 +9,18 @@ jest.mock('app/utils/localStorage');
describe('TeamInsightsOverview', () => {
const project1 = TestStubs.Project({id: '2', name: 'js', slug: 'js'});
const project2 = TestStubs.Project({id: '3', name: 'py', slug: 'py'});
- const team1 = TestStubs.Team({id: '2', name: 'frontend', projects: [project1]});
- const team2 = TestStubs.Team({id: '3', name: 'backend', projects: [project2]});
+ const team1 = TestStubs.Team({
+ id: '2',
+ slug: 'frontend',
+ name: 'frontend',
+ projects: [project1],
+ });
+ const team2 = TestStubs.Team({
+ id: '3',
+ slug: 'backend',
+ name: 'backend',
+ projects: [project2],
+ });
const mockRouter = {push: jest.fn()};
beforeEach(() => {
@@ -58,6 +69,7 @@ describe('TeamInsightsOverview', () => {
url: `/teams/org-slug/${team1.slug}/alerts-triggered/`,
body: TestStubs.TeamAlertsTriggered(),
});
+ act(() => void TeamStore.loadInitialData([team1, team2]));
});
afterEach(() => {
@@ -92,7 +104,7 @@ describe('TeamInsightsOverview', () => {
expect(wrapper.queryByTestId('loading-indicator')).not.toBeInTheDocument();
});
- expect(wrapper.getByText('Team: frontend')).toBeInTheDocument();
+ expect(wrapper.getByText('#frontend')).toBeInTheDocument();
expect(wrapper.getByText('Key transaction')).toBeInTheDocument();
});
@@ -102,9 +114,9 @@ describe('TeamInsightsOverview', () => {
expect(wrapper.queryByTestId('loading-indicator')).not.toBeInTheDocument();
});
- fireEvent.click(wrapper.getByText('Team: frontend'));
- expect(wrapper.getByText('backend')).toBeInTheDocument();
- fireEvent.click(wrapper.getByText('backend'));
+ fireEvent.mouseDown(wrapper.getByText('#frontend'));
+ expect(wrapper.getByText('#backend')).toBeInTheDocument();
+ fireEvent.click(wrapper.getByText('#backend'));
expect(mockRouter.push).toHaveBeenCalledWith({query: {team: team2.id}});
expect(localStorage.setItem).toHaveBeenCalledWith(
'teamInsightsSelectedTeamId:org-slug',
diff --git a/tests/js/spec/views/teamInsights/teamMisery.spec.jsx b/tests/js/spec/views/teamInsights/teamMisery.spec.jsx
index f6af9226965986..bb411de6fc2dce 100644
--- a/tests/js/spec/views/teamInsights/teamMisery.spec.jsx
+++ b/tests/js/spec/views/teamInsights/teamMisery.spec.jsx
@@ -18,7 +18,6 @@ describe('TeamMisery', () => {
},
data: [
{
- key_transaction: 1,
transaction: '/apple/cart',
project: project.slug,
tpm: 30,
@@ -28,7 +27,6 @@ describe('TeamMisery', () => {
project_threshold_config: ['duration', 300],
},
{
- key_transaction: 0,
transaction: '/apple/checkout',
project: project.slug,
tpm: 30,
@@ -59,4 +57,23 @@ describe('TeamMisery', () => {
expect(wrapper.getAllByText(project.slug)).toHaveLength(2);
expect(wrapper.getAllByText('0% change')).toHaveLength(2);
});
+
+ it('should render empty state', async () => {
+ const routerContext = TestStubs.routerContext();
+ const wrapper = mountWithTheme(
+ ,
+ {context: routerContext}
+ );
+
+ await waitFor(() => {
+ expect(wrapper.queryByTestId('loading-indicator')).not.toBeInTheDocument();
+ });
+
+ expect(wrapper.getByText('There are no items to display')).toBeTruthy();
+ });
});
diff --git a/tests/js/spec/views/teamInsights/teamStability.spec.jsx b/tests/js/spec/views/teamInsights/teamStability.spec.jsx
index 493438b067b667..8e426aeabbb575 100644
--- a/tests/js/spec/views/teamInsights/teamStability.spec.jsx
+++ b/tests/js/spec/views/teamInsights/teamStability.spec.jsx
@@ -43,4 +43,12 @@ describe('TeamStability', () => {
expect(wrapper.getAllByText('\u2014')).toHaveLength(3);
});
+
+ it('should render no projects', async () => {
+ const wrapper = mountWithTheme(
+
+ );
+
+ expect(wrapper.getByText('There are no items to display')).toBeTruthy();
+ });
});
diff --git a/tests/sentry/api/endpoints/test_project_release_stats.py b/tests/sentry/api/endpoints/test_project_release_stats.py
new file mode 100644
index 00000000000000..c373f0f8d0a99f
--- /dev/null
+++ b/tests/sentry/api/endpoints/test_project_release_stats.py
@@ -0,0 +1,32 @@
+from datetime import datetime
+
+from django.urls import reverse
+
+from sentry.models import Release
+from sentry.testutils import APITestCase
+
+
+class ProjectReleaseStatsTest(APITestCase):
+ def test_simple(self):
+ """Minimal test to ensure code coverage of the endpoint"""
+ self.login_as(user=self.user)
+
+ project = self.create_project(name="foo")
+ release = Release.objects.create(
+ organization_id=project.organization_id,
+ version="1",
+ date_added=datetime(2013, 8, 13, 3, 8, 24, 880386),
+ )
+ release.add_project(project)
+
+ url = reverse(
+ "sentry-api-0-project-release-stats",
+ kwargs={
+ "organization_slug": project.organization.slug,
+ "project_slug": project.slug,
+ "version": "1",
+ },
+ )
+ response = self.client.get(url, format="json")
+
+ assert response.status_code == 200, response.content
diff --git a/tests/sentry/api/endpoints/test_project_rule_details.py b/tests/sentry/api/endpoints/test_project_rule_details.py
index 718c97f344a338..57968beeff9fbc 100644
--- a/tests/sentry/api/endpoints/test_project_rule_details.py
+++ b/tests/sentry/api/endpoints/test_project_rule_details.py
@@ -1,3 +1,5 @@
+from unittest.mock import patch
+
import responses
from django.urls import reverse
@@ -781,6 +783,77 @@ def test_update_filters(self):
assert RuleActivity.objects.filter(rule=rule, type=RuleActivityType.UPDATED.value).exists()
+ @patch("sentry.mediators.alert_rule_actions.AlertRuleActionCreator.run")
+ def test_update_alert_rule_action(self, mock_alert_rule_action_creator):
+ """
+ Ensures that Sentry Apps with schema forms (UI components)
+ receive a payload when an alert rule is updated with them.
+ """
+ self.login_as(user=self.user)
+
+ project = self.create_project()
+
+ rule = Rule.objects.create(project=project, label="my super cool rule")
+
+ self.create_sentry_app(name="Pied Piper", organization=project.organization)
+ install = self.create_sentry_app_installation(
+ slug="pied-piper", organization=project.organization
+ )
+
+ actions = [
+ {
+ "id": "sentry.rules.actions.notify_event_sentry_app.NotifyEventSentryAppAction",
+ "settings": {"assignee": "Team Rocket", "priority": 27},
+ "uri": "/sentry/alerts/",
+ "sentryAppInstallationUuid": install.uuid,
+ "hasSchemaFormConfig": True,
+ },
+ ]
+
+ url = reverse(
+ "sentry-api-0-project-rule-details",
+ kwargs={
+ "organization_slug": project.organization.slug,
+ "project_slug": project.slug,
+ "rule_id": rule.id,
+ },
+ )
+
+ response = self.client.put(
+ url,
+ data={
+ "name": "my super cool rule",
+ "actionMatch": "any",
+ "filterMatch": "any",
+ "actions": actions,
+ "conditions": [],
+ "filters": [],
+ },
+ format="json",
+ )
+
+ assert response.status_code == 200, response.content
+ assert response.data["id"] == str(rule.id)
+
+ rule = Rule.objects.get(id=rule.id)
+ assert rule.data["actions"] == actions
+
+ kwargs = {
+ "install": install,
+ "fields": actions[0].get("settings"),
+ "uri": actions[0].get("uri"),
+ "rule": rule,
+ }
+
+ call_kwargs = mock_alert_rule_action_creator.call_args[1]
+
+ assert call_kwargs["install"].id == kwargs["install"].id
+ assert call_kwargs["fields"] == kwargs["fields"]
+ assert call_kwargs["uri"] == kwargs["uri"]
+ assert call_kwargs["rule"].id == kwargs["rule"].id
+
+ assert RuleActivity.objects.filter(rule=rule, type=RuleActivityType.UPDATED.value).exists()
+
class DeleteProjectRuleTest(APITestCase):
def test_simple(self):
diff --git a/tests/sentry/api/endpoints/test_project_rules.py b/tests/sentry/api/endpoints/test_project_rules.py
index 1bdf963655f1a5..dd76982cb6fec6 100644
--- a/tests/sentry/api/endpoints/test_project_rules.py
+++ b/tests/sentry/api/endpoints/test_project_rules.py
@@ -411,3 +411,68 @@ def test_comparison_condition_validation(self):
str(response.data["conditions"][0])
== "Select a valid choice. bad data is not one of the available choices."
)
+
+ @patch("sentry.mediators.alert_rule_actions.AlertRuleActionCreator.run")
+ def test_runs_alert_rule_action_creator(self, mock_alert_rule_action_creator):
+ """
+ Ensures that Sentry Apps with schema forms (UI components)
+ receive a payload when an alert rule is created with them.
+ """
+ self.login_as(user=self.user)
+
+ project = self.create_project()
+
+ self.create_sentry_app(name="Pied Piper", organization=project.organization)
+ install = self.create_sentry_app_installation(
+ slug="pied-piper", organization=project.organization
+ )
+
+ actions = [
+ {
+ "id": "sentry.rules.actions.notify_event_sentry_app.NotifyEventSentryAppAction",
+ "settings": {"assignee": "Team Rocket", "priority": 27},
+ "uri": "/sentry/alerts/",
+ "sentryAppInstallationUuid": install.uuid,
+ "hasSchemaFormConfig": True,
+ },
+ ]
+
+ url = reverse(
+ "sentry-api-0-project-rules",
+ kwargs={"organization_slug": project.organization.slug, "project_slug": project.slug},
+ )
+
+ response = self.client.post(
+ url,
+ data={
+ "name": "my super cool rule",
+ "owner": f"user:{self.user.id}",
+ "conditions": [],
+ "filters": [],
+ "actions": actions,
+ "filterMatch": "any",
+ "actionMatch": "any",
+ "frequency": 30,
+ },
+ format="json",
+ )
+
+ assert response.status_code == 200, response.content
+ assert response.data["id"]
+
+ rule = Rule.objects.get(id=response.data["id"])
+ assert rule.data["actions"] == actions
+
+ kwargs = {
+ "install": install,
+ "fields": actions[0].get("settings"),
+ "uri": actions[0].get("uri"),
+ "rule": rule,
+ }
+
+ call_kwargs = mock_alert_rule_action_creator.call_args[1]
+
+ assert call_kwargs["install"].id == kwargs["install"].id
+ assert call_kwargs["fields"] == kwargs["fields"]
+ assert call_kwargs["uri"] == kwargs["uri"]
+ assert call_kwargs["rule"].id == kwargs["rule"].id
diff --git a/tests/sentry/api/endpoints/test_project_rules_configuration.py b/tests/sentry/api/endpoints/test_project_rules_configuration.py
index 856d8f02af029e..a41341589916ad 100644
--- a/tests/sentry/api/endpoints/test_project_rules_configuration.py
+++ b/tests/sentry/api/endpoints/test_project_rules_configuration.py
@@ -5,6 +5,7 @@
EMAIL_ACTION = "sentry.mail.actions.NotifyEmailAction"
APP_ACTION = "sentry.rules.actions.notify_event_service.NotifyEventServiceAction"
JIRA_ACTION = "sentry.integrations.jira.notify_action.JiraCreateTicketAction"
+SENTRY_APP_ALERT_ACTION = "sentry.rules.actions.notify_event_sentry_app.NotifyEventSentryAppAction"
class ProjectRuleConfigurationTest(APITestCase):
@@ -162,8 +163,6 @@ def test_sentry_app_alertable_webhook(self):
assert len(response.data["filters"]) == 7
def test_sentry_app_alert_rules(self):
- from sentry.models import SentryAppComponent
-
team = self.create_team()
project1 = self.create_project(teams=[team], name="foo")
self.create_project(teams=[team], name="baz")
@@ -176,15 +175,12 @@ def test_sentry_app_alert_rules(self):
install = self.create_sentry_app_installation(
slug=sentry_app.slug, organization=self.organization, user=self.user
)
- component = SentryAppComponent.objects.get(
- sentry_app_id=sentry_app.id, type="alert-rule-action"
- )
response = self.get_valid_response(self.organization.slug, project1.slug)
assert len(response.data["actions"]) == 8
assert {
- "id": f"sentry.sentryapp.{sentry_app.slug}",
- "uuid": str(component.uuid),
+ "id": SENTRY_APP_ALERT_ACTION,
+ "service": sentry_app.slug,
"actionType": "sentryapp",
"prompt": sentry_app.name,
"enabled": True,
diff --git a/tests/sentry/api/endpoints/test_team_alerts_triggered.py b/tests/sentry/api/endpoints/test_team_alerts_triggered.py
index 18048d7d46c660..522d372ca41092 100644
--- a/tests/sentry/api/endpoints/test_team_alerts_triggered.py
+++ b/tests/sentry/api/endpoints/test_team_alerts_triggered.py
@@ -1,3 +1,4 @@
+from django.utils import timezone
from freezegun import freeze_time
from sentry.incidents.models import (
@@ -51,9 +52,9 @@ def test_simple(self):
assert (
response.data[
str(
- before_now(days=i).replace(
- hour=0, minute=0, second=0, microsecond=0, tzinfo=None
- )
+ before_now(days=i)
+ .replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc)
+ .isoformat()
)
]
== 1
@@ -63,9 +64,9 @@ def test_simple(self):
assert (
response.data[
str(
- before_now(days=i).replace(
- hour=0, minute=0, second=0, microsecond=0, tzinfo=None
- )
+ before_now(days=i)
+ .replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc)
+ .isoformat()
)
]
== 0
@@ -78,9 +79,9 @@ def test_simple(self):
assert (
response.data[
str(
- before_now(days=0).replace(
- hour=0, minute=0, second=0, microsecond=0, tzinfo=None
- )
+ before_now(days=0)
+ .replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc)
+ .isoformat()
)
]
== 0
@@ -89,9 +90,9 @@ def test_simple(self):
assert (
response.data[
str(
- before_now(days=i).replace(
- hour=0, minute=0, second=0, microsecond=0, tzinfo=None
- )
+ before_now(days=i)
+ .replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc)
+ .isoformat()
)
]
== 1
@@ -159,9 +160,9 @@ def test_not_as_simple(self):
assert (
response.data[
str(
- before_now(days=2).replace(
- hour=0, minute=0, second=0, microsecond=0, tzinfo=None
- )
+ before_now(days=2)
+ .replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc)
+ .isoformat()
)
]
== 1
@@ -172,9 +173,9 @@ def test_not_as_simple(self):
assert (
response.data[
str(
- before_now(days=i).replace(
- hour=0, minute=0, second=0, microsecond=0, tzinfo=None
- )
+ before_now(days=i)
+ .replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc)
+ .isoformat()
)
]
== 0
diff --git a/tests/sentry/api/endpoints/test_team_issue_breakdown.py b/tests/sentry/api/endpoints/test_team_issue_breakdown.py
new file mode 100644
index 00000000000000..aabcc16c043d33
--- /dev/null
+++ b/tests/sentry/api/endpoints/test_team_issue_breakdown.py
@@ -0,0 +1,158 @@
+from datetime import timedelta
+
+from django.utils import timezone
+from django.utils.timezone import now
+from freezegun import freeze_time
+
+from sentry.models import GroupHistory, GroupHistoryStatus
+from sentry.testutils import APITestCase
+from sentry.testutils.helpers.datetime import before_now
+
+
+@freeze_time()
+class TeamIssueBreakdownTest(APITestCase):
+ endpoint = "sentry-api-0-team-issue-breakdown"
+
+ def test_simple(self):
+ project1 = self.create_project(teams=[self.team], slug="foo")
+ project2 = self.create_project(teams=[self.team], slug="bar")
+ group1 = self.create_group(checksum="a" * 32, project=project1, times_seen=10)
+ group2 = self.create_group(checksum="b" * 32, project=project2, times_seen=5)
+
+ GroupHistory.objects.create(
+ organization=self.organization,
+ group=group1,
+ project=project1,
+ actor=self.user.actor,
+ date_added=before_now(days=5),
+ status=GroupHistoryStatus.UNRESOLVED,
+ )
+ GroupHistory.objects.create(
+ organization=self.organization,
+ group=group1,
+ project=project1,
+ actor=self.user.actor,
+ status=GroupHistoryStatus.RESOLVED,
+ date_added=before_now(days=2),
+ )
+ GroupHistory.objects.create(
+ organization=self.organization,
+ group=group1,
+ project=project1,
+ actor=self.user.actor,
+ status=GroupHistoryStatus.REGRESSED,
+ date_added=before_now(days=2),
+ )
+ GroupHistory.objects.create(
+ organization=self.organization,
+ group=group2,
+ project=project2,
+ actor=self.user.actor,
+ date_added=before_now(days=10),
+ status=GroupHistoryStatus.UNRESOLVED,
+ )
+ GroupHistory.objects.create(
+ organization=self.organization,
+ group=group2,
+ project=project2,
+ actor=self.user.actor,
+ date_added=before_now(days=1),
+ status=GroupHistoryStatus.UNRESOLVED,
+ )
+
+ GroupHistory.objects.create(
+ organization=self.organization,
+ group=group2,
+ project=project2,
+ actor=self.user.actor,
+ status=GroupHistoryStatus.RESOLVED,
+ )
+ GroupHistory.objects.create(
+ organization=self.organization,
+ group=group2,
+ project=project2,
+ actor=self.user.actor,
+ status=GroupHistoryStatus.RESOLVED,
+ )
+ GroupHistory.objects.create(
+ organization=self.organization,
+ group=group2,
+ project=project2,
+ actor=self.user.actor,
+ status=GroupHistoryStatus.IGNORED,
+ )
+ today = str(
+ now()
+ .replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc)
+ .isoformat()
+ )
+ yesterday = str(
+ (now() - timedelta(days=1))
+ .replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc)
+ .isoformat()
+ )
+ two_days_ago = str(
+ (now() - timedelta(days=2))
+ .replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc)
+ .isoformat()
+ )
+ self.login_as(user=self.user)
+ response = self.get_success_response(
+ self.team.organization.slug, self.team.slug, statsPeriod="7d"
+ )
+ assert len(response.data) == 2
+ assert response.data[project1.id][today]["reviewed"] == 0
+ assert response.data[project1.id][today]["total"] == 0
+ assert response.data[project1.id][yesterday]["reviewed"] == 0
+ assert response.data[project1.id][yesterday]["total"] == 0
+ assert response.data[project1.id][two_days_ago]["reviewed"] == 1
+ assert response.data[project1.id][two_days_ago]["reviewed"] == 1
+
+ assert response.data[project2.id][today]["reviewed"] == 3
+ assert response.data[project2.id][today]["total"] == 3
+ assert response.data[project2.id][yesterday]["reviewed"] == 0
+ assert response.data[project2.id][yesterday]["total"] == 1
+ assert response.data[project2.id][two_days_ago]["reviewed"] == 0
+ assert response.data[project2.id][two_days_ago]["total"] == 0
+
+ GroupHistory.objects.create(
+ organization=self.organization,
+ group=group1,
+ project=project1,
+ actor=self.user.actor,
+ date_added=before_now(days=1),
+ status=GroupHistoryStatus.UNRESOLVED,
+ )
+ GroupHistory.objects.create(
+ organization=self.organization,
+ group=group2,
+ project=project2,
+ actor=self.user.actor,
+ status=GroupHistoryStatus.RESOLVED,
+ )
+
+ # making sure it doesnt bork anything
+ GroupHistory.objects.create(
+ organization=self.organization,
+ group=group2,
+ project=project2,
+ actor=self.user.actor,
+ status=GroupHistoryStatus.ASSIGNED,
+ )
+
+ response = self.get_success_response(self.team.organization.slug, self.team.slug)
+ assert len(response.data) == 2
+
+ assert response.data[project1.id][today]["reviewed"] == 0
+ assert response.data[project1.id][today]["total"] == 0
+ assert response.data[project1.id][yesterday]["reviewed"] == 0
+ assert response.data[project1.id][yesterday]["total"] == 1
+ assert response.data[project1.id][two_days_ago]["reviewed"] == 1
+ assert response.data[project1.id][two_days_ago]["reviewed"] == 1
+
+ assert response.data[project2.id][today]["reviewed"] == 4
+ assert response.data[project2.id][today]["total"] == 4
+ assert response.data[project2.id][yesterday]["reviewed"] == 0
+ assert response.data[project2.id][yesterday]["total"] == 1
+ assert response.data[project2.id][two_days_ago]["reviewed"] == 0
+ assert response.data[project2.id][two_days_ago]["total"] == 0
diff --git a/tests/sentry/api/endpoints/test_team_time_to_resolution.py b/tests/sentry/api/endpoints/test_team_time_to_resolution.py
new file mode 100644
index 00000000000000..8cdb23af60112f
--- /dev/null
+++ b/tests/sentry/api/endpoints/test_team_time_to_resolution.py
@@ -0,0 +1,111 @@
+from datetime import timedelta
+
+from django.utils.timezone import now
+from freezegun import freeze_time
+
+from sentry.models import GroupHistory, GroupHistoryStatus
+from sentry.testutils import APITestCase
+from sentry.testutils.helpers.datetime import before_now
+
+
+@freeze_time()
+class TeamTimeToResolutionTest(APITestCase):
+ endpoint = "sentry-api-0-team-time-to-resolution"
+
+ def test_simple(self):
+ project1 = self.create_project(teams=[self.team], slug="foo")
+ project2 = self.create_project(teams=[self.team], slug="bar")
+ group1 = self.create_group(checksum="a" * 32, project=project1, times_seen=10)
+ group2 = self.create_group(checksum="b" * 32, project=project2, times_seen=5)
+
+ gh1 = GroupHistory.objects.create(
+ organization=self.organization,
+ group=group1,
+ project=project1,
+ actor=self.user.actor,
+ date_added=before_now(days=5),
+ status=GroupHistoryStatus.UNRESOLVED,
+ prev_history=None,
+ prev_history_date=None,
+ )
+
+ GroupHistory.objects.create(
+ organization=self.organization,
+ group=group1,
+ project=project1,
+ actor=self.user.actor,
+ status=GroupHistoryStatus.RESOLVED,
+ prev_history=gh1,
+ prev_history_date=gh1.date_added,
+ date_added=before_now(days=2),
+ )
+
+ gh2 = GroupHistory.objects.create(
+ organization=self.organization,
+ group=group2,
+ project=project2,
+ actor=self.user.actor,
+ date_added=before_now(days=10),
+ status=GroupHistoryStatus.UNRESOLVED,
+ prev_history=None,
+ prev_history_date=None,
+ )
+
+ GroupHistory.objects.create(
+ organization=self.organization,
+ group=group2,
+ project=project2,
+ actor=self.user.actor,
+ status=GroupHistoryStatus.RESOLVED,
+ prev_history=gh2,
+ prev_history_date=gh2.date_added,
+ )
+ today = str(now().date())
+ yesterday = str((now() - timedelta(days=1)).date())
+ two_days_ago = str((now() - timedelta(days=2)).date())
+ self.login_as(user=self.user)
+ response = self.get_success_response(
+ self.team.organization.slug, self.team.slug, statsPeriod="14d"
+ )
+ assert len(response.data) == 14
+ assert response.data[today]["avg"] == timedelta(days=10).total_seconds()
+ assert response.data[two_days_ago]["avg"] == timedelta(days=3).total_seconds()
+ assert response.data[yesterday]["avg"] == 0
+
+ # Lower "todays" average by adding another resolution, but this time 5 days instead of 10 (avg is 7.5 now)
+ gh2 = GroupHistory.objects.create(
+ organization=self.organization,
+ group=group2,
+ project=project2,
+ actor=self.user.actor,
+ date_added=before_now(days=5),
+ status=GroupHistoryStatus.UNRESOLVED,
+ prev_history=None,
+ prev_history_date=None,
+ )
+ GroupHistory.objects.create(
+ organization=self.organization,
+ group=group2,
+ project=project2,
+ actor=self.user.actor,
+ status=GroupHistoryStatus.RESOLVED,
+ prev_history=gh2,
+ prev_history_date=gh2.date_added,
+ )
+
+ # making sure it doesnt bork anything
+ GroupHistory.objects.create(
+ organization=self.organization,
+ group=group2,
+ project=project2,
+ actor=self.user.actor,
+ status=GroupHistoryStatus.DELETED,
+ prev_history=gh2,
+ prev_history_date=gh2.date_added,
+ )
+
+ response = self.get_success_response(self.team.organization.slug, self.team.slug)
+ assert len(response.data) == 90
+ assert response.data[today]["avg"] == timedelta(days=7, hours=12).total_seconds()
+ assert response.data[two_days_ago]["avg"] == timedelta(days=3).total_seconds()
+ assert response.data[yesterday]["avg"] == 0
diff --git a/tests/sentry/api/endpoints/test_user_authenticator_details.py b/tests/sentry/api/endpoints/test_user_authenticator_details.py
index 166208578621b7..2c3874a4d08195 100644
--- a/tests/sentry/api/endpoints/test_user_authenticator_details.py
+++ b/tests/sentry/api/endpoints/test_user_authenticator_details.py
@@ -1,51 +1,106 @@
import datetime
from django.conf import settings
+from django.core import mail
from django.db.models import F
-from django.urls import reverse
from django.utils import timezone
from sentry.auth.authenticators import RecoveryCodeInterface, SmsInterface, TotpInterface
-from sentry.models import Authenticator, Organization
+from sentry.models import Authenticator, Organization, User
from sentry.testutils import APITestCase
from sentry.utils.compat import mock
-class UserAuthenticatorDetailsTest(APITestCase):
+def get_auth(user: "User") -> Authenticator:
+ return Authenticator.objects.create(
+ type=3, # u2f
+ user=user,
+ config={
+ "devices": [
+ {
+ "binding": {
+ "publicKey": "aowekroawker",
+ "keyHandle": "devicekeyhandle",
+ "appId": "https://dev.getsentry.net:8000/auth/2fa/u2fappid.json",
+ },
+ "name": "Amused Beetle",
+ "ts": 1512505334,
+ },
+ {
+ "binding": {
+ "publicKey": "publickey",
+ "keyHandle": "aowerkoweraowerkkro",
+ "appId": "https://dev.getsentry.net:8000/auth/2fa/u2fappid.json",
+ },
+ "name": "Sentry",
+ "ts": 1512505334,
+ },
+ ]
+ },
+ )
+
+
+def assert_security_email_sent(email_type: str) -> None:
+ """TODO(mgaeta): Move this function to a test helper directory."""
+ body_fragment = {
+ "mfa-added": "An authenticator has been added to your Sentry account",
+ "mfa-removed": "An authenticator has been removed from your Sentry account",
+ "recovery-codes-regenerated": "Recovery codes have been regenerated for your Sentry account",
+ }.get(email_type)
+ assert len(mail.outbox) == 1
+ assert body_fragment in mail.outbox[0].body
+
+
+class UserAuthenticatorDetailsTestBase(APITestCase):
def setUp(self):
- self.user = self.create_user(email="test@example.com", is_superuser=False)
self.login_as(user=self.user)
- def _assert_security_email_sent(self, email_type, email_log):
- assert email_log.info.call_count == 1
- assert "mail.queued" in email_log.info.call_args[0]
- assert email_log.info.call_args[1]["extra"]["message_type"] == email_type
-
- def _require_2fa_for_organization(self):
+ def _require_2fa_for_organization(self) -> None:
organization = self.create_organization(name="test monkey", owner=self.user)
organization.update(flags=F("flags").bitor(Organization.flags.require_2fa))
- def test_wrong_auth_id(self):
- url = reverse(
- "sentry-api-0-user-authenticator-details",
- kwargs={"user_id": self.user.id, "auth_id": "totp"},
- )
- resp = self.client.get(url)
- assert resp.status_code == 404
+class UserAuthenticatorDeviceDetailsTest(UserAuthenticatorDetailsTestBase):
+ endpoint = "sentry-api-0-user-authenticator-device-details"
+ method = "delete"
+
+ def test_u2f_remove_device(self):
+ auth = get_auth(self.user)
+
+ with self.tasks():
+ self.get_success_response(self.user.id, auth.id, "devicekeyhandle")
+
+ authenticator = Authenticator.objects.get(id=auth.id)
+ assert len(authenticator.interface.get_registered_devices()) == 1
+
+ assert_security_email_sent("mfa-removed")
+
+ # Can't remove last device.
+ # TODO(mgaeta): We should not allow the API to return a 500.
+ with self.tasks():
+ self.get_error_response(self.user.id, auth.id, "aowerkoweraowerkkro", status_code=500)
+
+ # Only one send.
+ assert_security_email_sent("mfa-removed")
+
+ def test_require_2fa__delete_device__ok(self):
+ self._require_2fa_for_organization()
+ self.test_u2f_remove_device()
+
+
+class UserAuthenticatorDetailsTest(UserAuthenticatorDetailsTestBase):
+ endpoint = "sentry-api-0-user-authenticator-details"
+
+ def test_wrong_auth_id(self):
+ self.get_error_response(self.user.id, "totp", status_code=404)
def test_get_authenticator_details(self):
interface = TotpInterface()
interface.enroll(self.user)
auth = interface.authenticator
- url = reverse(
- "sentry-api-0-user-authenticator-details",
- kwargs={"user_id": self.user.id, "auth_id": auth.id},
- )
+ resp = self.get_success_response(self.user.id, auth.id)
- resp = self.client.get(url)
- assert resp.status_code == 200
assert resp.data["isEnrolled"]
assert resp.data["id"] == "totp"
assert resp.data["authId"] == str(auth.id)
@@ -55,50 +110,23 @@ def test_get_authenticator_details(self):
assert "form" not in resp.data
assert "qrcode" not in resp.data
- @mock.patch("sentry.utils.email.logger")
- def test_get_recovery_codes(self, email_log):
+ def test_get_recovery_codes(self):
interface = RecoveryCodeInterface()
interface.enroll(self.user)
- url = reverse(
- "sentry-api-0-user-authenticator-details",
- kwargs={"user_id": self.user.id, "auth_id": interface.authenticator.id},
- )
+ with self.tasks():
+ resp = self.get_success_response(self.user.id, interface.authenticator.id)
- resp = self.client.get(url)
- assert resp.status_code == 200
assert resp.data["id"] == "recovery"
assert resp.data["authId"] == str(interface.authenticator.id)
assert len(resp.data["codes"])
- assert email_log.info.call_count == 0
+ assert len(mail.outbox) == 0
def test_u2f_get_devices(self):
- auth = Authenticator.objects.create(
- type=3, # u2f
- user=self.user,
- config={
- "devices": [
- {
- "binding": {
- "publicKey": "aowekroawker",
- "keyHandle": "aowkeroakewrokaweokrwoer",
- "appId": "https://dev.getsentry.net:8000/auth/2fa/u2fappid.json",
- },
- "name": "Amused Beetle",
- "ts": 1512505334,
- }
- ]
- },
- )
-
- url = reverse(
- "sentry-api-0-user-authenticator-details",
- kwargs={"user_id": self.user.id, "auth_id": auth.id},
- )
-
- resp = self.client.get(url)
- assert resp.status_code == 200
+ auth = get_auth(self.user)
+
+ resp = self.get_success_response(self.user.id, auth.id)
assert resp.data["id"] == "u2f"
assert resp.data["authId"] == str(auth.id)
assert len(resp.data["devices"])
@@ -109,109 +137,17 @@ def test_u2f_get_devices(self):
assert "response" not in resp.data
def test_get_device_name(self):
- auth = Authenticator.objects.create(
- type=3, # u2f
- user=self.user,
- config={
- "devices": [
- {
- "binding": {
- "publicKey": "aowekroawker",
- "keyHandle": "devicekeyhandle",
- "appId": "https://dev.getsentry.net:8000/auth/2fa/u2fappid.json",
- },
- "name": "Amused Beetle",
- "ts": 1512505334,
- },
- {
- "binding": {
- "publicKey": "publickey",
- "keyHandle": "aowerkoweraowerkkro",
- "appId": "https://dev.getsentry.net:8000/auth/2fa/u2fappid.json",
- },
- "name": "Sentry",
- "ts": 1512505334,
- },
- ]
- },
- )
+ auth = get_auth(self.user)
assert auth.interface.get_device_name("devicekeyhandle") == "Amused Beetle"
assert auth.interface.get_device_name("aowerkoweraowerkkro") == "Sentry"
- @mock.patch("sentry.utils.email.logger")
- def test_u2f_remove_device(self, email_log):
- auth = Authenticator.objects.create(
- type=3, # u2f
- user=self.user,
- config={
- "devices": [
- {
- "binding": {
- "publicKey": "aowekroawker",
- "keyHandle": "devicekeyhandle",
- "appId": "https://dev.getsentry.net:8000/auth/2fa/u2fappid.json",
- },
- "name": "Amused Beetle",
- "ts": 1512505334,
- },
- {
- "binding": {
- "publicKey": "publickey",
- "keyHandle": "aowerkoweraowerkkro",
- "appId": "https://dev.getsentry.net:8000/auth/2fa/u2fappid.json",
- },
- "name": "Sentry",
- "ts": 1512505334,
- },
- ]
- },
- )
-
- url = reverse(
- "sentry-api-0-user-authenticator-device-details",
- kwargs={
- "user_id": self.user.id,
- "auth_id": auth.id,
- "interface_device_id": "devicekeyhandle",
- },
- )
-
- resp = self.client.delete(url)
- assert resp.status_code == 204
-
- authenticator = Authenticator.objects.get(id=auth.id)
- assert len(authenticator.interface.get_registered_devices()) == 1
-
- self._assert_security_email_sent("mfa-removed", email_log)
-
- # Can't remove last device
- url = reverse(
- "sentry-api-0-user-authenticator-device-details",
- kwargs={
- "user_id": self.user.id,
- "auth_id": auth.id,
- "interface_device_id": "aowerkoweraowerkkro",
- },
- )
- resp = self.client.delete(url)
- assert resp.status_code == 500
-
- # only one send
- self._assert_security_email_sent("mfa-removed", email_log)
-
def test_sms_get_phone(self):
interface = SmsInterface()
interface.phone_number = "5551231234"
interface.enroll(self.user)
- url = reverse(
- "sentry-api-0-user-authenticator-details",
- kwargs={"user_id": self.user.id, "auth_id": interface.authenticator.id},
- )
-
- resp = self.client.get(url)
- assert resp.status_code == 200
+ resp = self.get_success_response(self.user.id, interface.authenticator.id)
assert resp.data["id"] == "sms"
assert resp.data["authId"] == str(interface.authenticator.id)
assert resp.data["phone"] == "5551231234"
@@ -220,38 +156,30 @@ def test_sms_get_phone(self):
assert "totp_secret" not in resp.data
assert "form" not in resp.data
- @mock.patch("sentry.utils.email.logger")
- def test_recovery_codes_regenerate(self, email_log):
+ def test_recovery_codes_regenerate(self):
interface = RecoveryCodeInterface()
interface.enroll(self.user)
- url = reverse(
- "sentry-api-0-user-authenticator-details",
- kwargs={"user_id": self.user.id, "auth_id": interface.authenticator.id},
- )
-
- resp = self.client.get(url)
- assert resp.status_code == 200
+ resp = self.get_success_response(self.user.id, interface.authenticator.id)
old_codes = resp.data["codes"]
old_created_at = resp.data["createdAt"]
- resp = self.client.get(url)
+ resp = self.get_success_response(self.user.id, interface.authenticator.id)
assert old_codes == resp.data["codes"]
assert old_created_at == resp.data["createdAt"]
# regenerate codes
tomorrow = timezone.now() + datetime.timedelta(days=1)
with mock.patch.object(timezone, "now", return_value=tomorrow):
- resp = self.client.put(url)
-
- resp = self.client.get(url)
+ with self.tasks():
+ self.get_success_response(self.user.id, interface.authenticator.id, method="put")
+ resp = self.get_success_response(self.user.id, interface.authenticator.id)
assert old_codes != resp.data["codes"]
assert old_created_at != resp.data["createdAt"]
- self._assert_security_email_sent("recovery-codes-regenerated", email_log)
+ assert_security_email_sent("recovery-codes-regenerated")
- @mock.patch("sentry.utils.email.logger")
- def test_delete(self, email_log):
+ def test_delete(self):
new_options = settings.SENTRY_OPTIONS.copy()
new_options["sms.twilio-account"] = "twilio-account"
user = self.create_user(email="a@example.com", is_superuser=True)
@@ -265,38 +193,28 @@ def test_delete(self, email_log):
self.assertEqual(len(available_auths), 1)
self.login_as(user=user, superuser=True)
- url = reverse(
- "sentry-api-0-user-authenticator-details",
- kwargs={"user_id": user.id, "auth_id": auth.id},
- )
- resp = self.client.delete(url, format="json")
- assert resp.status_code == 204, (resp.status_code, resp.content)
+ with self.tasks():
+ self.get_success_response(user.id, auth.id, method="delete")
assert not Authenticator.objects.filter(id=auth.id).exists()
- self._assert_security_email_sent("mfa-removed", email_log)
+ assert_security_email_sent("mfa-removed")
- @mock.patch("sentry.utils.email.logger")
- def test_cannot_delete_without_superuser(self, email_log):
+ def test_cannot_delete_without_superuser(self):
user = self.create_user(email="a@example.com", is_superuser=False)
auth = Authenticator.objects.create(type=3, user=user) # u2f
actor = self.create_user(email="b@example.com", is_superuser=False)
self.login_as(user=actor)
- url = reverse(
- "sentry-api-0-user-authenticator-details",
- kwargs={"user_id": user.id, "auth_id": auth.id},
- )
- resp = self.client.delete(url, format="json")
- assert resp.status_code == 403, (resp.status_code, resp.content)
+ with self.tasks():
+ self.get_error_response(self.user.id, auth.id, method="delete", status_code=403)
assert Authenticator.objects.filter(id=auth.id).exists()
- assert email_log.info.call_count == 0
+ assert len(mail.outbox) == 0
- @mock.patch("sentry.utils.email.logger")
- def test_require_2fa__cannot_delete_last_auth(self, email_log):
+ def test_require_2fa__cannot_delete_last_auth(self):
self._require_2fa_for_organization()
# enroll in one auth method
@@ -304,21 +222,15 @@ def test_require_2fa__cannot_delete_last_auth(self, email_log):
interface.enroll(self.user)
auth = interface.authenticator
- url = reverse(
- "sentry-api-0-user-authenticator-details",
- kwargs={"user_id": self.user.id, "auth_id": auth.id},
- )
-
- resp = self.client.delete(url, format="json")
- assert resp.status_code == 403, (resp.status_code, resp.content)
- assert b"requires 2FA" in resp.content
+ with self.tasks():
+ resp = self.get_error_response(self.user.id, auth.id, method="delete", status_code=403)
+ assert b"requires 2FA" in resp.content
assert Authenticator.objects.filter(id=auth.id).exists()
- assert email_log.info.call_count == 0
+ assert len(mail.outbox) == 0
- @mock.patch("sentry.utils.email.logger")
- def test_require_2fa__delete_with_multiple_auth__ok(self, email_log):
+ def test_require_2fa__delete_with_multiple_auth__ok(self):
self._require_2fa_for_organization()
new_options = settings.SENTRY_OPTIONS.copy()
@@ -334,18 +246,8 @@ def test_require_2fa__delete_with_multiple_auth__ok(self, email_log):
interface.enroll(self.user)
auth = interface.authenticator
- url = reverse(
- "sentry-api-0-user-authenticator-details",
- kwargs={"user_id": self.user.id, "auth_id": auth.id},
- )
- resp = self.client.delete(url, format="json")
- assert resp.status_code == 204, (resp.status_code, resp.content)
+ with self.tasks():
+ self.get_success_response(self.user.id, auth.id, method="delete")
assert not Authenticator.objects.filter(id=auth.id).exists()
-
- self._assert_security_email_sent("mfa-removed", email_log)
-
- @mock.patch("sentry.utils.email.logger")
- def test_require_2fa__delete_device__ok(self, email_log):
- self._require_2fa_for_organization()
- self.test_u2f_remove_device()
+ assert_security_email_sent("mfa-removed")
diff --git a/tests/sentry/api/endpoints/test_user_authenticator_enroll.py b/tests/sentry/api/endpoints/test_user_authenticator_enroll.py
index 06e19970acaa79..954bdd5d6775bc 100644
--- a/tests/sentry/api/endpoints/test_user_authenticator_enroll.py
+++ b/tests/sentry/api/endpoints/test_user_authenticator_enroll.py
@@ -2,6 +2,7 @@
from urllib.parse import parse_qsl
from django.conf import settings
+from django.core import mail
from django.db.models import F
from django.urls import reverse
@@ -15,6 +16,7 @@
)
from sentry.testutils import APITestCase
from sentry.utils.compat import mock
+from tests.sentry.api.endpoints.test_user_authenticator_details import assert_security_email_sent
# TODO(joshuarli): move all fixtures to a standard path relative to gitroot,
@@ -26,46 +28,36 @@ def get_fixture_path(name):
class UserAuthenticatorEnrollTest(APITestCase):
+ endpoint = "sentry-api-0-user-authenticator-enroll"
+
def setUp(self):
- self.user = self.create_user(email="a@example.com", is_superuser=False)
- self.organization = self.create_organization(owner=self.user)
self.login_as(user=self.user)
- def _assert_security_email_sent(self, email_type, email_log):
- assert email_log.info.call_count == 1
- assert "mail.queued" in email_log.info.call_args[0]
- assert email_log.info.call_args[1]["extra"]["message_type"] == email_type
-
- @mock.patch("sentry.utils.email.logger")
@mock.patch("sentry.auth.authenticators.TotpInterface.validate_otp", return_value=True)
- def test_totp_can_enroll(self, validate_otp, email_log):
+ def test_totp_can_enroll(self, validate_otp):
# XXX: Pretend an unbound function exists.
validate_otp.__func__ = None
- url = reverse(
- "sentry-api-0-user-authenticator-enroll",
- kwargs={"user_id": "me", "interface_id": "totp"},
- )
-
with mock.patch(
"sentry.auth.authenticators.base.generate_secret_key", return_value="Z" * 32
):
- resp = self.client.get(url)
+ resp = self.get_success_response("me", "totp")
- assert resp.status_code == 200
assert resp.data["secret"] == "Z" * 32
assert (
resp.data["qrcode"]
- == "otpauth://totp/a%40example.com?issuer=Sentry&secret=ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ"
+ == "otpauth://totp/admin%40localhost?issuer=Sentry&secret=ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ"
)
assert resp.data["form"]
assert resp.data["secret"]
# try to enroll
- resp = self.client.post(url, data={"secret": "secret12", "otp": "1234"})
+ with self.tasks():
+ self.get_success_response(
+ "me", "totp", method="post", **{"secret": "secret12", "otp": "1234"}
+ )
assert validate_otp.call_count == 1
assert validate_otp.call_args == mock.call("1234")
- assert resp.status_code == 204
interface = Authenticator.objects.get_interface(user=self.user, interface_id="totp")
assert interface
@@ -76,40 +68,42 @@ def test_totp_can_enroll(self, validate_otp, email_log):
recovery = Authenticator.objects.get_interface(user=self.user, interface_id="recovery")
assert recovery.is_enrolled()
- self._assert_security_email_sent("mfa-added", email_log)
+ assert_security_email_sent("mfa-added")
# can rotate in place
- resp = self.client.get(url)
- assert resp.status_code == 200
- resp = self.client.post(url, data={"secret": "secret56", "otp": "5678"})
+ self.get_success_response("me", "totp")
+ self.get_success_response(
+ "me", "totp", method="post", **{"secret": "secret56", "otp": "5678"}
+ )
assert validate_otp.call_args == mock.call("5678")
- assert resp.status_code == 204
+
interface = Authenticator.objects.get_interface(user=self.user, interface_id="totp")
assert interface.secret == "secret56"
assert interface.config == {"secret": "secret56"}
- @mock.patch("sentry.utils.email.logger")
@mock.patch("sentry.auth.authenticators.TotpInterface.validate_otp", return_value=False)
- def test_invalid_otp(self, validate_otp, email_log):
+ def test_invalid_otp(self, validate_otp):
# XXX: Pretend an unbound function exists.
validate_otp.__func__ = None
- url = reverse(
- "sentry-api-0-user-authenticator-enroll",
- kwargs={"user_id": "me", "interface_id": "totp"},
- )
-
# try to enroll
- resp = self.client.post(url, data={"secret": "secret12", "otp": "1234"})
+ with self.tasks():
+ self.get_error_response(
+ "me",
+ "totp",
+ method="post",
+ status_code=400,
+ **{"secret": "secret12", "otp": "1234"},
+ )
+
assert validate_otp.call_count == 1
assert validate_otp.call_args == mock.call("1234")
- assert resp.status_code == 400
- assert email_log.call_count == 0
- @mock.patch("sentry.utils.email.logger")
+ assert len(mail.outbox) == 0
+
@mock.patch("sentry.auth.authenticators.SmsInterface.validate_otp", return_value=True)
@mock.patch("sentry.auth.authenticators.SmsInterface.send_text", return_value=True)
- def test_sms_can_enroll(self, send_text, validate_otp, email_log):
+ def test_sms_can_enroll(self, send_text, validate_otp):
# XXX: Pretend an unbound function exists.
validate_otp.__func__ = None
@@ -117,47 +111,50 @@ def test_sms_can_enroll(self, send_text, validate_otp, email_log):
new_options["sms.twilio-account"] = "twilio-account"
with self.settings(SENTRY_OPTIONS=new_options):
- url = reverse(
- "sentry-api-0-user-authenticator-enroll",
- kwargs={"user_id": "me", "interface_id": "sms"},
- )
-
- resp = self.client.get(url)
- assert resp.status_code == 200
+ resp = self.get_success_response("me", "sms")
assert resp.data["form"]
assert resp.data["secret"]
- resp = self.client.post(url, data={"secret": "secret12", "phone": "1231234"})
+ self.get_success_response(
+ "me", "sms", method="post", **{"secret": "secret12", "phone": "1231234"}
+ )
assert send_text.call_count == 1
assert validate_otp.call_count == 0
- assert resp.status_code == 204
- resp = self.client.post(
- url, data={"secret": "secret12", "phone": "1231234", "otp": "123123"}
- )
+ with self.tasks():
+ self.get_success_response(
+ "me",
+ "sms",
+ method="post",
+ **{"secret": "secret12", "phone": "1231234", "otp": "123123"},
+ )
assert validate_otp.call_count == 1
assert validate_otp.call_args == mock.call("123123")
interface = Authenticator.objects.get_interface(user=self.user, interface_id="sms")
assert interface.phone_number == "1231234"
- self._assert_security_email_sent("mfa-added", email_log)
+ assert_security_email_sent("mfa-added")
def test_sms_invalid_otp(self):
new_options = settings.SENTRY_OPTIONS.copy()
new_options["sms.twilio-account"] = "twilio-account"
with self.settings(SENTRY_OPTIONS=new_options):
- url = reverse(
- "sentry-api-0-user-authenticator-enroll",
- kwargs={"user_id": "me", "interface_id": "sms"},
+ self.get_error_response(
+ "me",
+ "sms",
+ method="post",
+ status_code=400,
+ **{"secret": "secret12", "phone": "1231234", "otp": None},
)
- resp = self.client.post(
- url, data={"secret": "secret12", "phone": "1231234", "otp": None}
+ self.get_error_response(
+ "me",
+ "sms",
+ method="post",
+ status_code=400,
+ **{"secret": "secret12", "phone": "1231234", "otp": ""},
)
- assert resp.status_code == 400
- resp = self.client.post(url, data={"secret": "secret12", "phone": "1231234", "otp": ""})
- assert resp.status_code == 400
def test_sms_no_verified_email(self):
user = self.create_user()
@@ -168,14 +165,13 @@ def test_sms_no_verified_email(self):
new_options["sms.twilio-account"] = "twilio-account"
with self.settings(SENTRY_OPTIONS=new_options):
- url = reverse(
- "sentry-api-0-user-authenticator-enroll",
- kwargs={"user_id": "me", "interface_id": "sms"},
- )
- resp = self.client.post(
- url, data={"secret": "secret12", "phone": "1231234", "otp": None}
+ resp = self.get_error_response(
+ "me",
+ "sms",
+ method="post",
+ status_code=401,
+ **{"secret": "secret12", "phone": "1231234", "otp": None},
)
- assert resp.status_code == 401
assert resp.data == {
"detail": {
"code": "email-verification-required",
@@ -192,58 +188,53 @@ def test_rate_limited(self, try_enroll, is_limited):
new_options = settings.SENTRY_OPTIONS.copy()
new_options["system.url-prefix"] = "https://testserver"
with self.settings(SENTRY_OPTIONS=new_options):
- url = reverse(
- "sentry-api-0-user-authenticator-enroll",
- kwargs={"user_id": "me", "interface_id": "u2f"},
- )
- resp = self.client.get(url)
- assert resp.status_code == 200
-
- resp = self.client.post(
- url,
- data={
+ self.get_success_response("me", "u2f")
+ self.get_error_response(
+ "me",
+ "u2f",
+ method="post",
+ status_code=429,
+ **{
"deviceName": "device name",
"challenge": "challenge",
"response": "response",
},
)
- assert resp.status_code == 429
+
assert try_enroll.call_count == 0
- @mock.patch("sentry.utils.email.logger")
@mock.patch("sentry.auth.authenticators.U2fInterface.try_enroll", return_value=True)
- def test_u2f_can_enroll(self, try_enroll, email_log):
+ def test_u2f_can_enroll(self, try_enroll):
new_options = settings.SENTRY_OPTIONS.copy()
new_options["system.url-prefix"] = "https://testserver"
with self.settings(SENTRY_OPTIONS=new_options):
- url = reverse(
- "sentry-api-0-user-authenticator-enroll",
- kwargs={"user_id": "me", "interface_id": "u2f"},
- )
-
- resp = self.client.get(url)
- assert resp.status_code == 200
+ resp = self.get_success_response("me", "u2f")
assert resp.data["form"]
assert "secret" not in resp.data
assert "qrcode" not in resp.data
assert resp.data["challenge"]
- resp = self.client.post(
- url,
- data={
- "deviceName": "device name",
- "challenge": "challenge",
- "response": "response",
- },
- )
+ with self.tasks():
+ self.get_success_response(
+ "me",
+ "u2f",
+ method="post",
+ **{
+ "deviceName": "device name",
+ "challenge": "challenge",
+ "response": "response",
+ },
+ )
+
assert try_enroll.call_count == 1
assert try_enroll.call_args == mock.call("challenge", "response", "device name")
- assert resp.status_code == 204
- self._assert_security_email_sent("mfa-added", email_log)
+ assert_security_email_sent("mfa-added")
class AcceptOrganizationInviteTest(APITestCase):
+ endpoint = "sentry-api-0-user-authenticator-enroll"
+
def setUp(self):
self.organization = self.create_organization(owner=self.create_user("foo@example.com"))
self.user = self.create_user("bar@example.com", is_superuser=False)
@@ -279,7 +270,7 @@ def get_om_and_init_invite(self):
return om
- def assert_invite_accepted(self, response, member_id):
+ def assert_invite_accepted(self, response, member_id: int) -> None:
om = OrganizationMember.objects.get(id=member_id)
assert om.user == self.user
assert om.email is None
@@ -298,22 +289,16 @@ def setup_u2f(self):
new_options = settings.SENTRY_OPTIONS.copy()
new_options["system.url-prefix"] = "https://testserver"
with self.settings(SENTRY_OPTIONS=new_options):
- url = reverse(
- "sentry-api-0-user-authenticator-enroll",
- kwargs={"user_id": "me", "interface_id": "u2f"},
- )
-
- resp = self.client.post(
- url,
- data={
+ return self.get_success_response(
+ "me",
+ "u2f",
+ method="post",
+ **{
"deviceName": "device name",
"challenge": "challenge",
"response": "response",
},
)
- assert resp.status_code == 204
-
- return resp
def test_cannot_accept_invite_pending_invite__2fa_required(self):
om = self.get_om_and_init_invite()
@@ -342,17 +327,14 @@ def test_accept_pending_invite__sms_enroll(self, send_text, validate_otp):
new_options["sms.twilio-account"] = "twilio-account"
with self.settings(SENTRY_OPTIONS=new_options):
- url = reverse(
- "sentry-api-0-user-authenticator-enroll",
- kwargs={"user_id": "me", "interface_id": "sms"},
+ self.get_success_response(
+ "me", "sms", method="post", **{"secret": "secret12", "phone": "1231234"}
)
-
- resp = self.client.post(url, data={"secret": "secret12", "phone": "1231234"})
- assert resp.status_code == 204
-
- resp = self.client.post(
- url,
- data={
+ resp = self.get_success_response(
+ "me",
+ "sms",
+ method="post",
+ **{
"secret": "secret12",
"phone": "1231234",
"otp": "123123",
@@ -360,6 +342,7 @@ def test_accept_pending_invite__sms_enroll(self, send_text, validate_otp):
"token": om.token,
},
)
+
assert validate_otp.call_count == 1
assert validate_otp.call_args == mock.call("123123")
@@ -376,18 +359,13 @@ def test_accept_pending_invite__totp_enroll(self, validate_otp):
om = self.get_om_and_init_invite()
# setup totp
- url = reverse(
- "sentry-api-0-user-authenticator-enroll",
- kwargs={"user_id": "me", "interface_id": "totp"},
- )
-
- resp = self.client.get(url)
- assert resp.status_code == 200
-
- resp = self.client.post(
- url, data={"secret": "secret12", "otp": "1234", "memberId": om.id, "token": om.token}
+ self.get_success_response("me", "totp")
+ resp = self.get_success_response(
+ "me",
+ "totp",
+ method="post",
+ **{"secret": "secret12", "otp": "1234", "memberId": om.id, "token": om.token},
)
- assert resp.status_code == 204
interface = Authenticator.objects.get_interface(user=self.user, interface_id="totp")
assert interface
@@ -447,18 +425,14 @@ def test_enroll_without_pending_invite__no_error(self, try_enroll, log):
new_options = settings.SENTRY_OPTIONS.copy()
new_options["system.url-prefix"] = "https://testserver"
with self.settings(SENTRY_OPTIONS=new_options):
- url = reverse(
- "sentry-api-0-user-authenticator-enroll",
- kwargs={"user_id": "me", "interface_id": "u2f"},
- )
-
- resp = self.client.post(
- url,
- data={
+ self.get_success_response(
+ "me",
+ "u2f",
+ method="post",
+ **{
"deviceName": "device name",
"challenge": "challenge",
"response": "response",
},
)
- assert resp.status_code == 204
assert log.error.called is False
diff --git a/tests/sentry/eventstream/kafka/test_postprocessworker.py b/tests/sentry/eventstream/kafka/test_postprocessworker.py
index 501a38b7770e83..6c16b4aaebc2e9 100644
--- a/tests/sentry/eventstream/kafka/test_postprocessworker.py
+++ b/tests/sentry/eventstream/kafka/test_postprocessworker.py
@@ -5,7 +5,9 @@
from sentry import options
from sentry.eventstream.kafka.postprocessworker import (
_CONCURRENCY_OPTION,
+ ErrorsPostProcessForwarderWorker,
PostProcessForwarderWorker,
+ TransactionsPostProcessForwarderWorker,
)
from sentry.eventstream.kafka.protocol import InvalidVersion
from sentry.utils import json
@@ -32,22 +34,50 @@ def kafka_message_payload():
]
-@patch("sentry.eventstream.kafka.postprocessworker.dispatch_post_process_group_task")
-def test_post_process_forwarder(dispatch_post_process_group_task, kafka_message_payload):
- """
- Test that the post process forwarder calls dispatch_post_process_group_task with the correct arguments
- """
- forwarder = PostProcessForwarderWorker(concurrency=1)
+@pytest.fixture
+def kafka_message_without_transaction_header(kafka_message_payload):
+ mock_message = Mock()
+ mock_message.headers = MagicMock(return_value=[("timestamp", b"12345")])
+ mock_message.value = MagicMock(return_value=json.dumps(kafka_message_payload))
+ mock_message.partition = MagicMock("1")
+ return mock_message
+
+@pytest.fixture
+def kafka_message_with_transaction_header_false(kafka_message_payload):
mock_message = Mock()
+ mock_message.headers = MagicMock(
+ return_value=[("timestamp", b"12345"), ("transaction_forwarder", b"0")]
+ )
mock_message.value = MagicMock(return_value=json.dumps(kafka_message_payload))
mock_message.partition = MagicMock("1")
+ return mock_message
- future = forwarder.process_message(mock_message)
+
+@pytest.fixture
+def kafka_message_with_transaction_header_true(kafka_message_payload):
+ mock_message = Mock()
+ mock_message.headers = MagicMock(
+ return_value=[("timestamp", b"12345"), ("transaction_forwarder", b"1")]
+ )
+ mock_message.value = MagicMock(return_value=json.dumps(kafka_message_payload))
+ mock_message.partition = MagicMock("1")
+ return mock_message
+
+
+@patch("sentry.eventstream.kafka.postprocessworker.dispatch_post_process_group_task")
+def test_post_process_forwarder(
+ dispatch_post_process_group_task, kafka_message_without_transaction_header
+):
+ """
+ Tests that the post process forwarder calls dispatch_post_process_group_task with the correct arguments
+ """
+ forwarder = PostProcessForwarderWorker(concurrency=1)
+ future = forwarder.process_message(kafka_message_without_transaction_header)
forwarder.flush_batch([future])
- dispatch_post_process_group_task.assert_called_with(
+ dispatch_post_process_group_task.assert_called_once_with(
event_id="fe0ee9a2bc3b415497bad68aaf70dc7f",
project_id=1,
group_id=43,
@@ -66,7 +96,7 @@ def test_post_process_forwarder_bad_message_headers(
dispatch_post_process_group_task, kafka_message_payload
):
"""
- Test that when bad message headers are received, post process forwarder still works if the payload is valid.
+ Tests that when bad message headers are received, post process forwarder still works if the payload is valid.
"""
forwarder = PostProcessForwarderWorker(concurrency=1)
@@ -80,7 +110,7 @@ def test_post_process_forwarder_bad_message_headers(
forwarder.flush_batch([future])
- dispatch_post_process_group_task.assert_called_with(
+ dispatch_post_process_group_task.assert_called_once_with(
event_id="fe0ee9a2bc3b415497bad68aaf70dc7f",
project_id=1,
group_id=43,
@@ -95,7 +125,7 @@ def test_post_process_forwarder_bad_message_headers(
def test_post_process_forwarder_bad_message(kafka_message_payload):
"""
- Test that exception is thrown during flush_batch calls when a bad message is received.
+ Tests that exception is thrown during flush_batch calls when a bad message is received.
"""
forwarder = PostProcessForwarderWorker(concurrency=1)
@@ -116,7 +146,7 @@ def test_post_process_forwarder_bad_message(kafka_message_payload):
@pytest.mark.django_db
def test_post_process_forwarder_concurrency(kafka_message_payload):
"""
- Test that the number of threads change when the option is changed.
+ Tests that the number of threads change when the option is changed.
"""
forwarder = PostProcessForwarderWorker(concurrency=1)
@@ -128,3 +158,131 @@ def test_post_process_forwarder_concurrency(kafka_message_payload):
assert forwarder._PostProcessForwarderWorker__current_concurrency == 5
forwarder.shutdown()
+
+
+@pytest.mark.django_db
+@patch("sentry.eventstream.kafka.postprocessworker.dispatch_post_process_group_task")
+def test_errors_post_process_forwarder_missing_headers(
+ dispatch_post_process_group_task, kafka_message_without_transaction_header
+):
+ """
+ Tests that the errors post process forwarder calls dispatch_post_process_group_task
+ when the header "transaction_forwarder" is missing.
+ """
+ forwarder = ErrorsPostProcessForwarderWorker(concurrency=1)
+ future = forwarder.process_message(kafka_message_without_transaction_header)
+ assert future is not None
+
+ forwarder.flush_batch([future])
+
+ dispatch_post_process_group_task.assert_called_once_with(
+ event_id="fe0ee9a2bc3b415497bad68aaf70dc7f",
+ project_id=1,
+ group_id=43,
+ primary_hash="311ee66a5b8e697929804ceb1c456ffe",
+ is_new=False,
+ is_regression=None,
+ is_new_group_environment=False,
+ )
+
+ forwarder.shutdown()
+
+
+@pytest.mark.django_db
+@patch("sentry.eventstream.kafka.postprocessworker.dispatch_post_process_group_task")
+def test_errors_post_process_forwarder_false_headers(
+ dispatch_post_process_group_task, kafka_message_with_transaction_header_false
+):
+ """
+ Test that the errors post process forwarder calls dispatch_post_process_group_task
+ when the header "transaction_forwarder" is set to False.
+ """
+ forwarder = ErrorsPostProcessForwarderWorker(concurrency=1)
+ future = forwarder.process_message(kafka_message_with_transaction_header_false)
+ assert future is not None
+
+ forwarder.flush_batch([future])
+
+ dispatch_post_process_group_task.assert_called_once_with(
+ event_id="fe0ee9a2bc3b415497bad68aaf70dc7f",
+ project_id=1,
+ group_id=43,
+ primary_hash="311ee66a5b8e697929804ceb1c456ffe",
+ is_new=False,
+ is_regression=None,
+ is_new_group_environment=False,
+ )
+
+ forwarder.shutdown()
+
+
+@pytest.mark.django_db
+def test_errors_post_process_forwarder_true_headers(kafka_message_with_transaction_header_true):
+ """
+ Tests that the errors post process forwarder's process_message returns None
+ when the header "transaction_forwarder" is set to True.
+ """
+ forwarder = ErrorsPostProcessForwarderWorker(concurrency=1)
+ future = forwarder.process_message(kafka_message_with_transaction_header_true)
+
+ assert future is None
+
+ forwarder.shutdown()
+
+
+@pytest.mark.django_db
+def test_transactions_post_process_forwarder_missing_headers(
+ kafka_message_without_transaction_header,
+):
+ """
+ Tests that the transactions post process forwarder's process_message returns None
+ when the header "transaction_forwarder" is missing.
+ """
+ forwarder = TransactionsPostProcessForwarderWorker(concurrency=1)
+ future = forwarder.process_message(kafka_message_without_transaction_header)
+ assert future is None
+
+ forwarder.shutdown()
+
+
+@pytest.mark.django_db
+def test_transactions_post_process_forwarder_false_headers(
+ kafka_message_with_transaction_header_false,
+):
+ """
+ Tests that the transactions post process forwarder's process_message returns None
+ when the header "transaction_forwarder" is set to False.
+ """
+ forwarder = TransactionsPostProcessForwarderWorker(concurrency=1)
+ future = forwarder.process_message(kafka_message_with_transaction_header_false)
+ assert future is None
+
+ forwarder.shutdown()
+
+
+@pytest.mark.django_db
+@patch("sentry.eventstream.kafka.postprocessworker.dispatch_post_process_group_task")
+def test_transactions_post_process_forwarder_true_headers(
+ dispatch_post_process_group_task, kafka_message_with_transaction_header_true
+):
+ """
+ Tests that the transactions post process forwarder calls dispatch_post_process_group_task
+ when the header "transaction_forwarder" is set to True.
+ """
+ forwarder = TransactionsPostProcessForwarderWorker(concurrency=1)
+ future = forwarder.process_message(kafka_message_with_transaction_header_true)
+
+ assert future is not None
+ forwarder.flush_batch([future])
+
+ dispatch_post_process_group_task.assert_called_with(
+ event_id="fe0ee9a2bc3b415497bad68aaf70dc7f",
+ project_id=1,
+ group_id=43,
+ primary_hash="311ee66a5b8e697929804ceb1c456ffe",
+ is_new=False,
+ is_regression=None,
+ is_new_group_environment=False,
+ )
+
+ forwarder.shutdown()
diff --git a/tests/sentry/incidents/endpoints/test_project_alert_rule_index.py b/tests/sentry/incidents/endpoints/test_project_alert_rule_index.py
index 0f4de224b3c975..d373c0ff2346f7 100644
--- a/tests/sentry/incidents/endpoints/test_project_alert_rule_index.py
+++ b/tests/sentry/incidents/endpoints/test_project_alert_rule_index.py
@@ -542,3 +542,148 @@ def test_offset_pagination(self):
self.assert_alert_rule_serialized(self.two_alert_rule, result[0], skip_dates=True)
self.assert_alert_rule_serialized(self.yet_another_alert_rule, result[1], skip_dates=True)
+
+
+@freeze_time()
+class AlertRuleCreateEndpointTestCrashRateAlert(APITestCase):
+ endpoint = "sentry-api-0-project-alert-rules"
+ method = "post"
+
+ def setUp(self):
+ super().setUp()
+ self.valid_alert_rule = {
+ "aggregate": "percentage(sessions_crashed, sessions) AS _crash_rate_alert_aggregate",
+ "query": "",
+ "timeWindow": "60",
+ "resolveThreshold": 90,
+ "thresholdType": 1,
+ "triggers": [
+ {
+ "label": "critical",
+ "alertThreshold": 70,
+ "actions": [
+ {"type": "email", "targetType": "team", "targetIdentifier": self.team.id}
+ ],
+ },
+ {
+ "label": "warning",
+ "alertThreshold": 80,
+ "actions": [
+ {"type": "email", "targetType": "team", "targetIdentifier": self.team.id},
+ {"type": "email", "targetType": "user", "targetIdentifier": self.user.id},
+ ],
+ },
+ ],
+ "projects": [self.project.slug],
+ "owner": self.user.id,
+ "name": "JustAValidTestRule",
+ "dataset": "sessions",
+ "eventTypes": [],
+ }
+ # Login
+ self.create_member(
+ user=self.user, organization=self.organization, role="owner", teams=[self.team]
+ )
+ self.login_as(self.user)
+
+ @fixture
+ def organization(self):
+ return self.create_organization()
+
+ @fixture
+ def project(self):
+ return self.create_project(organization=self.organization)
+
+ @fixture
+ def user(self):
+ return self.create_user()
+
+ def test_simple_crash_rate_alerts_for_sessions(self):
+ with self.feature(["organizations:incidents", "organizations:performance-view"]):
+ resp = self.get_valid_response(
+ self.organization.slug, self.project.slug, status_code=201, **self.valid_alert_rule
+ )
+ assert "id" in resp.data
+ alert_rule = AlertRule.objects.get(id=resp.data["id"])
+ assert resp.data == serialize(alert_rule, self.user)
+
+ def test_simple_crash_rate_alerts_for_users(self):
+ self.valid_alert_rule.update(
+ {
+ "aggregate": "percentage(users_crashed, users) AS _crash_rate_alert_aggregate",
+ }
+ )
+ with self.feature(["organizations:incidents", "organizations:performance-view"]):
+ resp = self.get_valid_response(
+ self.organization.slug, self.project.slug, status_code=201, **self.valid_alert_rule
+ )
+ assert "id" in resp.data
+ alert_rule = AlertRule.objects.get(id=resp.data["id"])
+ assert resp.data == serialize(alert_rule, self.user)
+
+ def test_simple_crash_rate_alerts_for_sessions_drops_event_types(self):
+ self.valid_alert_rule["eventTypes"] = ["sessions", "events"]
+ with self.feature(["organizations:incidents", "organizations:performance-view"]):
+ resp = self.get_valid_response(
+ self.organization.slug, self.project.slug, status_code=201, **self.valid_alert_rule
+ )
+ assert "id" in resp.data
+ alert_rule = AlertRule.objects.get(id=resp.data["id"])
+ assert resp.data == serialize(alert_rule, self.user)
+
+ def test_simple_crash_rate_alerts_for_sessions_with_invalid_time_window(self):
+ self.valid_alert_rule["timeWindow"] = "90"
+ with self.feature(["organizations:incidents", "organizations:performance-view"]):
+ resp = self.get_valid_response(
+ self.organization.slug, self.project.slug, status_code=400, **self.valid_alert_rule
+ )
+ assert (
+ resp.data["nonFieldErrors"][0]
+ == "Invalid Time Window: Allowed time windows for crash rate alerts are: "
+ "30min, 1h, 2h, 4h, 12h and 24h"
+ )
+
+ @patch(
+ "sentry.integrations.slack.utils.get_channel_id_with_timeout",
+ return_value=("#", None, True),
+ )
+ @patch("sentry.integrations.slack.tasks.find_channel_id_for_alert_rule.apply_async")
+ @patch("sentry.integrations.slack.tasks.uuid4")
+ def test_crash_rate_alerts_kicks_off_slack_async_job(
+ self, mock_uuid4, mock_find_channel_id_for_alert_rule, mock_get_channel_id
+ ):
+ mock_uuid4.return_value = self.get_mock_uuid()
+ self.integration = Integration.objects.create(
+ provider="slack",
+ name="Team A",
+ external_id="TXXXXXXX1",
+ metadata={"access_token": "xoxp-xxxxxxxxx-xxxxxxxxxx-xxxxxxxxxxxx"},
+ )
+ self.integration.add_organization(self.organization, self.user)
+ self.valid_alert_rule["triggers"] = [
+ {
+ "label": "critical",
+ "alertThreshold": 50,
+ "actions": [
+ {
+ "type": "slack",
+ "targetIdentifier": "my-channel",
+ "targetType": "specific",
+ "integration": self.integration.id,
+ }
+ ],
+ },
+ ]
+ with self.feature(["organizations:incidents"]):
+ resp = self.get_valid_response(
+ self.organization.slug, self.project.slug, status_code=202, **self.valid_alert_rule
+ )
+ resp.data["uuid"] = "abc123"
+ assert not AlertRule.objects.filter(name="JustAValidTestRule").exists()
+ kwargs = {
+ "organization_id": self.organization.id,
+ "uuid": "abc123",
+ "data": self.valid_alert_rule,
+ "user_id": self.user.id,
+ }
+ mock_find_channel_id_for_alert_rule.assert_called_once_with(kwargs=kwargs)
diff --git a/tests/sentry/incidents/test_subscription_processor.py b/tests/sentry/incidents/test_subscription_processor.py
index d3e7985711b186..5723fc33b8025c 100644
--- a/tests/sentry/incidents/test_subscription_processor.py
+++ b/tests/sentry/incidents/test_subscription_processor.py
@@ -8,6 +8,7 @@
from exam import fixture, patcher
from freezegun import freeze_time
+from sentry.constants import CRASH_RATE_ALERT_AGGREGATE_ALIAS, CRASH_RATE_ALERT_SESSION_COUNT_ALIAS
from sentry.incidents.logic import (
CRITICAL_TRIGGER_LABEL,
WARNING_TRIGGER_LABEL,
@@ -36,12 +37,12 @@
update_alert_rule_stats,
)
from sentry.models import Integration
-from sentry.snuba.models import QuerySubscription, SnubaQueryEventType
+from sentry.snuba.models import QueryDatasets, QuerySubscription, SnubaQueryEventType
from sentry.testutils import SnubaTestCase, TestCase
from sentry.testutils.helpers.datetime import iso_format
from sentry.utils import json
from sentry.utils.compat import map
-from sentry.utils.compat.mock import Mock, call
+from sentry.utils.compat.mock import Mock, call, patch
from sentry.utils.dates import to_timestamp
EMPTY = object()
@@ -134,6 +135,48 @@ def trigger(self):
def action(self):
return self.trigger.alertruletriggeraction_set.get()
+ @fixture
+ def crash_rate_alert_rule(self):
+ rule = self.create_alert_rule(
+ projects=[self.project],
+ dataset=QueryDatasets.SESSIONS,
+ name="JustAValidRule",
+ query="",
+ aggregate="percentage(sessions_crashed, sessions) AS _crash_rate_alert_aggregate",
+ time_window=1,
+ threshold_type=AlertRuleThresholdType.BELOW,
+ threshold_period=1,
+ )
+ trigger = create_alert_rule_trigger(rule, "critical", 80)
+ create_alert_rule_trigger_action(
+ trigger,
+ AlertRuleTriggerAction.Type.EMAIL,
+ AlertRuleTriggerAction.TargetType.USER,
+ str(self.user.id),
+ )
+ return rule
+
+ @fixture
+ def crash_rate_alert_critical_trigger(self):
+ return self.crash_rate_alert_rule.alertruletrigger_set.get()
+
+ @fixture
+ def crash_rate_alert_critical_action(self):
+ return self.crash_rate_alert_critical_trigger.alertruletriggeraction_set.get()
+
+ @fixture
+ def crash_rate_alert_warning_trigger(self):
+ return create_alert_rule_trigger(self.crash_rate_alert_rule, "warning", 90)
+
+ @fixture
+ def crash_rate_alert_warning_action(self):
+ return create_alert_rule_trigger_action(
+ self.crash_rate_alert_warning_trigger,
+ AlertRuleTriggerAction.Type.EMAIL,
+ AlertRuleTriggerAction.TargetType.USER,
+ str(self.user.id),
+ )
+
def build_subscription_update(self, subscription, time_delta=None, value=EMPTY):
if time_delta is not None:
timestamp = timezone.now() + time_delta
@@ -169,6 +212,44 @@ def send_update(self, rule, value, time_delta=None, subscription=None):
processor.process_update(message)
return processor
+ def send_crash_rate_alert_update(self, rule, value, subscription, time_delta=None, count=EMPTY):
+ self.email_action_handler.reset_mock()
+ if time_delta is None:
+ time_delta = timedelta()
+ processor = SubscriptionProcessor(subscription)
+
+ if time_delta is not None:
+ timestamp = timezone.now() + time_delta
+ else:
+ timestamp = timezone.now()
+ timestamp = timestamp.replace(tzinfo=pytz.utc, microsecond=0)
+
+ with self.feature(
+ ["organizations:incidents", "organizations:performance-view"]
+ ), self.capture_on_commit_callbacks(execute=True):
+ processor.process_update(
+ {
+ "subscription_id": subscription.subscription_id
+ if subscription
+ else uuid4().hex,
+ "values": {
+ "data": [
+ {
+ CRASH_RATE_ALERT_AGGREGATE_ALIAS: value,
+ CRASH_RATE_ALERT_SESSION_COUNT_ALIAS: randint(0, 100)
+ if count is EMPTY
+ else count,
+ }
+ ]
+ },
+ "timestamp": timestamp,
+ "interval": 1,
+ "partition": 1,
+ "offset": 1,
+ }
+ )
+ return processor
+
def assert_slack_calls(self, trigger_labels):
expected = [f"{label}: some rule 2" for label in trigger_labels]
actual = [
@@ -1015,6 +1096,226 @@ def test_multiple_triggers_resolve_separately(self):
self.assert_trigger_exists_with_status(incident, other_trigger, TriggerStatus.RESOLVED)
self.assert_actions_resolved_for_incident(incident, [self.action])
+ def test_crash_rate_alert_for_sessions_with_auto_resolve_critical(self):
+ """
+ Test that ensures that a Critical alert is triggered when `crash_free_percentage` falls
+ below the Critical threshold and then is Resolved once `crash_free_percentage` goes above
+ the threshold (when no resolve_threshold is set)
+ """
+ rule = self.crash_rate_alert_rule
+ trigger = self.crash_rate_alert_critical_trigger
+ action_critical = self.crash_rate_alert_critical_action
+
+ # Send Critical Update
+ update_value = (1 - trigger.alert_threshold / 100) + 0.05
+ self.send_crash_rate_alert_update(
+ rule=rule,
+ value=update_value,
+ time_delta=timedelta(minutes=-10),
+ subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(),
+ )
+ incident = self.assert_active_incident(rule)
+ self.assert_actions_fired_for_incident(incident, [action_critical])
+ self.assert_trigger_exists_with_status(incident, trigger, TriggerStatus.ACTIVE)
+
+ update_value = (1 - trigger.alert_threshold / 100) - 0.05
+ self.send_crash_rate_alert_update(
+ rule=rule,
+ value=update_value,
+ time_delta=timedelta(minutes=-1),
+ subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(),
+ )
+ self.assert_no_active_incident(rule)
+
+ def test_crash_rate_alert_for_sessions_with_auto_resolve_warning(self):
+ """
+ Test that ensures that a Warning alert is triggered when `crash_free_percentage` falls
+ below the Warning threshold and then is Resolved once `crash_free_percentage` goes above
+ the threshold (when no resolve_threshold is set)
+ """
+ rule = self.crash_rate_alert_rule
+ trigger_warning = self.crash_rate_alert_warning_trigger
+ action_warning = self.crash_rate_alert_warning_action
+
+ # Send Warning Update
+ update_value = (1 - trigger_warning.alert_threshold / 100) + 0.05
+ self.send_crash_rate_alert_update(
+ rule=rule,
+ value=update_value,
+ time_delta=timedelta(minutes=-3),
+ subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(),
+ )
+
+ incident = self.assert_active_incident(rule)
+ self.assert_actions_fired_for_incident(incident, [action_warning])
+ self.assert_trigger_exists_with_status(incident, trigger_warning, TriggerStatus.ACTIVE)
+
+ update_value = (1 - trigger_warning.alert_threshold / 100) - 0.05
+ self.send_crash_rate_alert_update(
+ rule=rule,
+ value=update_value,
+ time_delta=timedelta(minutes=-1),
+ subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(),
+ )
+ self.assert_no_active_incident(rule)
+
+ def test_crash_rate_alert_for_sessions_with_critical_warning_then_resolved(self):
+ """
+ Test that tests the behavior of going from Critical status to Warning status to Resolved
+ for Crash Rate Alerts
+ """
+ rule = self.crash_rate_alert_rule
+
+ trigger = self.crash_rate_alert_critical_trigger
+ trigger_warning = self.crash_rate_alert_warning_trigger
+
+ action_critical = self.crash_rate_alert_critical_action
+ action_warning = self.crash_rate_alert_warning_action
+
+ # Send Critical Update
+ update_value = (1 - trigger.alert_threshold / 100) + 0.05
+ self.send_crash_rate_alert_update(
+ rule=rule,
+ value=update_value,
+ time_delta=timedelta(minutes=-10),
+ subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(),
+ )
+ incident = self.assert_active_incident(rule)
+ self.assert_actions_fired_for_incident(incident, [action_critical])
+ self.assert_trigger_exists_with_status(incident, trigger, TriggerStatus.ACTIVE)
+
+ # Send Warning Update
+ update_value = (1 - trigger_warning.alert_threshold / 100) + 0.05
+ self.send_crash_rate_alert_update(
+ rule=rule,
+ value=update_value,
+ time_delta=timedelta(minutes=-3),
+ subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(),
+ )
+
+ incident = self.assert_active_incident(rule)
+ self.assert_actions_fired_for_incident(incident, [action_warning])
+ self.assert_trigger_exists_with_status(incident, trigger_warning, TriggerStatus.ACTIVE)
+
+ # Send update higher than warning threshold
+ update_value = (1 - trigger_warning.alert_threshold / 100) - 0.05
+ self.send_crash_rate_alert_update(
+ rule=rule,
+ value=update_value,
+ time_delta=timedelta(minutes=-1),
+ subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(),
+ )
+ self.assert_no_active_incident(rule)
+
+ def test_crash_rate_alert_for_sessions_with_triggers_lower_than_resolve_threshold(self):
+ """
+ Test that ensures that when `crash_rate_percentage` goes above the warning threshold but
+ lower than the resolve threshold, incident is not resolved
+ """
+ rule = self.crash_rate_alert_rule
+ rule.resolve_threshold = 95
+ rule.save()
+
+ trigger = self.crash_rate_alert_critical_trigger
+ trigger_warning = self.crash_rate_alert_warning_trigger
+
+ action_critical = self.crash_rate_alert_critical_action
+ action_warning = self.crash_rate_alert_warning_action
+
+ # Send Critical Update
+ update_value = (1 - trigger.alert_threshold / 100) + 0.05
+ self.send_crash_rate_alert_update(
+ rule=rule,
+ value=update_value,
+ time_delta=timedelta(minutes=-10),
+ subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(),
+ )
+ incident = self.assert_active_incident(rule)
+ self.assert_actions_fired_for_incident(incident, [action_critical])
+ self.assert_trigger_exists_with_status(incident, trigger, TriggerStatus.ACTIVE)
+
+ # Send Warning Update
+ update_value = (1 - trigger_warning.alert_threshold / 100) + 0.05
+ self.send_crash_rate_alert_update(
+ rule=rule,
+ value=update_value,
+ time_delta=timedelta(minutes=-3),
+ subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(),
+ )
+
+ incident = self.assert_active_incident(rule)
+ self.assert_actions_fired_for_incident(incident, [action_warning])
+ self.assert_trigger_exists_with_status(incident, trigger_warning, TriggerStatus.ACTIVE)
+
+ # Send update higher than warning threshold but lower than resolve threshold
+ update_value = (1 - trigger_warning.alert_threshold / 100) - 0.05
+ self.send_crash_rate_alert_update(
+ rule=rule,
+ value=update_value,
+ time_delta=timedelta(minutes=-1),
+ subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(),
+ )
+ self.assert_active_incident(rule)
+
+ def test_crash_rate_alert_for_sessions_with_no_sessions_data(self):
+ """
+ Test that ensures we skip the Crash Rate Alert processing if we have no sessions data
+ """
+ rule = self.crash_rate_alert_rule
+
+ self.send_crash_rate_alert_update(
+ rule=rule,
+ value=None,
+ subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(),
+ )
+ self.metrics.incr.assert_has_calls(
+ [
+ call("incidents.alert_rules.ignore_update_no_session_data"),
+ call("incidents.alert_rules.skipping_update_invalid_aggregation_value"),
+ ]
+ )
+
+ @patch("sentry.incidents.subscription_processor.CRASH_RATE_ALERT_MINIMUM_THRESHOLD", 30)
+ def test_crash_rate_alert_when_session_count_is_lower_than_minimum_threshold(self):
+ rule = self.crash_rate_alert_rule
+ trigger = self.crash_rate_alert_critical_trigger
+
+ # Send Critical Update
+ update_value = (1 - trigger.alert_threshold / 100) + 0.05
+ self.send_crash_rate_alert_update(
+ rule=rule,
+ value=update_value,
+ count=10,
+ time_delta=timedelta(minutes=-10),
+ subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(),
+ )
+ self.assert_no_active_incident(rule)
+ self.metrics.incr.assert_has_calls(
+ [
+ call("incidents.alert_rules.ignore_update_count_lower_than_min_threshold"),
+ call("incidents.alert_rules.skipping_update_invalid_aggregation_value"),
+ ]
+ )
+
+ @patch("sentry.incidents.subscription_processor.CRASH_RATE_ALERT_MINIMUM_THRESHOLD", 30)
+ def test_crash_rate_alert_when_session_count_is_higher_than_minimum_threshold(self):
+ rule = self.crash_rate_alert_rule
+ trigger = self.crash_rate_alert_critical_trigger
+ action_critical = self.crash_rate_alert_critical_action
+
+ # Send Critical Update
+ update_value = (1 - trigger.alert_threshold / 100) + 0.05
+ self.send_crash_rate_alert_update(
+ rule=rule,
+ value=update_value,
+ count=31,
+ time_delta=timedelta(minutes=-10),
+ subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(),
+ )
+ incident = self.assert_active_incident(rule)
+ self.assert_actions_fired_for_incident(incident, [action_critical])
+ self.assert_trigger_exists_with_status(incident, trigger, TriggerStatus.ACTIVE)
+
def test_comparison_alert_above(self):
rule = self.comparison_rule_above
comparison_delta = timedelta(seconds=rule.comparison_delta)
diff --git a/tests/sentry/integrations/jira/test_webhooks.py b/tests/sentry/integrations/jira/test_webhooks.py
index 8d5e165f69ac67..1b7842a656b7fa 100644
--- a/tests/sentry/integrations/jira/test_webhooks.py
+++ b/tests/sentry/integrations/jira/test_webhooks.py
@@ -10,45 +10,38 @@
class JiraWebhooksTest(APITestCase):
+ def setUp(self):
+ super().setUp()
+ self.integration = Integration.objects.create(
+ provider="jira",
+ name="Example Jira",
+ metadata={
+ "oauth_client_id": "oauth-client-id",
+ "shared_secret": "a-super-secret-key-from-atlassian",
+ "base_url": "https://example.atlassian.net",
+ "domain_name": "example.atlassian.net",
+ },
+ )
+ self.integration.add_organization(self.organization, self.user)
+ self.path = reverse("sentry-extensions-jira-issue-updated")
+
@patch("sentry.integrations.jira.webhooks.sync_group_assignee_inbound")
def test_simple_assign(self, mock_sync_group_assignee_inbound):
- org = self.organization
-
- integration = Integration.objects.create(provider="jira", name="Example Jira")
- integration.add_organization(org, self.user)
-
- path = reverse("sentry-extensions-jira-issue-updated")
-
with patch(
- "sentry.integrations.jira.webhooks.get_integration_from_jwt", return_value=integration
+ "sentry.integrations.jira.webhooks.get_integration_from_jwt",
+ return_value=self.integration,
):
data = StubService.get_stub_data("jira", "edit_issue_assignee_payload.json")
- resp = self.client.post(path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken")
+ resp = self.client.post(self.path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken")
assert resp.status_code == 200
mock_sync_group_assignee_inbound.assert_called_with(
- integration, "jess@sentry.io", "APP-123", assign=True
+ self.integration, "jess@sentry.io", "APP-123", assign=True
)
@override_settings(JIRA_USE_EMAIL_SCOPE=True)
@patch("sentry.integrations.jira.webhooks.sync_group_assignee_inbound")
@responses.activate
def test_assign_use_email_api(self, mock_sync_group_assignee_inbound):
- org = self.organization
-
- integration = Integration.objects.create(
- provider="jira",
- name="Example Jira",
- metadata={
- "oauth_client_id": "oauth-client-id",
- "shared_secret": "a-super-secret-key-from-atlassian",
- "base_url": "https://example.atlassian.net",
- "domain_name": "example.atlassian.net",
- },
- )
- integration.add_organization(org, self.user)
-
- path = reverse("sentry-extensions-jira-issue-updated")
-
responses.add(
responses.GET,
"https://example.atlassian.net/rest/api/3/user/email",
@@ -57,85 +50,62 @@ def test_assign_use_email_api(self, mock_sync_group_assignee_inbound):
)
with patch(
- "sentry.integrations.jira.webhooks.get_integration_from_jwt", return_value=integration
+ "sentry.integrations.jira.webhooks.get_integration_from_jwt",
+ return_value=self.integration,
):
data = StubService.get_stub_data("jira", "edit_issue_assignee_payload.json")
data["issue"]["fields"]["assignee"]["emailAddress"] = ""
- resp = self.client.post(path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken")
+ resp = self.client.post(self.path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken")
assert resp.status_code == 200
assert mock_sync_group_assignee_inbound.called
assert len(responses.calls) == 1
@patch("sentry.integrations.jira.webhooks.sync_group_assignee_inbound")
def test_assign_missing_email(self, mock_sync_group_assignee_inbound):
- org = self.organization
-
- integration = Integration.objects.create(provider="jira", name="Example Jira")
- integration.add_organization(org, self.user)
-
- path = reverse("sentry-extensions-jira-issue-updated")
-
with patch(
- "sentry.integrations.jira.webhooks.get_integration_from_jwt", return_value=integration
+ "sentry.integrations.jira.webhooks.get_integration_from_jwt",
+ return_value=self.integration,
):
data = StubService.get_stub_data("jira", "edit_issue_assignee_payload.json")
data["issue"]["fields"]["assignee"]["emailAddress"] = ""
- resp = self.client.post(path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken")
+ resp = self.client.post(self.path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken")
assert resp.status_code == 200
assert not mock_sync_group_assignee_inbound.called
@patch("sentry.integrations.jira.webhooks.sync_group_assignee_inbound")
def test_simple_deassign(self, mock_sync_group_assignee_inbound):
- org = self.organization
-
- integration = Integration.objects.create(provider="jira", name="Example Jira")
- integration.add_organization(org, self.user)
-
- path = reverse("sentry-extensions-jira-issue-updated")
-
with patch(
- "sentry.integrations.jira.webhooks.get_integration_from_jwt", return_value=integration
+ "sentry.integrations.jira.webhooks.get_integration_from_jwt",
+ return_value=self.integration,
):
data = StubService.get_stub_data("jira", "edit_issue_no_assignee_payload.json")
- resp = self.client.post(path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken")
+ resp = self.client.post(self.path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken")
assert resp.status_code == 200
mock_sync_group_assignee_inbound.assert_called_with(
- integration, None, "APP-123", assign=False
+ self.integration, None, "APP-123", assign=False
)
@patch("sentry.integrations.jira.webhooks.sync_group_assignee_inbound")
def test_simple_deassign_assignee_missing(self, mock_sync_group_assignee_inbound):
- org = self.organization
-
- integration = Integration.objects.create(provider="jira", name="Example Jira")
- integration.add_organization(org, self.user)
-
- path = reverse("sentry-extensions-jira-issue-updated")
-
with patch(
- "sentry.integrations.jira.webhooks.get_integration_from_jwt", return_value=integration
+ "sentry.integrations.jira.webhooks.get_integration_from_jwt",
+ return_value=self.integration,
):
data = StubService.get_stub_data("jira", "edit_issue_assignee_missing_payload.json")
- resp = self.client.post(path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken")
+ resp = self.client.post(self.path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken")
assert resp.status_code == 200
mock_sync_group_assignee_inbound.assert_called_with(
- integration, None, "APP-123", assign=False
+ self.integration, None, "APP-123", assign=False
)
@patch.object(IssueSyncMixin, "sync_status_inbound")
def test_simple_status_sync_inbound(self, mock_sync_status_inbound):
- org = self.organization
-
- integration = Integration.objects.create(provider="jira", name="Example Jira")
- integration.add_organization(org, self.user)
-
- path = reverse("sentry-extensions-jira-issue-updated")
-
with patch(
- "sentry.integrations.jira.webhooks.get_integration_from_jwt", return_value=integration
+ "sentry.integrations.jira.webhooks.get_integration_from_jwt",
+ return_value=self.integration,
) as mock_get_integration_from_jwt:
data = StubService.get_stub_data("jira", "edit_issue_status_payload.json")
- resp = self.client.post(path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken")
+ resp = self.client.post(self.path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken")
assert resp.status_code == 200
mock_get_integration_from_jwt.assert_called_with(
"anexampletoken", "/extensions/jira/issue-updated/", "jira", {}, method="POST"
@@ -160,30 +130,19 @@ def test_simple_status_sync_inbound(self, mock_sync_status_inbound):
)
def test_missing_changelog(self):
- org = self.organization
-
- integration = Integration.objects.create(provider="jira", name="Example Jira")
- integration.add_organization(org, self.user)
-
- path = reverse("sentry-extensions-jira-issue-updated")
-
with patch(
- "sentry.integrations.jira.webhooks.get_integration_from_jwt", return_value=integration
+ "sentry.integrations.jira.webhooks.get_integration_from_jwt",
+ return_value=self.integration,
):
data = StubService.get_stub_data("jira", "changelog_missing.json")
- resp = self.client.post(path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken")
+ resp = self.client.post(self.path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken")
assert resp.status_code == 200
def test_missing_body(self):
- org = self.organization
-
- integration = Integration.objects.create(provider="jira", name="Example Jira")
- integration.add_organization(org, self.user)
-
path = reverse("sentry-extensions-jira-installed")
-
with patch(
- "sentry.integrations.jira.webhooks.get_integration_from_jwt", return_value=integration
+ "sentry.integrations.jira.webhooks.get_integration_from_jwt",
+ return_value=self.integration,
):
resp = self.client.post(path, data={}, HTTP_AUTHORIZATION="JWT anexampletoken")
assert resp.status_code == 400
diff --git a/tests/sentry/integrations/jira_server/__init__.py b/tests/sentry/integrations/jira_server/__init__.py
index e69de29bb2d1d6..bd3d8fbee08868 100644
--- a/tests/sentry/integrations/jira_server/__init__.py
+++ b/tests/sentry/integrations/jira_server/__init__.py
@@ -0,0 +1,119 @@
+from sentry.models import (
+ ExternalIssue,
+ Group,
+ GroupLink,
+ Identity,
+ IdentityProvider,
+ IdentityStatus,
+ Integration,
+ Organization,
+ User,
+)
+
+EXAMPLE_PRIVATE_KEY = """-----BEGIN RSA PRIVATE KEY-----
+MIICWwIBAAKBgQC1cd9t8sA03awggLiX2gjZxyvOVUPJksLly1E662tttTeR3Wm9
+eo6onNeI8HRD+O4wubUp4h4Chc7DtLDmFEPhUZ8Qkwztiifm99Xo3s0nUq4Pygp5
+AU09KXTEPbzHLh1dnXLcxVLmGDE4drh0NWmYsd/Zp7XNIZq2TRQQ3NTdVQIDAQAB
+AoGAFwMyS0eWiR30TssEnn3Q0Y4pSCoYRuCOR4bZ7pcdMPTi72UdnCKHJWt/Cqc0
+l8piq1tiVsWO+NLvvnKUXRoE4cAyrGrpf1F0uP5zYW71SQALc9wwsjDzuj7BZEuK
+fg35JSceLHWE1WtzPDX5Xg20YPnMrA/xe/RwuPjuBH0wSqECQQDizzmKdKCq0ejy
+3OxEto5knqpSEgRcOk0HDsdgjwkwiZJOj5ECV2FKpNHuu2thGy/aDJyLlmUso8j0
+OpvLAzOvAkEAzMwAgGexTxKm8hy3ilvVn9EvhSKjaIakqY4ONK9LZ4zMiDHI0H6C
+FXlwWX7CJM0YVFMubj8SB8rnIuvFDEBMOwJABHtRyMGbNyTktH/XD1iIIcbc2LhQ
+a74fLYeGOws4hEQDpxfBJsmxO3dcSppbedS+slFTepKjNymZW/IYh/9tMwJAEL5E
+9DqGBn7x4y1x2//yESTbC7lvPqZzY+FXS/tg4NBkEGZxkoolPHg3NTnlyXhzGsHK
+M/04DicKipJYA85l7QJAJ3u67qZXecM/oWTtJToBDuyKGHfdY1564+RbyDEjJJRb
+vz4O/8FQQ1sGjdEBMMrRBCHEG8o3/XDTrB97t45TeA==
+-----END RSA PRIVATE KEY-----"""
+
+EXAMPLE_ISSUE_SEARCH = """
+{
+ "expand": "names,schema",
+ "startAt": 0,
+ "maxResults": 50,
+ "total": 1,
+ "issues": [
+ {
+ "expand": "",
+ "id": "10001",
+ "self": "http://www.example.com/jira/rest/api/2/issue/10001",
+ "key": "HSP-1",
+ "fields": {
+ "summary": "this is a test issue summary"
+ }
+ }
+ ],
+ "warningMessages": [
+ "The value 'splat' does not exist for the field 'Foo'."
+ ]
+}
+"""
+
+EXAMPLE_USER_SEARCH_RESPONSE = """
+[
+ {"name": "bob", "displayName": "Bobby", "emailAddress": "bob@example.org"}
+]
+"""
+
+EXAMPLE_PAYLOAD = {
+ "changelog": {
+ "items": [
+ {
+ "from": "10101",
+ "field": "status",
+ "fromString": "In Progress",
+ "to": "10102",
+ "toString": "Done",
+ "fieldtype": "jira",
+ "fieldId": "status",
+ }
+ ],
+ "id": 12345,
+ },
+ "issue": {"project": {"key": "APP", "id": "10000"}, "key": "APP-1"},
+}
+
+
+def get_integration(organization: Organization, user: User) -> Integration:
+ integration = Integration.objects.create(
+ provider="jira_server",
+ name="Example Jira",
+ metadata={
+ "verify_ssl": False,
+ "webhook_secret": "a long secret value",
+ "base_url": "https://jira.example.org",
+ },
+ )
+ identity_provider = IdentityProvider.objects.create(
+ external_id="jira.example.org:sentry-test", type="jira_server"
+ )
+ identity = Identity.objects.create(
+ idp=identity_provider,
+ user=user,
+ scopes=(),
+ status=IdentityStatus.VALID,
+ data={
+ "consumer_key": "sentry-test",
+ "private_key": EXAMPLE_PRIVATE_KEY,
+ "access_token": "access-token",
+ "access_token_secret": "access-token-secret",
+ },
+ )
+ integration.add_organization(organization, user, default_auth_id=identity.id)
+ return integration
+
+
+def link_group(organization: Organization, integration: Integration, group: Group) -> None:
+ external_issue = ExternalIssue.objects.create(
+ key=EXAMPLE_PAYLOAD["issue"]["key"],
+ integration_id=integration.id,
+ organization_id=organization.id,
+ )
+
+ GroupLink.objects.create(
+ group_id=group.id,
+ project_id=group.project_id,
+ linked_type=GroupLink.LinkedType.issue,
+ relationship=GroupLink.Relationship.resolves,
+ linked_id=external_issue.id,
+ )
diff --git a/tests/sentry/integrations/jira_server/test_integration.py b/tests/sentry/integrations/jira_server/test_integration.py
index 13c5ae7d6e4a76..c503b7f6ee0dcf 100644
--- a/tests/sentry/integrations/jira_server/test_integration.py
+++ b/tests/sentry/integrations/jira_server/test_integration.py
@@ -7,7 +7,7 @@
from sentry.testutils import IntegrationTestCase
from sentry.utils import json, jwt
-from .testutils import EXAMPLE_PRIVATE_KEY
+from . import EXAMPLE_PRIVATE_KEY
class JiraServerIntegrationTest(IntegrationTestCase):
diff --git a/tests/sentry/integrations/jira_server/test_search.py b/tests/sentry/integrations/jira_server/test_search.py
index 109735b39ff90a..b1555ed6ed2d56 100644
--- a/tests/sentry/integrations/jira_server/test_search.py
+++ b/tests/sentry/integrations/jira_server/test_search.py
@@ -4,37 +4,15 @@
from django.urls import reverse
from exam import fixture
-from sentry.models import Identity, IdentityProvider, IdentityStatus, Integration
from sentry.testutils import APITestCase
-from .testutils import EXAMPLE_ISSUE_SEARCH, EXAMPLE_PRIVATE_KEY, EXAMPLE_USER_SEARCH_RESPONSE
+from . import EXAMPLE_ISSUE_SEARCH, EXAMPLE_USER_SEARCH_RESPONSE, get_integration
class JiraSearchEndpointTest(APITestCase):
@fixture
def integration(self):
- integration = Integration.objects.create(
- provider="jira_server",
- name="Example Jira",
- metadata={"verify_ssl": False, "base_url": "https://jira.example.org"},
- )
- identity_provider = IdentityProvider.objects.create(
- external_id="jira.example.org:sentry-test", type="jira_server"
- )
- identity = Identity.objects.create(
- idp=identity_provider,
- user=self.user,
- scopes=(),
- status=IdentityStatus.VALID,
- data={
- "consumer_key": "sentry-test",
- "private_key": EXAMPLE_PRIVATE_KEY,
- "access_token": "access-token",
- "access_token_secret": "access-token-secret",
- },
- )
- integration.add_organization(self.organization, self.user, default_auth_id=identity.id)
- return integration
+ return get_integration(self.organization, self.user)
@responses.activate
def test_get_success_text_search(self):
diff --git a/tests/sentry/integrations/jira_server/test_webhooks.py b/tests/sentry/integrations/jira_server/test_webhooks.py
index 548c33aba0cdc3..9f7a70ffc9e3c3 100644
--- a/tests/sentry/integrations/jira_server/test_webhooks.py
+++ b/tests/sentry/integrations/jira_server/test_webhooks.py
@@ -1,122 +1,64 @@
import jwt
import responses
-from django.urls import reverse
-from exam import fixture
from requests.exceptions import ConnectionError
from sentry.integrations.jira_server.integration import JiraServerIntegration
-from sentry.models import (
- Identity,
- IdentityProvider,
- IdentityStatus,
- Integration,
- OrganizationIntegration,
-)
+from sentry.models import OrganizationIntegration
from sentry.testutils import APITestCase
from sentry.utils.compat.mock import patch
-from .testutils import EXAMPLE_PRIVATE_KEY
+from . import EXAMPLE_PAYLOAD, get_integration, link_group
class JiraServerWebhookEndpointTest(APITestCase):
endpoint = "sentry-extensions-jiraserver-issue-updated"
method = "post"
- @fixture
- def integration(self):
- integration = Integration.objects.create(
- provider="jira_server",
- name="Example Jira",
- metadata={
- "verify_ssl": False,
- "webhook_secret": "a long secret value",
- "base_url": "https://jira.example.org",
- },
- )
- identity_provider = IdentityProvider.objects.create(
- external_id="jira.example.org:sentry-test", type="jira_server"
- )
- identity = Identity.objects.create(
- idp=identity_provider,
- user=self.user,
- scopes=(),
- status=IdentityStatus.VALID,
- data={
- "consumer_key": "sentry-test",
- "private_key": EXAMPLE_PRIVATE_KEY,
- "access_token": "access-token",
- "access_token_secret": "access-token-secret",
- },
- )
- integration.add_organization(self.organization, self.user, default_auth_id=identity.id)
- return integration
+ def setUp(self):
+ super().setUp()
+ self.integration = get_integration(self.organization, self.user)
@property
def jwt_token(self):
- integration = self.integration
- return jwt.encode({"id": integration.external_id}, integration.metadata["webhook_secret"])
+ return jwt.encode(
+ {"id": self.integration.external_id}, self.integration.metadata["webhook_secret"]
+ )
def test_post_empty_token(self):
# Read the property to get side-effects in the database.
- token = self.jwt_token
- token = " "
- path = reverse("sentry-extensions-jiraserver-issue-updated", args=[token])
- resp = self.client.post(path)
+ _ = self.jwt_token
- assert resp.status_code == 400
+ self.get_error_response(" ", status_code=400)
def test_post_missing_default_identity(self):
org_integration = OrganizationIntegration.objects.get(
organization_id=self.organization.id,
integration_id=self.integration.id,
)
- org_integration.update(default_auth_id=None)
- org_integration.update(config={"sync_status_reverse": True})
- payload = {
- "changelog": {
- "items": [
- {
- "from": "10101",
- "field": "status",
- "fromString": "In Progress",
- "to": "10102",
- "toString": "Done",
- "fieldtype": "jira",
- "fieldId": "status",
- }
- ],
- "id": 12345,
- },
- "issue": {"project": {"key": "APP", "id": "10000"}, "key": "APP-1"},
- }
- self.get_error_response(self.jwt_token, **payload, status_code=400)
+ org_integration.update(default_auth_id=None, config={"sync_status_reverse": True})
+
+ link_group(self.organization, self.integration, self.group)
+
+ with self.tasks():
+ self.get_success_response(self.jwt_token, **EXAMPLE_PAYLOAD)
def test_post_token_missing_id(self):
integration = self.integration
# No id key in the token
token = jwt.encode({"no": integration.id}, integration.metadata["webhook_secret"])
- path = reverse("sentry-extensions-jiraserver-issue-updated", args=[token])
- resp = self.client.post(path)
-
- assert resp.status_code == 400
+ self.get_error_response(token, status_code=400)
def test_post_token_missing_integration(self):
integration = self.integration
# Use the wrong id in the token.
token = jwt.encode({"no": integration.id}, integration.metadata["webhook_secret"])
- path = reverse("sentry-extensions-jiraserver-issue-updated", args=[token])
- resp = self.client.post(path)
-
- assert resp.status_code == 400
+ self.get_error_response(token, status_code=400)
def test_post_token_invalid_signature(self):
integration = self.integration
# Use the wrong id in the token.
token = jwt.encode({"id": integration.external_id}, "bad-secret")
- path = reverse("sentry-extensions-jiraserver-issue-updated", args=[token])
- resp = self.client.post(path)
-
- assert resp.status_code == 400
+ self.get_error_response(token, status_code=400)
@patch("sentry.integrations.jira.webhooks.sync_group_assignee_inbound")
def test_post_update_assignee(self, mock_sync):
@@ -127,10 +69,7 @@ def test_post_update_assignee(self, mock_sync):
"changelog": {"items": [{"field": "assignee"}], "id": 12345},
"issue": {"fields": {"assignee": {"emailAddress": "bob@example.org"}}, "key": "APP-1"},
}
- token = self.jwt_token
- path = reverse("sentry-extensions-jiraserver-issue-updated", args=[token])
- resp = self.client.post(path, data=payload)
- assert resp.status_code == 200
+ self.get_success_response(self.jwt_token, **payload)
mock_sync.assert_called_with(self.integration, "bob@example.org", "APP-1", assign=True)
@@ -139,30 +78,14 @@ def test_post_update_status(self, mock_sync):
project = self.create_project()
self.create_group(project=project)
- payload = {
- "changelog": {
- "items": [
- {
- "from": "10101",
- "field": "status",
- "fromString": "In Progress",
- "to": "10102",
- "toString": "Done",
- "fieldtype": "jira",
- "fieldId": "status",
- }
- ],
- "id": 12345,
- },
- "issue": {"project": {"key": "APP", "id": "10000"}, "key": "APP-1"},
- }
- token = self.jwt_token
- path = reverse("sentry-extensions-jiraserver-issue-updated", args=[token])
- resp = self.client.post(path, data=payload)
- assert resp.status_code == 200
+ self.get_success_response(self.jwt_token, **EXAMPLE_PAYLOAD)
mock_sync.assert_called_with(
- "APP-1", {"changelog": payload["changelog"]["items"][0], "issue": payload["issue"]}
+ "APP-1",
+ {
+ "changelog": EXAMPLE_PAYLOAD["changelog"]["items"][0],
+ "issue": EXAMPLE_PAYLOAD["issue"],
+ },
)
@responses.activate
@@ -172,31 +95,11 @@ def test_post_update_status_token_error(self):
url="https://jira.example.org/rest/api/2/status",
body=ConnectionError(),
)
- project = self.create_project()
- self.create_group(project=project)
- integration = self.integration
- installation = integration.get_installation(self.organization.id)
+ group = self.create_group(self.project)
+ installation = self.integration.get_installation(self.organization.id)
installation.update_organization_config({"sync_status_reverse": True})
- payload = {
- "changelog": {
- "items": [
- {
- "from": "10101",
- "field": "status",
- "fromString": "In Progress",
- "to": "10102",
- "toString": "Done",
- "fieldtype": "jira",
- "fieldId": "status",
- }
- ],
- "id": 12345,
- },
- "issue": {"project": {"key": "APP", "id": "10000"}, "key": "APP-1"},
- }
- token = self.jwt_token
- path = reverse("sentry-extensions-jiraserver-issue-updated", args=[token])
- resp = self.client.post(path, data=payload)
+ link_group(self.organization, self.integration, group)
- assert resp.status_code == 400
+ with self.tasks():
+ self.get_success_response(self.jwt_token, **EXAMPLE_PAYLOAD)
diff --git a/tests/sentry/integrations/jira_server/testutils.py b/tests/sentry/integrations/jira_server/testutils.py
deleted file mode 100644
index b49d41595c8820..00000000000000
--- a/tests/sentry/integrations/jira_server/testutils.py
+++ /dev/null
@@ -1,44 +0,0 @@
-EXAMPLE_PRIVATE_KEY = """-----BEGIN RSA PRIVATE KEY-----
-MIICWwIBAAKBgQC1cd9t8sA03awggLiX2gjZxyvOVUPJksLly1E662tttTeR3Wm9
-eo6onNeI8HRD+O4wubUp4h4Chc7DtLDmFEPhUZ8Qkwztiifm99Xo3s0nUq4Pygp5
-AU09KXTEPbzHLh1dnXLcxVLmGDE4drh0NWmYsd/Zp7XNIZq2TRQQ3NTdVQIDAQAB
-AoGAFwMyS0eWiR30TssEnn3Q0Y4pSCoYRuCOR4bZ7pcdMPTi72UdnCKHJWt/Cqc0
-l8piq1tiVsWO+NLvvnKUXRoE4cAyrGrpf1F0uP5zYW71SQALc9wwsjDzuj7BZEuK
-fg35JSceLHWE1WtzPDX5Xg20YPnMrA/xe/RwuPjuBH0wSqECQQDizzmKdKCq0ejy
-3OxEto5knqpSEgRcOk0HDsdgjwkwiZJOj5ECV2FKpNHuu2thGy/aDJyLlmUso8j0
-OpvLAzOvAkEAzMwAgGexTxKm8hy3ilvVn9EvhSKjaIakqY4ONK9LZ4zMiDHI0H6C
-FXlwWX7CJM0YVFMubj8SB8rnIuvFDEBMOwJABHtRyMGbNyTktH/XD1iIIcbc2LhQ
-a74fLYeGOws4hEQDpxfBJsmxO3dcSppbedS+slFTepKjNymZW/IYh/9tMwJAEL5E
-9DqGBn7x4y1x2//yESTbC7lvPqZzY+FXS/tg4NBkEGZxkoolPHg3NTnlyXhzGsHK
-M/04DicKipJYA85l7QJAJ3u67qZXecM/oWTtJToBDuyKGHfdY1564+RbyDEjJJRb
-vz4O/8FQQ1sGjdEBMMrRBCHEG8o3/XDTrB97t45TeA==
------END RSA PRIVATE KEY-----"""
-
-EXAMPLE_ISSUE_SEARCH = """
-{
- "expand": "names,schema",
- "startAt": 0,
- "maxResults": 50,
- "total": 1,
- "issues": [
- {
- "expand": "",
- "id": "10001",
- "self": "http://www.example.com/jira/rest/api/2/issue/10001",
- "key": "HSP-1",
- "fields": {
- "summary": "this is a test issue summary"
- }
- }
- ],
- "warningMessages": [
- "The value 'splat' does not exist for the field 'Foo'."
- ]
-}
-"""
-
-EXAMPLE_USER_SEARCH_RESPONSE = """
-[
- {"name": "bob", "displayName": "Bobby", "emailAddress": "bob@example.org"}
-]
-"""
diff --git a/tests/sentry/integrations/test_issues.py b/tests/sentry/integrations/test_issues.py
index dfc441753a206b..90eda0f87b726f 100644
--- a/tests/sentry/integrations/test_issues.py
+++ b/tests/sentry/integrations/test_issues.py
@@ -44,7 +44,7 @@ def test_status_sync_inbound_resolve(self):
installation = integration.get_installation(group.organization.id)
- with self.feature("organizations:integrations-issue-sync"):
+ with self.feature("organizations:integrations-issue-sync"), self.tasks():
installation.sync_status_inbound(
external_issue.key,
{"project_id": "APP", "status": {"id": "12345", "category": "done"}},
@@ -87,7 +87,7 @@ def test_status_sync_inbound_unresolve(self):
installation = integration.get_installation(group.organization.id)
- with self.feature("organizations:integrations-issue-sync"):
+ with self.feature("organizations:integrations-issue-sync"), self.tasks():
installation.sync_status_inbound(
external_issue.key,
{"project_id": "APP", "status": {"id": "12345", "category": "in_progress"}},
diff --git a/tests/sentry/integrations/vsts/test_issues.py b/tests/sentry/integrations/vsts/test_issues.py
index f2fe3f24b4d9c1..271aea9ed2e471 100644
--- a/tests/sentry/integrations/vsts/test_issues.py
+++ b/tests/sentry/integrations/vsts/test_issues.py
@@ -5,6 +5,7 @@
from django.test import RequestFactory
from exam import fixture
+from sentry.integrations.issues import ResolveSyncAction
from sentry.integrations.vsts.integration import VstsIntegration
from sentry.models import (
ExternalIssue,
@@ -334,27 +335,42 @@ def test_get_issue_url(self):
@responses.activate
def test_should_resolve_active_to_resolved(self):
- should_resolve = self.integration.should_resolve(
- {"project": self.project_id_with_states, "old_state": "Active", "new_state": "Resolved"}
+ assert (
+ self.integration.get_resolve_sync_action(
+ {
+ "project": self.project_id_with_states,
+ "old_state": "Active",
+ "new_state": "Resolved",
+ }
+ )
+ == ResolveSyncAction.RESOLVE
)
- assert should_resolve is True
@responses.activate
def test_should_resolve_resolved_to_active(self):
- should_resolve = self.integration.should_resolve(
- {"project": self.project_id_with_states, "old_state": "Resolved", "new_state": "Active"}
+ assert (
+ self.integration.get_resolve_sync_action(
+ {
+ "project": self.project_id_with_states,
+ "old_state": "Resolved",
+ "new_state": "Active",
+ }
+ )
+ == ResolveSyncAction.UNRESOLVE
)
- assert should_resolve is False
@responses.activate
def test_should_resolve_new(self):
- should_resolve = self.integration.should_resolve(
- {"project": self.project_id_with_states, "old_state": None, "new_state": "New"}
+ assert (
+ self.integration.get_resolve_sync_action(
+ {"project": self.project_id_with_states, "old_state": None, "new_state": "New"}
+ )
+ == ResolveSyncAction.UNRESOLVE
)
- assert should_resolve is False
@responses.activate
def test_should_resolve_done_status_failure(self):
+ """TODO(mgaeta): Should this be NOOP instead of UNRESOLVE when we lose connection?"""
responses.reset()
responses.add(
responses.GET,
@@ -364,38 +380,30 @@ def test_should_resolve_done_status_failure(self):
"error": "The requested operation is not allowed. Your account is pending deletion."
},
)
- should_resolve = self.integration.should_resolve(
- {"project": self.project_id_with_states, "old_state": "Active", "new_state": "Resolved"}
- )
- assert should_resolve is False
-
- @responses.activate
- def test_should_unresolve_active_to_resolved(self):
- should_unresolve = self.integration.should_unresolve(
- {"project": self.project_id_with_states, "old_state": "Active", "new_state": "Resolved"}
- )
- assert should_unresolve is False
- @responses.activate
- def test_should_unresolve_resolved_to_active(self):
- should_unresolve = self.integration.should_unresolve(
- {"project": self.project_id_with_states, "old_state": "Resolved", "new_state": "Active"}
+ assert (
+ self.integration.get_resolve_sync_action(
+ {
+ "project": self.project_id_with_states,
+ "old_state": "Active",
+ "new_state": "Resolved",
+ }
+ )
+ == ResolveSyncAction.UNRESOLVE
)
- assert should_unresolve is True
@responses.activate
def test_should_not_unresolve_resolved_to_closed(self):
- should_unresolve = self.integration.should_unresolve(
- {"project": self.project_id_with_states, "old_state": "Resolved", "new_state": "Closed"}
- )
- assert should_unresolve is False
-
- @responses.activate
- def test_should_unresolve_new(self):
- should_unresolve = self.integration.should_unresolve(
- {"project": self.project_id_with_states, "old_state": None, "new_state": "New"}
+ assert (
+ self.integration.get_resolve_sync_action(
+ {
+ "project": self.project_id_with_states,
+ "old_state": "Resolved",
+ "new_state": "Closed",
+ }
+ )
+ == ResolveSyncAction.NOOP
)
- assert should_unresolve is True
class VstsIssueFormTest(VstsIssueBase):
diff --git a/tests/sentry/integrations/vsts/test_webhooks.py b/tests/sentry/integrations/vsts/test_webhooks.py
index 0d9eba70ec6722..cdf2d346394b12 100644
--- a/tests/sentry/integrations/vsts/test_webhooks.py
+++ b/tests/sentry/integrations/vsts/test_webhooks.py
@@ -161,19 +161,18 @@ def test_inbound_status_sync_resolve(self):
# Change so that state is changing from unresolved to resolved
work_item = self.set_workitem_state("Active", "Resolved")
- with self.feature("organizations:integrations-issue-sync"):
+ with self.feature("organizations:integrations-issue-sync"), self.tasks():
resp = self.client.post(
absolute_uri("/extensions/vsts/issue-updated/"),
data=work_item,
HTTP_SHARED_SECRET=self.shared_secret,
)
- assert resp.status_code == 200
- group_ids = [g.id for g in groups]
- assert (
- len(Group.objects.filter(id__in=group_ids, status=GroupStatus.RESOLVED))
- == num_groups
- )
- assert len(Activity.objects.filter(group_id__in=group_ids)) == num_groups
+ assert resp.status_code == 200
+ group_ids = [g.id for g in groups]
+ assert (
+ len(Group.objects.filter(id__in=group_ids, status=GroupStatus.RESOLVED)) == num_groups
+ )
+ assert len(Activity.objects.filter(group_id__in=group_ids)) == num_groups
@responses.activate
def test_inbound_status_sync_unresolve(self):
@@ -195,19 +194,18 @@ def test_inbound_status_sync_unresolve(self):
# Change so that state is changing from resolved to unresolved
work_item = self.set_workitem_state("Resolved", "Active")
- with self.feature("organizations:integrations-issue-sync"):
+ with self.feature("organizations:integrations-issue-sync"), self.tasks():
resp = self.client.post(
absolute_uri("/extensions/vsts/issue-updated/"),
data=work_item,
HTTP_SHARED_SECRET=self.shared_secret,
)
- assert resp.status_code == 200
- group_ids = [g.id for g in groups]
- assert (
- len(Group.objects.filter(id__in=group_ids, status=GroupStatus.UNRESOLVED))
- == num_groups
- )
- assert len(Activity.objects.filter(group_id__in=group_ids)) == num_groups
+ assert resp.status_code == 200
+ group_ids = [g.id for g in groups]
+ assert (
+ len(Group.objects.filter(id__in=group_ids, status=GroupStatus.UNRESOLVED)) == num_groups
+ )
+ assert len(Activity.objects.filter(group_id__in=group_ids)) == num_groups
@responses.activate
def test_inbound_status_sync_new_workitem(self):
diff --git a/tests/sentry/models/test_organization.py b/tests/sentry/models/test_organization.py
index 84ba867ca6b220..784827118269c8 100644
--- a/tests/sentry/models/test_organization.py
+++ b/tests/sentry/models/test_organization.py
@@ -279,8 +279,7 @@ def is_pending_organization_member(self, user_id, member_id, was_booted=True):
assert member.token is None
assert member.token_expires_at is None
- @mock.patch("sentry.utils.email.logger")
- def test_handle_2fa_required__compliant_and_non_compliant_members(self, email_log):
+ def test_handle_2fa_required__compliant_and_non_compliant_members(self):
compliant_user, compliant_member = self._create_user_and_member(has_2fa=True)
non_compliant_user, non_compliant_member = self._create_user_and_member()
@@ -292,7 +291,6 @@ def test_handle_2fa_required__compliant_and_non_compliant_members(self, email_lo
assert len(mail.outbox) == 1
assert mail.outbox[0].to == [non_compliant_user.email]
- assert email_log.info.call_count == 2 # mail.queued, mail.sent
audit_logs = AuditLogEntry.objects.filter(
event=AuditLogEntryEvent.MEMBER_PENDING, organization=self.org, actor=self.owner
@@ -301,8 +299,7 @@ def test_handle_2fa_required__compliant_and_non_compliant_members(self, email_lo
assert audit_logs[0].data["email"] == non_compliant_user.email
assert audit_logs[0].target_user_id == non_compliant_user.id
- @mock.patch("sentry.utils.email.logger")
- def test_handle_2fa_required__compliant_members(self, email_log):
+ def test_handle_2fa_required__compliant_members(self):
compliant = []
for num in range(0, 4):
user, member = self._create_user_and_member(has_2fa=True)
@@ -314,13 +311,12 @@ def test_handle_2fa_required__compliant_members(self, email_log):
for user, member in compliant:
self.is_organization_member(user.id, member.id)
- assert len(mail.outbox) == email_log.info.call_count == 0
+ assert len(mail.outbox) == 0
assert not AuditLogEntry.objects.filter(
event=AuditLogEntryEvent.MEMBER_PENDING, organization=self.org, actor=self.owner
).exists()
- @mock.patch("sentry.utils.email.logger")
- def test_handle_2fa_required__non_compliant_members(self, email_log):
+ def test_handle_2fa_required__non_compliant_members(self):
non_compliant = []
for num in range(0, 4):
user, member = self._create_user_and_member()
@@ -333,13 +329,11 @@ def test_handle_2fa_required__non_compliant_members(self, email_log):
self.is_pending_organization_member(user.id, member.id)
assert len(mail.outbox) == len(non_compliant)
- assert email_log.info.call_count == len(non_compliant) * 2 # mail.queued, mail.sent
assert AuditLogEntry.objects.filter(
event=AuditLogEntryEvent.MEMBER_PENDING, organization=self.org, actor=self.owner
).count() == len(non_compliant)
- @mock.patch("sentry.utils.email.logger")
- def test_handle_2fa_required__pending_member__ok(self, email_log):
+ def test_handle_2fa_required__pending_member__ok(self):
user, member = self._create_user_and_member(has_member_email=True)
member.user = None
member.save()
@@ -348,14 +342,13 @@ def test_handle_2fa_required__pending_member__ok(self, email_log):
self.org.handle_2fa_required(self.request)
self.is_pending_organization_member(user.id, member.id, was_booted=False)
- assert len(mail.outbox) == email_log.info.call_count == 0
+ assert len(mail.outbox) == 0
assert not AuditLogEntry.objects.filter(
event=AuditLogEntryEvent.MEMBER_PENDING, organization=self.org, actor=self.owner
).exists()
@mock.patch("sentry.tasks.auth.logger")
- @mock.patch("sentry.utils.email.logger")
- def test_handle_2fa_required__no_user_email__ok(self, email_log, auth_log):
+ def test_handle_2fa_required__no_user_email__ok(self, auth_log):
user, member = self._create_user_and_member(has_user_email=False, has_member_email=True)
assert not user.email
assert member.email
@@ -365,7 +358,6 @@ def test_handle_2fa_required__no_user_email__ok(self, email_log, auth_log):
self.is_pending_organization_member(user.id, member.id)
- assert email_log.info.call_count == 2 # mail.queued, mail.sent
assert len(mail.outbox) == 1
assert mail.outbox[0].to == [member.email]
@@ -376,8 +368,7 @@ def test_handle_2fa_required__no_user_email__ok(self, email_log, auth_log):
)
@mock.patch("sentry.tasks.auth.logger")
- @mock.patch("sentry.utils.email.logger")
- def test_handle_2fa_required__no_email__warning(self, email_log, auth_log):
+ def test_handle_2fa_required__no_email__warning(self, auth_log):
user, member = self._create_user_and_member(has_user_email=False)
assert not user.email
assert not member.email
@@ -392,8 +383,7 @@ def test_handle_2fa_required__no_email__warning(self, email_log, auth_log):
)
@mock.patch("sentry.tasks.auth.logger")
- @mock.patch("sentry.utils.email.logger")
- def test_handle_2fa_required__no_actor_and_api_key__ok(self, email_log, auth_log):
+ def test_handle_2fa_required__no_actor_and_api_key__ok(self, auth_log):
user, member = self._create_user_and_member()
with self.options({"system.url-prefix": "http://example.com"}), self.tasks():
@@ -408,7 +398,6 @@ def test_handle_2fa_required__no_actor_and_api_key__ok(self, email_log, auth_log
self.is_pending_organization_member(user.id, member.id)
assert len(mail.outbox) == 1
- assert email_log.info.call_count == 2 # mail.queued, mail.sent
assert (
AuditLogEntry.objects.filter(
event=AuditLogEntryEvent.MEMBER_PENDING,
@@ -420,8 +409,7 @@ def test_handle_2fa_required__no_actor_and_api_key__ok(self, email_log, auth_log
)
@mock.patch("sentry.tasks.auth.logger")
- @mock.patch("sentry.utils.email.logger")
- def test_handle_2fa_required__no_ip_address__ok(self, email_log, auth_log):
+ def test_handle_2fa_required__no_ip_address__ok(self, auth_log):
user, member = self._create_user_and_member()
with self.options({"system.url-prefix": "http://example.com"}), self.tasks():
@@ -431,7 +419,6 @@ def test_handle_2fa_required__no_ip_address__ok(self, email_log, auth_log):
self.is_pending_organization_member(user.id, member.id)
assert len(mail.outbox) == 1
- assert email_log.info.call_count == 2 # mail.queued, mail.sent
assert (
AuditLogEntry.objects.filter(
event=AuditLogEntryEvent.MEMBER_PENDING,
diff --git a/tests/sentry/models/test_release.py b/tests/sentry/models/test_release.py
index e2bbb457197611..bf30a3afe314f1 100644
--- a/tests/sentry/models/test_release.py
+++ b/tests/sentry/models/test_release.py
@@ -1204,3 +1204,77 @@ def test_follows_semver_check_when_project_only_has_two_releases(self):
)
is False
)
+
+
+class ClearCommitsTestCase(TestCase):
+ def test_simple(self):
+ org = self.create_organization()
+ project = self.create_project(organization=org, name="foo")
+ group = self.create_group(project=project)
+
+ repo = Repository.objects.create(organization_id=org.id, name="test/repo")
+
+ author = CommitAuthor.objects.create(
+ name="foo bar baz", email="foo@example.com", organization_id=org.id
+ )
+
+ author2 = CommitAuthor.objects.create(
+ name="foo bar boo", email="baroo@example.com", organization_id=org.id
+ )
+
+ commit = Commit.objects.create(
+ organization_id=org.id,
+ repository_id=repo.id,
+ author=author,
+ date_added="2019-03-01 12:00:00",
+ message="fixes %s" % (group.qualified_short_id),
+ key="alksdflskdfjsldkfajsflkslk",
+ )
+ commit2 = Commit.objects.create(
+ organization_id=org.id,
+ repository_id=repo.id,
+ author=author2,
+ date_added="2019-03-01 12:02:00",
+ message="i fixed something",
+ key="lskfslknsdkcsnlkdflksfdkls",
+ )
+
+ release = Release.objects.create(version="abcdabc", organization=org)
+ release.add_project(project)
+ release.set_commits(
+ [
+ {"id": commit.key, "repository": repo.name},
+ {"id": commit2.key, "repository": repo.name},
+ ]
+ )
+ # Confirm setup works
+ assert ReleaseCommit.objects.filter(commit=commit, release=release).exists()
+ assert ReleaseCommit.objects.filter(commit=commit2, release=release).exists()
+
+ assert release.commit_count == 2
+ assert release.authors == [str(author.id), str(author2.id)]
+ assert release.last_commit_id == commit.id
+
+ assert ReleaseHeadCommit.objects.filter(
+ release_id=release.id, commit_id=commit.id, repository_id=repo.id
+ ).exists()
+
+ # Now clear the release;
+ release.clear_commits()
+ assert not ReleaseCommit.objects.filter(commit=commit, release=release).exists()
+ assert not ReleaseCommit.objects.filter(commit=commit2, release=release).exists()
+ assert not ReleaseHeadCommit.objects.filter(
+ release_id=release.id, commit_id=commit.id, repository_id=repo.id
+ ).exists()
+
+ assert release.commit_count == 0
+ assert release.authors == []
+ assert not release.last_commit_id
+
+ # Commits should still exist
+ assert Commit.objects.filter(
+ id=commit.id, organization_id=org.id, repository_id=repo.id
+ ).exists()
+ assert Commit.objects.filter(
+ id=commit2.id, organization_id=org.id, repository_id=repo.id
+ ).exists()
diff --git a/tests/sentry/notifications/test_notifications.py b/tests/sentry/notifications/test_notifications.py
index 885a3b5aa6d664..b1ed9179e29741 100644
--- a/tests/sentry/notifications/test_notifications.py
+++ b/tests/sentry/notifications/test_notifications.py
@@ -6,6 +6,7 @@
import responses
from django.core import mail
from django.utils import timezone
+from sentry_relay import parse_release
from sentry.event_manager import EventManager
from sentry.models import (
@@ -219,6 +220,7 @@ def test_sends_deployment_notification(self):
"""
release = self.create_release()
+ version_parsed = self.version_parsed = parse_release(release.version)["description"]
url = f"/api/0/organizations/{self.organization.slug}/releases/{release.version}/deploys/"
with self.tasks():
response = self.client.post(
@@ -228,10 +230,10 @@ def test_sends_deployment_notification(self):
msg = mail.outbox[0]
# check the txt version
- assert f"Version {release.version} was deployed to {self.environment.name} on" in msg.body
+ assert f"Version {version_parsed} was deployed to {self.environment.name} on" in msg.body
# check the html version
assert (
- f"Version {release.version} was deployed to {self.environment.name}\n \n"
+ f"Version {version_parsed} was deployed to {self.environment.name}\n \n"
in msg.alternatives[0][0]
)
@@ -239,7 +241,7 @@ def test_sends_deployment_notification(self):
assert (
text
- == f"Release {release.version} was deployed to {self.environment.name} for this project"
+ == f"Release {version_parsed} was deployed to {self.environment.name} for this project"
)
assert (
attachment["actions"][0]["url"]
diff --git a/tests/sentry/processing/realtime_metrics/test_redis.py b/tests/sentry/processing/realtime_metrics/test_redis.py
index fe510ddafe29ef..b40bdaccf69215 100644
--- a/tests/sentry/processing/realtime_metrics/test_redis.py
+++ b/tests/sentry/processing/realtime_metrics/test_redis.py
@@ -4,8 +4,13 @@
import pytest
-from sentry.processing import realtime_metrics # type: ignore
-from sentry.processing.realtime_metrics.redis import RedisRealtimeMetricsStore # type: ignore
+from sentry.processing import realtime_metrics
+from sentry.processing.realtime_metrics.base import (
+ BucketedCount,
+ BucketedDurations,
+ DurationHistogram,
+)
+from sentry.processing.realtime_metrics.redis import RedisRealtimeMetricsStore
from sentry.utils import redis
if TYPE_CHECKING:
@@ -46,6 +51,13 @@ def test_default() -> None:
realtime_metrics.increment_project_duration_counter(17, 1234, 55)
+# TODO: group tests using classes
+
+#
+# increment_project_event_counter()
+#
+
+
def test_increment_project_event_counter_simple(
store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
) -> None:
@@ -80,6 +92,11 @@ def test_increment_project_event_counter_different_buckets(
assert redis_cluster.get("symbolicate_event_low_priority:counter:10:17:1150") == "1"
+#
+# increment_project_duration_counter()
+#
+
+
def test_increment_project_duration_counter_simple(
store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
) -> None:
@@ -112,3 +129,440 @@ def test_increment_project_duration_counter_different_buckets(
assert redis_cluster.hget("symbolicate_event_low_priority:histogram:10:17:1140", "20") == "1"
assert redis_cluster.hget("symbolicate_event_low_priority:histogram:10:17:1150", "40") == "1"
+
+
+#
+# get_lpq_projects()
+#
+
+
+def test_get_lpq_projects_unset(store: RedisRealtimeMetricsStore) -> None:
+ in_lpq = store.get_lpq_projects()
+ assert in_lpq == set()
+
+
+def test_get_lpq_projects_empty(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.sadd("store.symbolicate-event-lpq-selected", 1)
+ redis_cluster.srem("store.symbolicate-event-lpq-selected", 1)
+
+ in_lpq = store.get_lpq_projects()
+ assert in_lpq == set()
+
+
+def test_get_lpq_projects_filled(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.sadd("store.symbolicate-event-lpq-selected", 1)
+ in_lpq = store.get_lpq_projects()
+ assert in_lpq == {1}
+
+
+#
+# add_project_to_lpq()
+#
+
+
+def test_add_project_to_lpq_unset(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ added = store.add_project_to_lpq(1)
+ assert added
+ in_lpq = redis_cluster.smembers("store.symbolicate-event-lpq-selected")
+ assert in_lpq == {"1"}
+
+
+def test_add_project_to_lpq_empty(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.sadd("store.symbolicate-event-lpq-selected", 1)
+ redis_cluster.srem("store.symbolicate-event-lpq-selected", 1)
+
+ added = store.add_project_to_lpq(1)
+ assert added
+ in_lpq = redis_cluster.smembers("store.symbolicate-event-lpq-selected")
+ assert in_lpq == {"1"}
+
+
+def test_add_project_to_lpq_dupe(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.sadd("store.symbolicate-event-lpq-selected", 1)
+
+ added = store.add_project_to_lpq(1)
+ assert not added
+ in_lpq = redis_cluster.smembers("store.symbolicate-event-lpq-selected")
+ assert in_lpq == {"1"}
+
+
+def test_add_project_to_lpq_filled(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.sadd("store.symbolicate-event-lpq-selected", 11)
+
+ added = store.add_project_to_lpq(1)
+ assert added
+ in_lpq = redis_cluster.smembers("store.symbolicate-event-lpq-selected")
+ assert in_lpq == {"1", "11"}
+
+
+#
+# remove_projects_from_lpq()
+#
+
+
+def test_remove_projects_from_lpq_unset(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ removed = store.remove_projects_from_lpq({1})
+ assert removed == 0
+
+ remaining = redis_cluster.smembers("store.symbolicate-event-lpq-selected")
+ assert remaining == set()
+
+
+def test_remove_projects_from_lpq_empty(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.sadd("store.symbolicate-event-lpq-selected", 1)
+ redis_cluster.srem("store.symbolicate-event-lpq-selected", 1)
+
+ removed = store.remove_projects_from_lpq({1})
+ assert removed == 0
+ remaining = redis_cluster.smembers("store.symbolicate-event-lpq-selected")
+ assert remaining == set()
+
+
+def test_remove_projects_from_lpq_only_member(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.sadd("store.symbolicate-event-lpq-selected", 1)
+
+ removed = store.remove_projects_from_lpq({1})
+ assert removed == 1
+
+ remaining = redis_cluster.smembers("store.symbolicate-event-lpq-selected")
+ assert remaining == set()
+
+
+def test_remove_projects_from_lpq_nonmember(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.sadd("store.symbolicate-event-lpq-selected", 11)
+
+ removed = store.remove_projects_from_lpq({1})
+ assert removed == 0
+
+ remaining = redis_cluster.smembers("store.symbolicate-event-lpq-selected")
+ assert remaining == {"11"}
+
+
+def test_remove_projects_from_lpq_subset(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.sadd("store.symbolicate-event-lpq-selected", 1)
+ redis_cluster.sadd("store.symbolicate-event-lpq-selected", 11)
+
+ removed = store.remove_projects_from_lpq({1})
+ assert removed == 1
+
+ remaining = redis_cluster.smembers("store.symbolicate-event-lpq-selected")
+ assert remaining == {"11"}
+
+
+def test_remove_projects_from_lpq_all_members(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.sadd("store.symbolicate-event-lpq-selected", 1)
+ redis_cluster.sadd("store.symbolicate-event-lpq-selected", 11)
+
+ removed = store.remove_projects_from_lpq({1, 11})
+ assert removed == 2
+
+ remaining = redis_cluster.smembers("store.symbolicate-event-lpq-selected")
+ assert remaining == set()
+
+
+def test_remove_projects_from_lpq_no_members(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.sadd("store.symbolicate-event-lpq-selected", 1)
+
+ removed = store.remove_projects_from_lpq(set())
+ assert removed == 0
+
+ remaining = redis_cluster.smembers("store.symbolicate-event-lpq-selected")
+ assert remaining == {"1"}
+
+
+#
+# projects()
+#
+
+
+def test_projects_unset(store: RedisRealtimeMetricsStore) -> None:
+ candidates = store.projects()
+ assert list(candidates) == []
+
+
+def test_projects_empty(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.set(
+ "symbolicate_event_low_priority:counter:10:42:111",
+ 0,
+ )
+ redis_cluster.delete("symbolicate_event_low_priority:counter:10:42:111")
+
+ candidates = store.projects()
+ assert list(candidates) == []
+
+
+def test_projects_different_bucket(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.set("symbolicate_event_low_priority:counter:5:42:111", 0)
+
+ candidates = store.projects()
+ assert list(candidates) == []
+
+
+def test_projects_negative_timestamp(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.set("symbolicate_event_low_priority:counter:10:42:-111", 0)
+
+ candidates = store.projects()
+ assert list(candidates) == [42]
+
+
+def test_projects_one_count(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.set("symbolicate_event_low_priority:counter:10:42:111", 0)
+
+ candidates = store.projects()
+ assert list(candidates) == [42]
+
+
+def test_projects_one_histogram(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.hset("symbolicate_event_low_priority:histogram:10:42:111:0", 0, 123)
+
+ candidates = store.projects()
+ assert list(candidates) == [42]
+
+
+def test_projects_multiple_metric_types(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.set("symbolicate_event_low_priority:counter:10:42:111", 0)
+ redis_cluster.hset("symbolicate_event_low_priority:histogram:10:53:111:20", 20, 456)
+
+ candidates = store.projects()
+ assert list(candidates) == [42, 53]
+
+
+def test_projects_mixed_buckets(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.set("symbolicate_event_low_priority:counter:10:42:111", 0)
+ redis_cluster.set("symbolicate_event_low_priority:counter:5:53:111", 0)
+
+ candidates = store.projects()
+ assert list(candidates) == [42]
+
+
+#
+# get_counts_for_project()
+#
+
+
+def test_get_counts_for_project_unset(store: RedisRealtimeMetricsStore) -> None:
+ counts = store.get_counts_for_project(42)
+ assert list(counts) == []
+
+
+def test_get_counts_for_project_empty(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.set(
+ "symbolicate_event_low_priority:counter:10:42:111",
+ 0,
+ )
+ redis_cluster.delete("symbolicate_event_low_priority:counter:10:42:111")
+
+ counts = store.get_counts_for_project(42)
+ assert list(counts) == []
+
+
+def test_get_counts_for_project_no_matching_keys(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.set("symbolicate_event_low_priority:counter:10:53:111", 0)
+
+ counts = store.get_counts_for_project(42)
+ assert list(counts) == []
+
+
+def test_get_counts_for_project_negative_key(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.set("symbolicate_event_low_priority:counter:10:42:-111", 0)
+
+ counts = store.get_counts_for_project(42)
+ assert list(counts) == [
+ BucketedCount(timestamp=-111, count=0),
+ ]
+
+
+def test_get_counts_for_project_negative_count(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.set("symbolicate_event_low_priority:counter:10:42:111", -10)
+
+ counts = store.get_counts_for_project(42)
+ assert list(counts) == [
+ BucketedCount(timestamp=111, count=-10),
+ ]
+
+
+def test_get_counts_for_project_multiple_projects(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.set("symbolicate_event_low_priority:counter:10:42:111", 0)
+ redis_cluster.set("symbolicate_event_low_priority:counter:10:42:222", 0)
+ redis_cluster.set("symbolicate_event_low_priority:counter:10:53:111", 0)
+
+ counts = store.get_counts_for_project(42)
+ assert list(counts) == [
+ BucketedCount(timestamp=111, count=0),
+ BucketedCount(timestamp=222, count=0),
+ ]
+
+
+def test_get_counts_for_project_multi_metric(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.set("symbolicate_event_low_priority:counter:10:42:111", 0)
+ redis_cluster.hset("symbolicate_event_low_priority:histogram:10:42:222:0", 0, 123)
+
+ counts = store.get_counts_for_project(42)
+ assert list(counts) == [
+ BucketedCount(timestamp=111, count=0),
+ ]
+
+
+def test_get_counts_for_project_different_buckets(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.set("symbolicate_event_low_priority:counter:10:42:111", 0)
+ redis_cluster.set("symbolicate_event_low_priority:counter:5:42:111", 0)
+
+ counts = store.get_counts_for_project(42)
+ assert list(counts) == [
+ BucketedCount(timestamp=111, count=0),
+ ]
+
+
+#
+# get_durations_for_project()
+#
+
+
+def test_get_durations_for_project_unset(store: RedisRealtimeMetricsStore) -> None:
+ counts = store.get_durations_for_project(42)
+ assert list(counts) == []
+
+
+def test_get_durations_for_project_empty(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.hset(
+ "symbolicate_event_low_priority:histogram:10:42:111",
+ 0,
+ 123,
+ )
+ redis_cluster.delete("symbolicate_event_low_priority:histogram:10:42:111")
+
+ counts = store.get_durations_for_project(42)
+ assert list(counts) == []
+
+
+def test_get_durations_for_project_no_matching_keys(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.hset("symbolicate_event_low_priority:histogram:10:53:111", 0, 123)
+
+ counts = store.get_durations_for_project(42)
+ assert list(counts) == []
+
+
+def test_get_durations_for_project_negative_timestamp(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.hset("symbolicate_event_low_priority:histogram:10:42:-111", 0, 123)
+
+ counts = store.get_durations_for_project(42)
+ assert list(counts) == [
+ DurationHistogram(timestamp=-111, histogram=BucketedDurations({0: 123}))
+ ]
+
+
+def test_get_durations_for_project_negative_duration(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.hset("symbolicate_event_low_priority:histogram:10:42:111", -20, 123)
+
+ counts = store.get_durations_for_project(42)
+ assert list(counts) == [
+ DurationHistogram(timestamp=111, histogram=BucketedDurations({-20: 123}))
+ ]
+
+
+def test_get_durations_for_project_negative_count(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.hset("symbolicate_event_low_priority:histogram:10:42:111", 0, -123)
+
+ counts = store.get_durations_for_project(42)
+ assert list(counts) == [
+ DurationHistogram(timestamp=111, histogram=BucketedDurations({0: -123}))
+ ]
+
+
+def test_get_durations_for_project_multi_key_multi_durations(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.hset("symbolicate_event_low_priority:histogram:10:42:111", 0, 123)
+ redis_cluster.hset("symbolicate_event_low_priority:histogram:10:42:111", 10, 456)
+ redis_cluster.hset("symbolicate_event_low_priority:histogram:10:42:222", 20, 123)
+ redis_cluster.hset("symbolicate_event_low_priority:histogram:10:53:111", 0, 123)
+
+ counts = store.get_durations_for_project(42)
+ assert list(counts) == [
+ DurationHistogram(timestamp=111, histogram=BucketedDurations({0: 123, 10: 456})),
+ DurationHistogram(timestamp=222, histogram=BucketedDurations({20: 123})),
+ ]
+
+
+def test_get_durations_for_project_multi_metric(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.set("symbolicate_event_low_priority:counter:10:42:111", 0)
+ redis_cluster.hset("symbolicate_event_low_priority:histogram:10:42:222", 0, 123)
+
+ counts = store.get_durations_for_project(42)
+ assert list(counts) == [DurationHistogram(timestamp=222, histogram=BucketedDurations({0: 123}))]
+
+
+def test_get_durations_for_project_different_buckets(
+ store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster
+) -> None:
+ redis_cluster.hset("symbolicate_event_low_priority:histogram:10:42:111", 0, 123)
+ redis_cluster.hset("symbolicate_event_low_priority:histogram:5:42:111", 20, 456)
+
+ counts = store.get_durations_for_project(42)
+ assert list(counts) == [DurationHistogram(timestamp=111, histogram=BucketedDurations({0: 123}))]
diff --git a/tests/sentry/rules/actions/test_notify_event_sentry_app.py b/tests/sentry/rules/actions/test_notify_event_sentry_app.py
new file mode 100644
index 00000000000000..b4e29647eeac9b
--- /dev/null
+++ b/tests/sentry/rules/actions/test_notify_event_sentry_app.py
@@ -0,0 +1,91 @@
+from sentry.rules.actions.notify_event_sentry_app import NotifyEventSentryAppAction
+from sentry.tasks.sentry_apps import notify_sentry_app
+from sentry.testutils.cases import RuleTestCase
+
+SENTRY_APP_ALERT_ACTION = "sentry.rules.actions.notify_event_sentry_app.NotifyEventSentryAppAction"
+
+
+class NotifyEventSentryAppActionTest(RuleTestCase):
+ rule_cls = NotifyEventSentryAppAction
+ schema = {
+ "elements": [
+ {
+ "type": "alert-rule-action",
+ "title": "Create Alert Rule UI Example Task",
+ "settings": {
+ "type": "alert-rule-settings",
+ "uri": "/test/",
+ "required_fields": [
+ {"type": "text", "label": "Title", "name": "title"},
+ {"type": "textarea", "label": "Description", "name": "description"},
+ ],
+ },
+ }
+ ]
+ }
+ schema_data = {"title": "foo", "description": "bar"}
+
+ def test_applies_correctly_for_sentry_apps(self):
+ event = self.get_event()
+
+ self.app = self.create_sentry_app(
+ organization=event.organization,
+ name="Test Application",
+ is_alertable=True,
+ schema=self.schema,
+ )
+
+ self.install = self.create_sentry_app_installation(
+ slug="test-application", organization=event.organization
+ )
+
+ rule = self.get_rule(
+ data={
+ "sentryAppInstallationUuid": self.install.uuid,
+ "settings": self.schema_data,
+ }
+ )
+
+ assert rule.id == SENTRY_APP_ALERT_ACTION
+
+ futures = list(rule.after(event=event, state=self.get_state()))
+ assert len(futures) == 1
+ assert futures[0].callback is notify_sentry_app
+ assert futures[0].kwargs["sentry_app"].id == self.app.id
+ assert futures[0].kwargs["schema_defined_settings"] == self.schema_data
+
+ def test_sentry_app_actions(self):
+ event = self.get_event()
+
+ self.project = self.create_project(organization=event.organization)
+
+ self.app = self.create_sentry_app(
+ organization=event.organization,
+ name="Test Application",
+ is_alertable=True,
+ schema=self.schema,
+ )
+
+ self.install = self.create_sentry_app_installation(
+ slug="test-application", organization=event.organization
+ )
+
+ rule = self.get_rule(
+ data={
+ "sentryAppInstallationUuid": self.install.uuid,
+ "settings": self.schema_data,
+ }
+ )
+
+ action_list = rule.get_custom_actions(self.project)
+ assert len(action_list) == 1
+
+ action = action_list[0]
+ alert_element = self.schema["elements"][0]
+ assert action["id"] == SENTRY_APP_ALERT_ACTION
+ assert action["service"] == self.app.slug
+ assert action["prompt"] == self.app.name
+ assert action["actionType"] == "sentryapp"
+ assert action["enabled"]
+ assert action["formFields"] == alert_element["settings"]
+ assert alert_element["title"] in action["label"]
diff --git a/tests/sentry/search/events/test_builder.py b/tests/sentry/search/events/test_builder.py
index 41101d7475c606..31c0cc8c29081e 100644
--- a/tests/sentry/search/events/test_builder.py
+++ b/tests/sentry/search/events/test_builder.py
@@ -6,7 +6,7 @@
from snuba_sdk.column import Column
from snuba_sdk.conditions import Condition, Op, Or
from snuba_sdk.function import Function
-from snuba_sdk.orderby import Direction, OrderBy
+from snuba_sdk.orderby import Direction, LimitBy, OrderBy
from sentry.exceptions import InvalidSearchQuery
from sentry.search.events.builder import QueryBuilder
@@ -85,6 +85,19 @@ def test_simple_orderby(self):
)
query.get_snql_query().validate()
+ def test_simple_limitby(self):
+ query = QueryBuilder(
+ dataset=Dataset.Discover,
+ params=self.params,
+ query="",
+ selected_columns=["message"],
+ orderby="message",
+ limitby=("message", 1),
+ limit=4,
+ )
+
+ assert query.limitby == LimitBy(Column("message"), 1)
+
def test_environment_filter(self):
query = QueryBuilder(
Dataset.Discover,
diff --git a/tests/sentry/search/events/test_fields.py b/tests/sentry/search/events/test_fields.py
index 53508f59a87d0f..727c8dc0e21d2d 100644
--- a/tests/sentry/search/events/test_fields.py
+++ b/tests/sentry/search/events/test_fields.py
@@ -169,6 +169,7 @@ def test_get_json_meta_type(field_alias, snuba_type, function, expected):
r'to_other(release,"asdf @ \"qwer: (3,2)")',
("to_other", ["release", r'"asdf @ \"qwer: (3,2)"'], None),
),
+ ("identity(sessions)", ("identity", ["sessions"], None)),
],
)
def test_parse_function(function, expected):
@@ -360,14 +361,15 @@ def test_field_alias_with_aggregates(self):
]
def test_aggregate_function_expansion(self):
- fields = ["count_unique(user)", "count(id)", "min(timestamp)"]
- result = resolve_field_list(fields, eventstore.Filter())
+ fields = ["count_unique(user)", "count(id)", "min(timestamp)", "identity(sessions)"]
+ result = resolve_field_list(fields, eventstore.Filter(), functions_acl=["identity"])
# Automatic fields should be inserted, count() should have its column dropped.
assert result["selected_columns"] == []
assert result["aggregations"] == [
["uniq", "user", "count_unique_user"],
["count", None, "count_id"],
["min", "timestamp", "min_timestamp"],
+ ["identity", "sessions", "identity_sessions"],
]
assert result["groupby"] == []
diff --git a/tests/sentry/security/test_utils.py b/tests/sentry/security/test_utils.py
deleted file mode 100644
index 18cd614a76a5f1..00000000000000
--- a/tests/sentry/security/test_utils.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from sentry.security.utils import is_valid_email_address
-
-
-def test_is_valid_email_address_number_at_qqcom():
- assert is_valid_email_address("12345@qq.com") is False
-
-
-def test_is_valid_email_address_normal_human_email_address():
- assert is_valid_email_address("dcramer@gmail.com") is True
diff --git a/tests/sentry/snuba/test_discover.py b/tests/sentry/snuba/test_discover.py
index 665bf243711ea1..372ecf5635a2d8 100644
--- a/tests/sentry/snuba/test_discover.py
+++ b/tests/sentry/snuba/test_discover.py
@@ -2871,7 +2871,7 @@ def test_conditions_with_timestamps(self):
for t, ev in enumerate(events):
val = ev[0] * 32
for i in range(ev[1]):
- data = load_data("transaction", timestamp=before_now(seconds=3 * t + 1))
+ data = load_data("transaction", timestamp=self.now - timedelta(seconds=3 * t + 1))
data["transaction"] = f"{val}"
self.store_event(data=data, project_id=self.project.id)
@@ -2879,8 +2879,8 @@ def test_conditions_with_timestamps(self):
results = discover.query(
selected_columns=["transaction", "count()"],
query="event.type:transaction AND (timestamp:<{} OR timestamp:>{})".format(
- iso_format(before_now(seconds=5)),
- iso_format(before_now(seconds=3)),
+ iso_format(self.now - timedelta(seconds=5)),
+ iso_format(self.now - timedelta(seconds=3)),
),
params={
"project_id": [self.project.id],
@@ -2937,14 +2937,49 @@ def test_count_with_or(self):
assert data[0]["transaction"] == "a" * 32
assert data[0]["count"] == 1
- def test_access_to_private_functions(self):
- # using private functions directly without access should error
- with pytest.raises(InvalidSearchQuery, match="array_join: no access to private function"):
- discover.query(
- selected_columns=["array_join(tags.key)"],
+ def test_array_join(self):
+ data = load_data("transaction", timestamp=before_now(seconds=3))
+ data["measurements"] = {
+ "fp": {"value": 1000},
+ "fcp": {"value": 1000},
+ "lcp": {"value": 1000},
+ }
+ self.store_event(data=data, project_id=self.project.id)
+
+ for use_snql in [False, True]:
+ results = discover.query(
+ selected_columns=["array_join(measurements_key)"],
query="",
- params={"project_id": [self.project.id]},
+ params={
+ "project_id": [self.project.id],
+ "start": self.two_min_ago,
+ "end": self.now,
+ },
+ functions_acl=["array_join"],
+ use_snql=use_snql,
)
+ assert {"fcp", "fp", "lcp"} == {
+ row["array_join_measurements_key"] for row in results["data"]
+ }
+
+ def test_access_to_private_functions(self):
+ for use_snql in [False, True]:
+ # using private functions directly without access should error
+ with pytest.raises(
+ InvalidSearchQuery, match="array_join: no access to private function"
+ ):
+ discover.query(
+ selected_columns=["array_join(tags.key)"],
+ query="",
+ params={
+ "project_id": [self.project.id],
+ "start": self.two_min_ago,
+ "end": self.now,
+ },
+ use_snql=use_snql,
+ )
+
+ # TODO: test the following with `use_snql=True` once histogram is using snql
# using private functions in an aggregation without access should error
with pytest.raises(InvalidSearchQuery, match="histogram: no access to private function"):
@@ -5665,6 +5700,101 @@ def test_aggregate_function(self):
assert "count_unique_user" in keys
assert "time" in keys
+ def test_comparison_aggregate_function_invalid(self):
+ with pytest.raises(
+ InvalidSearchQuery, match="Only one column can be selected for comparison queries"
+ ):
+ discover.timeseries_query(
+ selected_columns=["count()", "count_unique(user)"],
+ query="",
+ params={
+ "start": self.day_ago,
+ "end": self.day_ago + timedelta(hours=2),
+ "project_id": [self.project.id],
+ },
+ rollup=3600,
+ comparison_delta=timedelta(days=1),
+ )
+
+ def test_comparison_aggregate_function(self):
+ self.store_event(
+ data={
+ "timestamp": iso_format(self.day_ago + timedelta(hours=1)),
+ "user": {"id": 1},
+ },
+ project_id=self.project.id,
+ )
+
+ result = discover.timeseries_query(
+ selected_columns=["count()"],
+ query="",
+ params={
+ "start": self.day_ago,
+ "end": self.day_ago + timedelta(hours=2),
+ "project_id": [self.project.id],
+ },
+ rollup=3600,
+ comparison_delta=timedelta(days=1),
+ )
+ assert len(result.data["data"]) == 3
+ # Values should all be 0, since there is no comparison period data at all.
+ assert [0, 0, 0] == [val["count"] for val in result.data["data"] if "count" in val]
+
+ self.store_event(
+ data={
+ "timestamp": iso_format(self.day_ago + timedelta(days=-1, hours=1)),
+ "user": {"id": 1},
+ },
+ project_id=self.project.id,
+ )
+ self.store_event(
+ data={
+ "timestamp": iso_format(self.day_ago + timedelta(days=-1, hours=1, minutes=2)),
+ "user": {"id": 2},
+ },
+ project_id=self.project.id,
+ )
+ self.store_event(
+ data={
+ "timestamp": iso_format(self.day_ago + timedelta(days=-1, hours=2, minutes=1)),
+ },
+ project_id=self.project.id,
+ )
+
+ result = discover.timeseries_query(
+ selected_columns=["count()"],
+ query="",
+ params={
+ "start": self.day_ago,
+ "end": self.day_ago + timedelta(hours=2, minutes=1),
+ "project_id": [self.project.id],
+ },
+ rollup=3600,
+ comparison_delta=timedelta(days=1),
+ )
+ assert len(result.data["data"]) == 3
+ # In the second bucket we have 3 events in the current period and 2 in the comparison, so
+ # we get a result of 50% increase
+ assert [0, 50, 0] == [val["count"] for val in result.data["data"] if "count" in val]
+
+ result = discover.timeseries_query(
+ selected_columns=["count_unique(user)"],
+ query="",
+ params={
+ "start": self.day_ago,
+ "end": self.day_ago + timedelta(hours=2, minutes=2),
+ "project_id": [self.project.id],
+ },
+ rollup=3600,
+ comparison_delta=timedelta(days=1),
+ )
+ assert len(result.data["data"]) == 3
+ # In the second bucket we have 1 unique user in the current period and 2 in the comparison, so
+ # we get a result of -50%
+ assert [0, -50, 0] == [
+ val["count_unique_user"] for val in result.data["data"] if "count_unique_user" in val
+ ]
+
def test_count_miserable(self):
event_data = load_data("transaction")
# Half of duration so we don't get weird rounding differences when comparing the results
diff --git a/tests/sentry/snuba/test_tasks.py b/tests/sentry/snuba/test_tasks.py
index fa3f061b22e2ed..80b3a3c275d25e 100644
--- a/tests/sentry/snuba/test_tasks.py
+++ b/tests/sentry/snuba/test_tasks.py
@@ -199,6 +199,42 @@ def test_simple_transactions(self):
assert snuba_filter.conditions == []
assert snuba_filter.aggregations == [["uniq", "user", "count_unique_user"]]
+ def test_simple_sessions(self):
+ snuba_filter = build_snuba_filter(
+ dataset=QueryDatasets.SESSIONS,
+ query="",
+ aggregate="percentage(sessions_crashed, sessions) AS _crash_rate_alert_aggregate",
+ environment=None,
+ event_types=[],
+ )
+ assert snuba_filter
+ assert snuba_filter.aggregations == [
+ [
+ "if(greater(sessions,0),divide(sessions_crashed,sessions),null)",
+ None,
+ "_crash_rate_alert_aggregate",
+ ],
+ ["identity", "sessions", "_total_count"],
+ ]
+
+ def test_simple_users(self):
+ snuba_filter = build_snuba_filter(
+ dataset=QueryDatasets.SESSIONS,
+ query="",
+ aggregate="percentage(users_crashed, users) AS _crash_rate_alert_aggregate",
+ environment=None,
+ event_types=[],
+ )
+ assert snuba_filter
+ assert snuba_filter.aggregations == [
+ [
+ "if(greater(users,0),divide(users_crashed,users),null)",
+ None,
+ "_crash_rate_alert_aggregate",
+ ],
+ ["identity", "users", "_total_count"],
+ ]
+
def test_aliased_query_events(self):
snuba_filter = build_snuba_filter(
QueryDatasets.EVENTS, "release:latest", "count_unique(user)", None, None
@@ -210,6 +246,52 @@ def test_aliased_query_events(self):
]
assert snuba_filter.aggregations == [["uniq", "tags[sentry:user]", "count_unique_user"]]
+ def test_query_and_environment_sessions(self):
+ env = self.create_environment(self.project, name="development")
+ snuba_filter = build_snuba_filter(
+ dataset=QueryDatasets.SESSIONS,
+ query="release:ahmed@12.2",
+ aggregate="percentage(sessions_crashed, sessions) AS _crash_rate_alert_aggregate",
+ environment=env,
+ event_types=[],
+ )
+ assert snuba_filter
+ assert snuba_filter.aggregations == [
+ [
+ "if(greater(sessions,0),divide(sessions_crashed,sessions),null)",
+ None,
+ "_crash_rate_alert_aggregate",
+ ],
+ ["identity", "sessions", "_total_count"],
+ ]
+ assert snuba_filter.conditions == [
+ ["release", "=", "ahmed@12.2"],
+ ["environment", "=", "development"],
+ ]
+
+ def test_query_and_environment_users(self):
+ env = self.create_environment(self.project, name="development")
+ snuba_filter = build_snuba_filter(
+ dataset=QueryDatasets.SESSIONS,
+ query="release:ahmed@12.2",
+ aggregate="percentage(users_crashed, users) AS _crash_rate_alert_aggregate",
+ environment=env,
+ event_types=[],
+ )
+ assert snuba_filter
+ assert snuba_filter.aggregations == [
+ [
+ "if(greater(users,0),divide(users_crashed,users),null)",
+ None,
+ "_crash_rate_alert_aggregate",
+ ],
+ ["identity", "users", "_total_count"],
+ ]
+ assert snuba_filter.conditions == [
+ ["release", "=", "ahmed@12.2"],
+ ["environment", "=", "development"],
+ ]
+
def test_aliased_query_transactions(self):
snuba_filter = build_snuba_filter(
QueryDatasets.TRANSACTIONS,
@@ -360,6 +442,15 @@ def test_event_types_no_discover(self):
)
== "release:123"
)
+ assert (
+ apply_dataset_query_conditions(
+ QueryDatasets.SESSIONS,
+ "release:123",
+ [],
+ False,
+ )
+ == "release:123"
+ )
def test_event_types_discover(self):
assert (
diff --git a/tests/sentry/tasks/test_low_priority_symbolication.py b/tests/sentry/tasks/test_low_priority_symbolication.py
new file mode 100644
index 00000000000000..86a3063bacaabf
--- /dev/null
+++ b/tests/sentry/tasks/test_low_priority_symbolication.py
@@ -0,0 +1,24 @@
+import pytest
+
+from sentry.processing import realtime_metrics
+from sentry.tasks.low_priority_symbolication import _scan_for_suspect_projects, calculation_magic
+from sentry.testutils.helpers.task_runner import TaskRunner
+from sentry.utils import redis
+from sentry.utils.compat import mock
+
+
+@pytest.fixture
+def redis_cluster() -> redis._RedisCluster:
+ return redis.redis_clusters.get("default")
+
+
+@mock.patch("sentry.tasks.low_priority_symbolication.calculation_magic", lambda x, y: True)
+def test_scan_for_suspect_projects() -> None:
+ realtime_metrics.increment_project_event_counter(17, 0)
+ with TaskRunner():
+ _scan_for_suspect_projects()
+ assert realtime_metrics.get_lpq_projects() == {17}
+
+
+def test_calculation_magic():
+ assert not calculation_magic([], [])
diff --git a/tests/sentry/tasks/test_sentry_apps.py b/tests/sentry/tasks/test_sentry_apps.py
index caf63fdb9f8849..ce62dde8fda1e2 100644
--- a/tests/sentry/tasks/test_sentry_apps.py
+++ b/tests/sentry/tasks/test_sentry_apps.py
@@ -177,6 +177,40 @@ def test_send_alert_event(self, safe_urlopen):
assert requests[0]["response_code"] == 200
assert requests[0]["event_type"] == "event_alert.triggered"
+ @patch("sentry.tasks.sentry_apps.safe_urlopen", return_value=MockResponseInstance)
+ def test_send_alert_event_with_additional_payload(self, safe_urlopen):
+ event = self.store_event(data={}, project_id=self.project.id)
+ settings = {
+ "alert_prefix": "[Not Good]",
+ "channel": "#ignored-errors",
+ "best_emoji": ":fire:",
+ }
+ rule_future = RuleFuture(
+ rule=self.rule,
+ kwargs={"sentry_app": self.sentry_app, "schema_defined_settings": settings},
+ )
+
+ with self.tasks():
+ notify_sentry_app(event, [rule_future])
+
+ payload = json.loads(faux(safe_urlopen).kwargs["data"])
+
+ assert payload["action"] == "triggered"
+ assert payload["data"]["triggered_rule"] == self.rule.label
+ assert payload["data"]["issue_alert"] == {
+ "id": self.rule.id,
+ "title": self.rule.label,
+ "sentry_app_id": self.sentry_app.id,
+ "settings": settings,
+ }
+
+ buffer = SentryAppWebhookRequestsBuffer(self.sentry_app)
+ requests = buffer.get_requests()
+
+ assert len(requests) == 1
+ assert requests[0]["response_code"] == 200
+ assert requests[0]["event_type"] == "event_alert.triggered"
+
@patch("sentry.tasks.sentry_apps.safe_urlopen", return_value=MockResponseInstance)
class TestProcessResourceChange(TestCase):
diff --git a/tests/sentry/utils/email/test_address.py b/tests/sentry/utils/email/test_address.py
new file mode 100644
index 00000000000000..f456f46038d920
--- /dev/null
+++ b/tests/sentry/utils/email/test_address.py
@@ -0,0 +1,22 @@
+from sentry.testutils import TestCase
+from sentry.utils.email.address import get_from_email_domain, is_valid_email_address
+
+
+class GetFromEmailDomainTest(TestCase):
+ def test_get_from_email_domain(self):
+ with self.options({"mail.from": "matt@example.com"}):
+ assert get_from_email_domain() == "example.com"
+
+ with self.options({"mail.from": "root@localhost"}):
+ assert get_from_email_domain() == "localhost"
+
+ with self.options({"mail.from": "garbage"}):
+ assert get_from_email_domain() == "garbage"
+
+
+class ValidEmailTest(TestCase):
+ def test_is_valid_email_address_number_at_qqcom(self):
+ assert is_valid_email_address("12345@qq.com") is False
+
+ def test_is_valid_email_address_normal_human_email_address(self):
+ assert is_valid_email_address("dcramer@gmail.com") is True
diff --git a/tests/sentry/utils/email/test_backend.py b/tests/sentry/utils/email/test_backend.py
new file mode 100644
index 00000000000000..2c319161645052
--- /dev/null
+++ b/tests/sentry/utils/email/test_backend.py
@@ -0,0 +1,17 @@
+from sentry.testutils import TestCase
+from sentry.utils.email.backend import get_mail_backend
+
+
+class GetMailBackendTest(TestCase):
+ def test_get_mail_backend(self):
+ with self.options({"mail.backend": "smtp"}):
+ assert get_mail_backend() == "django.core.mail.backends.smtp.EmailBackend"
+
+ with self.options({"mail.backend": "dummy"}):
+ assert get_mail_backend() == "django.core.mail.backends.dummy.EmailBackend"
+
+ with self.options({"mail.backend": "console"}):
+ assert get_mail_backend() == "django.core.mail.backends.console.EmailBackend"
+
+ with self.options({"mail.backend": "something.else"}):
+ assert get_mail_backend() == "something.else"
diff --git a/tests/sentry/utils/email/test_list_resolver.py b/tests/sentry/utils/email/test_list_resolver.py
new file mode 100644
index 00000000000000..145a81e6398458
--- /dev/null
+++ b/tests/sentry/utils/email/test_list_resolver.py
@@ -0,0 +1,28 @@
+import pytest
+
+from sentry.testutils import TestCase
+from sentry.utils.email import ListResolver
+from sentry.utils.email.message_builder import default_list_type_handlers
+
+
+class ListResolverTestCase(TestCase):
+ resolver = ListResolver("namespace", default_list_type_handlers)
+
+ def test_rejects_invalid_namespace(self):
+ with pytest.raises(AssertionError):
+ ListResolver("\x00", {})
+
+ def test_rejects_invalid_types(self):
+ with pytest.raises(ListResolver.UnregisteredTypeError):
+ self.resolver(object())
+
+ def test_generates_list_ids(self):
+ expected = "<{0.project.slug}.{0.organization.slug}.namespace>".format(self.event)
+ assert self.resolver(self.event.group) == expected
+ assert self.resolver(self.event.project) == expected
+
+ def test_rejects_invalid_objects(self):
+ resolver = ListResolver("namespace", {object: lambda value: ("\x00",)})
+
+ with pytest.raises(AssertionError):
+ resolver(object())
diff --git a/tests/sentry/utils/email/tests.py b/tests/sentry/utils/email/test_message_builder.py
similarity index 76%
rename from tests/sentry/utils/email/tests.py
rename to tests/sentry/utils/email/test_message_builder.py
index c17cb538bcaf5b..5eecad9fbcff77 100644
--- a/tests/sentry/utils/email/tests.py
+++ b/tests/sentry/utils/email/test_message_builder.py
@@ -1,44 +1,13 @@
import functools
-import pytest
from django.core import mail
from sentry import options
-from sentry.models import GroupEmailThread, User, UserOption
+from sentry.models import GroupEmailThread, User, UserEmail, UserOption
from sentry.testutils import TestCase
from sentry.utils.compat.mock import patch
-from sentry.utils.email import (
- ListResolver,
- MessageBuilder,
- create_fake_email,
- default_list_type_handlers,
- get_from_email_domain,
- get_mail_backend,
- send_mail,
-)
-
-
-class ListResolverTestCase(TestCase):
- resolver = ListResolver("namespace", default_list_type_handlers)
-
- def test_rejects_invalid_namespace(self):
- with pytest.raises(AssertionError):
- ListResolver("\x00", {})
-
- def test_rejects_invalid_types(self):
- with pytest.raises(ListResolver.UnregisteredTypeError):
- self.resolver(object())
-
- def test_generates_list_ids(self):
- expected = "<{0.project.slug}.{0.organization.slug}.namespace>".format(self.event)
- assert self.resolver(self.event.group) == expected
- assert self.resolver(self.event.project) == expected
-
- def test_rejects_invalid_objects(self):
- resolver = ListResolver("namespace", {object: lambda value: ("\x00",)})
-
- with pytest.raises(AssertionError):
- resolver(object())
+from sentry.utils.email import MessageBuilder
+from sentry.utils.email.faker import create_fake_email
class MessageBuilderTest(TestCase):
@@ -115,8 +84,10 @@ def test_with_users(self):
user_b = User.objects.create(email="bar@example.com")
user_c = User.objects.create(email="baz@example.com")
+ alternate_email = "bazzer@example.com"
+ UserEmail.objects.create(user=user_c, email=alternate_email)
UserOption.objects.create(
- user=user_c, project=project, key="mail:email", value="bazzer@example.com"
+ user=user_c, project=project, key="mail:email", value=alternate_email
)
msg = MessageBuilder(
@@ -155,7 +126,7 @@ def test_fake_dont_send(self):
assert len(mail.outbox) == 0
- @patch("sentry.utils.email.make_msgid")
+ @patch("sentry.utils.email.message_builder.make_msgid")
def test_message_id(self, make_msgid):
make_msgid.return_value = "abc123"
@@ -180,7 +151,7 @@ def test_message_id(self, make_msgid):
"text/html",
)
- @patch("sentry.utils.email.make_msgid")
+ @patch("sentry.utils.email.message_builder.make_msgid")
def test_add_groupemailthread(self, make_msgid):
make_msgid.return_value = "abc123"
@@ -211,7 +182,7 @@ def test_add_groupemailthread(self, make_msgid):
assert thread.email == "foo@example.com"
assert thread.group == self.group
- @patch("sentry.utils.email.make_msgid")
+ @patch("sentry.utils.email.message_builder.make_msgid")
def test_reply_reference(self, make_msgid):
make_msgid.return_value = "abc123"
@@ -327,47 +298,3 @@ def test_stripped_newline(self):
assert len(mail.outbox) == 1
assert mail.outbox[0].subject == "Foo"
-
-
-class MiscTestCase(TestCase):
- def test_get_from_email_domain(self):
- with self.options({"mail.from": "matt@example.com"}):
- assert get_from_email_domain() == "example.com"
-
- with self.options({"mail.from": "root@localhost"}):
- assert get_from_email_domain() == "localhost"
-
- with self.options({"mail.from": "garbage"}):
- assert get_from_email_domain() == "garbage"
-
- def test_get_mail_backend(self):
- with self.options({"mail.backend": "smtp"}):
- assert get_mail_backend() == "django.core.mail.backends.smtp.EmailBackend"
-
- with self.options({"mail.backend": "dummy"}):
- assert get_mail_backend() == "django.core.mail.backends.dummy.EmailBackend"
-
- with self.options({"mail.backend": "console"}):
- assert get_mail_backend() == "django.core.mail.backends.console.EmailBackend"
-
- with self.options({"mail.backend": "something.else"}):
- assert get_mail_backend() == "something.else"
-
-
-class SendMail(TestCase):
- @patch("django.core.mail.EmailMessage", autospec=True)
- @patch("django.core.mail.get_connection", return_value="connection")
- def test_send_mail_with_kwargs(self, get_connection, MockEmailMessage):
- patch.object(MockEmailMessage.return_value, "send")
- send_mail(
- "subject", "my_message", "fake@example.com", ["a@b.com"], reply_to=["emusk@tesla.com"]
- )
- MockEmailMessage.assert_called_once_with(
- "subject",
- "my_message",
- "fake@example.com",
- ["a@b.com"],
- connection="connection",
- reply_to=["emusk@tesla.com"],
- )
- MockEmailMessage.return_value.send.assert_called_once_with(fail_silently=False)
diff --git a/tests/sentry/utils/email/test_send_mail.py b/tests/sentry/utils/email/test_send_mail.py
new file mode 100644
index 00000000000000..a113ca9d258823
--- /dev/null
+++ b/tests/sentry/utils/email/test_send_mail.py
@@ -0,0 +1,22 @@
+from sentry.testutils import TestCase
+from sentry.utils.compat.mock import patch
+from sentry.utils.email import send_mail
+
+
+class SendMail(TestCase):
+ @patch("django.core.mail.EmailMessage", autospec=True)
+ @patch("django.core.mail.get_connection", return_value="connection")
+ def test_send_mail_with_kwargs(self, get_connection, MockEmailMessage):
+ patch.object(MockEmailMessage.return_value, "send")
+ send_mail(
+ "subject", "my_message", "fake@example.com", ["a@b.com"], reply_to=["emusk@tesla.com"]
+ )
+ MockEmailMessage.assert_called_once_with(
+ "subject",
+ "my_message",
+ "fake@example.com",
+ ["a@b.com"],
+ connection="connection",
+ reply_to=["emusk@tesla.com"],
+ )
+ MockEmailMessage.return_value.send.assert_called_once_with(fail_silently=False)
diff --git a/tests/snuba/api/endpoints/test_organization_events_stats.py b/tests/snuba/api/endpoints/test_organization_events_stats.py
index b6622df018627c..f2356f786d70dd 100644
--- a/tests/snuba/api/endpoints/test_organization_events_stats.py
+++ b/tests/snuba/api/endpoints/test_organization_events_stats.py
@@ -17,6 +17,8 @@
class OrganizationEventsStatsEndpointTest(APITestCase, SnubaTestCase):
+ endpoint = "sentry-api-0-organization-events-stats"
+
def setUp(self):
super().setUp()
self.login_as(user=self.user)
@@ -737,7 +739,7 @@ def test_multiple_yaxis_only_one_query(self, mock_query):
assert mock_query.call_count == 1
- @mock.patch("sentry.snuba.discover.raw_query", return_value={"data": []})
+ @mock.patch("sentry.snuba.discover.bulk_raw_query", return_value=[{"data": []}])
def test_invalid_interval(self, mock_query):
with self.feature("organizations:discover-basic"):
response = self.client.get(
@@ -754,7 +756,7 @@ def test_invalid_interval(self, mock_query):
assert response.status_code == 200
assert mock_query.call_count == 1
# Should've reset to the default for 24h
- assert mock_query.mock_calls[0].kwargs["rollup"] == 300
+ assert mock_query.mock_calls[0].args[0][0].rollup == 300
with self.feature("organizations:discover-basic"):
response = self.client.get(
@@ -771,7 +773,7 @@ def test_invalid_interval(self, mock_query):
assert response.status_code == 200
assert mock_query.call_count == 2
# Should've reset to the default for 24h
- assert mock_query.mock_calls[1].kwargs["rollup"] == 300
+ assert mock_query.mock_calls[0].args[0][0].rollup == 300
def test_out_of_retention(self):
with self.options({"system.event-retention-days": 10}):
@@ -869,6 +871,61 @@ def test_without_zerofill(self):
assert response.data["start"] == parse_date.parse(start).timestamp()
assert response.data["end"] == parse_date.parse(end).timestamp()
+ def test_comparison(self):
+ self.store_event(
+ data={
+ "timestamp": iso_format(self.day_ago + timedelta(days=-1, minutes=1)),
+ },
+ project_id=self.project.id,
+ )
+ self.store_event(
+ data={
+ "timestamp": iso_format(self.day_ago + timedelta(days=-1, minutes=2)),
+ },
+ project_id=self.project.id,
+ )
+ self.store_event(
+ data={
+ "timestamp": iso_format(self.day_ago + timedelta(days=-1, hours=1, minutes=1)),
+ },
+ project_id=self.project2.id,
+ )
+
+ response = self.get_success_response(
+ self.organization.slug,
+ start=iso_format(self.day_ago),
+ end=iso_format(self.day_ago + timedelta(hours=2)),
+ interval="1h",
+ comparisonDelta=int(timedelta(days=1).total_seconds()),
+ )
+
+ assert [attrs for time, attrs in response.data["data"]] == [
+ [{"count": -50}],
+ [{"count": 100}],
+ ]
+
+ def test_comparison_invalid(self):
+ response = self.get_error_response(
+ self.organization.slug,
+ start=iso_format(self.day_ago),
+ end=iso_format(self.day_ago + timedelta(hours=2)),
+ interval="1h",
+ comparisonDelta="17h",
+ )
+ assert response.data["detail"] == "comparisonDelta must be an integer"
+
+ start = before_now(days=85)
+ end = start + timedelta(days=7)
+ with self.options({"system.event-retention-days": 90}):
+ response = self.get_error_response(
+ self.organization.slug,
+ start=iso_format(start),
+ end=iso_format(end),
+ interval="1h",
+ comparisonDelta=int(timedelta(days=7).total_seconds()),
+ )
+ assert response.data["detail"] == "Comparison period is outside retention window"
+
class OrganizationEventsStatsTopNEvents(APITestCase, SnubaTestCase):
def setUp(self):
@@ -1155,6 +1212,35 @@ def test_top_events_with_issue(self):
assert other["order"] == 5
assert [{"count": 1}] in [attrs for _, attrs in other["data"]]
+ @mock.patch(
+ "sentry.snuba.discover.raw_query",
+ side_effect=[{"data": [{"group_id": 1}], "meta": []}, {"data": [], "meta": []}],
+ )
+ def test_top_events_with_issue_check_query_conditions(self, mock_query):
+ """ "Intentionally separate from test_top_events_with_issue
+
+ This is to test against a bug where the condition for issues wasn't included and we'd be missing data for
+ the interval since we'd cap out the max rows. This was not caught by the previous test since the results
+ would still be correct given the smaller interval & lack of data
+ """
+ with self.feature(self.enabled_features):
+ self.client.get(
+ self.url,
+ data={
+ "start": iso_format(self.day_ago),
+ "end": iso_format(self.day_ago + timedelta(hours=2)),
+ "interval": "1h",
+ "yAxis": "count()",
+ "orderby": ["-count()"],
+ "field": ["count()", "message", "issue"],
+ "topEvents": 5,
+ "query": "!event.type:transaction",
+ },
+ format="json",
+ )
+
+ assert ["group_id", "IN", [1]] in mock_query.mock_calls[1].kwargs["conditions"]
+
def test_top_events_with_functions(self):
with self.feature(self.enabled_features):
response = self.client.get(
@@ -1808,8 +1894,9 @@ def test_top_events_with_equations(self):
assert other["order"] == 5
assert [{"count": 0.03}] in [attrs for _, attrs in other["data"]]
+ @mock.patch("sentry.snuba.discover.bulk_raw_query", return_value=[{"data": [], "meta": []}])
@mock.patch("sentry.snuba.discover.raw_query", return_value={"data": [], "meta": []})
- def test_invalid_interval(self, mock_query):
+ def test_invalid_interval(self, mock_raw_query, mock_bulk_query):
with self.feature("organizations:discover-basic"):
response = self.client.get(
self.url,
@@ -1825,7 +1912,7 @@ def test_invalid_interval(self, mock_query):
},
)
assert response.status_code == 200
- assert mock_query.call_count == 1
+ assert mock_bulk_query.call_count == 1
with self.feature("organizations:discover-basic"):
response = self.client.get(
@@ -1843,9 +1930,9 @@ def test_invalid_interval(self, mock_query):
},
)
assert response.status_code == 200
- assert mock_query.call_count == 3
+ assert mock_raw_query.call_count == 2
# Should've reset to the default for between 1 and 24h
- assert mock_query.mock_calls[2].kwargs["rollup"] == 300
+ assert mock_raw_query.mock_calls[1].kwargs["rollup"] == 300
with self.feature("organizations:discover-basic"):
response = self.client.get(
@@ -1863,9 +1950,9 @@ def test_invalid_interval(self, mock_query):
},
)
assert response.status_code == 200
- assert mock_query.call_count == 5
+ assert mock_raw_query.call_count == 4
# Should've left the interval alone since we're just below the limit
- assert mock_query.mock_calls[4].kwargs["rollup"] == 1
+ assert mock_raw_query.mock_calls[3].kwargs["rollup"] == 1
with self.feature("organizations:discover-basic"):
response = self.client.get(
@@ -1882,9 +1969,9 @@ def test_invalid_interval(self, mock_query):
},
)
assert response.status_code == 200
- assert mock_query.call_count == 7
+ assert mock_raw_query.call_count == 6
# Should've default to 24h's default of 5m
- assert mock_query.mock_calls[6].kwargs["rollup"] == 300
+ assert mock_raw_query.mock_calls[5].kwargs["rollup"] == 300
def test_top_events_timestamp_fields(self):
with self.feature("organizations:discover-basic"):
diff --git a/tests/snuba/api/endpoints/test_organization_events_v2.py b/tests/snuba/api/endpoints/test_organization_events_v2.py
index cd74b27372fb14..83744174a021d7 100644
--- a/tests/snuba/api/endpoints/test_organization_events_v2.py
+++ b/tests/snuba/api/endpoints/test_organization_events_v2.py
@@ -5,7 +5,7 @@
from django.utils import timezone
from pytz import utc
-from sentry.discover.models import KeyTransaction, TeamKeyTransaction
+from sentry.discover.models import TeamKeyTransaction
from sentry.models import (
ApiKey,
ProjectTeam,
@@ -3914,178 +3914,6 @@ def test_compare_numeric_aggregate(self):
assert len(response.data["data"]) == 1
assert response.data["data"][0]["compare_numeric_aggregate_p75_equals_0"] == 0
- def test_no_key_transactions(self):
- transactions = [
- "/blah_transaction/",
- "/foo_transaction/",
- "/zoo_transaction/",
- ]
-
- for transaction in transactions:
- self.transaction_data["transaction"] = transaction
- self.store_event(self.transaction_data, self.project.id)
-
- query = {
- "project": [self.project.id],
- # use the order by to ensure the result order
- "orderby": "transaction",
- "field": [
- "key_transaction",
- "transaction",
- "transaction.status",
- "project",
- "epm()",
- "failure_rate()",
- "percentile(transaction.duration, 0.95)",
- ],
- }
- response = self.do_request(query)
-
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 3
- assert data[0]["key_transaction"] == 0
- assert data[0]["transaction"] == "/blah_transaction/"
- assert data[1]["key_transaction"] == 0
- assert data[1]["transaction"] == "/foo_transaction/"
- assert data[2]["key_transaction"] == 0
- assert data[2]["transaction"] == "/zoo_transaction/"
-
- def test_key_transactions_orderby(self):
- transactions = ["/blah_transaction/"]
- key_transactions = [
- "/foo_transaction/",
- "/zoo_transaction/",
- ]
-
- for transaction in transactions:
- self.transaction_data["transaction"] = transaction
- self.store_event(self.transaction_data, self.project.id)
-
- for transaction in key_transactions:
- self.transaction_data["transaction"] = transaction
- self.store_event(self.transaction_data, self.project.id)
- KeyTransaction.objects.create(
- owner=self.user,
- organization=self.organization,
- transaction=transaction,
- project=self.project,
- )
-
- query = {
- "project": [self.project.id],
- "field": [
- "key_transaction",
- "transaction",
- "transaction.status",
- "project",
- "epm()",
- "failure_rate()",
- "percentile(transaction.duration, 0.95)",
- ],
- }
-
- # test ascending order
- query["orderby"] = ["key_transaction", "transaction"]
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 3
- assert data[0]["key_transaction"] == 0
- assert data[0]["transaction"] == "/blah_transaction/"
- assert data[1]["key_transaction"] == 1
- assert data[1]["transaction"] == "/foo_transaction/"
- assert data[2]["key_transaction"] == 1
- assert data[2]["transaction"] == "/zoo_transaction/"
-
- # test descending order
- query["orderby"] = ["-key_transaction", "-transaction"]
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 3
- assert data[0]["key_transaction"] == 1
- assert data[0]["transaction"] == "/zoo_transaction/"
- assert data[1]["key_transaction"] == 1
- assert data[1]["transaction"] == "/foo_transaction/"
- assert data[2]["key_transaction"] == 0
- assert data[2]["transaction"] == "/blah_transaction/"
-
- def test_key_transactions_query(self):
- transactions = ["/blah_transaction/"]
- key_transactions = [
- "/foo_transaction/",
- "/zoo_transaction/",
- ]
-
- for transaction in transactions:
- self.transaction_data["transaction"] = transaction
- self.store_event(self.transaction_data, self.project.id)
-
- for transaction in key_transactions:
- self.transaction_data["transaction"] = transaction
- self.store_event(self.transaction_data, self.project.id)
- KeyTransaction.objects.create(
- owner=self.user,
- organization=self.organization,
- transaction=transaction,
- project=self.project,
- )
-
- query = {
- "project": [self.project.id],
- "orderby": "transaction",
- "field": [
- "key_transaction",
- "transaction",
- "transaction.status",
- "project",
- "epm()",
- "failure_rate()",
- "percentile(transaction.duration, 0.95)",
- ],
- }
-
- # key transactions
- query["query"] = "has:key_transaction"
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 2
- assert data[0]["key_transaction"] == 1
- assert data[0]["transaction"] == "/foo_transaction/"
- assert data[1]["key_transaction"] == 1
- assert data[1]["transaction"] == "/zoo_transaction/"
-
- # key transactions
- query["query"] = "key_transaction:true"
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 2
- assert data[0]["key_transaction"] == 1
- assert data[0]["transaction"] == "/foo_transaction/"
- assert data[1]["key_transaction"] == 1
- assert data[1]["transaction"] == "/zoo_transaction/"
-
- # not key transactions
- query["query"] = "!has:key_transaction"
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["key_transaction"] == 0
- assert data[0]["transaction"] == "/blah_transaction/"
-
- # not key transactions
- query["query"] = "key_transaction:false"
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["key_transaction"] == 0
- assert data[0]["transaction"] == "/blah_transaction/"
-
def test_no_team_key_transactions(self):
transactions = [
"/blah_transaction/",
diff --git a/tests/snuba/sessions/test_sessions.py b/tests/snuba/sessions/test_sessions.py
index c7048106ec3a20..e6ca0595e0fb85 100644
--- a/tests/snuba/sessions/test_sessions.py
+++ b/tests/snuba/sessions/test_sessions.py
@@ -11,7 +11,6 @@
_make_stats,
get_project_releases_by_stability,
get_project_releases_count,
- get_release_health_data_overview,
)
from sentry.testutils import SnubaTestCase, TestCase
from sentry.testutils.cases import SessionMetricsTestCase
@@ -362,7 +361,7 @@ def test_get_release_adoption_lowered(self):
}
def test_get_release_health_data_overview_users(self):
- data = get_release_health_data_overview(
+ data = self.backend.get_release_health_data_overview(
[
(self.project.id, self.session_release),
(self.project.id, self.session_crashed_release),
@@ -416,7 +415,7 @@ def test_get_release_health_data_overview_users(self):
}
def test_get_release_health_data_overview_sessions(self):
- data = get_release_health_data_overview(
+ data = self.backend.get_release_health_data_overview(
[
(self.project.id, self.session_release),
(self.project.id, self.session_crashed_release),
@@ -569,6 +568,101 @@ def test_fetching_release_sessions_time_bounds_for_different_release_with_no_ses
"sessions_upper_bound": None,
}
+ def test_get_crash_free_breakdown(self):
+ start = timezone.now() - timedelta(days=4)
+ data = self.backend.get_crash_free_breakdown(
+ project_id=self.project.id,
+ release=self.session_release,
+ start=start,
+ environments=["prod"],
+ )
+
+ # Last returned date is generated within function, should be close to now:
+ last_date = data[-1].pop("date")
+ assert timezone.now() - last_date < timedelta(seconds=1)
+
+ assert data == [
+ {
+ "crash_free_sessions": None,
+ "crash_free_users": None,
+ "date": start + timedelta(days=1),
+ "total_sessions": 0,
+ "total_users": 0,
+ },
+ {
+ "crash_free_sessions": None,
+ "crash_free_users": None,
+ "date": start + timedelta(days=2),
+ "total_sessions": 0,
+ "total_users": 0,
+ },
+ {
+ "crash_free_sessions": 100.0,
+ "crash_free_users": 100.0,
+ "total_sessions": 2,
+ "total_users": 1,
+ },
+ ]
+
+ data = self.backend.get_crash_free_breakdown(
+ project_id=self.project.id,
+ release=self.session_crashed_release,
+ start=start,
+ environments=["prod"],
+ )
+ data[-1].pop("date")
+ assert data == [
+ {
+ "crash_free_sessions": None,
+ "crash_free_users": None,
+ "date": start + timedelta(days=1),
+ "total_sessions": 0,
+ "total_users": 0,
+ },
+ {
+ "crash_free_sessions": None,
+ "crash_free_users": None,
+ "date": start + timedelta(days=2),
+ "total_sessions": 0,
+ "total_users": 0,
+ },
+ {
+ "crash_free_sessions": 0.0,
+ "crash_free_users": 0.0,
+ "total_sessions": 1,
+ "total_users": 1,
+ },
+ ]
+ data = self.backend.get_crash_free_breakdown(
+ project_id=self.project.id,
+ release="non-existing",
+ start=start,
+ environments=["prod"],
+ )
+ data[-1].pop("date")
+ assert data == [
+ {
+ "crash_free_sessions": None,
+ "crash_free_users": None,
+ "date": start + timedelta(days=1),
+ "total_sessions": 0,
+ "total_users": 0,
+ },
+ {
+ "crash_free_sessions": None,
+ "crash_free_users": None,
+ "date": start + timedelta(days=2),
+ "total_sessions": 0,
+ "total_users": 0,
+ },
+ {
+ "crash_free_sessions": None,
+ "crash_free_users": None,
+ "total_sessions": 0,
+ "total_users": 0,
+ },
+ ]
+
def test_basic_release_model_adoptions(self):
"""
Test that the basic (project,release) data is returned
diff --git a/webpack.config.ts b/webpack.config.ts
index 50e2088cf365f2..8324073d75ce6b 100644
--- a/webpack.config.ts
+++ b/webpack.config.ts
@@ -60,6 +60,7 @@ const WEBPACK_MODE: Configuration['mode'] = IS_PRODUCTION ? 'production' : 'deve
const SENTRY_BACKEND_PORT = env.SENTRY_BACKEND_PORT;
const SENTRY_WEBPACK_PROXY_HOST = env.SENTRY_WEBPACK_PROXY_HOST;
const SENTRY_WEBPACK_PROXY_PORT = env.SENTRY_WEBPACK_PROXY_PORT;
+const SENTRY_RELEASE_VERSION = env.SENTRY_RELEASE_VERSION;
// Used by sentry devserver runner to force using webpack-dev-server
const FORCE_WEBPACK_DEV_SERVER = !!env.FORCE_WEBPACK_DEV_SERVER;
@@ -335,6 +336,7 @@ let appConfig: Configuration = {
DEPLOY_PREVIEW_CONFIG: JSON.stringify(DEPLOY_PREVIEW_CONFIG),
EXPERIMENTAL_SPA: JSON.stringify(SENTRY_EXPERIMENTAL_SPA),
SPA_DSN: JSON.stringify(SENTRY_SPA_DSN),
+ SENTRY_RELEASE_VERSION: JSON.stringify(SENTRY_RELEASE_VERSION),
},
}),